prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>fktv.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
clean_html,
determine_ext,
js_to_json,
)
class FKTVIE(InfoExtractor):
IE_NAME = 'fernsehkritik.tv'
_VALID_URL = r'http://(?:www\.)?fernsehkritik\.tv/folge-(?P<id>[0-9]+)(?:/.*)?'
_TEST = {
'url': 'http://fernsehkritik.tv/folge-1',
'md5': '21f0b0c99bce7d5b524eb1b17b1c6d79',
'info_dict': {
'id': '1',
'ext': 'mp4',<|fim▁hole|> }
def _real_extract(self, url):
episode = self._match_id(url)
webpage = self._download_webpage(
'http://fernsehkritik.tv/folge-%s/play' % episode, episode)
title = clean_html(self._html_search_regex(
'<h3>([^<]+)</h3>', webpage, 'title'))
thumbnail = self._search_regex(r'POSTER\s*=\s*"([^"]+)', webpage, 'thumbnail', fatal=False)
sources = self._parse_json(self._search_regex(r'(?s)MEDIA\s*=\s*(\[.+?\]);', webpage, 'media'), episode, js_to_json)
formats = []
for source in sources:
furl = source.get('src')
if furl:
formats.append({
'url': furl,
'format_id': determine_ext(furl),
})
self._sort_formats(formats)
return {
'id': episode,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}<|fim▁end|> | 'title': 'Folge 1 vom 10. April 2007',
'thumbnail': 're:^https?://.*\.jpg$',
}, |
<|file_name|>ndarray.native.js<|end_file_name|><|fim▁begin|>/**
* @license Apache-2.0
*
* Copyright (c) 2020 The Stdlib Authors.<|fim▁hole|>* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var Float32Array = require( '@stdlib/array/float32' );
var addon = require( './smin.native.js' );
// MAIN //
/**
* Computes the minimum value of a single-precision floating-point strided array.
*
* @param {PositiveInteger} N - number of indexed elements
* @param {Float32Array} x - input array
* @param {integer} stride - stride length
* @param {NonNegativeInteger} offset - starting index
* @returns {number} minimum value
*
* @example
* var Float32Array = require( '@stdlib/array/float32' );
* var floor = require( '@stdlib/math/base/special/floor' );
*
* var x = new Float32Array( [ 2.0, 1.0, 2.0, -2.0, -2.0, 2.0, 3.0, 4.0 ] );
* var N = floor( x.length / 2 );
*
* var v = smin( N, x, 2, 1 );
* // returns -2.0
*/
function smin( N, x, stride, offset ) {
var view;
if ( stride < 0 ) {
offset += (N-1) * stride;
}
view = new Float32Array( x.buffer, x.byteOffset+(x.BYTES_PER_ELEMENT*offset), x.length-offset ); // eslint-disable-line max-len
return addon( N, view, stride );
}
// EXPORTS //
module.exports = smin;<|fim▁end|> | *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>constant_op.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Constant Value Tensors
TensorFlow provides several operations that you can use to generate constants.
@@zeros
@@zeros_like
@@ones
@@ones_like
@@fill
@@constant
## Sequences
@@linspace
@@range
## Random Tensors
TensorFlow has several ops that create random tensors with different
distributions. The random ops are stateful, and create new random values each
time they are evaluated.
The `seed` keyword argument in these functions acts in conjunction with
the graph-level random seed. Changing either the graph-level seed using
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed) or the
op-level seed will change the underlying seed of these operations. Setting
neither graph-level nor op-level seed, results in a random seed for all
operations.
See [`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for details on the interaction between operation-level and graph-level random
seeds.
### Examples:
```python
# Create a tensor of shape [2, 3] consisting of random normal values, with mean
# -1 and standard deviation 4.
norm = tf.random_normal([2, 3], mean=-1, stddev=4)
# Shuffle the first dimension of a tensor
c = tf.constant([[1, 2], [3, 4], [5, 6]])
shuff = tf.random_shuffle(c)
# Each time we run these ops, different results are generated
sess = tf.Session()
print(sess.run(norm))
print(sess.run(norm))
# Set an op-level seed to generate repeatable sequences across sessions.
c = tf.constant([[1, 2], [3, 4], [5, 6]])
sess = tf.Session()
norm = tf.random_normal(c, seed=1234)
print(sess.run(norm))
print(sess.run(norm))
```
Another common use of random values is the initialization of variables. Also see
the [Variables How To](../../how_tos/variables/index.md).
```python
# Use random uniform values in [0, 1) as the initializer for a variable of shape
# [2, 3]. The default type is float32.
var = tf.Variable(tf.random_uniform([2, 3]), name="var")
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
print(sess.run(var))
```
@@random_normal
@@truncated_normal
@@random_uniform
@@random_shuffle
@@set_random_seed
"""
# Must be separate from array_ops to avoid a cyclic dependency.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from tensorflow.core.framework import attr_value_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
def constant(value, dtype=None, shape=None, name="Const"):
"""Creates a constant tensor.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` and (optionally) `shape` (see examples
below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the `shape` argument (if
specified). In the case where the list length is less than the number of
elements specified by `shape`, the last element in the list will be used
to fill the remaining entries.
The argument `shape` is optional. If present, it specifies the dimensions
of the resulting tensor. If not present, then the tensor is a scalar (0-D)
if `value` is a scalar, or 1-D otherwise.
If the argument `dtype` is not specified, then the type is inferred from
the type of `value`.
For example:
```python
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.]
[-1. -1. -1.]]
```
Args:
value: A constant value (or list) of output type `dtype`.
dtype: The type of the elements of the resulting tensor.
shape: Optional dimensions of resulting tensor.
name: Optional name for the tensor.
Returns:
A Constant Tensor.
"""
g = ops.get_default_graph()
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(
tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape))
dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
const_tensor = g.create_op(
"Const", [], [dtype_value.type],
attrs={"value": tensor_value, "dtype": dtype_value}, name=name).outputs[0]
return const_tensor
<|fim▁hole|>@ops.RegisterShape("Const")
def _ConstantShape(op):
return [tensor_shape.TensorShape(
[d.size for d in op.get_attr("value").tensor_shape.dim])]
def _constant_tensor_conversion_function(v, dtype=None, name=None,
as_ref=False):
_ = as_ref
return constant(v, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
(list, tuple), _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.ndarray, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
np.generic, _constant_tensor_conversion_function, 100)
ops.register_tensor_conversion_function(
object, _constant_tensor_conversion_function, 200)
def _tensor_shape_tensor_conversion_function(s, dtype=None, name=None,
as_ref=False):
_ = as_ref
if not s.is_fully_defined():
raise ValueError(
"Cannot convert a partially known TensorShape to a Tensor: %s" % s)
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
else:
dtype = dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(s.as_list(), dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.TensorShape, _tensor_shape_tensor_conversion_function, 100)
def _dimension_tensor_conversion_function(d, dtype=None, name=None,
as_ref=False):
_ = as_ref
if d.value is None:
raise ValueError("Cannot convert an unknown Dimension to a Tensor: %s" % d)
if dtype is not None:
if dtype not in (dtypes.int32, dtypes.int64):
raise TypeError("Cannot convert a TensorShape to dtype: %s" % dtype)
else:
dtype = dtypes.int32
if name is None:
name = "shape_as_tensor"
return constant(d.value, dtype=dtype, name=name)
ops.register_tensor_conversion_function(
tensor_shape.Dimension, _dimension_tensor_conversion_function, 100)<|fim▁end|> | |
<|file_name|>Bibliography.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
C.11.3 Bibliography and Citation (p208)
"""
import plasTeX, codecs
from plasTeX.Base.LaTeX.Sectioning import chapter, section
from plasTeX import Command, Environment
from Lists import List
log = plasTeX.Logging.getLogger()
class bibliography(chapter):
args = 'files:str'
linkType = 'bibliography'
def invoke(self, tex):
res = chapter.invoke(self, tex)
self.title = self.ownerDocument.createElement('bibname').expand(tex)
self.loadBibliographyFile(tex)
return res
def loadBibliographyFile(self, tex):
# Load bibtex file
try:
file = tex.kpsewhich(tex.jobname+'.bbl')
tex.input(codecs.open(file, 'r', self.ownerDocument.config['files']['input-encoding']))
except OSError, msg:
log.warning(msg)
class bibliographystyle(Command):
args = 'style'
class thebibliography(List):
args = 'widelabel'
linkType = 'bibliography'
class bibitem(List.item):
args = '[ label ] key:str'
def invoke(self, tex):
res = List.item.invoke(self, tex)
a = self.attributes
# Put the entry into the global bibliography
doc = self.ownerDocument
bibitems = doc.userdata.getPath('bibliography/bibitems', {})
bibitems[a['key']] = self
doc.userdata.setPath('bibliography/bibitems', bibitems)
self.ref = str(len([x for x in bibitems.values()
if not x.attributes['label']]))
key = a['key']
label = a.get('label')
bibcites = doc.userdata.getPath('bibliography/bibcites', {})
if not bibcites.has_key(key):
if label is None:
label = doc.createDocumentFragment()
label.extend(self.ref)
bibcites[key] = label
doc.userdata.setPath('bibliography/bibcites', bibcites)
return res
@property
def id(self):
return self.attributes['key']
@property
def bibcite(self):
doc = self.ownerDocument
res = doc.createDocumentFragment()
bibcites = doc.userdata.getPath('bibliography/bibcites', {})
res.extend(bibcites.get(self.attributes['key']))
return res
def digest(self, tokens):
if self.macroMode == Command.MODE_END:
return
for tok in tokens:
if not isinstance(tok, thebibliography.bibitem):
continue
tokens.push(tok)
break
return List.digest(self, tokens)
class cite(Command):<|fim▁hole|> def bibitems(self):
# Get all referenced items
output = []
doc = self.ownerDocument
for x in self.attributes['bibkeys']:
item = doc.userdata.getPath('bibliography/bibitems', {}).get(x)
if item is None:
log.warning('Bibliography item "%s" has no entry', x)
else:
output.append(item)
return output
@property
def postnote(self):
a = self.attributes
if a['text'] is not None:
return a['text']
return ''
def citation(self):
""" (Jones et al., 1990) """
res = self.ownerDocument.createDocumentFragment()
i = 0
res.append('[')
for i, item in enumerate(self.bibitems):
node = self.ownerDocument.createElement('bgroup')
node.extend(item.bibcite)
node.idref['bibitem'] = item
res.append(node)
if i < (len(self.bibitems)-1):
res.append(', ')
else:
if self.postnote:
res.append(', ')
res.append(self.postnote)
res.append(']')
return res
class nocite(Command):
args = 'bibkeys:str'
class bibcite(Command):
args = 'key:str info'
def invoke(self, tex):
Command.invoke(self, tex)
value = self.attributes['info'].firstChild
doc = self.ownerDocument
bibcites = doc.userdata.getPath('bibliography/bibcites', {})
bibcites[self.attributes['key']] = value
doc.userdata.setPath('bibliography/bibcites', bibcites)
class citation(Command):
pass
class bibstyle(Command):
pass
class bibdata(Command):
pass
class newblock(Command):
pass
class bibliographyref(Command):
pass<|fim▁end|> | args = '[ text ] bibkeys:list:str'
@property |
<|file_name|>item_func.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
/**
@file
@brief
This file defines all numerical functions
*/
#include "my_global.h" /* NO_EMBEDDED_ACCESS_CHECKS */
#include "sql_priv.h"
/*
It is necessary to include set_var.h instead of item.h because there
are dependencies on include order for set_var.h and item.h. This
will be resolved later.
*/
#include "sql_class.h" // set_var.h: THD
#include "set_var.h"
#include "rpl_slave.h" // for wait_for_master_pos
#include "sql_show.h" // append_identifier
#include "strfunc.h" // find_type
#include "sql_parse.h" // is_update_query
#include "sql_acl.h" // EXECUTE_ACL
#include "mysqld.h" // LOCK_uuid_generator
#include "rpl_mi.h"
#include "sql_time.h"
#include <m_ctype.h>
#include <hash.h>
#include <time.h>
#include <ft_global.h>
#include <my_bit.h>
#include "sp_head.h"
#include "sp_rcontext.h"
#include "sp.h"
#include "set_var.h"
#include "debug_sync.h"
#include <mysql/plugin.h>
#include <mysql/service_thd_wait.h>
#include "rpl_gtid.h"
using std::min;
using std::max;
bool check_reserved_words(LEX_STRING *name)
{
if (!my_strcasecmp(system_charset_info, name->str, "GLOBAL") ||
!my_strcasecmp(system_charset_info, name->str, "LOCAL") ||
!my_strcasecmp(system_charset_info, name->str, "SESSION"))
return TRUE;
return FALSE;
}
/**
@return
TRUE if item is a constant
*/
bool
eval_const_cond(Item *cond)
{
return ((Item_func*) cond)->val_int() ? TRUE : FALSE;
}
/**
Test if the sum of arguments overflows the ulonglong range.
*/
static inline bool test_if_sum_overflows_ull(ulonglong arg1, ulonglong arg2)
{
return ULONGLONG_MAX - arg1 < arg2;
}
void Item_func::set_arguments(List<Item> &list)
{
allowed_arg_cols= 1;
arg_count=list.elements;
args= tmp_arg; // If 2 arguments
if (arg_count <= 2 || (args=(Item**) sql_alloc(sizeof(Item*)*arg_count)))
{
List_iterator_fast<Item> li(list);
Item *item;
Item **save_args= args;
while ((item=li++))
{
*(save_args++)= item;
with_sum_func|=item->with_sum_func;
}
}
list.empty(); // Fields are used
}
Item_func::Item_func(List<Item> &list)
:allowed_arg_cols(1)
{
set_arguments(list);
}
Item_func::Item_func(THD *thd, Item_func *item)
:Item_result_field(thd, item),
const_item_cache(0),
allowed_arg_cols(item->allowed_arg_cols),
used_tables_cache(item->used_tables_cache),
not_null_tables_cache(item->not_null_tables_cache),
arg_count(item->arg_count)
{
if (arg_count)
{
if (arg_count <=2)
args= tmp_arg;
else
{
if (!(args=(Item**) thd->alloc(sizeof(Item*)*arg_count)))
return;
}
memcpy((char*) args, (char*) item->args, sizeof(Item*)*arg_count);
}
}
/*
Resolve references to table column for a function and its argument
SYNOPSIS:
fix_fields()
thd Thread object
ref Pointer to where this object is used. This reference
is used if we want to replace this object with another
one (for example in the summary functions).
DESCRIPTION
Call fix_fields() for all arguments to the function. The main intention
is to allow all Item_field() objects to setup pointers to the table fields.
Sets as a side effect the following class variables:
maybe_null Set if any argument may return NULL
with_sum_func Set if any of the arguments contains a sum function
used_tables_cache Set to union of the tables used by arguments
str_value.charset If this is a string function, set this to the
character set for the first argument.
If any argument is binary, this is set to binary
If for any item any of the defaults are wrong, then this can
be fixed in the fix_length_and_dec() function that is called
after this one or by writing a specialized fix_fields() for the
item.
RETURN VALUES
FALSE ok
TRUE Got error. Stored with my_error().
*/
bool
Item_func::fix_fields(THD *thd, Item **ref)
{
DBUG_ASSERT(fixed == 0 || basic_const_item());
Item **arg,**arg_end;
uchar buff[STACK_BUFF_ALLOC]; // Max argument in function
Switch_resolve_place SRP(thd->lex->current_select ?
&thd->lex->current_select->resolve_place : NULL,
st_select_lex::RESOLVE_NONE,
thd->lex->current_select);
used_tables_cache= get_initial_pseudo_tables();
not_null_tables_cache= 0;
const_item_cache=1;
/*
Use stack limit of STACK_MIN_SIZE * 2 since
on some platforms a recursive call to fix_fields
requires more than STACK_MIN_SIZE bytes (e.g. for
MIPS, it takes about 22kB to make one recursive
call to Item_func::fix_fields())
*/
if (check_stack_overrun(thd, STACK_MIN_SIZE * 2, buff))
return TRUE; // Fatal error if flag is set!
if (arg_count)
{ // Print purify happy
for (arg=args, arg_end=args+arg_count; arg != arg_end ; arg++)
{
Item *item;
/*
We can't yet set item to *arg as fix_fields may change *arg
We shouldn't call fix_fields() twice, so check 'fixed' field first
*/
if ((!(*arg)->fixed && (*arg)->fix_fields(thd, arg)))
return TRUE; /* purecov: inspected */
item= *arg;
if (allowed_arg_cols)
{
if (item->check_cols(allowed_arg_cols))
return 1;
}
else
{
/* we have to fetch allowed_arg_cols from first argument */
DBUG_ASSERT(arg == args); // it is first argument
allowed_arg_cols= item->cols();
DBUG_ASSERT(allowed_arg_cols); // Can't be 0 any more
}
if (item->maybe_null)
maybe_null=1;
with_sum_func= with_sum_func || item->with_sum_func;
used_tables_cache|= item->used_tables();
not_null_tables_cache|= item->not_null_tables();
const_item_cache&= item->const_item();
with_subselect|= item->has_subquery();
with_stored_program|= item->has_stored_program();
}
}
fix_length_and_dec();
if (thd->is_error()) // An error inside fix_length_and_dec occured
return TRUE;
fixed= 1;
return FALSE;
}
void Item_func::fix_after_pullout(st_select_lex *parent_select,
st_select_lex *removed_select)
{
Item **arg,**arg_end;
used_tables_cache= get_initial_pseudo_tables();
not_null_tables_cache= 0;
const_item_cache=1;
if (arg_count)
{
for (arg=args, arg_end=args+arg_count; arg != arg_end ; arg++)
{
Item *const item= *arg;
item->fix_after_pullout(parent_select, removed_select);
used_tables_cache|= item->used_tables();
not_null_tables_cache|= item->not_null_tables();
const_item_cache&= item->const_item();
}
}
}
bool Item_func::walk(Item_processor processor, bool walk_subquery,
uchar *argument)
{
if (arg_count)
{
Item **arg,**arg_end;
for (arg= args, arg_end= args+arg_count; arg != arg_end; arg++)
{
if ((*arg)->walk(processor, walk_subquery, argument))
return 1;
}
}
return (this->*processor)(argument);
}
void Item_func::traverse_cond(Cond_traverser traverser,
void *argument, traverse_order order)
{
if (arg_count)
{
Item **arg,**arg_end;
switch (order) {
case(PREFIX):
(*traverser)(this, argument);
for (arg= args, arg_end= args+arg_count; arg != arg_end; arg++)
{
(*arg)->traverse_cond(traverser, argument, order);
}
break;
case (POSTFIX):
for (arg= args, arg_end= args+arg_count; arg != arg_end; arg++)
{
(*arg)->traverse_cond(traverser, argument, order);
}
(*traverser)(this, argument);
}
}
else
(*traverser)(this, argument);
}
/**
Transform an Item_func object with a transformer callback function.
The function recursively applies the transform method to each
argument of the Item_func node.
If the call of the method for an argument item returns a new item
the old item is substituted for a new one.
After this the transformer is applied to the root node
of the Item_func object.
@param transformer the transformer callback function to be applied to
the nodes of the tree of the object
@param argument parameter to be passed to the transformer
@return
Item returned as the result of transformation of the root node
*/
Item *Item_func::transform(Item_transformer transformer, uchar *argument)
{
DBUG_ASSERT(!current_thd->stmt_arena->is_stmt_prepare());
if (arg_count)
{
Item **arg,**arg_end;
for (arg= args, arg_end= args+arg_count; arg != arg_end; arg++)
{
Item *new_item= (*arg)->transform(transformer, argument);
if (!new_item)
return 0;
/*
THD::change_item_tree() should be called only if the tree was
really transformed, i.e. when a new item has been created.
Otherwise we'll be allocating a lot of unnecessary memory for
change records at each execution.
*/
if (*arg != new_item)
current_thd->change_item_tree(arg, new_item);
}
}
return (this->*transformer)(argument);
}
/**
Compile Item_func object with a processor and a transformer
callback functions.
First the function applies the analyzer to the root node of
the Item_func object. Then if the analizer succeeeds (returns TRUE)
the function recursively applies the compile method to each argument
of the Item_func node.
If the call of the method for an argument item returns a new item
the old item is substituted for a new one.
After this the transformer is applied to the root node
of the Item_func object.
@param analyzer the analyzer callback function to be applied to the
nodes of the tree of the object
@param[in,out] arg_p parameter to be passed to the processor
@param transformer the transformer callback function to be applied to the
nodes of the tree of the object
@param arg_t parameter to be passed to the transformer
@return Item returned as result of transformation of the node,
the same item if no transformation applied, or NULL if
transformation caused an error.
*/
Item *Item_func::compile(Item_analyzer analyzer, uchar **arg_p,
Item_transformer transformer, uchar *arg_t)
{
if (!(this->*analyzer)(arg_p))
return this;
if (arg_count)
{
Item **arg,**arg_end;
for (arg= args, arg_end= args+arg_count; arg != arg_end; arg++)
{
/*
The same parameter value of arg_p must be passed
to analyze any argument of the condition formula.
*/
uchar *arg_v= *arg_p;
Item *new_item= (*arg)->compile(analyzer, &arg_v, transformer, arg_t);
if (new_item == NULL)
return NULL;
if (*arg != new_item)
current_thd->change_item_tree(arg, new_item);
}
}
return (this->*transformer)(arg_t);
}
/**
See comments in Item_cmp_func::split_sum_func()
*/
void Item_func::split_sum_func(THD *thd, Ref_ptr_array ref_pointer_array,
List<Item> &fields)
{
Item **arg, **arg_end;
for (arg= args, arg_end= args+arg_count; arg != arg_end ; arg++)
(*arg)->split_sum_func2(thd, ref_pointer_array, fields, arg, TRUE);
}
void Item_func::update_used_tables()
{
used_tables_cache= get_initial_pseudo_tables();
const_item_cache=1;
with_subselect= false;
with_stored_program= false;
for (uint i=0 ; i < arg_count ; i++)
{
args[i]->update_used_tables();
used_tables_cache|=args[i]->used_tables();
const_item_cache&=args[i]->const_item();
with_subselect|= args[i]->has_subquery();
with_stored_program|= args[i]->has_stored_program();
}
}
table_map Item_func::used_tables() const
{
return used_tables_cache;
}
table_map Item_func::not_null_tables() const
{
return not_null_tables_cache;
}
void Item_func::print(String *str, enum_query_type query_type)
{
str->append(func_name());
str->append('(');
print_args(str, 0, query_type);
str->append(')');
}
void Item_func::print_args(String *str, uint from, enum_query_type query_type)
{
for (uint i=from ; i < arg_count ; i++)
{
if (i != from)
str->append(',');
args[i]->print(str, query_type);
}
}
void Item_func::print_op(String *str, enum_query_type query_type)
{
str->append('(');
for (uint i=0 ; i < arg_count-1 ; i++)
{
args[i]->print(str, query_type);
str->append(' ');
str->append(func_name());
str->append(' ');
}
args[arg_count-1]->print(str, query_type);
str->append(')');
}
bool Item_func::eq(const Item *item, bool binary_cmp) const
{
/* Assume we don't have rtti */
if (this == item)
return 1;
if (item->type() != FUNC_ITEM)
return 0;
Item_func *item_func=(Item_func*) item;
Item_func::Functype func_type;
if ((func_type= functype()) != item_func->functype() ||
arg_count != item_func->arg_count ||
(func_type != Item_func::FUNC_SP &&
func_name() != item_func->func_name()) ||
(func_type == Item_func::FUNC_SP &&
my_strcasecmp(system_charset_info, func_name(), item_func->func_name())))
return 0;
for (uint i=0; i < arg_count ; i++)
if (!args[i]->eq(item_func->args[i], binary_cmp))
return 0;
return 1;
}
Field *Item_func::tmp_table_field(TABLE *table)
{
Field *field= NULL;
switch (result_type()) {
case INT_RESULT:
if (max_char_length() > MY_INT32_NUM_DECIMAL_DIGITS)
field= new Field_longlong(max_char_length(), maybe_null, item_name.ptr(),
unsigned_flag);
else
field= new Field_long(max_char_length(), maybe_null, item_name.ptr(),
unsigned_flag);
break;
case REAL_RESULT:
field= new Field_double(max_char_length(), maybe_null, item_name.ptr(), decimals);
break;
case STRING_RESULT:
return make_string_field(table);
break;
case DECIMAL_RESULT:
field= Field_new_decimal::create_from_item(this);
break;
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
field= 0;
break;
}
if (field)
field->init(table);
return field;
}
my_decimal *Item_func::val_decimal(my_decimal *decimal_value)
{
DBUG_ASSERT(fixed);
longlong nr= val_int();
if (null_value)
return 0; /* purecov: inspected */
int2my_decimal(E_DEC_FATAL_ERROR, nr, unsigned_flag, decimal_value);
return decimal_value;
}
String *Item_real_func::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
double nr= val_real();
if (null_value)
return 0; /* purecov: inspected */
str->set_real(nr, decimals, collation.collation);
return str;
}
my_decimal *Item_real_func::val_decimal(my_decimal *decimal_value)
{
DBUG_ASSERT(fixed);
double nr= val_real();
if (null_value)
return 0; /* purecov: inspected */
double2my_decimal(E_DEC_FATAL_ERROR, nr, decimal_value);
return decimal_value;
}
void Item_func::fix_num_length_and_dec()
{
uint fl_length= 0;
decimals=0;
for (uint i=0 ; i < arg_count ; i++)
{
set_if_bigger(decimals,args[i]->decimals);
set_if_bigger(fl_length, args[i]->max_length);
}
max_length=float_length(decimals);
if (fl_length > max_length)
{
decimals= NOT_FIXED_DEC;
max_length= float_length(NOT_FIXED_DEC);
}
}
void Item_func_numhybrid::fix_num_length_and_dec()
{}
/**
Count max_length and decimals for temporal functions.
@param item Argument array
@param nitems Number of arguments in the array.
@retval False on success, true on error.
*/
void Item_func::count_datetime_length(Item **item, uint nitems)
{
unsigned_flag= 0;
decimals= 0;
if (field_type() != MYSQL_TYPE_DATE)
{
for (uint i= 0; i < nitems; i++)
set_if_bigger(decimals,
field_type() == MYSQL_TYPE_TIME ?
item[i]->time_precision() : item[i]->datetime_precision());
}
set_if_smaller(decimals, DATETIME_MAX_DECIMALS);
uint len= decimals ? (decimals + 1) : 0;
switch (field_type())
{
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
len+= MAX_DATETIME_WIDTH;
break;
case MYSQL_TYPE_DATE:
case MYSQL_TYPE_NEWDATE:
len+= MAX_DATE_WIDTH;
break;
case MYSQL_TYPE_TIME:
len+= MAX_TIME_WIDTH;
break;
default:
DBUG_ASSERT(0);
}
fix_char_length(len);
}
/**
Set max_length/decimals of function if function is fixed point and
result length/precision depends on argument ones.
*/
void Item_func::count_decimal_length()
{
int max_int_part= 0;
decimals= 0;
unsigned_flag= 1;
for (uint i=0 ; i < arg_count ; i++)
{
set_if_bigger(decimals, args[i]->decimals);
set_if_bigger(max_int_part, args[i]->decimal_int_part());
set_if_smaller(unsigned_flag, args[i]->unsigned_flag);
}
int precision= min(max_int_part + decimals, DECIMAL_MAX_PRECISION);
fix_char_length(my_decimal_precision_to_length_no_truncation(precision,
decimals,
unsigned_flag));
}
/**
Set max_length of if it is maximum length of its arguments.
*/
void Item_func::count_only_length(Item **item, uint nitems)
{
uint32 char_length= 0;
unsigned_flag= 1;
for (uint i= 0; i < nitems; i++)
{
set_if_bigger(char_length, item[i]->max_char_length());
set_if_smaller(unsigned_flag, item[i]->unsigned_flag);
}
fix_char_length(char_length);
}
/**
Set max_length/decimals of function if function is floating point and
result length/precision depends on argument ones.
*/
void Item_func::count_real_length()
{
uint32 length= 0;
decimals= 0;
max_length= 0;
for (uint i=0 ; i < arg_count ; i++)
{
if (decimals != NOT_FIXED_DEC)
{
set_if_bigger(decimals, args[i]->decimals);
set_if_bigger(length, (args[i]->max_length - args[i]->decimals));
}
set_if_bigger(max_length, args[i]->max_length);
}
if (decimals != NOT_FIXED_DEC)
{
max_length= length;
length+= decimals;
if (length < max_length) // If previous operation gave overflow
max_length= UINT_MAX32;
else
max_length= length;
}
}
/**
Calculate max_length and decimals for STRING_RESULT functions.
@param field_type Field type.
@param items Argument array.
@param nitems Number of arguments.
@retval False on success, true on error.
*/
bool Item_func::count_string_result_length(enum_field_types field_type,
Item **items, uint nitems)
{
if (agg_arg_charsets_for_string_result(collation, items, nitems))
return true;
if (is_temporal_type(field_type))
count_datetime_length(items, nitems);
else
{
decimals= NOT_FIXED_DEC;
count_only_length(items, nitems);
}
return false;
}
void Item_func::signal_divide_by_null()
{
THD *thd= current_thd;
if (thd->variables.sql_mode & MODE_ERROR_FOR_DIVISION_BY_ZERO)
push_warning(thd, Sql_condition::WARN_LEVEL_WARN, ER_DIVISION_BY_ZERO,
ER(ER_DIVISION_BY_ZERO));
null_value= 1;
}
Item *Item_func::get_tmp_table_item(THD *thd)
{
if (!with_sum_func && !const_item())
return new Item_field(result_field);
return copy_or_same(thd);
}
double Item_int_func::val_real()
{
DBUG_ASSERT(fixed == 1);
return unsigned_flag ? (double) ((ulonglong) val_int()) : (double) val_int();
}
String *Item_int_func::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
longlong nr=val_int();
if (null_value)
return 0;
str->set_int(nr, unsigned_flag, collation.collation);
return str;
}
void Item_func_connection_id::fix_length_and_dec()
{
Item_int_func::fix_length_and_dec();
unsigned_flag= 1;
}
bool Item_func_connection_id::fix_fields(THD *thd, Item **ref)
{
if (Item_int_func::fix_fields(thd, ref))
return TRUE;
thd->thread_specific_used= TRUE;
value= thd->variables.pseudo_thread_id;
return FALSE;
}
/**
Check arguments here to determine result's type for a numeric
function of two arguments.
*/
void Item_num_op::find_num_type(void)
{
DBUG_ENTER("Item_num_op::find_num_type");
DBUG_PRINT("info", ("name %s", func_name()));
DBUG_ASSERT(arg_count == 2);
Item_result r0= args[0]->numeric_context_result_type();
Item_result r1= args[1]->numeric_context_result_type();
DBUG_ASSERT(r0 != STRING_RESULT && r1 != STRING_RESULT);
if (r0 == REAL_RESULT || r1 == REAL_RESULT)
{
/*
Since DATE/TIME/DATETIME data types return INT_RESULT/DECIMAL_RESULT
type codes, we should never get to here when both fields are temporal.
*/
DBUG_ASSERT(!args[0]->is_temporal() || !args[1]->is_temporal());
count_real_length();
max_length= float_length(decimals);
hybrid_type= REAL_RESULT;
}
else if (r0 == DECIMAL_RESULT || r1 == DECIMAL_RESULT)
{
hybrid_type= DECIMAL_RESULT;
result_precision();
}
else
{
DBUG_ASSERT(r0 == INT_RESULT && r1 == INT_RESULT);
decimals= 0;
hybrid_type=INT_RESULT;
result_precision();
}
DBUG_PRINT("info", ("Type: %s",
(hybrid_type == REAL_RESULT ? "REAL_RESULT" :
hybrid_type == DECIMAL_RESULT ? "DECIMAL_RESULT" :
hybrid_type == INT_RESULT ? "INT_RESULT" :
"--ILLEGAL!!!--")));
DBUG_VOID_RETURN;
}
/**
Set result type for a numeric function of one argument
(can be also used by a numeric function of many arguments, if the result
type depends only on the first argument)
*/
void Item_func_num1::find_num_type()
{
DBUG_ENTER("Item_func_num1::find_num_type");
DBUG_PRINT("info", ("name %s", func_name()));
switch (hybrid_type= args[0]->result_type()) {
case INT_RESULT:
unsigned_flag= args[0]->unsigned_flag;
break;
case STRING_RESULT:
case REAL_RESULT:
hybrid_type= REAL_RESULT;
max_length= float_length(decimals);
break;
case DECIMAL_RESULT:
break;
default:
DBUG_ASSERT(0);
}
DBUG_PRINT("info", ("Type: %s",
(hybrid_type == REAL_RESULT ? "REAL_RESULT" :
hybrid_type == DECIMAL_RESULT ? "DECIMAL_RESULT" :
hybrid_type == INT_RESULT ? "INT_RESULT" :
"--ILLEGAL!!!--")));
DBUG_VOID_RETURN;
}
void Item_func_num1::fix_num_length_and_dec()
{
decimals= args[0]->decimals;
max_length= args[0]->max_length;
}
void Item_func_numhybrid::fix_length_and_dec()
{
fix_num_length_and_dec();
find_num_type();
}
String *Item_func_numhybrid::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
switch (hybrid_type) {
case DECIMAL_RESULT:
{
my_decimal decimal_value, *val;
if (!(val= decimal_op(&decimal_value)))
return 0; // null is set
my_decimal_round(E_DEC_FATAL_ERROR, val, decimals, FALSE, val);
str->set_charset(collation.collation);
my_decimal2string(E_DEC_FATAL_ERROR, val, 0, 0, 0, str);
break;
}
case INT_RESULT:
{
longlong nr= int_op();
if (null_value)
return 0; /* purecov: inspected */
str->set_int(nr, unsigned_flag, collation.collation);
break;
}
case REAL_RESULT:
{
double nr= real_op();
if (null_value)
return 0; /* purecov: inspected */
str->set_real(nr, decimals, collation.collation);
break;
}
case STRING_RESULT:
switch (field_type()) {
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return val_string_from_datetime(str);
case MYSQL_TYPE_DATE:
return val_string_from_date(str);
case MYSQL_TYPE_TIME:
return val_string_from_time(str);
default:
break;
}
return str_op(&str_value);
default:
DBUG_ASSERT(0);
}
return str;
}
double Item_func_numhybrid::val_real()
{
DBUG_ASSERT(fixed == 1);
switch (hybrid_type) {
case DECIMAL_RESULT:
{
my_decimal decimal_value, *val;
double result;
if (!(val= decimal_op(&decimal_value)))
return 0.0; // null is set
my_decimal2double(E_DEC_FATAL_ERROR, val, &result);
return result;
}
case INT_RESULT:
{
longlong result= int_op();
return unsigned_flag ? (double) ((ulonglong) result) : (double) result;
}
case REAL_RESULT:
return real_op();
case STRING_RESULT:
{
switch (field_type())
{
case MYSQL_TYPE_TIME:
case MYSQL_TYPE_DATE:
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return val_real_from_decimal();
default:
break;
}
char *end_not_used;
int err_not_used;
String *res= str_op(&str_value);
return (res ? my_strntod(res->charset(), (char*) res->ptr(), res->length(),
&end_not_used, &err_not_used) : 0.0);
}
default:
DBUG_ASSERT(0);
}
return 0.0;
}
longlong Item_func_numhybrid::val_int()
{
DBUG_ASSERT(fixed == 1);
switch (hybrid_type) {
case DECIMAL_RESULT:
{
my_decimal decimal_value, *val;
if (!(val= decimal_op(&decimal_value)))
return 0; // null is set
longlong result;
my_decimal2int(E_DEC_FATAL_ERROR, val, unsigned_flag, &result);
return result;
}
case INT_RESULT:
return int_op();
case REAL_RESULT:
return (longlong) rint(real_op());
case STRING_RESULT:
{
switch (field_type())
{
case MYSQL_TYPE_DATE:
return val_int_from_date();
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return val_int_from_datetime();
case MYSQL_TYPE_TIME:
return val_int_from_time();
default:
break;
}
int err_not_used;
String *res;
if (!(res= str_op(&str_value)))
return 0;
char *end= (char*) res->ptr() + res->length();
const CHARSET_INFO *cs= res->charset();
return (*(cs->cset->strtoll10))(cs, res->ptr(), &end, &err_not_used);
}
default:
DBUG_ASSERT(0);
}
return 0;
}
my_decimal *Item_func_numhybrid::val_decimal(my_decimal *decimal_value)
{
my_decimal *val= decimal_value;
DBUG_ASSERT(fixed == 1);
switch (hybrid_type) {
case DECIMAL_RESULT:
val= decimal_op(decimal_value);
break;
case INT_RESULT:
{
longlong result= int_op();
int2my_decimal(E_DEC_FATAL_ERROR, result, unsigned_flag, decimal_value);
break;
}
case REAL_RESULT:
{
double result= (double)real_op();
double2my_decimal(E_DEC_FATAL_ERROR, result, decimal_value);
break;
}
case STRING_RESULT:
{
switch (field_type())
{
case MYSQL_TYPE_DATE:
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return val_decimal_from_date(decimal_value);
case MYSQL_TYPE_TIME:
return val_decimal_from_time(decimal_value);
default:
break;
}
String *res;
if (!(res= str_op(&str_value)))
return NULL;
str2my_decimal(E_DEC_FATAL_ERROR, (char*) res->ptr(),
res->length(), res->charset(), decimal_value);
break;
}
case ROW_RESULT:
default:
DBUG_ASSERT(0);
}
return val;
}
bool Item_func_numhybrid::get_date(MYSQL_TIME *ltime, uint fuzzydate)
{
DBUG_ASSERT(fixed == 1);
switch (field_type())
{
case MYSQL_TYPE_DATE:
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return date_op(ltime, fuzzydate);
case MYSQL_TYPE_TIME:
return get_date_from_time(ltime);
default:
return Item::get_date_from_non_temporal(ltime, fuzzydate);
}
}
bool Item_func_numhybrid::get_time(MYSQL_TIME *ltime)
{
DBUG_ASSERT(fixed == 1);
switch (field_type())
{
case MYSQL_TYPE_TIME:
return time_op(ltime);
case MYSQL_TYPE_DATE:
return get_time_from_date(ltime);
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
return get_time_from_datetime(ltime);
default:
return Item::get_time_from_non_temporal(ltime);
}
}
void Item_func_signed::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("cast("));
args[0]->print(str, query_type);
str->append(STRING_WITH_LEN(" as signed)"));
}
longlong Item_func_signed::val_int_from_str(int *error)
{
char buff[MAX_FIELD_WIDTH], *end, *start;
uint32 length;
String tmp(buff,sizeof(buff), &my_charset_bin), *res;
longlong value;
const CHARSET_INFO *cs;
/*
For a string result, we must first get the string and then convert it
to a longlong
*/
if (!(res= args[0]->val_str(&tmp)))
{
null_value= 1;
*error= 0;
return 0;
}
null_value= 0;
start= (char *)res->ptr();
length= res->length();
cs= res->charset();
end= start + length;
value= cs->cset->strtoll10(cs, start, &end, error);
if (*error > 0 || end != start+ length)
{
ErrConvString err(res);
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
ER_TRUNCATED_WRONG_VALUE,
ER(ER_TRUNCATED_WRONG_VALUE), "INTEGER",
err.ptr());
}
return value;
}
longlong Item_func_signed::val_int()
{
longlong value;
int error;
if (args[0]->cast_to_int_type() != STRING_RESULT ||
args[0]->is_temporal())
{
value= args[0]->val_int();
null_value= args[0]->null_value;
return value;
}
value= val_int_from_str(&error);
if (value < 0 && error == 0)
{
push_warning(current_thd, Sql_condition::WARN_LEVEL_WARN, ER_UNKNOWN_ERROR,
"Cast to signed converted positive out-of-range integer to "
"it's negative complement");
}
return value;
}
void Item_func_unsigned::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("cast("));
args[0]->print(str, query_type);
str->append(STRING_WITH_LEN(" as unsigned)"));
}
longlong Item_func_unsigned::val_int()
{
longlong value;
int error;
if (args[0]->cast_to_int_type() == DECIMAL_RESULT)
{
my_decimal tmp, *dec= args[0]->val_decimal(&tmp);
if (!(null_value= args[0]->null_value))
my_decimal2int(E_DEC_FATAL_ERROR, dec, 1, &value);
else
value= 0;
return value;
}
else if (args[0]->cast_to_int_type() != STRING_RESULT ||
args[0]->is_temporal())
{
value= args[0]->val_int();
null_value= args[0]->null_value;
return value;
}
value= val_int_from_str(&error);
if (error < 0)
push_warning(current_thd, Sql_condition::WARN_LEVEL_WARN, ER_UNKNOWN_ERROR,
"Cast to unsigned converted negative integer to it's "
"positive complement");
return value;
}
String *Item_decimal_typecast::val_str(String *str)
{
my_decimal tmp_buf, *tmp= val_decimal(&tmp_buf);
if (null_value)
return NULL;
my_decimal2string(E_DEC_FATAL_ERROR, tmp, 0, 0, 0, str);
return str;
}
double Item_decimal_typecast::val_real()
{
my_decimal tmp_buf, *tmp= val_decimal(&tmp_buf);
double res;
if (null_value)
return 0.0;
my_decimal2double(E_DEC_FATAL_ERROR, tmp, &res);
return res;
}
longlong Item_decimal_typecast::val_int()
{
my_decimal tmp_buf, *tmp= val_decimal(&tmp_buf);
longlong res;
if (null_value)
return 0;
my_decimal2int(E_DEC_FATAL_ERROR, tmp, unsigned_flag, &res);
return res;
}
my_decimal *Item_decimal_typecast::val_decimal(my_decimal *dec)
{
my_decimal tmp_buf, *tmp= args[0]->val_decimal(&tmp_buf);
bool sign;
uint precision;
if ((null_value= args[0]->null_value))
return NULL;
my_decimal_round(E_DEC_FATAL_ERROR, tmp, decimals, FALSE, dec);
sign= dec->sign();
if (unsigned_flag)
{
if (sign)
{
my_decimal_set_zero(dec);
goto err;
}
}
precision= my_decimal_length_to_precision(max_length,
decimals, unsigned_flag);
if (precision - decimals < (uint) my_decimal_intg(dec))
{
max_my_decimal(dec, precision, decimals);
dec->sign(sign);
goto err;
}
return dec;
err:
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
ER_WARN_DATA_OUT_OF_RANGE,
ER(ER_WARN_DATA_OUT_OF_RANGE),
item_name.ptr(), 1L);
return dec;
}
void Item_decimal_typecast::print(String *str, enum_query_type query_type)
{
char len_buf[20*3 + 1];
char *end;
uint precision= my_decimal_length_to_precision(max_length, decimals,
unsigned_flag);
str->append(STRING_WITH_LEN("cast("));
args[0]->print(str, query_type);
str->append(STRING_WITH_LEN(" as decimal("));
end=int10_to_str(precision, len_buf,10);
str->append(len_buf, (uint32) (end - len_buf));
str->append(',');
end=int10_to_str(decimals, len_buf,10);
str->append(len_buf, (uint32) (end - len_buf));
str->append(')');
str->append(')');
}
double Item_func_plus::real_op()
{
double value= args[0]->val_real() + args[1]->val_real();
if ((null_value=args[0]->null_value || args[1]->null_value))
return 0.0;
return check_float_overflow(value);
}
longlong Item_func_plus::int_op()
{
longlong val0= args[0]->val_int();
longlong val1= args[1]->val_int();
longlong res= val0 + val1;
bool res_unsigned= FALSE;
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0;
/*
First check whether the result can be represented as a
(bool unsigned_flag, longlong value) pair, then check if it is compatible
with this Item's unsigned_flag by calling check_integer_overflow().
*/
if (args[0]->unsigned_flag)
{
if (args[1]->unsigned_flag || val1 >= 0)
{
if (test_if_sum_overflows_ull((ulonglong) val0, (ulonglong) val1))
goto err;
res_unsigned= TRUE;
}
else
{
/* val1 is negative */
if ((ulonglong) val0 > (ulonglong) LONGLONG_MAX)
res_unsigned= TRUE;
}
}
else
{
if (args[1]->unsigned_flag)
{
if (val0 >= 0)
{
if (test_if_sum_overflows_ull((ulonglong) val0, (ulonglong) val1))
goto err;
res_unsigned= TRUE;
}
else
{
if ((ulonglong) val1 > (ulonglong) LONGLONG_MAX)
res_unsigned= TRUE;
}
}
else
{
if (val0 >=0 && val1 >= 0)
res_unsigned= TRUE;
else if (val0 < 0 && val1 < 0 && res >= 0)
goto err;
}
}
return check_integer_overflow(res, res_unsigned);
err:
return raise_integer_overflow();
}
/**
Calculate plus of two decimals.
@param decimal_value Buffer that can be used to store result
@retval
0 Value was NULL; In this case null_value is set
@retval
\# Value of operation as a decimal
*/
my_decimal *Item_func_plus::decimal_op(my_decimal *decimal_value)
{
my_decimal value1, *val1;
my_decimal value2, *val2;
val1= args[0]->val_decimal(&value1);
if ((null_value= args[0]->null_value))
return 0;
val2= args[1]->val_decimal(&value2);
if (!(null_value= (args[1]->null_value ||
check_decimal_overflow(my_decimal_add(E_DEC_FATAL_ERROR &
~E_DEC_OVERFLOW,
decimal_value,
val1, val2)) > 3)))
return decimal_value;
return 0;
}
/**
Set precision of results for additive operations (+ and -)
*/
void Item_func_additive_op::result_precision()
{
decimals= max(args[0]->decimals, args[1]->decimals);
int arg1_int= args[0]->decimal_precision() - args[0]->decimals;
int arg2_int= args[1]->decimal_precision() - args[1]->decimals;
int precision= max(arg1_int, arg2_int) + 1 + decimals;
/* Integer operations keep unsigned_flag if one of arguments is unsigned */
if (result_type() == INT_RESULT)
unsigned_flag= args[0]->unsigned_flag | args[1]->unsigned_flag;
else
unsigned_flag= args[0]->unsigned_flag & args[1]->unsigned_flag;
max_length= my_decimal_precision_to_length_no_truncation(precision, decimals,
unsigned_flag);
}
/**
The following function is here to allow the user to force
subtraction of UNSIGNED BIGINT to return negative values.
*/
void Item_func_additive_op::fix_length_and_dec()
{
Item_num_op::fix_length_and_dec();
if (unsigned_flag &&
(current_thd->variables.sql_mode & MODE_NO_UNSIGNED_SUBTRACTION))
unsigned_flag=0;
}
double Item_func_minus::real_op()
{
double value= args[0]->val_real() - args[1]->val_real();
if ((null_value=args[0]->null_value || args[1]->null_value))
return 0.0;
return check_float_overflow(value);
}
longlong Item_func_minus::int_op()
{
longlong val0= args[0]->val_int();
longlong val1= args[1]->val_int();
longlong res= val0 - val1;
bool res_unsigned= FALSE;
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0;
/*
First check whether the result can be represented as a
(bool unsigned_flag, longlong value) pair, then check if it is compatible
with this Item's unsigned_flag by calling check_integer_overflow().
*/
if (args[0]->unsigned_flag)
{
if (args[1]->unsigned_flag)
{
if ((ulonglong) val0 < (ulonglong) val1)
{
if (res >= 0)
goto err;
}
else
res_unsigned= TRUE;
}
else
{
if (val1 >= 0)
{
if ((ulonglong) val0 > (ulonglong) val1)
res_unsigned= TRUE;
}
else
{
if (test_if_sum_overflows_ull((ulonglong) val0, (ulonglong) -val1))
goto err;
res_unsigned= TRUE;
}
}
}
else
{
if (args[1]->unsigned_flag)
{
if ((ulonglong) (val0 - LONGLONG_MIN) < (ulonglong) val1)
goto err;
}
else
{
if (val0 > 0 && val1 < 0)
res_unsigned= TRUE;
else if (val0 < 0 && val1 > 0 && res >= 0)
goto err;
}
}
return check_integer_overflow(res, res_unsigned);
err:
return raise_integer_overflow();
}
/**
See Item_func_plus::decimal_op for comments.
*/
my_decimal *Item_func_minus::decimal_op(my_decimal *decimal_value)
{
my_decimal value1, *val1;
my_decimal value2, *val2=
val1= args[0]->val_decimal(&value1);
if ((null_value= args[0]->null_value))
return 0;
val2= args[1]->val_decimal(&value2);
if (!(null_value= (args[1]->null_value ||
(check_decimal_overflow(my_decimal_sub(E_DEC_FATAL_ERROR &
~E_DEC_OVERFLOW,
decimal_value, val1,
val2)) > 3))))
return decimal_value;
return 0;
}
double Item_func_mul::real_op()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real() * args[1]->val_real();
if ((null_value=args[0]->null_value || args[1]->null_value))
return 0.0;
return check_float_overflow(value);
}
longlong Item_func_mul::int_op()
{
DBUG_ASSERT(fixed == 1);
longlong a= args[0]->val_int();
longlong b= args[1]->val_int();
longlong res;
ulonglong res0, res1;
ulong a0, a1, b0, b1;
bool res_unsigned= FALSE;
bool a_negative= FALSE, b_negative= FALSE;
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0;
/*
First check whether the result can be represented as a
(bool unsigned_flag, longlong value) pair, then check if it is compatible
with this Item's unsigned_flag by calling check_integer_overflow().
Let a = a1 * 2^32 + a0 and b = b1 * 2^32 + b0. Then
a * b = (a1 * 2^32 + a0) * (b1 * 2^32 + b0) = a1 * b1 * 2^64 +
+ (a1 * b0 + a0 * b1) * 2^32 + a0 * b0;
We can determine if the above sum overflows the ulonglong range by
sequentially checking the following conditions:
1. If both a1 and b1 are non-zero.
2. Otherwise, if (a1 * b0 + a0 * b1) is greater than ULONG_MAX.
3. Otherwise, if (a1 * b0 + a0 * b1) * 2^32 + a0 * b0 is greater than
ULONGLONG_MAX.
Since we also have to take the unsigned_flag for a and b into account,
it is easier to first work with absolute values and set the
correct sign later.
*/
if (!args[0]->unsigned_flag && a < 0)
{
a_negative= TRUE;
a= -a;
}
if (!args[1]->unsigned_flag && b < 0)
{
b_negative= TRUE;
b= -b;
}
a0= 0xFFFFFFFFUL & a;
a1= ((ulonglong) a) >> 32;
b0= 0xFFFFFFFFUL & b;
b1= ((ulonglong) b) >> 32;
if (a1 && b1)
goto err;
res1= (ulonglong) a1 * b0 + (ulonglong) a0 * b1;
if (res1 > 0xFFFFFFFFUL)
goto err;
res1= res1 << 32;
res0= (ulonglong) a0 * b0;
if (test_if_sum_overflows_ull(res1, res0))
goto err;
res= res1 + res0;
if (a_negative != b_negative)
{
if ((ulonglong) res > (ulonglong) LONGLONG_MIN + 1)
goto err;
res= -res;
}
else
res_unsigned= TRUE;
return check_integer_overflow(res, res_unsigned);
err:
return raise_integer_overflow();
}
/** See Item_func_plus::decimal_op for comments. */
my_decimal *Item_func_mul::decimal_op(my_decimal *decimal_value)
{
my_decimal value1, *val1;
my_decimal value2, *val2;
val1= args[0]->val_decimal(&value1);
if ((null_value= args[0]->null_value))
return 0;
val2= args[1]->val_decimal(&value2);
if (!(null_value= (args[1]->null_value ||
(check_decimal_overflow(my_decimal_mul(E_DEC_FATAL_ERROR &
~E_DEC_OVERFLOW,
decimal_value, val1,
val2)) > 3))))
return decimal_value;
return 0;
}
void Item_func_mul::result_precision()
{
/* Integer operations keep unsigned_flag if one of arguments is unsigned */
if (result_type() == INT_RESULT)
unsigned_flag= args[0]->unsigned_flag | args[1]->unsigned_flag;
else
unsigned_flag= args[0]->unsigned_flag & args[1]->unsigned_flag;
decimals= min(args[0]->decimals + args[1]->decimals, DECIMAL_MAX_SCALE);
uint est_prec = args[0]->decimal_precision() + args[1]->decimal_precision();
uint precision= min<uint>(est_prec, DECIMAL_MAX_PRECISION);
max_length= my_decimal_precision_to_length_no_truncation(precision, decimals,
unsigned_flag);
}
double Item_func_div::real_op()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
double val2= args[1]->val_real();
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0.0;
if (val2 == 0.0)
{
signal_divide_by_null();
return 0.0;
}
return check_float_overflow(value/val2);
}
my_decimal *Item_func_div::decimal_op(my_decimal *decimal_value)
{
my_decimal value1, *val1;
my_decimal value2, *val2;
int err;
val1= args[0]->val_decimal(&value1);
if ((null_value= args[0]->null_value))
return 0;
val2= args[1]->val_decimal(&value2);
if ((null_value= args[1]->null_value))
return 0;
if ((err= check_decimal_overflow(my_decimal_div(E_DEC_FATAL_ERROR &
~E_DEC_OVERFLOW &
~E_DEC_DIV_ZERO,
decimal_value,
val1, val2,
prec_increment))) > 3)
{
if (err == E_DEC_DIV_ZERO)
signal_divide_by_null();
null_value= 1;
return 0;
}
return decimal_value;
}
void Item_func_div::result_precision()
{
uint precision= min<uint>(args[0]->decimal_precision() +
args[1]->decimals + prec_increment,
DECIMAL_MAX_PRECISION);
/* Integer operations keep unsigned_flag if one of arguments is unsigned */
if (result_type() == INT_RESULT)
unsigned_flag= args[0]->unsigned_flag | args[1]->unsigned_flag;
else
unsigned_flag= args[0]->unsigned_flag & args[1]->unsigned_flag;
decimals= min<uint>(args[0]->decimals + prec_increment, DECIMAL_MAX_SCALE);
max_length= my_decimal_precision_to_length_no_truncation(precision, decimals,
unsigned_flag);
}
void Item_func_div::fix_length_and_dec()
{
DBUG_ENTER("Item_func_div::fix_length_and_dec");
prec_increment= current_thd->variables.div_precincrement;
Item_num_op::fix_length_and_dec();
switch(hybrid_type) {
case REAL_RESULT:
{
decimals=max(args[0]->decimals,args[1]->decimals)+prec_increment;
set_if_smaller(decimals, NOT_FIXED_DEC);
uint tmp=float_length(decimals);
if (decimals == NOT_FIXED_DEC)
max_length= tmp;
else
{
max_length=args[0]->max_length - args[0]->decimals + decimals;
set_if_smaller(max_length,tmp);
}
break;
}
case INT_RESULT:
hybrid_type= DECIMAL_RESULT;
DBUG_PRINT("info", ("Type changed: DECIMAL_RESULT"));
result_precision();
break;
case DECIMAL_RESULT:
result_precision();
break;
default:
DBUG_ASSERT(0);
}
maybe_null= 1; // devision by zero
DBUG_VOID_RETURN;
}
/* Integer division */
longlong Item_func_int_div::val_int()
{
DBUG_ASSERT(fixed == 1);
/*
Perform division using DECIMAL math if either of the operands has a
non-integer type
*/
if (args[0]->result_type() != INT_RESULT ||
args[1]->result_type() != INT_RESULT)
{
my_decimal tmp;
my_decimal *val0p= args[0]->val_decimal(&tmp);
if ((null_value= args[0]->null_value))
return 0;
my_decimal val0= *val0p;
my_decimal *val1p= args[1]->val_decimal(&tmp);
if ((null_value= args[1]->null_value))
return 0;
my_decimal val1= *val1p;
int err;
if ((err= my_decimal_div(E_DEC_FATAL_ERROR & ~E_DEC_DIV_ZERO, &tmp,
&val0, &val1, 0)) > 3)
{
if (err == E_DEC_DIV_ZERO)
signal_divide_by_null();
return 0;
}
my_decimal truncated;
const bool do_truncate= true;
if (my_decimal_round(E_DEC_FATAL_ERROR, &tmp, 0, do_truncate, &truncated))
DBUG_ASSERT(false);
longlong res;
if (my_decimal2int(E_DEC_FATAL_ERROR, &truncated, unsigned_flag, &res) &
E_DEC_OVERFLOW)
raise_integer_overflow();
return res;
}
longlong val0=args[0]->val_int();
longlong val1=args[1]->val_int();
bool val0_negative, val1_negative, res_negative;
ulonglong uval0, uval1, res;
if ((null_value= (args[0]->null_value || args[1]->null_value)))
return 0;
if (val1 == 0)
{
signal_divide_by_null();
return 0;
}
val0_negative= !args[0]->unsigned_flag && val0 < 0;
val1_negative= !args[1]->unsigned_flag && val1 < 0;
res_negative= val0_negative != val1_negative;
uval0= (ulonglong) (val0_negative ? -val0 : val0);
uval1= (ulonglong) (val1_negative ? -val1 : val1);
res= uval0 / uval1;
if (res_negative)
{
if (res > (ulonglong) LONGLONG_MAX)
return raise_integer_overflow();
res= (ulonglong) (-(longlong) res);
}
return check_integer_overflow(res, !res_negative);
}
void Item_func_int_div::fix_length_and_dec()
{
Item_result argtype= args[0]->result_type();
/* use precision ony for the data type it is applicable for and valid */
max_length=args[0]->max_length -
(argtype == DECIMAL_RESULT || argtype == INT_RESULT ?
args[0]->decimals : 0);
maybe_null=1;
unsigned_flag=args[0]->unsigned_flag | args[1]->unsigned_flag;
}
longlong Item_func_mod::int_op()
{
DBUG_ASSERT(fixed == 1);
longlong val0= args[0]->val_int();
longlong val1= args[1]->val_int();
bool val0_negative, val1_negative;
ulonglong uval0, uval1;
ulonglong res;
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0; /* purecov: inspected */
if (val1 == 0)
{
signal_divide_by_null();
return 0;
}
/*
'%' is calculated by integer division internally. Since dividing
LONGLONG_MIN by -1 generates SIGFPE, we calculate using unsigned values and
then adjust the sign appropriately.
*/
val0_negative= !args[0]->unsigned_flag && val0 < 0;
val1_negative= !args[1]->unsigned_flag && val1 < 0;
uval0= (ulonglong) (val0_negative ? -val0 : val0);
uval1= (ulonglong) (val1_negative ? -val1 : val1);
res= uval0 % uval1;
return check_integer_overflow(val0_negative ? -(longlong) res : res,
!val0_negative);
}
double Item_func_mod::real_op()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
double val2= args[1]->val_real();
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0.0; /* purecov: inspected */
if (val2 == 0.0)
{
signal_divide_by_null();
return 0.0;
}
return fmod(value,val2);
}
my_decimal *Item_func_mod::decimal_op(my_decimal *decimal_value)
{
my_decimal value1, *val1;
my_decimal value2, *val2;
val1= args[0]->val_decimal(&value1);
if ((null_value= args[0]->null_value))
return 0;
val2= args[1]->val_decimal(&value2);
if ((null_value= args[1]->null_value))
return 0;
switch (my_decimal_mod(E_DEC_FATAL_ERROR & ~E_DEC_DIV_ZERO, decimal_value,
val1, val2)) {
case E_DEC_TRUNCATED:
case E_DEC_OK:
return decimal_value;
case E_DEC_DIV_ZERO:
signal_divide_by_null();
default:
null_value= 1;
return 0;
}
}
void Item_func_mod::result_precision()
{
decimals= max(args[0]->decimals, args[1]->decimals);
max_length= max(args[0]->max_length, args[1]->max_length);
}
void Item_func_mod::fix_length_and_dec()
{
Item_num_op::fix_length_and_dec();
maybe_null= 1;
unsigned_flag= args[0]->unsigned_flag;
}
double Item_func_neg::real_op()
{
double value= args[0]->val_real();
null_value= args[0]->null_value;
return -value;
}
longlong Item_func_neg::int_op()
{
longlong value= args[0]->val_int();
if ((null_value= args[0]->null_value))
return 0;
if (args[0]->unsigned_flag &&
(ulonglong) value > (ulonglong) LONGLONG_MAX + 1ULL)
return raise_integer_overflow();
// For some platforms we need special handling of LONGLONG_MIN to
// guarantee overflow.
if (value == LONGLONG_MIN &&
!args[0]->unsigned_flag &&
!unsigned_flag)
return raise_integer_overflow();
return check_integer_overflow(-value, !args[0]->unsigned_flag && value < 0);
}
my_decimal *Item_func_neg::decimal_op(my_decimal *decimal_value)
{
my_decimal val, *value= args[0]->val_decimal(&val);
if (!(null_value= args[0]->null_value))
{
my_decimal2decimal(value, decimal_value);
my_decimal_neg(decimal_value);
return decimal_value;
}
return 0;
}
void Item_func_neg::fix_num_length_and_dec()
{
decimals= args[0]->decimals;
/* 1 add because sign can appear */
max_length= args[0]->max_length + 1;
}
void Item_func_neg::fix_length_and_dec()
{
DBUG_ENTER("Item_func_neg::fix_length_and_dec");
Item_func_num1::fix_length_and_dec();
/*
If this is in integer context keep the context as integer if possible
(This is how multiplication and other integer functions works)
Use val() to get value as arg_type doesn't mean that item is
Item_int or Item_real due to existence of Item_param.
*/
if (hybrid_type == INT_RESULT && args[0]->const_item())
{
longlong val= args[0]->val_int();
if ((ulonglong) val >= (ulonglong) LONGLONG_MIN &&
((ulonglong) val != (ulonglong) LONGLONG_MIN ||
args[0]->type() != INT_ITEM))
{
/*
Ensure that result is converted to DECIMAL, as longlong can't hold
the negated number
*/
hybrid_type= DECIMAL_RESULT;
DBUG_PRINT("info", ("Type changed: DECIMAL_RESULT"));
}
}
unsigned_flag= 0;
DBUG_VOID_RETURN;
}
double Item_func_abs::real_op()
{
double value= args[0]->val_real();
null_value= args[0]->null_value;
return fabs(value);
}
longlong Item_func_abs::int_op()
{
longlong value= args[0]->val_int();
if ((null_value= args[0]->null_value))
return 0;
if (unsigned_flag)
return value;
/* -LONGLONG_MIN = LONGLONG_MAX + 1 => outside of signed longlong range */
if (value == LONGLONG_MIN)
return raise_integer_overflow();
return (value >= 0) ? value : -value;
}
my_decimal *Item_func_abs::decimal_op(my_decimal *decimal_value)
{
my_decimal val, *value= args[0]->val_decimal(&val);
if (!(null_value= args[0]->null_value))
{
my_decimal2decimal(value, decimal_value);
if (decimal_value->sign())
my_decimal_neg(decimal_value);
return decimal_value;
}
return 0;
}
void Item_func_abs::fix_length_and_dec()
{
Item_func_num1::fix_length_and_dec();
unsigned_flag= args[0]->unsigned_flag;
}
/** Gateway to natural LOG function. */
double Item_func_ln::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value= args[0]->null_value))
return 0.0;
if (value <= 0.0)
{
signal_divide_by_null();
return 0.0;
}
return log(value);
}
/**
Extended but so slower LOG function.
We have to check if all values are > zero and first one is not one
as these are the cases then result is not a number.
*/
double Item_func_log::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value= args[0]->null_value))
return 0.0;
if (value <= 0.0)
{
signal_divide_by_null();
return 0.0;
}
if (arg_count == 2)
{
double value2= args[1]->val_real();
if ((null_value= args[1]->null_value))
return 0.0;
if (value2 <= 0.0 || value == 1.0)
{
signal_divide_by_null();
return 0.0;
}
return log(value2) / log(value);
}
return log(value);
}
double Item_func_log2::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
if (value <= 0.0)
{
signal_divide_by_null();
return 0.0;
}
return log(value) / M_LN2;
}
double Item_func_log10::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value= args[0]->null_value))
return 0.0;
if (value <= 0.0)
{
signal_divide_by_null();
return 0.0;
}
return log10(value);
}
double Item_func_exp::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0; /* purecov: inspected */
return check_float_overflow(exp(value));
}
double Item_func_sqrt::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=(args[0]->null_value || value < 0)))
return 0.0; /* purecov: inspected */
return sqrt(value);
}
double Item_func_pow::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
double val2= args[1]->val_real();
if ((null_value=(args[0]->null_value || args[1]->null_value)))
return 0.0; /* purecov: inspected */
return check_float_overflow(pow(value,val2));
}
// Trigonometric functions
double Item_func_acos::val_real()
{
DBUG_ASSERT(fixed == 1);
/* One can use this to defer SELECT processing. */
DEBUG_SYNC(current_thd, "before_acos_function");
// the volatile's for BUG #2338 to calm optimizer down (because of gcc's bug)
volatile double value= args[0]->val_real();
if ((null_value=(args[0]->null_value || (value < -1.0 || value > 1.0))))
return 0.0;
return acos(value);
}
double Item_func_asin::val_real()
{
DBUG_ASSERT(fixed == 1);
// the volatile's for BUG #2338 to calm optimizer down (because of gcc's bug)
volatile double value= args[0]->val_real();
if ((null_value=(args[0]->null_value || (value < -1.0 || value > 1.0))))
return 0.0;
return asin(value);
}
double Item_func_atan::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
if (arg_count == 2)
{
double val2= args[1]->val_real();
if ((null_value=args[1]->null_value))
return 0.0;<|fim▁hole|> return atan(value);
}
double Item_func_cos::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
return cos(value);
}
double Item_func_sin::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
return sin(value);
}
double Item_func_tan::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
return check_float_overflow(tan(value));
}
double Item_func_cot::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0.0;
return check_float_overflow(1.0 / tan(value));
}
// Shift-functions, same as << and >> in C/C++
longlong Item_func_shift_left::val_int()
{
DBUG_ASSERT(fixed == 1);
uint shift;
ulonglong res= ((ulonglong) args[0]->val_int() <<
(shift=(uint) args[1]->val_int()));
if (args[0]->null_value || args[1]->null_value)
{
null_value=1;
return 0;
}
null_value=0;
return (shift < sizeof(longlong)*8 ? (longlong) res : LL(0));
}
longlong Item_func_shift_right::val_int()
{
DBUG_ASSERT(fixed == 1);
uint shift;
ulonglong res= (ulonglong) args[0]->val_int() >>
(shift=(uint) args[1]->val_int());
if (args[0]->null_value || args[1]->null_value)
{
null_value=1;
return 0;
}
null_value=0;
return (shift < sizeof(longlong)*8 ? (longlong) res : LL(0));
}
longlong Item_func_bit_neg::val_int()
{
DBUG_ASSERT(fixed == 1);
ulonglong res= (ulonglong) args[0]->val_int();
if ((null_value=args[0]->null_value))
return 0;
return ~res;
}
// Conversion functions
void Item_func_integer::fix_length_and_dec()
{
max_length=args[0]->max_length - args[0]->decimals+1;
uint tmp=float_length(decimals);
set_if_smaller(max_length,tmp);
decimals=0;
}
void Item_func_int_val::fix_num_length_and_dec()
{
ulonglong tmp_max_length= (ulonglong ) args[0]->max_length -
(args[0]->decimals ? args[0]->decimals + 1 : 0) + 2;
max_length= tmp_max_length > (ulonglong) 4294967295U ?
(uint32) 4294967295U : (uint32) tmp_max_length;
uint tmp= float_length(decimals);
set_if_smaller(max_length,tmp);
decimals= 0;
}
void Item_func_int_val::find_num_type()
{
DBUG_ENTER("Item_func_int_val::find_num_type");
DBUG_PRINT("info", ("name %s", func_name()));
switch(hybrid_type= args[0]->result_type())
{
case STRING_RESULT:
case REAL_RESULT:
hybrid_type= REAL_RESULT;
max_length= float_length(decimals);
break;
case INT_RESULT:
case DECIMAL_RESULT:
/*
-2 because in most high position can't be used any digit for longlong
and one position for increasing value during operation
*/
if ((args[0]->max_length - args[0]->decimals) >=
(DECIMAL_LONGLONG_DIGITS - 2))
{
hybrid_type= DECIMAL_RESULT;
}
else
{
unsigned_flag= args[0]->unsigned_flag;
hybrid_type= INT_RESULT;
}
break;
default:
DBUG_ASSERT(0);
}
DBUG_PRINT("info", ("Type: %s",
(hybrid_type == REAL_RESULT ? "REAL_RESULT" :
hybrid_type == DECIMAL_RESULT ? "DECIMAL_RESULT" :
hybrid_type == INT_RESULT ? "INT_RESULT" :
"--ILLEGAL!!!--")));
DBUG_VOID_RETURN;
}
longlong Item_func_ceiling::int_op()
{
longlong result;
switch (args[0]->result_type()) {
case INT_RESULT:
result= args[0]->val_int();
null_value= args[0]->null_value;
break;
case DECIMAL_RESULT:
{
my_decimal dec_buf, *dec;
if ((dec= Item_func_ceiling::decimal_op(&dec_buf)))
my_decimal2int(E_DEC_FATAL_ERROR, dec, unsigned_flag, &result);
else
result= 0;
break;
}
default:
result= (longlong)Item_func_ceiling::real_op();
};
return result;
}
double Item_func_ceiling::real_op()
{
/*
the volatile's for BUG #3051 to calm optimizer down (because of gcc's
bug)
*/
volatile double value= args[0]->val_real();
null_value= args[0]->null_value;
return ceil(value);
}
my_decimal *Item_func_ceiling::decimal_op(my_decimal *decimal_value)
{
my_decimal val, *value= args[0]->val_decimal(&val);
if (!(null_value= (args[0]->null_value ||
my_decimal_ceiling(E_DEC_FATAL_ERROR, value,
decimal_value) > 1)))
return decimal_value;
return 0;
}
longlong Item_func_floor::int_op()
{
longlong result;
switch (args[0]->result_type()) {
case INT_RESULT:
result= args[0]->val_int();
null_value= args[0]->null_value;
break;
case DECIMAL_RESULT:
{
my_decimal dec_buf, *dec;
if ((dec= Item_func_floor::decimal_op(&dec_buf)))
my_decimal2int(E_DEC_FATAL_ERROR, dec, unsigned_flag, &result);
else
result= 0;
break;
}
default:
result= (longlong)Item_func_floor::real_op();
};
return result;
}
double Item_func_floor::real_op()
{
/*
the volatile's for BUG #3051 to calm optimizer down (because of gcc's
bug)
*/
volatile double value= args[0]->val_real();
null_value= args[0]->null_value;
return floor(value);
}
my_decimal *Item_func_floor::decimal_op(my_decimal *decimal_value)
{
my_decimal val, *value= args[0]->val_decimal(&val);
if (!(null_value= (args[0]->null_value ||
my_decimal_floor(E_DEC_FATAL_ERROR, value,
decimal_value) > 1)))
return decimal_value;
return 0;
}
void Item_func_round::fix_length_and_dec()
{
int decimals_to_set;
longlong val1;
bool val1_unsigned;
unsigned_flag= args[0]->unsigned_flag;
if (!args[1]->const_item())
{
decimals= args[0]->decimals;
max_length= float_length(decimals);
if (args[0]->result_type() == DECIMAL_RESULT)
{
max_length++;
hybrid_type= DECIMAL_RESULT;
}
else
hybrid_type= REAL_RESULT;
return;
}
val1= args[1]->val_int();
if ((null_value= args[1]->is_null()))
return;
val1_unsigned= args[1]->unsigned_flag;
if (val1 < 0)
decimals_to_set= val1_unsigned ? INT_MAX : 0;
else
decimals_to_set= (val1 > INT_MAX) ? INT_MAX : (int) val1;
if (args[0]->decimals == NOT_FIXED_DEC)
{
decimals= min(decimals_to_set, NOT_FIXED_DEC);
max_length= float_length(decimals);
hybrid_type= REAL_RESULT;
return;
}
switch (args[0]->result_type()) {
case REAL_RESULT:
case STRING_RESULT:
hybrid_type= REAL_RESULT;
decimals= min(decimals_to_set, NOT_FIXED_DEC);
max_length= float_length(decimals);
break;
case INT_RESULT:
if ((!decimals_to_set && truncate) || (args[0]->decimal_precision() < DECIMAL_LONGLONG_DIGITS))
{
int length_can_increase= MY_TEST(!truncate && (val1 < 0) && !val1_unsigned);
max_length= args[0]->max_length + length_can_increase;
/* Here we can keep INT_RESULT */
hybrid_type= INT_RESULT;
decimals= 0;
break;
}
/* fall through */
case DECIMAL_RESULT:
{
hybrid_type= DECIMAL_RESULT;
decimals_to_set= min(DECIMAL_MAX_SCALE, decimals_to_set);
int decimals_delta= args[0]->decimals - decimals_to_set;
int precision= args[0]->decimal_precision();
int length_increase= ((decimals_delta <= 0) || truncate) ? 0:1;
precision-= decimals_delta - length_increase;
decimals= min(decimals_to_set, DECIMAL_MAX_SCALE);
max_length= my_decimal_precision_to_length_no_truncation(precision,
decimals,
unsigned_flag);
break;
}
default:
DBUG_ASSERT(0); /* This result type isn't handled */
}
}
double my_double_round(double value, longlong dec, bool dec_unsigned,
bool truncate)
{
double tmp;
bool dec_negative= (dec < 0) && !dec_unsigned;
ulonglong abs_dec= dec_negative ? -dec : dec;
/*
tmp2 is here to avoid return the value with 80 bit precision
This will fix that the test round(0.1,1) = round(0.1,1) is true
Tagging with volatile is no guarantee, it may still be optimized away...
*/
volatile double tmp2;
tmp=(abs_dec < array_elements(log_10) ?
log_10[abs_dec] : pow(10.0,(double) abs_dec));
// Pre-compute these, to avoid optimizing away e.g. 'floor(v/tmp) * tmp'.
volatile double value_div_tmp= value / tmp;
volatile double value_mul_tmp= value * tmp;
if (dec_negative && my_isinf(tmp))
tmp2= 0.0;
else if (!dec_negative && my_isinf(value_mul_tmp))
tmp2= value;
else if (truncate)
{
if (value >= 0.0)
tmp2= dec < 0 ? floor(value_div_tmp) * tmp : floor(value_mul_tmp) / tmp;
else
tmp2= dec < 0 ? ceil(value_div_tmp) * tmp : ceil(value_mul_tmp) / tmp;
}
else
tmp2=dec < 0 ? rint(value_div_tmp) * tmp : rint(value_mul_tmp) / tmp;
return tmp2;
}
double Item_func_round::real_op()
{
double value= args[0]->val_real();
if (!(null_value= args[0]->null_value || args[1]->null_value))
return my_double_round(value, args[1]->val_int(), args[1]->unsigned_flag,
truncate);
return 0.0;
}
/*
Rounds a given value to a power of 10 specified as the 'to' argument,
avoiding overflows when the value is close to the ulonglong range boundary.
*/
static inline ulonglong my_unsigned_round(ulonglong value, ulonglong to)
{
ulonglong tmp= value / to * to;
return (value - tmp < (to >> 1)) ? tmp : tmp + to;
}
longlong Item_func_round::int_op()
{
longlong value= args[0]->val_int();
longlong dec= args[1]->val_int();
decimals= 0;
ulonglong abs_dec;
if ((null_value= args[0]->null_value || args[1]->null_value))
return 0;
if ((dec >= 0) || args[1]->unsigned_flag)
return value; // integer have not digits after point
abs_dec= -dec;
longlong tmp;
if(abs_dec >= array_elements(log_10_int))
return 0;
tmp= log_10_int[abs_dec];
if (truncate)
value= (unsigned_flag) ?
((ulonglong) value / tmp) * tmp : (value / tmp) * tmp;
else
value= (unsigned_flag || value >= 0) ?
my_unsigned_round((ulonglong) value, tmp) :
-(longlong) my_unsigned_round((ulonglong) -value, tmp);
return value;
}
my_decimal *Item_func_round::decimal_op(my_decimal *decimal_value)
{
my_decimal val, *value= args[0]->val_decimal(&val);
longlong dec= args[1]->val_int();
if (dec >= 0 || args[1]->unsigned_flag)
dec= min<ulonglong>(dec, decimals);
else if (dec < INT_MIN)
dec= INT_MIN;
if (!(null_value= (args[0]->null_value || args[1]->null_value ||
my_decimal_round(E_DEC_FATAL_ERROR, value, (int) dec,
truncate, decimal_value) > 1)))
return decimal_value;
return 0;
}
void Item_func_rand::seed_random(Item *arg)
{
/*
TODO: do not do reinit 'rand' for every execute of PS/SP if
args[0] is a constant.
*/
uint32 tmp= (uint32) arg->val_int();
randominit(rand, (uint32) (tmp*0x10001L+55555555L),
(uint32) (tmp*0x10000001L));
}
bool Item_func_rand::fix_fields(THD *thd,Item **ref)
{
if (Item_real_func::fix_fields(thd, ref))
return TRUE;
if (arg_count)
{ // Only use argument once in query
/*
Allocate rand structure once: we must use thd->stmt_arena
to create rand in proper mem_root if it's a prepared statement or
stored procedure.
No need to send a Rand log event if seed was given eg: RAND(seed),
as it will be replicated in the query as such.
*/
if (!rand && !(rand= (struct rand_struct*)
thd->stmt_arena->alloc(sizeof(*rand))))
return TRUE;
}
else
{
/*
Save the seed only the first time RAND() is used in the query
Once events are forwarded rather than recreated,
the following can be skipped if inside the slave thread
*/
if (!thd->rand_used)
{
thd->rand_used= 1;
thd->rand_saved_seed1= thd->rand.seed1;
thd->rand_saved_seed2= thd->rand.seed2;
}
rand= &thd->rand;
}
return FALSE;
}
double Item_func_rand::val_real()
{
DBUG_ASSERT(fixed == 1);
if (arg_count)
{
if (!args[0]->const_item())
seed_random(args[0]);
else if (first_eval)
{
/*
Constantness of args[0] may be set during JOIN::optimize(), if arg[0]
is a field item of "constant" table. Thus, we have to evaluate
seed_random() for constant arg there but not at the fix_fields method.
*/
first_eval= FALSE;
seed_random(args[0]);
}
}
return my_rnd(rand);
}
longlong Item_func_sign::val_int()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
null_value=args[0]->null_value;
return value < 0.0 ? -1 : (value > 0 ? 1 : 0);
}
double Item_func_units::val_real()
{
DBUG_ASSERT(fixed == 1);
double value= args[0]->val_real();
if ((null_value=args[0]->null_value))
return 0;
return check_float_overflow(value * mul + add);
}
void Item_func_min_max::fix_length_and_dec()
{
uint string_arg_count= 0;
int max_int_part=0;
bool datetime_found= FALSE;
decimals=0;
max_length=0;
maybe_null=0;
cmp_type= args[0]->temporal_with_date_as_number_result_type();
for (uint i=0 ; i < arg_count ; i++)
{
set_if_bigger(max_length, args[i]->max_length);
set_if_bigger(decimals, args[i]->decimals);
set_if_bigger(max_int_part, args[i]->decimal_int_part());
if (args[i]->maybe_null)
maybe_null=1;
cmp_type= item_cmp_type(cmp_type,
args[i]->temporal_with_date_as_number_result_type());
if (args[i]->result_type() == STRING_RESULT)
string_arg_count++;
if (args[i]->result_type() != ROW_RESULT &&
args[i]->is_temporal_with_date())
{
datetime_found= TRUE;
if (!datetime_item || args[i]->field_type() == MYSQL_TYPE_DATETIME)
datetime_item= args[i];
}
}
if (string_arg_count == arg_count)
{
// We compare as strings only if all arguments were strings.
agg_arg_charsets_for_string_result_with_comparison(collation,
args, arg_count);
if (datetime_found)
{
thd= current_thd;
compare_as_dates= TRUE;
/*
We should not do this:
cached_field_type= datetime_item->field_type();
count_datetime_length(args, arg_count);
because compare_as_dates can be TRUE but
result type can still be VARCHAR.
*/
}
}
else if ((cmp_type == DECIMAL_RESULT) || (cmp_type == INT_RESULT))
{
collation.set_numeric();
fix_char_length(my_decimal_precision_to_length_no_truncation(max_int_part +
decimals,
decimals,
unsigned_flag));
}
else if (cmp_type == REAL_RESULT)
fix_char_length(float_length(decimals));
cached_field_type= agg_field_type(args, arg_count);
}
/*
Compare item arguments in the DATETIME context.
SYNOPSIS
cmp_datetimes()
value [out] found least/greatest DATE/DATETIME value
DESCRIPTION
Compare item arguments as DATETIME values and return the index of the
least/greatest argument in the arguments array.
The correct integer DATE/DATETIME value of the found argument is
stored to the value pointer, if latter is provided.
RETURN
0 If one of arguments is NULL or there was a execution error
# index of the least/greatest argument
*/
uint Item_func_min_max::cmp_datetimes(longlong *value)
{
longlong UNINIT_VAR(min_max);
uint min_max_idx= 0;
for (uint i=0; i < arg_count ; i++)
{
Item **arg= args + i;
bool is_null;
longlong res= get_datetime_value(thd, &arg, 0, datetime_item, &is_null);
/* Check if we need to stop (because of error or KILL) and stop the loop */
if (thd->is_error())
{
null_value= 1;
return 0;
}
if ((null_value= args[i]->null_value))
return 0;
if (i == 0 || (res < min_max ? cmp_sign : -cmp_sign) > 0)
{
min_max= res;
min_max_idx= i;
}
}
if (value)
*value= min_max;
return min_max_idx;
}
uint Item_func_min_max::cmp_times(longlong *value)
{
longlong UNINIT_VAR(min_max);
uint min_max_idx= 0;
for (uint i=0; i < arg_count ; i++)
{
longlong res= args[i]->val_time_temporal();
if ((null_value= args[i]->null_value))
return 0;
if (i == 0 || (res < min_max ? cmp_sign : -cmp_sign) > 0)
{
min_max= res;
min_max_idx= i;
}
}
if (value)
*value= min_max;
return min_max_idx;
}
String *Item_func_min_max::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
if (compare_as_dates)
{
if (is_temporal())
{
/*
In case of temporal data types, we always return
string value according the format of the data type.
For example, in case of LEAST(time_column, datetime_column)
the result date type is DATETIME,
so we return a 'YYYY-MM-DD hh:mm:ss' string even if time_column wins
(conversion from TIME to DATETIME happens in this case).
*/
longlong result;
cmp_datetimes(&result);
if (null_value)
return 0;
MYSQL_TIME ltime;
TIME_from_longlong_packed(<ime, field_type(), result);
return (null_value= my_TIME_to_str(<ime, str, decimals)) ?
(String *) 0 : str;
}
else
{
/*
In case of VARCHAR result type we just return val_str()
value of the winning item AS IS, without conversion.
*/
String *str_res;
uint min_max_idx= cmp_datetimes(NULL);
if (null_value)
return 0;
str_res= args[min_max_idx]->val_str(str);
if (args[min_max_idx]->null_value)
{
// check if the call to val_str() above returns a NULL value
null_value= 1;
return NULL;
}
str_res->set_charset(collation.collation);
return str_res;
}
}
switch (cmp_type) {
case INT_RESULT:
{
longlong nr=val_int();
if (null_value)
return 0;
str->set_int(nr, unsigned_flag, collation.collation);
return str;
}
case DECIMAL_RESULT:
{
my_decimal dec_buf, *dec_val= val_decimal(&dec_buf);
if (null_value)
return 0;
my_decimal2string(E_DEC_FATAL_ERROR, dec_val, 0, 0, 0, str);
return str;
}
case REAL_RESULT:
{
double nr= val_real();
if (null_value)
return 0; /* purecov: inspected */
str->set_real(nr, decimals, collation.collation);
return str;
}
case STRING_RESULT:
{
String *UNINIT_VAR(res);
for (uint i=0; i < arg_count ; i++)
{
if (i == 0)
res=args[i]->val_str(str);
else
{
String *res2;
res2= args[i]->val_str(res == str ? &tmp_value : str);
if (res2)
{
int cmp= sortcmp(res,res2,collation.collation);
if ((cmp_sign < 0 ? cmp : -cmp) < 0)
res=res2;
}
}
if ((null_value= args[i]->null_value))
return 0;
}
res->set_charset(collation.collation);
return res;
}
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
return 0;
}
return 0; // Keep compiler happy
}
bool Item_func_min_max::get_date(MYSQL_TIME *ltime, uint fuzzydate)
{
DBUG_ASSERT(fixed == 1);
if (compare_as_dates)
{
longlong result;
cmp_datetimes(&result);
if (null_value)
return true;
TIME_from_longlong_packed(ltime, datetime_item->field_type(), result);
int warnings;
return check_date(ltime, non_zero_date(ltime), fuzzydate, &warnings);
}
switch (field_type())
{
case MYSQL_TYPE_TIME:
return get_date_from_time(ltime);
case MYSQL_TYPE_DATETIME:
case MYSQL_TYPE_TIMESTAMP:
case MYSQL_TYPE_DATE:
DBUG_ASSERT(0); // Should have been processed in "compare_as_dates" block.
default:
return get_date_from_non_temporal(ltime, fuzzydate);
}
}
bool Item_func_min_max::get_time(MYSQL_TIME *ltime)
{
DBUG_ASSERT(fixed == 1);
if (compare_as_dates)
{
longlong result;
cmp_datetimes(&result);
if (null_value)
return true;
TIME_from_longlong_packed(ltime, datetime_item->field_type(), result);
datetime_to_time(ltime);
return false;
}
switch (field_type())
{
case MYSQL_TYPE_TIME:
{
longlong result;
cmp_times(&result);
if (null_value)
return true;
TIME_from_longlong_time_packed(ltime, result);
return false;
}
break;
case MYSQL_TYPE_DATE:
case MYSQL_TYPE_TIMESTAMP:
case MYSQL_TYPE_DATETIME:
DBUG_ASSERT(0); // Should have been processed in "compare_as_dates" block.
default:
return get_time_from_non_temporal(ltime);
break;
}
}
double Item_func_min_max::val_real()
{
DBUG_ASSERT(fixed == 1);
double value=0.0;
if (compare_as_dates)
{
longlong result= 0;
(void)cmp_datetimes(&result);
return double_from_datetime_packed(datetime_item->field_type(), result);
}
for (uint i=0; i < arg_count ; i++)
{
if (i == 0)
value= args[i]->val_real();
else
{
double tmp= args[i]->val_real();
if (!args[i]->null_value && (tmp < value ? cmp_sign : -cmp_sign) > 0)
value=tmp;
}
if ((null_value= args[i]->null_value))
break;
}
return value;
}
longlong Item_func_min_max::val_int()
{
DBUG_ASSERT(fixed == 1);
longlong value=0;
if (compare_as_dates)
{
longlong result= 0;
(void)cmp_datetimes(&result);
return longlong_from_datetime_packed(datetime_item->field_type(), result);
}
/*
TS-TODO: val_str decides which type to use using cmp_type.
val_int, val_decimal, val_real do not check cmp_type and
decide data type according to the method type.
This is probably not good:
mysql> select least('11', '2'), least('11', '2')+0, concat(least(11,2));
+------------------+--------------------+---------------------+
| least('11', '2') | least('11', '2')+0 | concat(least(11,2)) |
+------------------+--------------------+---------------------+
| 11 | 2 | 2 |
+------------------+--------------------+---------------------+
1 row in set (0.00 sec)
Should not the second column return 11?
I.e. compare as strings and return '11', then convert to number.
*/
for (uint i=0; i < arg_count ; i++)
{
if (i == 0)
value=args[i]->val_int();
else
{
longlong tmp=args[i]->val_int();
if (!args[i]->null_value && (tmp < value ? cmp_sign : -cmp_sign) > 0)
value=tmp;
}
if ((null_value= args[i]->null_value))
break;
}
return value;
}
my_decimal *Item_func_min_max::val_decimal(my_decimal *dec)
{
DBUG_ASSERT(fixed == 1);
my_decimal tmp_buf, *tmp, *UNINIT_VAR(res);
if (compare_as_dates)
{
longlong value= 0;
(void)cmp_datetimes(&value);
return my_decimal_from_datetime_packed(dec, datetime_item->field_type(),
value);
}
for (uint i=0; i < arg_count ; i++)
{
if (i == 0)
res= args[i]->val_decimal(dec);
else
{
tmp= args[i]->val_decimal(&tmp_buf); // Zero if NULL
if (tmp && (my_decimal_cmp(tmp, res) * cmp_sign) < 0)
{
if (tmp == &tmp_buf)
{
/* Move value out of tmp_buf as this will be reused on next loop */
my_decimal2decimal(tmp, dec);
res= dec;
}
else
res= tmp;
}
}
if ((null_value= args[i]->null_value))
{
res= 0;
break;
}
}
if (res)
{
/*
Need this to make val_str() always return fixed
number of fractional digits, according to "decimals".
*/
my_decimal_round(E_DEC_FATAL_ERROR, res, decimals, false, res);
}
return res;
}
longlong Item_func_length::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
if (!res)
{
null_value=1;
return 0; /* purecov: inspected */
}
null_value=0;
return (longlong) res->length();
}
longlong Item_func_char_length::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
if (!res)
{
null_value=1;
return 0; /* purecov: inspected */
}
null_value=0;
return (longlong) res->numchars();
}
longlong Item_func_coercibility::val_int()
{
DBUG_ASSERT(fixed == 1);
null_value= 0;
return (longlong) args[0]->collation.derivation;
}
void Item_func_locate::fix_length_and_dec()
{
max_length= MY_INT32_NUM_DECIMAL_DIGITS;
agg_arg_charsets_for_comparison(cmp_collation, args, 2);
}
longlong Item_func_locate::val_int()
{
DBUG_ASSERT(fixed == 1);
String *a=args[0]->val_str(&value1);
String *b=args[1]->val_str(&value2);
if (!a || !b)
{
null_value=1;
return 0; /* purecov: inspected */
}
null_value=0;
/* must be longlong to avoid truncation */
longlong start= 0;
longlong start0= 0;
my_match_t match;
if (arg_count == 3)
{
start0= start= args[2]->val_int() - 1;
if ((start < 0) || (start > a->length()))
return 0;
/* start is now sufficiently valid to pass to charpos function */
start= a->charpos((int) start);
if (start + b->length() > a->length())
return 0;
}
if (!b->length()) // Found empty string at start
return start + 1;
if (!cmp_collation.collation->coll->instr(cmp_collation.collation,
a->ptr()+start,
(uint) (a->length()-start),
b->ptr(), b->length(),
&match, 1))
return 0;
return (longlong) match.mb_len + start0 + 1;
}
void Item_func_locate::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("locate("));
args[1]->print(str, query_type);
str->append(',');
args[0]->print(str, query_type);
if (arg_count == 3)
{
str->append(',');
args[2]->print(str, query_type);
}
str->append(')');
}
longlong Item_func_validate_password_strength::val_int()
{
String *field= args[0]->val_str(&value);
if ((null_value= args[0]->null_value))
return 0;
return (check_password_strength(field));
}
longlong Item_func_field::val_int()
{
DBUG_ASSERT(fixed == 1);
if (cmp_type == STRING_RESULT)
{
String *field;
if (!(field= args[0]->val_str(&value)))
return 0;
for (uint i=1 ; i < arg_count ; i++)
{
String *tmp_value=args[i]->val_str(&tmp);
if (tmp_value && !sortcmp(field,tmp_value,cmp_collation.collation))
return (longlong) (i);
}
}
else if (cmp_type == INT_RESULT)
{
longlong val= args[0]->val_int();
if (args[0]->null_value)
return 0;
for (uint i=1; i < arg_count ; i++)
{
if (val == args[i]->val_int() && !args[i]->null_value)
return (longlong) (i);
}
}
else if (cmp_type == DECIMAL_RESULT)
{
my_decimal dec_arg_buf, *dec_arg,
dec_buf, *dec= args[0]->val_decimal(&dec_buf);
if (args[0]->null_value)
return 0;
for (uint i=1; i < arg_count; i++)
{
dec_arg= args[i]->val_decimal(&dec_arg_buf);
if (!args[i]->null_value && !my_decimal_cmp(dec_arg, dec))
return (longlong) (i);
}
}
else
{
double val= args[0]->val_real();
if (args[0]->null_value)
return 0;
for (uint i=1; i < arg_count ; i++)
{
if (val == args[i]->val_real() && !args[i]->null_value)
return (longlong) (i);
}
}
return 0;
}
void Item_func_field::fix_length_and_dec()
{
maybe_null=0; max_length=3;
cmp_type= args[0]->result_type();
for (uint i=1; i < arg_count ; i++)
cmp_type= item_cmp_type(cmp_type, args[i]->result_type());
if (cmp_type == STRING_RESULT)
agg_arg_charsets_for_comparison(cmp_collation, args, arg_count);
}
longlong Item_func_ascii::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
if (!res)
{
null_value=1;
return 0;
}
null_value=0;
return (longlong) (res->length() ? (uchar) (*res)[0] : (uchar) 0);
}
longlong Item_func_ord::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
if (!res)
{
null_value=1;
return 0;
}
null_value=0;
if (!res->length()) return 0;
#ifdef USE_MB
if (use_mb(res->charset()))
{
register const char *str=res->ptr();
register uint32 n=0, l=my_ismbchar(res->charset(),str,str+res->length());
if (!l)
return (longlong)((uchar) *str);
while (l--)
n=(n<<8)|(uint32)((uchar) *str++);
return (longlong) n;
}
#endif
return (longlong) ((uchar) (*res)[0]);
}
/* Search after a string in a string of strings separated by ',' */
/* Returns number of found type >= 1 or 0 if not found */
/* This optimizes searching in enums to bit testing! */
void Item_func_find_in_set::fix_length_and_dec()
{
decimals=0;
max_length=3; // 1-999
if (args[0]->const_item() && args[1]->type() == FIELD_ITEM)
{
Field *field= ((Item_field*) args[1])->field;
if (field->real_type() == MYSQL_TYPE_SET)
{
String *find=args[0]->val_str(&value);
if (find)
{
// find is not NULL pointer so args[0] is not a null-value
DBUG_ASSERT(!args[0]->null_value);
enum_value= find_type(((Field_enum*) field)->typelib,find->ptr(),
find->length(), 0);
enum_bit=0;
if (enum_value)
enum_bit=LL(1) << (enum_value-1);
}
}
}
agg_arg_charsets_for_comparison(cmp_collation, args, 2);
}
static const char separator=',';
longlong Item_func_find_in_set::val_int()
{
DBUG_ASSERT(fixed == 1);
if (enum_value)
{
// enum_value is set iff args[0]->const_item() in fix_length_and_dec().
DBUG_ASSERT(args[0]->const_item());
ulonglong tmp= (ulonglong) args[1]->val_int();
null_value= args[1]->null_value;
/*
No need to check args[0]->null_value since enum_value is set iff
args[0] is a non-null const item. Note: no DBUG_ASSERT on
args[0]->null_value here because args[0] may have been replaced
by an Item_cache on which val_int() has not been called. See
BUG#11766317
*/
if (!null_value)
{
if (tmp & enum_bit)
return enum_value;
}
return 0L;
}
String *find=args[0]->val_str(&value);
String *buffer=args[1]->val_str(&value2);
if (!find || !buffer)
{
null_value=1;
return 0; /* purecov: inspected */
}
null_value=0;
int diff;
if ((diff=buffer->length() - find->length()) >= 0)
{
my_wc_t wc= 0;
const CHARSET_INFO *cs= cmp_collation.collation;
const char *str_begin= buffer->ptr();
const char *str_end= buffer->ptr();
const char *real_end= str_end+buffer->length();
const uchar *find_str= (const uchar *) find->ptr();
uint find_str_len= find->length();
int position= 0;
while (1)
{
int symbol_len;
if ((symbol_len= cs->cset->mb_wc(cs, &wc, (uchar*) str_end,
(uchar*) real_end)) > 0)
{
const char *substr_end= str_end + symbol_len;
bool is_last_item= (substr_end == real_end);
bool is_separator= (wc == (my_wc_t) separator);
if (is_separator || is_last_item)
{
position++;
if (is_last_item && !is_separator)
str_end= substr_end;
if (!my_strnncoll(cs, (const uchar *) str_begin,
(uint) (str_end - str_begin),
find_str, find_str_len))
return (longlong) position;
else
str_begin= substr_end;
}
str_end= substr_end;
}
else if (str_end - str_begin == 0 &&
find_str_len == 0 &&
wc == (my_wc_t) separator)
return (longlong) ++position;
else
return LL(0);
}
}
return 0;
}
longlong Item_func_bit_count::val_int()
{
DBUG_ASSERT(fixed == 1);
ulonglong value= (ulonglong) args[0]->val_int();
if ((null_value= args[0]->null_value))
return 0; /* purecov: inspected */
return (longlong) my_count_bits(value);
}
/****************************************************************************
** Functions to handle dynamic loadable functions
** Original source by: Alexis Mikhailov <[email protected]>
** Rewritten by monty.
****************************************************************************/
#ifdef HAVE_DLOPEN
void udf_handler::cleanup()
{
if (!not_original)
{
if (initialized)
{
if (u_d->func_deinit != NULL)
{
Udf_func_deinit deinit= u_d->func_deinit;
(*deinit)(&initid);
}
free_udf(u_d);
initialized= FALSE;
}
if (buffers) // Because of bug in ecc
delete [] buffers;
buffers= 0;
}
}
bool
udf_handler::fix_fields(THD *thd, Item_result_field *func,
uint arg_count, Item **arguments)
{
uchar buff[STACK_BUFF_ALLOC]; // Max argument in function
DBUG_ENTER("Item_udf_func::fix_fields");
if (check_stack_overrun(thd, STACK_MIN_SIZE, buff))
DBUG_RETURN(TRUE); // Fatal error flag is set!
udf_func *tmp_udf=find_udf(u_d->name.str,(uint) u_d->name.length,1);
if (!tmp_udf)
{
my_error(ER_CANT_FIND_UDF, MYF(0), u_d->name.str);
DBUG_RETURN(TRUE);
}
u_d=tmp_udf;
args=arguments;
/* Fix all arguments */
func->maybe_null=0;
used_tables_cache=0;
const_item_cache=1;
if ((f_args.arg_count=arg_count))
{
if (!(f_args.arg_type= (Item_result*)
sql_alloc(f_args.arg_count*sizeof(Item_result))))
{
free_udf(u_d);
DBUG_RETURN(TRUE);
}
uint i;
Item **arg,**arg_end;
for (i=0, arg=arguments, arg_end=arguments+arg_count;
arg != arg_end ;
arg++,i++)
{
if (!(*arg)->fixed &&
(*arg)->fix_fields(thd, arg))
DBUG_RETURN(1);
// we can't assign 'item' before, because fix_fields() can change arg
Item *item= *arg;
if (item->check_cols(1))
DBUG_RETURN(TRUE);
/*
TODO: We should think about this. It is not always
right way just to set an UDF result to return my_charset_bin
if one argument has binary sorting order.
The result collation should be calculated according to arguments
derivations in some cases and should not in other cases.
Moreover, some arguments can represent a numeric input
which doesn't effect the result character set and collation.
There is no a general rule for UDF. Everything depends on
the particular user defined function.
*/
if (item->collation.collation->state & MY_CS_BINSORT)
func->collation.set(&my_charset_bin);
if (item->maybe_null)
func->maybe_null=1;
func->with_sum_func= func->with_sum_func || item->with_sum_func;
used_tables_cache|=item->used_tables();
const_item_cache&=item->const_item();
f_args.arg_type[i]=item->result_type();
}
//TODO: why all following memory is not allocated with 1 call of sql_alloc?
if (!(buffers=new String[arg_count]) ||
!(f_args.args= (char**) sql_alloc(arg_count * sizeof(char *))) ||
!(f_args.lengths= (ulong*) sql_alloc(arg_count * sizeof(long))) ||
!(f_args.maybe_null= (char*) sql_alloc(arg_count * sizeof(char))) ||
!(num_buffer= (char*) sql_alloc(arg_count *
ALIGN_SIZE(sizeof(double)))) ||
!(f_args.attributes= (char**) sql_alloc(arg_count * sizeof(char *))) ||
!(f_args.attribute_lengths= (ulong*) sql_alloc(arg_count *
sizeof(long))))
{
free_udf(u_d);
DBUG_RETURN(TRUE);
}
}
func->fix_length_and_dec();
initid.max_length=func->max_length;
initid.maybe_null=func->maybe_null;
initid.const_item=const_item_cache;
initid.decimals=func->decimals;
initid.ptr=0;
if (u_d->func_init)
{
char init_msg_buff[MYSQL_ERRMSG_SIZE];
char *to=num_buffer;
for (uint i=0; i < arg_count; i++)
{
/*
For a constant argument i, args->args[i] points to the argument value.
For non-constant, args->args[i] is NULL.
*/
f_args.args[i]= NULL; /* Non-const unless updated below. */
f_args.lengths[i]= arguments[i]->max_length;
f_args.maybe_null[i]= (char) arguments[i]->maybe_null;
f_args.attributes[i]= (char*) arguments[i]->item_name.ptr();
f_args.attribute_lengths[i]= arguments[i]->item_name.length();
if (arguments[i]->const_item())
{
switch (arguments[i]->result_type())
{
case STRING_RESULT:
case DECIMAL_RESULT:
{
String *res= arguments[i]->val_str(&buffers[i]);
if (arguments[i]->null_value)
continue;
f_args.args[i]= (char*) res->c_ptr_safe();
f_args.lengths[i]= res->length();
break;
}
case INT_RESULT:
*((longlong*) to)= arguments[i]->val_int();
if (arguments[i]->null_value)
continue;
f_args.args[i]= to;
to+= ALIGN_SIZE(sizeof(longlong));
break;
case REAL_RESULT:
*((double*) to)= arguments[i]->val_real();
if (arguments[i]->null_value)
continue;
f_args.args[i]= to;
to+= ALIGN_SIZE(sizeof(double));
break;
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
break;
}
}
}
Udf_func_init init= u_d->func_init;
if ((error=(uchar) init(&initid, &f_args, init_msg_buff)))
{
my_error(ER_CANT_INITIALIZE_UDF, MYF(0),
u_d->name.str, init_msg_buff);
free_udf(u_d);
DBUG_RETURN(TRUE);
}
func->max_length= min<size_t>(initid.max_length, MAX_BLOB_WIDTH);
func->maybe_null=initid.maybe_null;
const_item_cache=initid.const_item;
/*
Keep used_tables_cache in sync with const_item_cache.
See the comment in Item_udf_func::update_used tables.
*/
if (!const_item_cache && !used_tables_cache)
used_tables_cache= RAND_TABLE_BIT;
func->decimals= min<uint>(initid.decimals, NOT_FIXED_DEC);
}
initialized=1;
if (error)
{
my_error(ER_CANT_INITIALIZE_UDF, MYF(0),
u_d->name.str, ER(ER_UNKNOWN_ERROR));
DBUG_RETURN(TRUE);
}
DBUG_RETURN(FALSE);
}
bool udf_handler::get_arguments()
{
if (error)
return 1; // Got an error earlier
char *to= num_buffer;
uint str_count=0;
for (uint i=0; i < f_args.arg_count; i++)
{
f_args.args[i]=0;
switch (f_args.arg_type[i]) {
case STRING_RESULT:
case DECIMAL_RESULT:
{
String *res=args[i]->val_str(&buffers[str_count++]);
if (!(args[i]->null_value))
{
f_args.args[i]= (char*) res->ptr();
f_args.lengths[i]= res->length();
break;
}
}
case INT_RESULT:
*((longlong*) to) = args[i]->val_int();
if (!args[i]->null_value)
{
f_args.args[i]=to;
to+= ALIGN_SIZE(sizeof(longlong));
}
break;
case REAL_RESULT:
*((double*) to)= args[i]->val_real();
if (!args[i]->null_value)
{
f_args.args[i]=to;
to+= ALIGN_SIZE(sizeof(double));
}
break;
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
break;
}
}
return 0;
}
/**
@return
(String*)NULL in case of NULL values
*/
String *udf_handler::val_str(String *str,String *save_str)
{
uchar is_null_tmp=0;
ulong res_length;
DBUG_ENTER("udf_handler::val_str");
if (get_arguments())
DBUG_RETURN(0);
char * (*func)(UDF_INIT *, UDF_ARGS *, char *, ulong *, uchar *, uchar *)=
(char* (*)(UDF_INIT *, UDF_ARGS *, char *, ulong *, uchar *, uchar *))
u_d->func;
if ((res_length=str->alloced_length()) < MAX_FIELD_WIDTH)
{ // This happens VERY seldom
if (str->alloc(MAX_FIELD_WIDTH))
{
error=1;
DBUG_RETURN(0);
}
}
char *res=func(&initid, &f_args, (char*) str->ptr(), &res_length,
&is_null_tmp, &error);
DBUG_PRINT("info", ("udf func returned, res_length: %lu", res_length));
if (is_null_tmp || !res || error) // The !res is for safety
{
DBUG_PRINT("info", ("Null or error"));
DBUG_RETURN(0);
}
if (res == str->ptr())
{
str->length(res_length);
DBUG_PRINT("exit", ("str: %*.s", (int) str->length(), str->ptr()));
DBUG_RETURN(str);
}
save_str->set(res, res_length, str->charset());
DBUG_PRINT("exit", ("save_str: %s", save_str->ptr()));
DBUG_RETURN(save_str);
}
/*
For the moment, UDF functions are returning DECIMAL values as strings
*/
my_decimal *udf_handler::val_decimal(my_bool *null_value, my_decimal *dec_buf)
{
char buf[DECIMAL_MAX_STR_LENGTH+1], *end;
ulong res_length= DECIMAL_MAX_STR_LENGTH;
if (get_arguments())
{
*null_value=1;
return 0;
}
char *(*func)(UDF_INIT *, UDF_ARGS *, char *, ulong *, uchar *, uchar *)=
(char* (*)(UDF_INIT *, UDF_ARGS *, char *, ulong *, uchar *, uchar *))
u_d->func;
char *res= func(&initid, &f_args, buf, &res_length, &is_null, &error);
if (is_null || error)
{
*null_value= 1;
return 0;
}
end= res+ res_length;
str2my_decimal(E_DEC_FATAL_ERROR, res, dec_buf, &end);
return dec_buf;
}
void Item_udf_func::cleanup()
{
udf.cleanup();
Item_func::cleanup();
}
void Item_udf_func::print(String *str, enum_query_type query_type)
{
str->append(func_name());
str->append('(');
for (uint i=0 ; i < arg_count ; i++)
{
if (i != 0)
str->append(',');
args[i]->print_item_w_name(str, query_type);
}
str->append(')');
}
double Item_func_udf_float::val_real()
{
DBUG_ASSERT(fixed == 1);
DBUG_ENTER("Item_func_udf_float::val");
DBUG_PRINT("info",("result_type: %d arg_count: %d",
args[0]->result_type(), arg_count));
DBUG_RETURN(udf.val(&null_value));
}
String *Item_func_udf_float::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
double nr= val_real();
if (null_value)
return 0; /* purecov: inspected */
str->set_real(nr,decimals,&my_charset_bin);
return str;
}
longlong Item_func_udf_int::val_int()
{
DBUG_ASSERT(fixed == 1);
DBUG_ENTER("Item_func_udf_int::val_int");
DBUG_RETURN(udf.val_int(&null_value));
}
String *Item_func_udf_int::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
longlong nr=val_int();
if (null_value)
return 0;
str->set_int(nr, unsigned_flag, &my_charset_bin);
return str;
}
longlong Item_func_udf_decimal::val_int()
{
my_decimal dec_buf, *dec= udf.val_decimal(&null_value, &dec_buf);
longlong result;
if (null_value)
return 0;
my_decimal2int(E_DEC_FATAL_ERROR, dec, unsigned_flag, &result);
return result;
}
double Item_func_udf_decimal::val_real()
{
my_decimal dec_buf, *dec= udf.val_decimal(&null_value, &dec_buf);
double result;
if (null_value)
return 0.0;
my_decimal2double(E_DEC_FATAL_ERROR, dec, &result);
return result;
}
my_decimal *Item_func_udf_decimal::val_decimal(my_decimal *dec_buf)
{
DBUG_ASSERT(fixed == 1);
DBUG_ENTER("Item_func_udf_decimal::val_decimal");
DBUG_PRINT("info",("result_type: %d arg_count: %d",
args[0]->result_type(), arg_count));
DBUG_RETURN(udf.val_decimal(&null_value, dec_buf));
}
String *Item_func_udf_decimal::val_str(String *str)
{
my_decimal dec_buf, *dec= udf.val_decimal(&null_value, &dec_buf);
if (null_value)
return 0;
if (str->length() < DECIMAL_MAX_STR_LENGTH)
str->length(DECIMAL_MAX_STR_LENGTH);
my_decimal_round(E_DEC_FATAL_ERROR, dec, decimals, FALSE, &dec_buf);
my_decimal2string(E_DEC_FATAL_ERROR, &dec_buf, 0, 0, '0', str);
return str;
}
void Item_func_udf_decimal::fix_length_and_dec()
{
fix_num_length_and_dec();
}
/* Default max_length is max argument length */
void Item_func_udf_str::fix_length_and_dec()
{
DBUG_ENTER("Item_func_udf_str::fix_length_and_dec");
max_length=0;
for (uint i = 0; i < arg_count; i++)
set_if_bigger(max_length,args[i]->max_length);
DBUG_VOID_RETURN;
}
String *Item_func_udf_str::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
String *res=udf.val_str(str,&str_value);
null_value = !res;
return res;
}
/**
@note
This has to come last in the udf_handler methods, or C for AIX
version 6.0.0.0 fails to compile with debugging enabled. (Yes, really.)
*/
udf_handler::~udf_handler()
{
/* Everything should be properly cleaned up by this moment. */
DBUG_ASSERT(not_original || !(initialized || buffers));
}
#else
bool udf_handler::get_arguments() { return 0; }
#endif /* HAVE_DLOPEN */
/*
** User level locks
*/
mysql_mutex_t LOCK_user_locks;
static HASH hash_user_locks;
class User_level_lock
{
uchar *key;
size_t key_length;
public:
int count;
bool locked;
mysql_cond_t cond;
my_thread_id thread_id;
void set_thread(THD *thd) { thread_id= thd->thread_id; }
User_level_lock(const uchar *key_arg,uint length, ulong id)
:key_length(length),count(1),locked(1), thread_id(id)
{
key= (uchar*) my_memdup(key_arg,length,MYF(0));
mysql_cond_init(key_user_level_lock_cond, &cond, NULL);
if (key)
{
if (my_hash_insert(&hash_user_locks,(uchar*) this))
{
my_free(key);
key=0;
}
}
}
~User_level_lock()
{
if (key)
{
my_hash_delete(&hash_user_locks,(uchar*) this);
my_free(key);
}
mysql_cond_destroy(&cond);
}
inline bool initialized() { return key != 0; }
friend void item_user_lock_release(User_level_lock *ull);
friend uchar *ull_get_key(const User_level_lock *ull, size_t *length,
my_bool not_used);
};
uchar *ull_get_key(const User_level_lock *ull, size_t *length,
my_bool not_used __attribute__((unused)))
{
*length= ull->key_length;
return ull->key;
}
#ifdef HAVE_PSI_INTERFACE
static PSI_mutex_key key_LOCK_user_locks;
static PSI_mutex_info all_user_mutexes[]=
{
{ &key_LOCK_user_locks, "LOCK_user_locks", PSI_FLAG_GLOBAL}
};
static void init_user_lock_psi_keys(void)
{
int count;
count= array_elements(all_user_mutexes);
mysql_mutex_register("sql", all_user_mutexes, count);
}
#endif
static bool item_user_lock_inited= 0;
void item_user_lock_init(void)
{
#ifdef HAVE_PSI_INTERFACE
init_user_lock_psi_keys();
#endif
mysql_mutex_init(key_LOCK_user_locks, &LOCK_user_locks, MY_MUTEX_INIT_SLOW);
my_hash_init(&hash_user_locks,system_charset_info,
16,0,0,(my_hash_get_key) ull_get_key,NULL,0);
item_user_lock_inited= 1;
}
void item_user_lock_free(void)
{
if (item_user_lock_inited)
{
item_user_lock_inited= 0;
my_hash_free(&hash_user_locks);
mysql_mutex_destroy(&LOCK_user_locks);
}
}
void item_user_lock_release(User_level_lock *ull)
{
ull->locked=0;
ull->thread_id= 0;
if (--ull->count)
mysql_cond_signal(&ull->cond);
else
delete ull;
}
/**
Wait until we are at or past the given position in the master binlog
on the slave.
*/
longlong Item_master_pos_wait::val_int()
{
DBUG_ASSERT(fixed == 1);
THD* thd = current_thd;
String *log_name = args[0]->val_str(&value);
int event_count= 0;
null_value=0;
if (thd->slave_thread || !log_name || !log_name->length())
{
null_value = 1;
return 0;
}
#ifdef HAVE_REPLICATION
longlong pos = (ulong)args[1]->val_int();
longlong timeout = (arg_count==3) ? args[2]->val_int() : 0 ;
if (active_mi == NULL ||
(event_count = active_mi->rli->wait_for_pos(thd, log_name, pos, timeout)) == -2)
{
null_value = 1;
event_count=0;
}
#endif
return event_count;
}
longlong Item_master_gtid_set_wait::val_int()
{
DBUG_ASSERT(fixed == 1);
THD* thd = current_thd;
String *gtid= args[0]->val_str(&value);
int event_count= 0;
null_value=0;
if (thd->slave_thread || !gtid || 0 == gtid_mode)
{
null_value = 1;
return event_count;
}
#if defined(HAVE_REPLICATION)
longlong timeout = (arg_count== 2) ? args[1]->val_int() : 0;
if (active_mi && active_mi->rli)
{
if ((event_count = active_mi->rli->wait_for_gtid_set(thd, gtid, timeout))
== -2)
{
null_value = 1;
event_count=0;
}
}
else
/*
Replication has not been set up, we should return NULL;
*/
null_value = 1;
#endif
return event_count;
}
/**
Return 1 if both arguments are Gtid_sets and the first is a subset
of the second. Generate an error if any of the arguments is not a
Gtid_set.
*/
longlong Item_func_gtid_subset::val_int()
{
DBUG_ENTER("Item_func_gtid_subset::val_int()");
if (args[0]->null_value || args[1]->null_value)
{
null_value= true;
DBUG_RETURN(0);
}
String *string1, *string2;
const char *charp1, *charp2;
int ret= 1;
enum_return_status status;
// get strings without lock
if ((string1= args[0]->val_str(&buf1)) != NULL &&
(charp1= string1->c_ptr_safe()) != NULL &&
(string2= args[1]->val_str(&buf2)) != NULL &&
(charp2= string2->c_ptr_safe()) != NULL)
{
Sid_map sid_map(NULL/*no rwlock*/);
// compute sets while holding locks
const Gtid_set sub_set(&sid_map, charp1, &status);
if (status == RETURN_STATUS_OK)
{
const Gtid_set super_set(&sid_map, charp2, &status);
if (status == RETURN_STATUS_OK)
ret= sub_set.is_subset(&super_set) ? 1 : 0;
}
}
DBUG_RETURN(ret);
}
/**
Enables a session to wait on a condition until a timeout or a network
disconnect occurs.
@remark The connection is polled every m_interrupt_interval nanoseconds.
*/
class Interruptible_wait
{
THD *m_thd;
struct timespec m_abs_timeout;
static const ulonglong m_interrupt_interval;
public:
Interruptible_wait(THD *thd)
: m_thd(thd) {}
~Interruptible_wait() {}
public:
/**
Set the absolute timeout.
@param timeout The amount of time in nanoseconds to wait
*/
void set_timeout(ulonglong timeout)
{
/*
Calculate the absolute system time at the start so it can
be controlled in slices. It relies on the fact that once
the absolute time passes, the timed wait call will fail
automatically with a timeout error.
*/
set_timespec_nsec(m_abs_timeout, timeout);
}
/** The timed wait. */
int wait(mysql_cond_t *, mysql_mutex_t *);
};
/** Time to wait before polling the connection status. */
const ulonglong Interruptible_wait::m_interrupt_interval= 5 * ULL(1000000000);
/**
Wait for a given condition to be signaled.
@param cond The condition variable to wait on.
@param mutex The associated mutex.
@remark The absolute timeout is preserved across calls.
@retval return value from mysql_cond_timedwait
*/
int Interruptible_wait::wait(mysql_cond_t *cond, mysql_mutex_t *mutex)
{
int error;
struct timespec timeout;
while (1)
{
/* Wait for a fixed interval. */
set_timespec_nsec(timeout, m_interrupt_interval);
/* But only if not past the absolute timeout. */
if (cmp_timespec(timeout, m_abs_timeout) > 0)
timeout= m_abs_timeout;
error= mysql_cond_timedwait(cond, mutex, &timeout);
if (error == ETIMEDOUT || error == ETIME)
{
/* Return error if timed out or connection is broken. */
if (!cmp_timespec(timeout, m_abs_timeout) || !m_thd->is_connected())
break;
}
/* Otherwise, propagate status to the caller. */
else
break;
}
return error;
}
/**
Get a user level lock. If the thread has an old lock this is first released.
@retval
1 : Got lock
@retval
0 : Timeout
@retval
NULL : Error
*/
longlong Item_func_get_lock::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
ulonglong timeout= args[1]->val_int();
THD *thd=current_thd;
User_level_lock *ull;
int error;
Interruptible_wait timed_cond(thd);
DBUG_ENTER("Item_func_get_lock::val_int");
/*
In slave thread no need to get locks, everything is serialized. Anyway
there is no way to make GET_LOCK() work on slave like it did on master
(i.e. make it return exactly the same value) because we don't have the
same other concurrent threads environment. No matter what we return here,
it's not guaranteed to be same as on master.
*/
if (thd->slave_thread)
DBUG_RETURN(1);
mysql_mutex_lock(&LOCK_user_locks);
if (!res || !res->length())
{
mysql_mutex_unlock(&LOCK_user_locks);
null_value=1;
DBUG_RETURN(0);
}
DBUG_PRINT("info", ("lock %.*s, thd=%ld", res->length(), res->ptr(),
(long) thd->real_id));
null_value=0;
if (thd->ull)
{
item_user_lock_release(thd->ull);
thd->ull=0;
}
if (!(ull= ((User_level_lock *) my_hash_search(&hash_user_locks,
(uchar*) res->ptr(),
(size_t) res->length()))))
{
ull= new User_level_lock((uchar*) res->ptr(), (size_t) res->length(),
thd->thread_id);
if (!ull || !ull->initialized())
{
delete ull;
mysql_mutex_unlock(&LOCK_user_locks);
null_value=1; // Probably out of memory
DBUG_RETURN(0);
}
ull->set_thread(thd);
thd->ull=ull;
mysql_mutex_unlock(&LOCK_user_locks);
DBUG_PRINT("info", ("made new lock"));
DBUG_RETURN(1); // Got new lock
}
ull->count++;
DBUG_PRINT("info", ("ull->count=%d", ull->count));
/*
Structure is now initialized. Try to get the lock.
Set up control struct to allow others to abort locks.
*/
THD_STAGE_INFO(thd, stage_user_lock);
thd->mysys_var->current_mutex= &LOCK_user_locks;
thd->mysys_var->current_cond= &ull->cond;
timed_cond.set_timeout(timeout * ULL(1000000000));
error= 0;
thd_wait_begin(thd, THD_WAIT_USER_LOCK);
while (ull->locked && !thd->killed)
{
DBUG_PRINT("info", ("waiting on lock"));
error= timed_cond.wait(&ull->cond, &LOCK_user_locks);
if (error == ETIMEDOUT || error == ETIME)
{
DBUG_PRINT("info", ("lock wait timeout"));
break;
}
error= 0;
}
thd_wait_end(thd);
if (ull->locked)
{
if (!--ull->count)
{
DBUG_ASSERT(0);
delete ull; // Should never happen
}
if (!error) // Killed (thd->killed != 0)
{
error=1;
null_value=1; // Return NULL
}
}
else // We got the lock
{
ull->locked=1;
ull->set_thread(thd);
ull->thread_id= thd->thread_id;
thd->ull=ull;
error=0;
DBUG_PRINT("info", ("got the lock"));
}
mysql_mutex_unlock(&LOCK_user_locks);
mysql_mutex_lock(&thd->mysys_var->mutex);
thd->mysys_var->current_mutex= 0;
thd->mysys_var->current_cond= 0;
mysql_mutex_unlock(&thd->mysys_var->mutex);
DBUG_RETURN(!error ? 1 : 0);
}
/**
Release a user level lock.
@return
- 1 if lock released
- 0 if lock wasn't held
- (SQL) NULL if no such lock
*/
longlong Item_func_release_lock::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
User_level_lock *ull;
longlong result;
THD *thd=current_thd;
DBUG_ENTER("Item_func_release_lock::val_int");
if (!res || !res->length())
{
null_value=1;
DBUG_RETURN(0);
}
DBUG_PRINT("info", ("lock %.*s", res->length(), res->ptr()));
null_value=0;
result=0;
mysql_mutex_lock(&LOCK_user_locks);
if (!(ull= ((User_level_lock*) my_hash_search(&hash_user_locks,
(const uchar*) res->ptr(),
(size_t) res->length()))))
{
null_value=1;
}
else
{
DBUG_PRINT("info", ("ull->locked=%d ull->thread=%lu thd=%lu",
(int) ull->locked,
(long)ull->thread_id,
(long)thd->thread_id));
if (ull->locked && current_thd->thread_id == ull->thread_id)
{
DBUG_PRINT("info", ("release lock"));
result=1; // Release is ok
item_user_lock_release(ull);
thd->ull=0;
}
}
mysql_mutex_unlock(&LOCK_user_locks);
DBUG_RETURN(result);
}
longlong Item_func_last_insert_id::val_int()
{
THD *thd= current_thd;
DBUG_ASSERT(fixed == 1);
if (arg_count)
{
longlong value= args[0]->val_int();
null_value= args[0]->null_value;
/*
LAST_INSERT_ID(X) must affect the client's mysql_insert_id() as
documented in the manual. We don't want to touch
first_successful_insert_id_in_cur_stmt because it would make
LAST_INSERT_ID(X) take precedence over an generated auto_increment
value for this row.
*/
thd->arg_of_last_insert_id_function= TRUE;
thd->first_successful_insert_id_in_prev_stmt= value;
return value;
}
return
static_cast<longlong>(thd->read_first_successful_insert_id_in_prev_stmt());
}
bool Item_func_last_insert_id::fix_fields(THD *thd, Item **ref)
{
thd->lex->uncacheable(UNCACHEABLE_SIDEEFFECT);
return Item_int_func::fix_fields(thd, ref);
}
/* This function is just used to test speed of different functions */
longlong Item_func_benchmark::val_int()
{
DBUG_ASSERT(fixed == 1);
char buff[MAX_FIELD_WIDTH];
String tmp(buff,sizeof(buff), &my_charset_bin);
my_decimal tmp_decimal;
THD *thd=current_thd;
ulonglong loop_count;
loop_count= (ulonglong) args[0]->val_int();
if (args[0]->null_value ||
(!args[0]->unsigned_flag && (((longlong) loop_count) < 0)))
{
if (!args[0]->null_value)
{
char buff[22];
llstr(((longlong) loop_count), buff);
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
ER_WRONG_VALUE_FOR_TYPE, ER(ER_WRONG_VALUE_FOR_TYPE),
"count", buff, "benchmark");
}
null_value= 1;
return 0;
}
null_value=0;
for (ulonglong loop=0 ; loop < loop_count && !thd->killed; loop++)
{
switch (args[1]->result_type()) {
case REAL_RESULT:
(void) args[1]->val_real();
break;
case INT_RESULT:
(void) args[1]->val_int();
break;
case STRING_RESULT:
(void) args[1]->val_str(&tmp);
break;
case DECIMAL_RESULT:
(void) args[1]->val_decimal(&tmp_decimal);
break;
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
return 0;
}
}
return 0;
}
void Item_func_benchmark::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("benchmark("));
args[0]->print(str, query_type);
str->append(',');
args[1]->print(str, query_type);
str->append(')');
}
/** This function is just used to create tests with time gaps. */
longlong Item_func_sleep::val_int()
{
THD *thd= current_thd;
Interruptible_wait timed_cond(thd);
mysql_cond_t cond;
double timeout;
int error;
DBUG_ASSERT(fixed == 1);
timeout= args[0]->val_real();
/*
On 64-bit OSX mysql_cond_timedwait() waits forever
if passed abstime time has already been exceeded by
the system time.
When given a very short timeout (< 10 mcs) just return
immediately.
We assume that the lines between this test and the call
to mysql_cond_timedwait() will be executed in less than 0.00001 sec.
*/
if (timeout < 0.00001)
return 0;
timed_cond.set_timeout((ulonglong) (timeout * 1000000000.0));
mysql_cond_init(key_item_func_sleep_cond, &cond, NULL);
mysql_mutex_lock(&LOCK_user_locks);
THD_STAGE_INFO(thd, stage_user_sleep);
thd->mysys_var->current_mutex= &LOCK_user_locks;
thd->mysys_var->current_cond= &cond;
error= 0;
thd_wait_begin(thd, THD_WAIT_SLEEP);
while (!thd->killed)
{
error= timed_cond.wait(&cond, &LOCK_user_locks);
if (error == ETIMEDOUT || error == ETIME)
break;
error= 0;
}
thd_wait_end(thd);
mysql_mutex_unlock(&LOCK_user_locks);
mysql_mutex_lock(&thd->mysys_var->mutex);
thd->mysys_var->current_mutex= 0;
thd->mysys_var->current_cond= 0;
mysql_mutex_unlock(&thd->mysys_var->mutex);
mysql_cond_destroy(&cond);
return MY_TEST(!error); // Return 1 killed
}
static user_var_entry *get_variable(HASH *hash, const Name_string &name,
bool create_if_not_exists)
{
user_var_entry *entry;
if (!(entry = (user_var_entry*) my_hash_search(hash, (uchar*) name.ptr(),
name.length())) &&
create_if_not_exists)
{
if (!my_hash_inited(hash))
return 0;
if (!(entry= user_var_entry::create(name)))
return 0;
if (my_hash_insert(hash,(uchar*) entry))
{
my_free(entry);
return 0;
}
}
return entry;
}
void Item_func_set_user_var::cleanup()
{
Item_func::cleanup();
entry= NULL;
}
bool Item_func_set_user_var::set_entry(THD *thd, bool create_if_not_exists)
{
if (entry && thd->thread_id == entry_thread_id)
goto end; // update entry->update_query_id for PS
if (!(entry= get_variable(&thd->user_vars, name, create_if_not_exists)))
{
entry_thread_id= 0;
return TRUE;
}
entry_thread_id= thd->thread_id;
end:
/*
Remember the last query which updated it, this way a query can later know
if this variable is a constant item in the query (it is if update_query_id
is different from query_id).
If this object has delayed setting of non-constness, we delay this
until Item_func_set-user_var::save_item_result().
*/
if (!delayed_non_constness)
entry->update_query_id= thd->query_id;
return FALSE;
}
/*
When a user variable is updated (in a SET command or a query like
SELECT @a:= ).
*/
bool Item_func_set_user_var::fix_fields(THD *thd, Item **ref)
{
DBUG_ASSERT(fixed == 0);
/* fix_fields will call Item_func_set_user_var::fix_length_and_dec */
if (Item_func::fix_fields(thd, ref) || set_entry(thd, TRUE))
return TRUE;
/*
As it is wrong and confusing to associate any
character set with NULL, @a should be latin2
after this query sequence:
SET @a=_latin2'string';
SET @a=NULL;
I.e. the second query should not change the charset
to the current default value, but should keep the
original value assigned during the first query.
In order to do it, we don't copy charset
from the argument if the argument is NULL
and the variable has previously been initialized.
*/
null_item= (args[0]->type() == NULL_ITEM);
if (!entry->collation.collation || !null_item)
entry->collation.set(args[0]->collation.derivation == DERIVATION_NUMERIC ?
default_charset() : args[0]->collation.collation,
DERIVATION_IMPLICIT);
collation.set(entry->collation.collation, DERIVATION_IMPLICIT);
cached_result_type= args[0]->result_type();
return FALSE;
}
void
Item_func_set_user_var::fix_length_and_dec()
{
maybe_null=args[0]->maybe_null;
decimals=args[0]->decimals;
collation.set(DERIVATION_IMPLICIT);
if (args[0]->collation.derivation == DERIVATION_NUMERIC)
fix_length_and_charset(args[0]->max_char_length(), default_charset());
else
{
fix_length_and_charset(args[0]->max_char_length(),
args[0]->collation.collation);
}
unsigned_flag= args[0]->unsigned_flag;
}
/*
Mark field in read_map
NOTES
This is used by filesort to register used fields in a a temporary
column read set or to register used fields in a view
*/
bool Item_func_set_user_var::register_field_in_read_map(uchar *arg)
{
if (result_field)
{
TABLE *table= (TABLE *) arg;
if (result_field->table == table || !table)
bitmap_set_bit(result_field->table->read_set, result_field->field_index);
}
return 0;
}
bool user_var_entry::realloc(uint length)
{
if (length <= extra_size)
{
/* Enough space to store value in value struct */
free_value();
m_ptr= internal_buffer_ptr();
}
else
{
/* Allocate an external buffer */
if (m_length != length)
{
if (m_ptr == internal_buffer_ptr())
m_ptr= 0;
if (!(m_ptr= (char*) my_realloc(m_ptr, length,
MYF(MY_ALLOW_ZERO_PTR | MY_WME |
ME_FATALERROR))))
return true;
}
}
return false;
}
/**
Set value to user variable.
@param ptr pointer to buffer with new value
@param length length of new value
@param type type of new value
@retval false on success
@retval true on allocation error
*/
bool user_var_entry::store(void *from, uint length, Item_result type)
{
// Store strings with end \0
if (realloc(length + MY_TEST(type == STRING_RESULT)))
return true;
if (type == STRING_RESULT)
m_ptr[length]= 0; // Store end \0
memmove(m_ptr, from, length);
if (type == DECIMAL_RESULT)
((my_decimal*) m_ptr)->fix_buffer_pointer();
m_length= length;
m_type= type;
return false;
}
/**
Set value to user variable.
@param ptr pointer to buffer with new value
@param length length of new value
@param type type of new value
@param cs charset info for new value
@param dv derivation for new value
@param unsigned_arg indiates if a value of type INT_RESULT is unsigned
@note Sets error and fatal error if allocation fails.
@retval
false success
@retval
true failure
*/
bool user_var_entry::store(void *ptr, uint length, Item_result type,
const CHARSET_INFO *cs, Derivation dv,
bool unsigned_arg)
{
if (store(ptr, length, type))
return true;
collation.set(cs, dv);
unsigned_flag= unsigned_arg;
return false;
}
bool
Item_func_set_user_var::update_hash(void *ptr, uint length,
Item_result res_type,
const CHARSET_INFO *cs, Derivation dv,
bool unsigned_arg)
{
/*
If we set a variable explicitely to NULL then keep the old
result type of the variable
*/
// args[0]->null_value could be outdated
if (args[0]->type() == Item::FIELD_ITEM)
null_value= ((Item_field*)args[0])->field->is_null();
else
null_value= args[0]->null_value;
if (null_value && null_item)
res_type= entry->type(); // Don't change type of item
if (null_value)
entry->set_null_value(res_type);
else if (entry->store(ptr, length, res_type, cs, dv, unsigned_arg))
{
null_value= 1;
return 1;
}
return 0;
}
/** Get the value of a variable as a double. */
double user_var_entry::val_real(my_bool *null_value)
{
if ((*null_value= (m_ptr == 0)))
return 0.0;
switch (m_type) {
case REAL_RESULT:
return *(double*) m_ptr;
case INT_RESULT:
return (double) *(longlong*) m_ptr;
case DECIMAL_RESULT:
{
double result;
my_decimal2double(E_DEC_FATAL_ERROR, (my_decimal *) m_ptr, &result);
return result;
}
case STRING_RESULT:
return my_atof(m_ptr); // This is null terminated
case ROW_RESULT:
DBUG_ASSERT(1); // Impossible
break;
}
return 0.0; // Impossible
}
/** Get the value of a variable as an integer. */
longlong user_var_entry::val_int(my_bool *null_value) const
{
if ((*null_value= (m_ptr == 0)))
return LL(0);
switch (m_type) {
case REAL_RESULT:
return (longlong) *(double*) m_ptr;
case INT_RESULT:
return *(longlong*) m_ptr;
case DECIMAL_RESULT:
{
longlong result;
my_decimal2int(E_DEC_FATAL_ERROR, (my_decimal *) m_ptr, 0, &result);
return result;
}
case STRING_RESULT:
{
int error;
return my_strtoll10(m_ptr, (char**) 0, &error);// String is null terminated
}
case ROW_RESULT:
DBUG_ASSERT(1); // Impossible
break;
}
return LL(0); // Impossible
}
/** Get the value of a variable as a string. */
String *user_var_entry::val_str(my_bool *null_value, String *str,
uint decimals)
{
if ((*null_value= (m_ptr == 0)))
return (String*) 0;
switch (m_type) {
case REAL_RESULT:
str->set_real(*(double*) m_ptr, decimals, collation.collation);
break;
case INT_RESULT:
if (!unsigned_flag)
str->set(*(longlong*) m_ptr, collation.collation);
else
str->set(*(ulonglong*) m_ptr, collation.collation);
break;
case DECIMAL_RESULT:
str_set_decimal((my_decimal *) m_ptr, str, collation.collation);
break;
case STRING_RESULT:
if (str->copy(m_ptr, m_length, collation.collation))
str= 0; // EOM error
case ROW_RESULT:
DBUG_ASSERT(1); // Impossible
break;
}
return(str);
}
/** Get the value of a variable as a decimal. */
my_decimal *user_var_entry::val_decimal(my_bool *null_value, my_decimal *val)
{
if ((*null_value= (m_ptr == 0)))
return 0;
switch (m_type) {
case REAL_RESULT:
double2my_decimal(E_DEC_FATAL_ERROR, *(double*) m_ptr, val);
break;
case INT_RESULT:
int2my_decimal(E_DEC_FATAL_ERROR, *(longlong*) m_ptr, 0, val);
break;
case DECIMAL_RESULT:
my_decimal2decimal((my_decimal *) m_ptr, val);
break;
case STRING_RESULT:
str2my_decimal(E_DEC_FATAL_ERROR, m_ptr, m_length,
collation.collation, val);
break;
case ROW_RESULT:
DBUG_ASSERT(1); // Impossible
break;
}
return(val);
}
/**
This functions is invoked on SET \@variable or
\@variable:= expression.
Evaluate (and check expression), store results.
@note
For now it always return OK. All problem with value evaluating
will be caught by thd->is_error() check in sql_set_variables().
@retval
FALSE OK.
*/
bool
Item_func_set_user_var::check(bool use_result_field)
{
DBUG_ENTER("Item_func_set_user_var::check");
if (use_result_field && !result_field)
use_result_field= FALSE;
switch (cached_result_type) {
case REAL_RESULT:
{
save_result.vreal= use_result_field ? result_field->val_real() :
args[0]->val_real();
break;
}
case INT_RESULT:
{
save_result.vint= use_result_field ? result_field->val_int() :
args[0]->val_int();
unsigned_flag= use_result_field ? ((Field_num*)result_field)->unsigned_flag:
args[0]->unsigned_flag;
break;
}
case STRING_RESULT:
{
save_result.vstr= use_result_field ? result_field->val_str(&value) :
args[0]->val_str(&value);
break;
}
case DECIMAL_RESULT:
{
save_result.vdec= use_result_field ?
result_field->val_decimal(&decimal_buff) :
args[0]->val_decimal(&decimal_buff);
break;
}
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
break;
}
DBUG_RETURN(FALSE);
}
/**
@brief Evaluate and store item's result.
This function is invoked on "SELECT ... INTO @var ...".
@param item An item to get value from.
*/
void Item_func_set_user_var::save_item_result(Item *item)
{
DBUG_ENTER("Item_func_set_user_var::save_item_result");
switch (cached_result_type) {
case REAL_RESULT:
save_result.vreal= item->val_result();
break;
case INT_RESULT:
save_result.vint= item->val_int_result();
unsigned_flag= item->unsigned_flag;
break;
case STRING_RESULT:
save_result.vstr= item->str_result(&value);
break;
case DECIMAL_RESULT:
save_result.vdec= item->val_decimal_result(&decimal_buff);
break;
case ROW_RESULT:
default:
// Should never happen
DBUG_ASSERT(0);
break;
}
/*
Set the ID of the query that last updated this variable. This is
usually set by Item_func_set_user_var::set_entry(), but if this
item has delayed setting of non-constness, we must do it now.
*/
if (delayed_non_constness)
entry->update_query_id= current_thd->query_id;
DBUG_VOID_RETURN;
}
/**
This functions is invoked on
SET \@variable or \@variable:= expression.
@note
We have to store the expression as such in the variable, independent of
the value method used by the user
@retval
0 OK
@retval
1 EOM Error
*/
bool
Item_func_set_user_var::update()
{
bool res= 0;
DBUG_ENTER("Item_func_set_user_var::update");
switch (cached_result_type) {
case REAL_RESULT:
{
res= update_hash((void*) &save_result.vreal,sizeof(save_result.vreal),
REAL_RESULT, default_charset(), DERIVATION_IMPLICIT, 0);
break;
}
case INT_RESULT:
{
res= update_hash((void*) &save_result.vint, sizeof(save_result.vint),
INT_RESULT, default_charset(), DERIVATION_IMPLICIT,
unsigned_flag);
break;
}
case STRING_RESULT:
{
if (!save_result.vstr) // Null value
res= update_hash((void*) 0, 0, STRING_RESULT, &my_charset_bin,
DERIVATION_IMPLICIT, 0);
else
res= update_hash((void*) save_result.vstr->ptr(),
save_result.vstr->length(), STRING_RESULT,
save_result.vstr->charset(),
DERIVATION_IMPLICIT, 0);
break;
}
case DECIMAL_RESULT:
{
if (!save_result.vdec) // Null value
res= update_hash((void*) 0, 0, DECIMAL_RESULT, &my_charset_bin,
DERIVATION_IMPLICIT, 0);
else
res= update_hash((void*) save_result.vdec,
sizeof(my_decimal), DECIMAL_RESULT,
default_charset(), DERIVATION_IMPLICIT, 0);
break;
}
case ROW_RESULT:
default:
// This case should never be chosen
DBUG_ASSERT(0);
break;
}
DBUG_RETURN(res);
}
double Item_func_set_user_var::val_real()
{
DBUG_ASSERT(fixed == 1);
check(0);
update(); // Store expression
return entry->val_real(&null_value);
}
longlong Item_func_set_user_var::val_int()
{
DBUG_ASSERT(fixed == 1);
check(0);
update(); // Store expression
return entry->val_int(&null_value);
}
String *Item_func_set_user_var::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
check(0);
update(); // Store expression
return entry->val_str(&null_value, str, decimals);
}
my_decimal *Item_func_set_user_var::val_decimal(my_decimal *val)
{
DBUG_ASSERT(fixed == 1);
check(0);
update(); // Store expression
return entry->val_decimal(&null_value, val);
}
double Item_func_set_user_var::val_result()
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return entry->val_real(&null_value);
}
longlong Item_func_set_user_var::val_int_result()
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return entry->val_int(&null_value);
}
bool Item_func_set_user_var::val_bool_result()
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return entry->val_int(&null_value) != 0;
}
String *Item_func_set_user_var::str_result(String *str)
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return entry->val_str(&null_value, str, decimals);
}
my_decimal *Item_func_set_user_var::val_decimal_result(my_decimal *val)
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return entry->val_decimal(&null_value, val);
}
bool Item_func_set_user_var::is_null_result()
{
DBUG_ASSERT(fixed == 1);
check(TRUE);
update(); // Store expression
return is_null();
}
// just the assignment, for use in "SET @a:=5" type self-prints
void Item_func_set_user_var::print_assignment(String *str,
enum_query_type query_type)
{
str->append(STRING_WITH_LEN("@"));
str->append(name);
str->append(STRING_WITH_LEN(":="));
args[0]->print(str, query_type);
}
// parenthesize assignment for use in "EXPLAIN EXTENDED SELECT (@e:=80)+5"
void Item_func_set_user_var::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("("));
print_assignment(str, query_type);
str->append(STRING_WITH_LEN(")"));
}
bool Item_func_set_user_var::send(Protocol *protocol, String *str_arg)
{
if (result_field)
{
check(1);
update();
return protocol->store(result_field);
}
return Item::send(protocol, str_arg);
}
void Item_func_set_user_var::make_field(Send_field *tmp_field)
{
if (result_field)
{
result_field->make_field(tmp_field);
DBUG_ASSERT(tmp_field->table_name != 0);
if (Item::item_name.is_set())
tmp_field->col_name=Item::item_name.ptr(); // Use user supplied name
}
else
Item::make_field(tmp_field);
}
/*
Save the value of a user variable into a field
SYNOPSIS
save_in_field()
field target field to save the value to
no_conversion flag indicating whether conversions are allowed
DESCRIPTION
Save the function value into a field and update the user variable
accordingly. If a result field is defined and the target field doesn't
coincide with it then the value from the result field will be used as
the new value of the user variable.
The reason to have this method rather than simply using the result
field in the val_xxx() methods is that the value from the result field
not always can be used when the result field is defined.
Let's consider the following cases:
1) when filling a tmp table the result field is defined but the value of it
is undefined because it has to be produced yet. Thus we can't use it.
2) on execution of an INSERT ... SELECT statement the save_in_field()
function will be called to fill the data in the new record. If the SELECT
part uses a tmp table then the result field is defined and should be
used in order to get the correct result.
The difference between the SET_USER_VAR function and regular functions
like CONCAT is that the Item_func objects for the regular functions are
replaced by Item_field objects after the values of these functions have
been stored in a tmp table. Yet an object of the Item_field class cannot
be used to update a user variable.
Due to this we have to handle the result field in a special way here and
in the Item_func_set_user_var::send() function.
RETURN VALUES
FALSE Ok
TRUE Error
*/
type_conversion_status
Item_func_set_user_var::save_in_field(Field *field, bool no_conversions,
bool can_use_result_field)
{
bool use_result_field= (!can_use_result_field ? 0 :
(result_field && result_field != field));
type_conversion_status error;
/* Update the value of the user variable */
check(use_result_field);
update();
if (result_type() == STRING_RESULT ||
(result_type() == REAL_RESULT &&
field->result_type() == STRING_RESULT))
{
String *result;
const CHARSET_INFO *cs= collation.collation;
char buff[MAX_FIELD_WIDTH]; // Alloc buffer for small columns
str_value.set_quick(buff, sizeof(buff), cs);
result= entry->val_str(&null_value, &str_value, decimals);
if (null_value)
{
str_value.set_quick(0, 0, cs);
return set_field_to_null_with_conversions(field, no_conversions);
}
/* NOTE: If null_value == FALSE, "result" must be not NULL. */
field->set_notnull();
error=field->store(result->ptr(),result->length(),cs);
str_value.set_quick(0, 0, cs);
}
else if (result_type() == REAL_RESULT)
{
double nr= entry->val_real(&null_value);
if (null_value)
return set_field_to_null(field);
field->set_notnull();
error=field->store(nr);
}
else if (result_type() == DECIMAL_RESULT)
{
my_decimal decimal_value;
my_decimal *val= entry->val_decimal(&null_value, &decimal_value);
if (null_value)
return set_field_to_null(field);
field->set_notnull();
error=field->store_decimal(val);
}
else
{
longlong nr= entry->val_int(&null_value);
if (null_value)
return set_field_to_null_with_conversions(field, no_conversions);
field->set_notnull();
error=field->store(nr, unsigned_flag);
}
return error;
}
String *
Item_func_get_user_var::val_str(String *str)
{
DBUG_ASSERT(fixed == 1);
DBUG_ENTER("Item_func_get_user_var::val_str");
if (!var_entry)
DBUG_RETURN((String*) 0); // No such variable
DBUG_RETURN(var_entry->val_str(&null_value, str, decimals));
}
double Item_func_get_user_var::val_real()
{
DBUG_ASSERT(fixed == 1);
if (!var_entry)
return 0.0; // No such variable
return (var_entry->val_real(&null_value));
}
my_decimal *Item_func_get_user_var::val_decimal(my_decimal *dec)
{
DBUG_ASSERT(fixed == 1);
if (!var_entry)
return 0;
return var_entry->val_decimal(&null_value, dec);
}
longlong Item_func_get_user_var::val_int()
{
DBUG_ASSERT(fixed == 1);
if (!var_entry)
return LL(0); // No such variable
return (var_entry->val_int(&null_value));
}
/**
Get variable by name and, if necessary, put the record of variable
use into the binary log.
When a user variable is invoked from an update query (INSERT, UPDATE etc),
stores this variable and its value in thd->user_var_events, so that it can be
written to the binlog (will be written just before the query is written, see
log.cc).
@param thd Current thread
@param name Variable name
@param[out] out_entry variable structure or NULL. The pointer is set
regardless of whether function succeeded or not.
@retval
0 OK
@retval
1 Failed to put appropriate record into binary log
*/
static int
get_var_with_binlog(THD *thd, enum_sql_command sql_command,
Name_string &name, user_var_entry **out_entry)
{
BINLOG_USER_VAR_EVENT *user_var_event;
user_var_entry *var_entry;
var_entry= get_variable(&thd->user_vars, name, 0);
/*
Any reference to user-defined variable which is done from stored
function or trigger affects their execution and the execution of the
calling statement. We must log all such variables even if they are
not involved in table-updating statements.
*/
if (!(opt_bin_log &&
(is_update_query(sql_command) || thd->in_sub_stmt)))
{
*out_entry= var_entry;
return 0;
}
if (!var_entry)
{
/*
If the variable does not exist, it's NULL, but we want to create it so
that it gets into the binlog (if it didn't, the slave could be
influenced by a variable of the same name previously set by another
thread).
We create it like if it had been explicitly set with SET before.
The 'new' mimics what sql_yacc.yy does when 'SET @a=10;'.
sql_set_variables() is what is called from 'case SQLCOM_SET_OPTION'
in dispatch_command()). Instead of building a one-element list to pass to
sql_set_variables(), we could instead manually call check() and update();
this would save memory and time; but calling sql_set_variables() makes
one unique place to maintain (sql_set_variables()).
Manipulation with lex is necessary since free_underlaid_joins
is going to release memory belonging to the main query.
*/
List<set_var_base> tmp_var_list;
LEX *sav_lex= thd->lex, lex_tmp;
thd->lex= &lex_tmp;
lex_start(thd);
tmp_var_list.push_back(new set_var_user(new Item_func_set_user_var(name,
new Item_null(),
false)));
/* Create the variable */
if (sql_set_variables(thd, &tmp_var_list))
{
thd->lex= sav_lex;
goto err;
}
thd->lex= sav_lex;
if (!(var_entry= get_variable(&thd->user_vars, name, 0)))
goto err;
}
else if (var_entry->used_query_id == thd->query_id ||
mysql_bin_log.is_query_in_union(thd, var_entry->used_query_id))
{
/*
If this variable was already stored in user_var_events by this query
(because it's used in more than one place in the query), don't store
it.
*/
*out_entry= var_entry;
return 0;
}
uint size;
/*
First we need to store value of var_entry, when the next situation
appears:
> set @a:=1;
> insert into t1 values (@a), (@a:=@a+1), (@a:=@a+1);
We have to write to binlog value @a= 1.
We allocate the user_var_event on user_var_events_alloc pool, not on
the this-statement-execution pool because in SPs user_var_event objects
may need to be valid after current [SP] statement execution pool is
destroyed.
*/
size= ALIGN_SIZE(sizeof(BINLOG_USER_VAR_EVENT)) + var_entry->length();
if (!(user_var_event= (BINLOG_USER_VAR_EVENT *)
alloc_root(thd->user_var_events_alloc, size)))
goto err;
user_var_event->value= (char*) user_var_event +
ALIGN_SIZE(sizeof(BINLOG_USER_VAR_EVENT));
user_var_event->user_var_event= var_entry;
user_var_event->type= var_entry->type();
user_var_event->charset_number= var_entry->collation.collation->number;
user_var_event->unsigned_flag= var_entry->unsigned_flag;
if (!var_entry->ptr())
{
/* NULL value*/
user_var_event->length= 0;
user_var_event->value= 0;
}
else
{
user_var_event->length= var_entry->length();
memcpy(user_var_event->value, var_entry->ptr(),
var_entry->length());
}
/* Mark that this variable has been used by this query */
var_entry->used_query_id= thd->query_id;
if (insert_dynamic(&thd->user_var_events, &user_var_event))
goto err;
*out_entry= var_entry;
return 0;
err:
*out_entry= var_entry;
return 1;
}
void Item_func_get_user_var::fix_length_and_dec()
{
THD *thd=current_thd;
int error;
maybe_null=1;
decimals=NOT_FIXED_DEC;
max_length=MAX_BLOB_WIDTH;
error= get_var_with_binlog(thd, thd->lex->sql_command, name, &var_entry);
/*
If the variable didn't exist it has been created as a STRING-type.
'var_entry' is NULL only if there occured an error during the call to
get_var_with_binlog.
*/
if (!error && var_entry)
{
m_cached_result_type= var_entry->type();
unsigned_flag= var_entry->unsigned_flag;
max_length= var_entry->length();
collation.set(var_entry->collation);
switch(m_cached_result_type) {
case REAL_RESULT:
fix_char_length(DBL_DIG + 8);
break;
case INT_RESULT:
fix_char_length(MAX_BIGINT_WIDTH);
decimals=0;
break;
case STRING_RESULT:
max_length= MAX_BLOB_WIDTH - 1;
break;
case DECIMAL_RESULT:
fix_char_length(DECIMAL_MAX_STR_LENGTH);
decimals= DECIMAL_MAX_SCALE;
break;
case ROW_RESULT: // Keep compiler happy
default:
DBUG_ASSERT(0);
break;
}
}
else
{
collation.set(&my_charset_bin, DERIVATION_IMPLICIT);
null_value= 1;
m_cached_result_type= STRING_RESULT;
max_length= MAX_BLOB_WIDTH;
}
}
bool Item_func_get_user_var::const_item() const
{
return (!var_entry || current_thd->query_id != var_entry->update_query_id);
}
enum Item_result Item_func_get_user_var::result_type() const
{
return m_cached_result_type;
}
void Item_func_get_user_var::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("(@"));
append_identifier(current_thd, str, name);
str->append(')');
}
bool Item_func_get_user_var::eq(const Item *item, bool binary_cmp) const
{
/* Assume we don't have rtti */
if (this == item)
return 1; // Same item is same.
/* Check if other type is also a get_user_var() object */
if (item->type() != FUNC_ITEM ||
((Item_func*) item)->functype() != functype())
return 0;
Item_func_get_user_var *other=(Item_func_get_user_var*) item;
return name.eq_bin(other->name);
}
bool Item_func_get_user_var::set_value(THD *thd,
sp_rcontext * /*ctx*/, Item **it)
{
Item_func_set_user_var *suv= new Item_func_set_user_var(name, *it, false);
/*
Item_func_set_user_var is not fixed after construction, call
fix_fields().
*/
return (!suv || suv->fix_fields(thd, it) || suv->check(0) || suv->update());
}
bool Item_user_var_as_out_param::fix_fields(THD *thd, Item **ref)
{
DBUG_ASSERT(fixed == 0);
DBUG_ASSERT(thd->lex->exchange);
if (Item::fix_fields(thd, ref) ||
!(entry= get_variable(&thd->user_vars, name, 1)))
return TRUE;
entry->set_type(STRING_RESULT);
/*
Let us set the same collation which is used for loading
of fields in LOAD DATA INFILE.
(Since Item_user_var_as_out_param is used only there).
*/
entry->collation.set(thd->lex->exchange->cs ?
thd->lex->exchange->cs :
thd->variables.collation_database);
entry->update_query_id= thd->query_id;
return FALSE;
}
void Item_user_var_as_out_param::set_null_value(const CHARSET_INFO* cs)
{
entry->set_null_value(STRING_RESULT);
}
void Item_user_var_as_out_param::set_value(const char *str, uint length,
const CHARSET_INFO* cs)
{
entry->store((void*) str, length, STRING_RESULT, cs,
DERIVATION_IMPLICIT, 0 /* unsigned_arg */);
}
double Item_user_var_as_out_param::val_real()
{
DBUG_ASSERT(0);
return 0.0;
}
longlong Item_user_var_as_out_param::val_int()
{
DBUG_ASSERT(0);
return 0;
}
String* Item_user_var_as_out_param::val_str(String *str)
{
DBUG_ASSERT(0);
return 0;
}
my_decimal* Item_user_var_as_out_param::val_decimal(my_decimal *decimal_buffer)
{
DBUG_ASSERT(0);
return 0;
}
void Item_user_var_as_out_param::print(String *str, enum_query_type query_type)
{
str->append('@');
append_identifier(current_thd, str, name);
}
Item_func_get_system_var::
Item_func_get_system_var(sys_var *var_arg, enum_var_type var_type_arg,
LEX_STRING *component_arg, const char *name_arg,
size_t name_len_arg)
:var(var_arg), var_type(var_type_arg), orig_var_type(var_type_arg),
component(*component_arg), cache_present(0)
{
/* copy() will allocate the name */
item_name.copy(name_arg, (uint) name_len_arg);
}
bool Item_func_get_system_var::is_written_to_binlog()
{
return var->is_written_to_binlog(var_type);
}
void Item_func_get_system_var::update_null_value()
{
THD *thd= current_thd;
int save_no_errors= thd->no_errors;
thd->no_errors= TRUE;
Item::update_null_value();
thd->no_errors= save_no_errors;
}
void Item_func_get_system_var::fix_length_and_dec()
{
char *cptr;
maybe_null= TRUE;
max_length= 0;
if (var->check_type(var_type))
{
if (var_type != OPT_DEFAULT)
{
my_error(ER_INCORRECT_GLOBAL_LOCAL_VAR, MYF(0),
var->name.str, var_type == OPT_GLOBAL ? "SESSION" : "GLOBAL");
return;
}
/* As there was no local variable, return the global value */
var_type= OPT_GLOBAL;
}
switch (var->show_type())
{
case SHOW_LONG:
case SHOW_INT:
case SHOW_HA_ROWS:
case SHOW_LONGLONG:
unsigned_flag= TRUE;
collation.set_numeric();
fix_char_length(MY_INT64_NUM_DECIMAL_DIGITS);
decimals=0;
break;
case SHOW_SIGNED_LONG:
unsigned_flag= FALSE;
collation.set_numeric();
fix_char_length(MY_INT64_NUM_DECIMAL_DIGITS);
decimals=0;
break;
case SHOW_CHAR:
case SHOW_CHAR_PTR:
mysql_mutex_lock(&LOCK_global_system_variables);
cptr= var->show_type() == SHOW_CHAR ?
(char*) var->value_ptr(current_thd, var_type, &component) :
*(char**) var->value_ptr(current_thd, var_type, &component);
if (cptr)
max_length= system_charset_info->cset->numchars(system_charset_info,
cptr,
cptr + strlen(cptr));
mysql_mutex_unlock(&LOCK_global_system_variables);
collation.set(system_charset_info, DERIVATION_SYSCONST);
max_length*= system_charset_info->mbmaxlen;
decimals=NOT_FIXED_DEC;
break;
case SHOW_LEX_STRING:
{
mysql_mutex_lock(&LOCK_global_system_variables);
LEX_STRING *ls= ((LEX_STRING*)var->value_ptr(current_thd, var_type, &component));
max_length= system_charset_info->cset->numchars(system_charset_info,
ls->str,
ls->str + ls->length);
mysql_mutex_unlock(&LOCK_global_system_variables);
collation.set(system_charset_info, DERIVATION_SYSCONST);
max_length*= system_charset_info->mbmaxlen;
decimals=NOT_FIXED_DEC;
}
break;
case SHOW_BOOL:
case SHOW_MY_BOOL:
unsigned_flag= FALSE;
collation.set_numeric();
fix_char_length(1);
decimals=0;
break;
case SHOW_DOUBLE:
unsigned_flag= FALSE;
decimals= 6;
collation.set_numeric();
fix_char_length(DBL_DIG + 6);
break;
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
break;
}
}
void Item_func_get_system_var::print(String *str, enum_query_type query_type)
{
str->append(item_name);
}
enum Item_result Item_func_get_system_var::result_type() const
{
switch (var->show_type())
{
case SHOW_BOOL:
case SHOW_MY_BOOL:
case SHOW_INT:
case SHOW_LONG:
case SHOW_SIGNED_LONG:
case SHOW_LONGLONG:
case SHOW_HA_ROWS:
return INT_RESULT;
case SHOW_CHAR:
case SHOW_CHAR_PTR:
case SHOW_LEX_STRING:
return STRING_RESULT;
case SHOW_DOUBLE:
return REAL_RESULT;
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
return STRING_RESULT; // keep the compiler happy
}
}
enum_field_types Item_func_get_system_var::field_type() const
{
switch (var->show_type())
{
case SHOW_BOOL:
case SHOW_MY_BOOL:
case SHOW_INT:
case SHOW_LONG:
case SHOW_SIGNED_LONG:
case SHOW_LONGLONG:
case SHOW_HA_ROWS:
return MYSQL_TYPE_LONGLONG;
case SHOW_CHAR:
case SHOW_CHAR_PTR:
case SHOW_LEX_STRING:
return MYSQL_TYPE_VARCHAR;
case SHOW_DOUBLE:
return MYSQL_TYPE_DOUBLE;
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
return MYSQL_TYPE_VARCHAR; // keep the compiler happy
}
}
/*
Uses var, var_type, component, cache_present, used_query_id, thd,
cached_llval, null_value, cached_null_value
*/
#define get_sys_var_safe(type) \
do { \
type value; \
mysql_mutex_lock(&LOCK_global_system_variables); \
value= *(type*) var->value_ptr(thd, var_type, &component); \
mysql_mutex_unlock(&LOCK_global_system_variables); \
cache_present |= GET_SYS_VAR_CACHE_LONG; \
used_query_id= thd->query_id; \
cached_llval= null_value ? 0 : (longlong) value; \
cached_null_value= null_value; \
return cached_llval; \
} while (0)
longlong Item_func_get_system_var::val_int()
{
THD *thd= current_thd;
if (cache_present && thd->query_id == used_query_id)
{
if (cache_present & GET_SYS_VAR_CACHE_LONG)
{
null_value= cached_null_value;
return cached_llval;
}
else if (cache_present & GET_SYS_VAR_CACHE_DOUBLE)
{
null_value= cached_null_value;
cached_llval= (longlong) cached_dval;
cache_present|= GET_SYS_VAR_CACHE_LONG;
return cached_llval;
}
else if (cache_present & GET_SYS_VAR_CACHE_STRING)
{
null_value= cached_null_value;
if (!null_value)
cached_llval= longlong_from_string_with_check (cached_strval.charset(),
cached_strval.c_ptr(),
cached_strval.c_ptr() +
cached_strval.length());
else
cached_llval= 0;
cache_present|= GET_SYS_VAR_CACHE_LONG;
return cached_llval;
}
}
switch (var->show_type())
{
case SHOW_INT: get_sys_var_safe (uint);
case SHOW_LONG: get_sys_var_safe (ulong);
case SHOW_SIGNED_LONG: get_sys_var_safe (long);
case SHOW_LONGLONG: get_sys_var_safe (ulonglong);
case SHOW_HA_ROWS: get_sys_var_safe (ha_rows);
case SHOW_BOOL: get_sys_var_safe (bool);
case SHOW_MY_BOOL: get_sys_var_safe (my_bool);
case SHOW_DOUBLE:
{
double dval= val_real();
used_query_id= thd->query_id;
cached_llval= (longlong) dval;
cache_present|= GET_SYS_VAR_CACHE_LONG;
return cached_llval;
}
case SHOW_CHAR:
case SHOW_CHAR_PTR:
case SHOW_LEX_STRING:
{
String *str_val= val_str(NULL);
// Treat empty strings as NULL, like val_real() does.
if (str_val && str_val->length())
cached_llval= longlong_from_string_with_check (system_charset_info,
str_val->c_ptr(),
str_val->c_ptr() +
str_val->length());
else
{
null_value= TRUE;
cached_llval= 0;
}
cache_present|= GET_SYS_VAR_CACHE_LONG;
return cached_llval;
}
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
return 0; // keep the compiler happy
}
}
String* Item_func_get_system_var::val_str(String* str)
{
THD *thd= current_thd;
if (cache_present && thd->query_id == used_query_id)
{
if (cache_present & GET_SYS_VAR_CACHE_STRING)
{
null_value= cached_null_value;
return null_value ? NULL : &cached_strval;
}
else if (cache_present & GET_SYS_VAR_CACHE_LONG)
{
null_value= cached_null_value;
if (!null_value)
cached_strval.set (cached_llval, collation.collation);
cache_present|= GET_SYS_VAR_CACHE_STRING;
return null_value ? NULL : &cached_strval;
}
else if (cache_present & GET_SYS_VAR_CACHE_DOUBLE)
{
null_value= cached_null_value;
if (!null_value)
cached_strval.set_real (cached_dval, decimals, collation.collation);
cache_present|= GET_SYS_VAR_CACHE_STRING;
return null_value ? NULL : &cached_strval;
}
}
str= &cached_strval;
switch (var->show_type())
{
case SHOW_CHAR:
case SHOW_CHAR_PTR:
case SHOW_LEX_STRING:
{
mysql_mutex_lock(&LOCK_global_system_variables);
char *cptr= var->show_type() == SHOW_CHAR ?
(char*) var->value_ptr(thd, var_type, &component) :
*(char**) var->value_ptr(thd, var_type, &component);
if (cptr)
{
size_t len= var->show_type() == SHOW_LEX_STRING ?
((LEX_STRING*)(var->value_ptr(thd, var_type, &component)))->length :
strlen(cptr);
if (str->copy(cptr, len, collation.collation))
{
null_value= TRUE;
str= NULL;
}
}
else
{
null_value= TRUE;
str= NULL;
}
mysql_mutex_unlock(&LOCK_global_system_variables);
break;
}
case SHOW_INT:
case SHOW_LONG:
case SHOW_SIGNED_LONG:
case SHOW_LONGLONG:
case SHOW_HA_ROWS:
case SHOW_BOOL:
case SHOW_MY_BOOL:
str->set (val_int(), collation.collation);
break;
case SHOW_DOUBLE:
str->set_real (val_real(), decimals, collation.collation);
break;
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
str= NULL;
break;
}
cache_present|= GET_SYS_VAR_CACHE_STRING;
used_query_id= thd->query_id;
cached_null_value= null_value;
return str;
}
double Item_func_get_system_var::val_real()
{
THD *thd= current_thd;
if (cache_present && thd->query_id == used_query_id)
{
if (cache_present & GET_SYS_VAR_CACHE_DOUBLE)
{
null_value= cached_null_value;
return cached_dval;
}
else if (cache_present & GET_SYS_VAR_CACHE_LONG)
{
null_value= cached_null_value;
cached_dval= (double)cached_llval;
cache_present|= GET_SYS_VAR_CACHE_DOUBLE;
return cached_dval;
}
else if (cache_present & GET_SYS_VAR_CACHE_STRING)
{
null_value= cached_null_value;
if (!null_value)
cached_dval= double_from_string_with_check (cached_strval.charset(),
cached_strval.c_ptr(),
cached_strval.c_ptr() +
cached_strval.length());
else
cached_dval= 0;
cache_present|= GET_SYS_VAR_CACHE_DOUBLE;
return cached_dval;
}
}
switch (var->show_type())
{
case SHOW_DOUBLE:
mysql_mutex_lock(&LOCK_global_system_variables);
cached_dval= *(double*) var->value_ptr(thd, var_type, &component);
mysql_mutex_unlock(&LOCK_global_system_variables);
used_query_id= thd->query_id;
cached_null_value= null_value;
if (null_value)
cached_dval= 0;
cache_present|= GET_SYS_VAR_CACHE_DOUBLE;
return cached_dval;
case SHOW_CHAR:
case SHOW_LEX_STRING:
case SHOW_CHAR_PTR:
{
mysql_mutex_lock(&LOCK_global_system_variables);
char *cptr= var->show_type() == SHOW_CHAR ?
(char*) var->value_ptr(thd, var_type, &component) :
*(char**) var->value_ptr(thd, var_type, &component);
// Treat empty strings as NULL, like val_int() does.
if (cptr && *cptr)
cached_dval= double_from_string_with_check (system_charset_info,
cptr, cptr + strlen (cptr));
else
{
null_value= TRUE;
cached_dval= 0;
}
mysql_mutex_unlock(&LOCK_global_system_variables);
used_query_id= thd->query_id;
cached_null_value= null_value;
cache_present|= GET_SYS_VAR_CACHE_DOUBLE;
return cached_dval;
}
case SHOW_INT:
case SHOW_LONG:
case SHOW_SIGNED_LONG:
case SHOW_LONGLONG:
case SHOW_HA_ROWS:
case SHOW_BOOL:
case SHOW_MY_BOOL:
cached_dval= (double) val_int();
cache_present|= GET_SYS_VAR_CACHE_DOUBLE;
used_query_id= thd->query_id;
cached_null_value= null_value;
return cached_dval;
default:
my_error(ER_VAR_CANT_BE_READ, MYF(0), var->name.str);
return 0;
}
}
bool Item_func_get_system_var::eq(const Item *item, bool binary_cmp) const
{
/* Assume we don't have rtti */
if (this == item)
return 1; // Same item is same.
/* Check if other type is also a get_user_var() object */
if (item->type() != FUNC_ITEM ||
((Item_func*) item)->functype() != functype())
return 0;
Item_func_get_system_var *other=(Item_func_get_system_var*) item;
return (var == other->var && var_type == other->var_type);
}
void Item_func_get_system_var::cleanup()
{
Item_func::cleanup();
cache_present= 0;
var_type= orig_var_type;
cached_strval.free();
}
void Item_func_match::init_search(bool no_order)
{
DBUG_ENTER("Item_func_match::init_search");
/*
We will skip execution if the item is not fixed
with fix_field
*/
if (!fixed)
DBUG_VOID_RETURN;
/* Check if init_search() has been called before */
if (ft_handler)
{
/*
We should reset ft_handler as it is cleaned up
on destruction of FT_SELECT object
(necessary in case of re-execution of subquery).
TODO: FT_SELECT should not clean up ft_handler.
*/
if (join_key)
table->file->ft_handler= ft_handler;
DBUG_VOID_RETURN;
}
if (key == NO_SUCH_KEY)
{
List<Item> fields;
fields.push_back(new Item_string(" ",1, cmp_collation.collation));
for (uint i=1; i < arg_count; i++)
fields.push_back(args[i]);
concat_ws=new Item_func_concat_ws(fields);
/*
Above function used only to get value and do not need fix_fields for it:
Item_string - basic constant
fields - fix_fields() was already called for this arguments
Item_func_concat_ws - do not need fix_fields() to produce value
*/
concat_ws->quick_fix_field();
}
if (master)
{
join_key=master->join_key=join_key|master->join_key;
master->init_search(no_order);
ft_handler=master->ft_handler;
join_key=master->join_key;
DBUG_VOID_RETURN;
}
String *ft_tmp= 0;
// MATCH ... AGAINST (NULL) is meaningless, but possible
if (!(ft_tmp=key_item()->val_str(&value)))
{
ft_tmp= &value;
value.set("",0,cmp_collation.collation);
}
if (ft_tmp->charset() != cmp_collation.collation)
{
uint dummy_errors;
search_value.copy(ft_tmp->ptr(), ft_tmp->length(), ft_tmp->charset(),
cmp_collation.collation, &dummy_errors);
ft_tmp= &search_value;
}
if (join_key && !no_order)
flags|=FT_SORTED;
ft_handler=table->file->ft_init_ext(flags, key, ft_tmp);
if (join_key)
table->file->ft_handler=ft_handler;
DBUG_VOID_RETURN;
}
bool Item_func_match::fix_fields(THD *thd, Item **ref)
{
DBUG_ASSERT(fixed == 0);
Item *UNINIT_VAR(item); // Safe as arg_count is > 1
maybe_null=1;
join_key=0;
/*
const_item is assumed in quite a bit of places, so it would be difficult
to remove; If it would ever to be removed, this should include
modifications to find_best and auto_close as complement to auto_init code
above.
*/
if (Item_func::fix_fields(thd, ref) ||
!args[0]->const_during_execution())
{
my_error(ER_WRONG_ARGUMENTS,MYF(0),"AGAINST");
return TRUE;
}
bool allows_multi_table_search= true;
const_item_cache=0;
for (uint i=1 ; i < arg_count ; i++)
{
item=args[i];
if (item->type() == Item::REF_ITEM)
args[i]= item= *((Item_ref *)item)->ref;
if (item->type() != Item::FIELD_ITEM)
{
my_error(ER_WRONG_ARGUMENTS, MYF(0), "AGAINST");
return TRUE;
}
allows_multi_table_search &=
allows_search_on_non_indexed_columns(((Item_field *)item)->field->table);
}
/*
Check that all columns come from the same table.
We've already checked that columns in MATCH are fields so
PARAM_TABLE_BIT can only appear from AGAINST argument.
*/
if ((used_tables_cache & ~PARAM_TABLE_BIT) != item->used_tables())
key=NO_SUCH_KEY;
if (key == NO_SUCH_KEY && !allows_multi_table_search)
{
my_error(ER_WRONG_ARGUMENTS,MYF(0),"MATCH");
return TRUE;
}
table=((Item_field *)item)->field->table;
if (!(table->file->ha_table_flags() & HA_CAN_FULLTEXT))
{
my_error(ER_TABLE_CANT_HANDLE_FT, MYF(0));
return 1;
}
table->fulltext_searched=1;
return agg_item_collations_for_comparison(cmp_collation, func_name(),
args+1, arg_count-1, 0);
}
bool Item_func_match::fix_index()
{
Item_field *item;
uint ft_to_key[MAX_KEY], ft_cnt[MAX_KEY], fts=0, keynr;
uint max_cnt=0, mkeys=0, i;
/*
We will skip execution if the item is not fixed
with fix_field
*/
if (!fixed)
return false;
if (key == NO_SUCH_KEY)
return 0;
if (!table)
goto err;
for (keynr=0 ; keynr < table->s->keys ; keynr++)
{
if ((table->key_info[keynr].flags & HA_FULLTEXT) &&
(flags & FT_BOOL ? table->keys_in_use_for_query.is_set(keynr) :
table->s->keys_in_use.is_set(keynr)))
{
ft_to_key[fts]=keynr;
ft_cnt[fts]=0;
fts++;
}
}
if (!fts)
goto err;
for (i=1; i < arg_count; i++)
{
item=(Item_field*)args[i];
for (keynr=0 ; keynr < fts ; keynr++)
{
KEY *ft_key=&table->key_info[ft_to_key[keynr]];
uint key_parts=ft_key->user_defined_key_parts;
for (uint part=0 ; part < key_parts ; part++)
{
if (item->field->eq(ft_key->key_part[part].field))
ft_cnt[keynr]++;
}
}
}
for (keynr=0 ; keynr < fts ; keynr++)
{
if (ft_cnt[keynr] > max_cnt)
{
mkeys=0;
max_cnt=ft_cnt[mkeys]=ft_cnt[keynr];
ft_to_key[mkeys]=ft_to_key[keynr];
continue;
}
if (max_cnt && ft_cnt[keynr] == max_cnt)
{
mkeys++;
ft_cnt[mkeys]=ft_cnt[keynr];
ft_to_key[mkeys]=ft_to_key[keynr];
continue;
}
}
for (keynr=0 ; keynr <= mkeys ; keynr++)
{
// partial keys doesn't work
if (max_cnt < arg_count-1 ||
max_cnt < table->key_info[ft_to_key[keynr]].user_defined_key_parts)
continue;
key=ft_to_key[keynr];
return 0;
}
err:
if (allows_search_on_non_indexed_columns(table))
{
key=NO_SUCH_KEY;
return 0;
}
my_message(ER_FT_MATCHING_KEY_NOT_FOUND,
ER(ER_FT_MATCHING_KEY_NOT_FOUND), MYF(0));
return 1;
}
bool Item_func_match::eq(const Item *item, bool binary_cmp) const
{
/* We ignore FT_SORTED flag when checking for equality since result is
equvialent regardless of sorting */
if (item->type() != FUNC_ITEM ||
((Item_func*)item)->functype() != FT_FUNC ||
(flags | FT_SORTED) != (((Item_func_match*)item)->flags | FT_SORTED))
return 0;
Item_func_match *ifm=(Item_func_match*) item;
if (key == ifm->key && table == ifm->table &&
key_item()->eq(ifm->key_item(), binary_cmp))
return 1;
return 0;
}
double Item_func_match::val_real()
{
DBUG_ASSERT(fixed == 1);
DBUG_ENTER("Item_func_match::val");
if (ft_handler == NULL)
DBUG_RETURN(-1.0);
if (key != NO_SUCH_KEY && table->null_row) /* NULL row from an outer join */
DBUG_RETURN(0.0);
if (join_key)
{
if (table->file->ft_handler)
DBUG_RETURN(ft_handler->please->get_relevance(ft_handler));
join_key=0;
}
if (key == NO_SUCH_KEY)
{
String *a= concat_ws->val_str(&value);
if ((null_value= (a == 0)) || !a->length())
DBUG_RETURN(0);
DBUG_RETURN(ft_handler->please->find_relevance(ft_handler,
(uchar *)a->ptr(), a->length()));
}
DBUG_RETURN(ft_handler->please->find_relevance(ft_handler,
table->record[0], 0));
}
void Item_func_match::print(String *str, enum_query_type query_type)
{
str->append(STRING_WITH_LEN("(match "));
print_args(str, 1, query_type);
str->append(STRING_WITH_LEN(" against ("));
args[0]->print(str, query_type);
if (flags & FT_BOOL)
str->append(STRING_WITH_LEN(" in boolean mode"));
else if (flags & FT_EXPAND)
str->append(STRING_WITH_LEN(" with query expansion"));
str->append(STRING_WITH_LEN("))"));
}
longlong Item_func_bit_xor::val_int()
{
DBUG_ASSERT(fixed == 1);
ulonglong arg1= (ulonglong) args[0]->val_int();
ulonglong arg2= (ulonglong) args[1]->val_int();
if ((null_value= (args[0]->null_value || args[1]->null_value)))
return 0;
return (longlong) (arg1 ^ arg2);
}
/***************************************************************************
System variables
****************************************************************************/
/**
Return value of an system variable base[.name] as a constant item.
@param thd Thread handler
@param var_type global / session
@param name Name of base or system variable
@param component Component.
@note
If component.str = 0 then the variable name is in 'name'
@return
- 0 : error
- # : constant item
*/
Item *get_system_var(THD *thd, enum_var_type var_type, LEX_STRING name,
LEX_STRING component)
{
sys_var *var;
LEX_STRING *base_name, *component_name;
if (component.str)
{
base_name= &component;
component_name= &name;
}
else
{
base_name= &name;
component_name= &component; // Empty string
}
if (!(var= find_sys_var(thd, base_name->str, base_name->length)))
return 0;
if (component.str)
{
if (!var->is_struct())
{
my_error(ER_VARIABLE_IS_NOT_STRUCT, MYF(0), base_name->str);
return 0;
}
}
thd->lex->uncacheable(UNCACHEABLE_SIDEEFFECT);
set_if_smaller(component_name->length, MAX_SYS_VAR_LENGTH);
var->do_deprecated_warning(thd);
return new Item_func_get_system_var(var, var_type, component_name,
NULL, 0);
}
/**
Check a user level lock.
Sets null_value=TRUE on error.
@retval
1 Available
@retval
0 Already taken, or error
*/
longlong Item_func_is_free_lock::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
User_level_lock *ull;
null_value=0;
if (!res || !res->length())
{
null_value=1;
return 0;
}
mysql_mutex_lock(&LOCK_user_locks);
ull= (User_level_lock *) my_hash_search(&hash_user_locks, (uchar*) res->ptr(),
(size_t) res->length());
mysql_mutex_unlock(&LOCK_user_locks);
if (!ull || !ull->locked)
return 1;
return 0;
}
longlong Item_func_is_used_lock::val_int()
{
DBUG_ASSERT(fixed == 1);
String *res=args[0]->val_str(&value);
User_level_lock *ull;
null_value=1;
if (!res || !res->length())
return 0;
mysql_mutex_lock(&LOCK_user_locks);
ull= (User_level_lock *) my_hash_search(&hash_user_locks, (uchar*) res->ptr(),
(size_t) res->length());
mysql_mutex_unlock(&LOCK_user_locks);
if (!ull || !ull->locked)
return 0;
null_value=0;
return ull->thread_id;
}
longlong Item_func_row_count::val_int()
{
DBUG_ASSERT(fixed == 1);
THD *thd= current_thd;
return thd->get_row_count_func();
}
Item_func_sp::Item_func_sp(Name_resolution_context *context_arg, sp_name *name)
:Item_func(), context(context_arg), m_name(name), m_sp(NULL), sp_result_field(NULL)
{
maybe_null= 1;
m_name->init_qname(current_thd);
dummy_table= (TABLE*) sql_calloc(sizeof(TABLE)+ sizeof(TABLE_SHARE));
dummy_table->s= (TABLE_SHARE*) (dummy_table+1);
with_stored_program= true;
}
Item_func_sp::Item_func_sp(Name_resolution_context *context_arg,
sp_name *name, List<Item> &list)
:Item_func(list), context(context_arg), m_name(name), m_sp(NULL),sp_result_field(NULL)
{
maybe_null= 1;
m_name->init_qname(current_thd);
dummy_table= (TABLE*) sql_calloc(sizeof(TABLE)+ sizeof(TABLE_SHARE));
dummy_table->s= (TABLE_SHARE*) (dummy_table+1);
with_stored_program= true;
}
void
Item_func_sp::cleanup()
{
if (sp_result_field)
{
delete sp_result_field;
sp_result_field= NULL;
}
m_sp= NULL;
dummy_table->alias= NULL;
Item_func::cleanup();
tables_locked_cache= false;
with_stored_program= true;
}
const char *
Item_func_sp::func_name() const
{
THD *thd= current_thd;
/* Calculate length to avoid reallocation of string for sure */
uint len= (((m_name->m_explicit_name ? m_name->m_db.length : 0) +
m_name->m_name.length)*2 + //characters*quoting
2 + // ` and `
(m_name->m_explicit_name ?
3 : 0) + // '`', '`' and '.' for the db
1 + // end of string
ALIGN_SIZE(1)); // to avoid String reallocation
String qname((char *)alloc_root(thd->mem_root, len), len,
system_charset_info);
qname.length(0);
if (m_name->m_explicit_name)
{
append_identifier(thd, &qname, m_name->m_db.str, m_name->m_db.length);
qname.append('.');
}
append_identifier(thd, &qname, m_name->m_name.str, m_name->m_name.length);
return qname.ptr();
}
table_map Item_func_sp::get_initial_pseudo_tables() const
{
return m_sp->m_chistics->detistic ? 0 : RAND_TABLE_BIT;
}
void my_missing_function_error(const LEX_STRING &token, const char *func_name)
{
if (token.length && is_lex_native_function (&token))
my_error(ER_FUNC_INEXISTENT_NAME_COLLISION, MYF(0), func_name);
else
my_error(ER_SP_DOES_NOT_EXIST, MYF(0), "FUNCTION", func_name);
}
/**
@brief Initialize the result field by creating a temporary dummy table
and assign it to a newly created field object. Meta data used to
create the field is fetched from the sp_head belonging to the stored
proceedure found in the stored procedure functon cache.
@note This function should be called from fix_fields to init the result
field. It is some what related to Item_field.
@see Item_field
@param thd A pointer to the session and thread context.
@return Function return error status.
@retval TRUE is returned on an error
@retval FALSE is returned on success.
*/
bool
Item_func_sp::init_result_field(THD *thd)
{
LEX_STRING empty_name= { C_STRING_WITH_LEN("") };
TABLE_SHARE *share;
DBUG_ENTER("Item_func_sp::init_result_field");
DBUG_ASSERT(m_sp == NULL);
DBUG_ASSERT(sp_result_field == NULL);
if (!(m_sp= sp_find_routine(thd, SP_TYPE_FUNCTION, m_name,
&thd->sp_func_cache, TRUE)))
{
my_missing_function_error (m_name->m_name, m_name->m_qname.str);
context->process_error(thd);
DBUG_RETURN(TRUE);
}
/*
A Field need to be attached to a Table.
Below we "create" a dummy table by initializing
the needed pointers.
*/
share= dummy_table->s;
dummy_table->alias = "";
dummy_table->maybe_null = maybe_null;
dummy_table->in_use= thd;
dummy_table->copy_blobs= TRUE;
share->table_cache_key = empty_name;
share->table_name = empty_name;
if (!(sp_result_field= m_sp->create_result_field(max_length, item_name.ptr(),
dummy_table)))
{
DBUG_RETURN(TRUE);
}
if (sp_result_field->pack_length() > sizeof(result_buf))
{
void *tmp;
if (!(tmp= sql_alloc(sp_result_field->pack_length())))
DBUG_RETURN(TRUE);
sp_result_field->move_field((uchar*) tmp);
}
else
sp_result_field->move_field(result_buf);
sp_result_field->set_null_ptr((uchar *) &null_value, 1);
DBUG_RETURN(FALSE);
}
/**
@brief Initialize local members with values from the Field interface.
@note called from Item::fix_fields.
*/
void Item_func_sp::fix_length_and_dec()
{
DBUG_ENTER("Item_func_sp::fix_length_and_dec");
DBUG_ASSERT(sp_result_field);
decimals= sp_result_field->decimals();
max_length= sp_result_field->field_length;
collation.set(sp_result_field->charset());
maybe_null= 1;
unsigned_flag= MY_TEST(sp_result_field->flags & UNSIGNED_FLAG);
DBUG_VOID_RETURN;
}
void Item_func_sp::update_null_value()
{
/*
This method is called when we try to check if the item value is NULL.
We call Item_func_sp::execute() to get value of null_value attribute
as a side effect of its execution.
We ignore any error since update_null_value() doesn't return value.
We used to delegate nullability check to Item::update_null_value as
a result of a chain of function calls:
Item_func_isnull::val_int --> Item_func::is_null -->
Item::update_null_value -->Item_func_sp::val_int -->
Field_varstring::val_int
Such approach resulted in a call of push_warning_printf() in case
if a stored program value couldn't be cast to integer (the case when
for example there was a stored function that declared as returning
varchar(1) and a function's implementation returned "Y" from its body).
*/
execute();
}
/**
@brief Execute function & store value in field.
@return Function returns error status.
@retval FALSE on success.
@retval TRUE if an error occurred.
*/
bool
Item_func_sp::execute()
{
THD *thd= current_thd;
/* Execute function and store the return value in the field. */
if (execute_impl(thd))
{
null_value= 1;
context->process_error(thd);
if (thd->killed)
thd->send_kill_message();
return TRUE;
}
/* Check that the field (the value) is not NULL. */
null_value= sp_result_field->is_null();
return null_value;
}
/**
@brief Execute function and store the return value in the field.
@note This function was intended to be the concrete implementation of
the interface function execute. This was never realized.
@return The error state.
@retval FALSE on success
@retval TRUE if an error occurred.
*/
bool
Item_func_sp::execute_impl(THD *thd)
{
bool err_status= TRUE;
Sub_statement_state statement_state;
#ifndef NO_EMBEDDED_ACCESS_CHECKS
Security_context *save_security_ctx= thd->security_ctx;
#endif
enum enum_sp_data_access access=
(m_sp->m_chistics->daccess == SP_DEFAULT_ACCESS) ?
SP_DEFAULT_ACCESS_MAPPING : m_sp->m_chistics->daccess;
DBUG_ENTER("Item_func_sp::execute_impl");
#ifndef NO_EMBEDDED_ACCESS_CHECKS
if (context->security_ctx)
{
/* Set view definer security context */
thd->security_ctx= context->security_ctx;
}
#endif
if (sp_check_access(thd))
goto error;
/*
Throw an error if a non-deterministic function is called while
statement-based replication (SBR) is active.
*/
if (!m_sp->m_chistics->detistic && !trust_function_creators &&
(access == SP_CONTAINS_SQL || access == SP_MODIFIES_SQL_DATA) &&
(mysql_bin_log.is_open() &&
thd->variables.binlog_format == BINLOG_FORMAT_STMT))
{
my_error(ER_BINLOG_UNSAFE_ROUTINE, MYF(0));
goto error;
}
/*
Disable the binlogging if this is not a SELECT statement. If this is a
SELECT, leave binlogging on, so execute_function() code writes the
function call into binlog.
*/
thd->reset_sub_statement_state(&statement_state, SUB_STMT_FUNCTION);
err_status= m_sp->execute_function(thd, args, arg_count, sp_result_field);
thd->restore_sub_statement_state(&statement_state);
error:
#ifndef NO_EMBEDDED_ACCESS_CHECKS
thd->security_ctx= save_security_ctx;
#endif
DBUG_RETURN(err_status);
}
void
Item_func_sp::make_field(Send_field *tmp_field)
{
DBUG_ENTER("Item_func_sp::make_field");
DBUG_ASSERT(sp_result_field);
sp_result_field->make_field(tmp_field);
if (item_name.is_set())
tmp_field->col_name= item_name.ptr();
DBUG_VOID_RETURN;
}
enum enum_field_types
Item_func_sp::field_type() const
{
DBUG_ENTER("Item_func_sp::field_type");
DBUG_ASSERT(sp_result_field);
DBUG_RETURN(sp_result_field->type());
}
Item_result
Item_func_sp::result_type() const
{
DBUG_ENTER("Item_func_sp::result_type");
DBUG_PRINT("info", ("m_sp = %p", (void *) m_sp));
DBUG_ASSERT(sp_result_field);
DBUG_RETURN(sp_result_field->result_type());
}
longlong Item_func_found_rows::val_int()
{
DBUG_ASSERT(fixed == 1);
return current_thd->found_rows();
}
Field *
Item_func_sp::tmp_table_field(TABLE *t_arg)
{
DBUG_ENTER("Item_func_sp::tmp_table_field");
DBUG_ASSERT(sp_result_field);
DBUG_RETURN(sp_result_field);
}
/**
@brief Checks if requested access to function can be granted to user.
If function isn't found yet, it searches function first.
If function can't be found or user don't have requested access
error is raised.
@param thd thread handler
@return Indication if the access was granted or not.
@retval FALSE Access is granted.
@retval TRUE Requested access can't be granted or function doesn't exists.
*/
bool
Item_func_sp::sp_check_access(THD *thd)
{
DBUG_ENTER("Item_func_sp::sp_check_access");
DBUG_ASSERT(m_sp);
#ifndef NO_EMBEDDED_ACCESS_CHECKS
if (check_routine_access(thd, EXECUTE_ACL,
m_sp->m_db.str, m_sp->m_name.str, 0, FALSE))
DBUG_RETURN(TRUE);
#endif
DBUG_RETURN(FALSE);
}
bool
Item_func_sp::fix_fields(THD *thd, Item **ref)
{
bool res;
#ifndef NO_EMBEDDED_ACCESS_CHECKS
Security_context *save_security_ctx= thd->security_ctx;
#endif
DBUG_ENTER("Item_func_sp::fix_fields");
DBUG_ASSERT(fixed == 0);
#ifndef NO_EMBEDDED_ACCESS_CHECKS
/*
Checking privileges to execute the function while creating view and
executing the function of select.
*/
if (!(thd->lex->context_analysis_only & CONTEXT_ANALYSIS_ONLY_VIEW) ||
(thd->lex->sql_command == SQLCOM_CREATE_VIEW))
{
if (context->security_ctx)
{
/* Set view definer security context */
thd->security_ctx= context->security_ctx;
}
/*
Check whether user has execute privilege or not
*/
res= check_routine_access(thd, EXECUTE_ACL, m_name->m_db.str,
m_name->m_name.str, 0, FALSE);
thd->security_ctx= save_security_ctx;
if (res)
{
context->process_error(thd);
DBUG_RETURN(res);
}
}
#endif
/*
We must call init_result_field before Item_func::fix_fields()
to make m_sp and result_field members available to fix_length_and_dec(),
which is called from Item_func::fix_fields().
*/
res= init_result_field(thd);
if (res)
DBUG_RETURN(res);
res= Item_func::fix_fields(thd, ref);
/* These is reset/set by Item_func::fix_fields. */
with_stored_program= true;
if (!m_sp->m_chistics->detistic || !tables_locked_cache)
const_item_cache= false;
if (res)
DBUG_RETURN(res);
if (thd->lex->context_analysis_only & CONTEXT_ANALYSIS_ONLY_VIEW)
{
/*
Here we check privileges of the stored routine only during view
creation, in order to validate the view. A runtime check is
perfomed in Item_func_sp::execute(), and this method is not
called during context analysis. Notice, that during view
creation we do not infer into stored routine bodies and do not
check privileges of its statements, which would probably be a
good idea especially if the view has SQL SECURITY DEFINER and
the used stored procedure has SQL SECURITY DEFINER.
*/
res= sp_check_access(thd);
#ifndef NO_EMBEDDED_ACCESS_CHECKS
/*
Try to set and restore the security context to see whether it's valid
*/
Security_context *save_secutiry_ctx;
res= m_sp->set_security_ctx(thd, &save_secutiry_ctx);
if (!res)
m_sp->m_security_ctx.restore_security_context(thd, save_secutiry_ctx);
#endif /* ! NO_EMBEDDED_ACCESS_CHECKS */
}
DBUG_RETURN(res);
}
void Item_func_sp::update_used_tables()
{
Item_func::update_used_tables();
if (!m_sp->m_chistics->detistic)
const_item_cache= false;
/* This is reset by Item_func::update_used_tables(). */
with_stored_program= true;
}
/*
uuid_short handling.
The short uuid is defined as a longlong that contains the following bytes:
Bytes Comment
1 Server_id & 255
4 Startup time of server in seconds
3 Incrementor
This means that an uuid is guaranteed to be unique
even in a replication environment if the following holds:
- The last byte of the server id is unique
- If you between two shutdown of the server don't get more than
an average of 2^24 = 16M calls to uuid_short() per second.
*/
ulonglong uuid_value;
void uuid_short_init()
{
uuid_value= ((((ulonglong) server_id) << 56) +
(((ulonglong) server_start_time) << 24));
}
longlong Item_func_uuid_short::val_int()
{
ulonglong val;
mysql_mutex_lock(&LOCK_uuid_generator);
val= uuid_value++;
mysql_mutex_unlock(&LOCK_uuid_generator);
return (longlong) val;
}<|fim▁end|> | return check_float_overflow(atan2(value,val2));
} |
<|file_name|>ppaux.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::def;
use middle::region;
use middle::subst::{VecPerParamSpace,Subst};
use middle::subst;
use middle::ty::{BoundRegion, BrAnon, BrNamed};
use middle::ty::{ReEarlyBound, BrFresh, ctxt};
use middle::ty::{ReFree, ReScope, ReInfer, ReStatic, Region, ReEmpty};
use middle::ty::{ReSkolemized, ReVar, BrEnv};
use middle::ty::{mt, Ty, ParamTy};
use middle::ty::{ty_bool, ty_char, ty_struct, ty_enum};
use middle::ty::{ty_err, ty_str, ty_vec, ty_float, ty_bare_fn};
use middle::ty::{ty_param, ty_ptr, ty_rptr, ty_tup};
use middle::ty::ty_closure;
use middle::ty::{ty_uniq, ty_trait, ty_int, ty_uint, ty_infer};
use middle::ty;
use middle::ty_fold::TypeFoldable;
use std::collections::HashMap;
use std::collections::hash_state::HashState;
use std::hash::Hash;
use std::rc::Rc;
use syntax::abi;
use syntax::ast_map;
use syntax::codemap::{Span, Pos};
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
use syntax::{ast, ast_util};
use syntax::owned_slice::OwnedSlice;
/// Produces a string suitable for debugging output.
pub trait Repr<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String;
}
/// Produces a string suitable for showing to the user.
pub trait UserString<'tcx> : Repr<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String;
}
pub fn note_and_explain_region(cx: &ctxt,
prefix: &str,
region: ty::Region,
suffix: &str) -> Option<Span> {
match explain_region_and_span(cx, region) {
(ref str, Some(span)) => {
cx.sess.span_note(
span,
&format!("{}{}{}", prefix, *str, suffix));
Some(span)
}
(ref str, None) => {
cx.sess.note(
&format!("{}{}{}", prefix, *str, suffix));
None
}
}
}
/// When a free region is associated with `item`, how should we describe the item in the error
/// message.
fn item_scope_tag(item: &ast::Item) -> &'static str {
match item.node {
ast::ItemImpl(..) => "impl",
ast::ItemStruct(..) => "struct",
ast::ItemEnum(..) => "enum",
ast::ItemTrait(..) => "trait",
ast::ItemFn(..) => "function body",
_ => "item"
}
}
pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
-> (String, Option<Span>) {
return match region {
ReScope(scope) => {
let new_string;
let on_unknown_scope = || {
(format!("unknown scope: {:?}. Please report a bug.", scope), None)
};
let span = match scope.span(&cx.map) {
Some(s) => s,
None => return on_unknown_scope(),
};
let tag = match cx.map.find(scope.node_id()) {
Some(ast_map::NodeBlock(_)) => "block",
Some(ast_map::NodeExpr(expr)) => match expr.node {
ast::ExprCall(..) => "call",
ast::ExprMethodCall(..) => "method call",
ast::ExprMatch(_, _, ast::MatchSource::IfLetDesugar { .. }) => "if let",
ast::ExprMatch(_, _, ast::MatchSource::WhileLetDesugar) => "while let",
ast::ExprMatch(_, _, ast::MatchSource::ForLoopDesugar) => "for",
ast::ExprMatch(..) => "match",
_ => "expression",
},
Some(ast_map::NodeStmt(_)) => "statement",
Some(ast_map::NodeItem(it)) => item_scope_tag(&*it),
Some(_) | None => {
// this really should not happen
return on_unknown_scope();
}
};
let scope_decorated_tag = match scope {
region::CodeExtent::Misc(_) => tag,
region::CodeExtent::ParameterScope { .. } => {
"scope of parameters for function"
}
region::CodeExtent::DestructionScope(_) => {
new_string = format!("destruction scope surrounding {}", tag);
&*new_string
}
region::CodeExtent::Remainder(r) => {
new_string = format!("block suffix following statement {}",
r.first_statement_index);
&*new_string
}
};
explain_span(cx, scope_decorated_tag, span)
}
ReFree(ref fr) => {
let prefix = match fr.bound_region {
BrAnon(idx) => {
format!("the anonymous lifetime #{} defined on", idx + 1)
}
BrFresh(_) => "an anonymous lifetime defined on".to_string(),
_ => {
format!("the lifetime {} as defined on",
bound_region_ptr_to_string(cx, fr.bound_region))
}
};
match cx.map.find(fr.scope.node_id) {
Some(ast_map::NodeBlock(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", blk.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(ast_map::NodeItem(it)) => {
let tag = item_scope_tag(&*it);
let (msg, opt_span) = explain_span(cx, tag, it.span);
(format!("{} {}", prefix, msg), opt_span)
}
Some(_) | None => {
// this really should not happen
(format!("{} unknown free region bounded by scope {:?}", prefix, fr.scope), None)
}
}
}
ReStatic => { ("the static lifetime".to_string(), None) }
ReEmpty => { ("the empty lifetime".to_string(), None) }
ReEarlyBound(ref data) => {
(format!("{}", token::get_name(data.name)), None)
}
// I believe these cases should not occur (except when debugging,
// perhaps)
ty::ReInfer(_) | ty::ReLateBound(..) => {
(format!("lifetime {:?}", region), None)
}
};
fn explain_span(cx: &ctxt, heading: &str, span: Span)
-> (String, Option<Span>) {
let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
(format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
Some(span))
}
}
pub fn bound_region_ptr_to_string(cx: &ctxt, br: BoundRegion) -> String {
bound_region_to_string(cx, "", false, br)
}
pub fn bound_region_to_string(cx: &ctxt,
prefix: &str, space: bool,
br: BoundRegion) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, br.repr(cx), space_str)
}
match br {
BrNamed(_, name) => {
format!("{}{}{}", prefix, token::get_name(name), space_str)
}
BrAnon(_) | BrFresh(_) | BrEnv => prefix.to_string()
}
}
// In general, if you are giving a region error message,
// you should use `explain_region()` or, better yet,
// `note_and_explain_region()`
pub fn region_ptr_to_string(cx: &ctxt, region: Region) -> String {
region_to_string(cx, "&", true, region)
}
pub fn region_to_string(cx: &ctxt, prefix: &str, space: bool, region: Region) -> String {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return format!("{}{}{}", prefix, region.repr(cx), space_str)
}
// These printouts are concise. They do not contain all the information
// the user might want to diagnose an error, but there is basically no way
// to fit that into a short string. Hence the recommendation to use
// `explain_region()` or `note_and_explain_region()`.
match region {
ty::ReScope(_) => prefix.to_string(),
ty::ReEarlyBound(ref data) => {
token::get_name(data.name).to_string()
}
ty::ReLateBound(_, br) => bound_region_to_string(cx, prefix, space, br),
ty::ReFree(ref fr) => bound_region_to_string(cx, prefix, space, fr.bound_region),
ty::ReInfer(ReSkolemized(_, br)) => {
bound_region_to_string(cx, prefix, space, br)
}
ty::ReInfer(ReVar(_)) => prefix.to_string(),
ty::ReStatic => format!("{}'static{}", prefix, space_str),
ty::ReEmpty => format!("{}'<empty>{}", prefix, space_str),
}
}
pub fn mutability_to_string(m: ast::Mutability) -> String {
match m {
ast::MutMutable => "mut ".to_string(),
ast::MutImmutable => "".to_string(),
}
}
pub fn mt_to_string<'tcx>(cx: &ctxt<'tcx>, m: &mt<'tcx>) -> String {
format!("{}{}",
mutability_to_string(m.mutbl),
ty_to_string(cx, m.ty))
}
pub fn vec_map_to_string<T, F>(ts: &[T], f: F) -> String where
F: FnMut(&T) -> String,
{
let tstrs = ts.iter().map(f).collect::<Vec<String>>();
format!("[{}]", tstrs.connect(", "))
}
pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {<|fim▁hole|> fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>,
opt_def_id: Option<ast::DefId>,
unsafety: ast::Unsafety,
abi: abi::Abi,
ident: Option<ast::Ident>,
sig: &ty::PolyFnSig<'tcx>)
-> String {
let mut s = String::new();
match unsafety {
ast::Unsafety::Normal => {}
ast::Unsafety::Unsafe => {
s.push_str(&unsafety.to_string());
s.push(' ');
}
};
if abi != abi::Rust {
s.push_str(&format!("extern {} ", abi.to_string()));
};
s.push_str("fn");
match ident {
Some(i) => {
s.push(' ');
s.push_str(&token::get_ident(i));
}
_ => { }
}
push_sig_to_string(cx, &mut s, '(', ')', sig);
match opt_def_id {
Some(def_id) => {
s.push_str(" {");
let path_str = ty::item_path_str(cx, def_id);
s.push_str(&path_str[..]);
s.push_str("}");
}
None => { }
}
s
}
fn closure_to_string<'tcx>(cx: &ctxt<'tcx>,
cty: &ty::ClosureTy<'tcx>,
did: &ast::DefId)
-> String {
let mut s = String::new();
s.push_str("[closure");
push_sig_to_string(cx, &mut s, '(', ')', &cty.sig);
if cx.sess.verbose() {
s.push_str(&format!(" id={:?}]", did));
} else {
s.push(']');
}
s
}
fn push_sig_to_string<'tcx>(cx: &ctxt<'tcx>,
s: &mut String,
bra: char,
ket: char,
sig: &ty::PolyFnSig<'tcx>) {
s.push(bra);
let strs = sig.0.inputs
.iter()
.map(|a| ty_to_string(cx, *a))
.collect::<Vec<_>>();
s.push_str(&strs.connect(", "));
if sig.0.variadic {
s.push_str(", ...");
}
s.push(ket);
match sig.0.output {
ty::FnConverging(t) => {
if !ty::type_is_nil(t) {
s.push_str(" -> ");
s.push_str(&ty_to_string(cx, t));
}
}
ty::FnDiverging => {
s.push_str(" -> !");
}
}
}
fn infer_ty_to_string(cx: &ctxt, ty: ty::InferTy) -> String {
let print_var_ids = cx.sess.verbose();
match ty {
ty::TyVar(ref vid) if print_var_ids => vid.repr(cx),
ty::IntVar(ref vid) if print_var_ids => vid.repr(cx),
ty::FloatVar(ref vid) if print_var_ids => vid.repr(cx),
ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => format!("_"),
ty::FreshTy(v) => format!("FreshTy({})", v),
ty::FreshIntTy(v) => format!("FreshIntTy({})", v),
ty::FreshFloatTy(v) => format!("FreshFloatTy({})", v)
}
}
// pretty print the structural type representation:
match typ.sty {
ty_bool => "bool".to_string(),
ty_char => "char".to_string(),
ty_int(t) => ast_util::int_ty_to_string(t, None).to_string(),
ty_uint(t) => ast_util::uint_ty_to_string(t, None).to_string(),
ty_float(t) => ast_util::float_ty_to_string(t).to_string(),
ty_uniq(typ) => format!("Box<{}>", ty_to_string(cx, typ)),
ty_ptr(ref tm) => {
format!("*{} {}", match tm.mutbl {
ast::MutMutable => "mut",
ast::MutImmutable => "const",
}, ty_to_string(cx, tm.ty))
}
ty_rptr(r, ref tm) => {
let mut buf = region_ptr_to_string(cx, *r);
buf.push_str(&mt_to_string(cx, tm));
buf
}
ty_tup(ref elems) => {
let strs = elems
.iter()
.map(|elem| ty_to_string(cx, *elem))
.collect::<Vec<_>>();
match &strs[..] {
[ref string] => format!("({},)", string),
strs => format!("({})", strs.connect(", "))
}
}
ty_bare_fn(opt_def_id, ref f) => {
bare_fn_to_string(cx, opt_def_id, f.unsafety, f.abi, None, &f.sig)
}
ty_infer(infer_ty) => infer_ty_to_string(cx, infer_ty),
ty_err => "[type error]".to_string(),
ty_param(ref param_ty) => param_ty.user_string(cx),
ty_enum(did, substs) | ty_struct(did, substs) => {
let base = ty::item_path_str(cx, did);
parameterized(cx, &base, substs, did, &[],
|| ty::lookup_item_type(cx, did).generics)
}
ty_trait(ref data) => {
data.user_string(cx)
}
ty::ty_projection(ref data) => {
format!("<{} as {}>::{}",
data.trait_ref.self_ty().user_string(cx),
data.trait_ref.user_string(cx),
data.item_name.user_string(cx))
}
ty_str => "str".to_string(),
ty_closure(ref did, substs) => {
let closure_tys = cx.closure_tys.borrow();
closure_tys.get(did).map(|closure_type| {
closure_to_string(cx, &closure_type.subst(cx, substs), did)
}).unwrap_or_else(|| {
let id_str = if cx.sess.verbose() {
format!(" id={:?}", did)
} else {
"".to_owned()
};
if did.krate == ast::LOCAL_CRATE {
let span = cx.map.span(did.node);
format!("[closure {}{}]", span.repr(cx), id_str)
} else {
format!("[closure{}]", id_str)
}
})
}
ty_vec(t, sz) => {
let inner_str = ty_to_string(cx, t);
match sz {
Some(n) => format!("[{}; {}]", inner_str, n),
None => format!("[{}]", inner_str),
}
}
}
}
pub fn explicit_self_category_to_str(category: &ty::ExplicitSelfCategory)
-> &'static str {
match *category {
ty::StaticExplicitSelfCategory => "static",
ty::ByValueExplicitSelfCategory => "self",
ty::ByReferenceExplicitSelfCategory(_, ast::MutMutable) => {
"&mut self"
}
ty::ByReferenceExplicitSelfCategory(_, ast::MutImmutable) => "&self",
ty::ByBoxExplicitSelfCategory => "Box<self>",
}
}
pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
base: &str,
substs: &subst::Substs<'tcx>,
did: ast::DefId,
projections: &[ty::ProjectionPredicate<'tcx>],
get_generics: GG)
-> String
where GG : FnOnce() -> ty::Generics<'tcx>
{
if cx.sess.verbose() {
let mut strings = vec![];
match substs.regions {
subst::ErasedRegions => {
strings.push(format!(".."));
}
subst::NonerasedRegions(ref regions) => {
for region in regions.iter() {
strings.push(region.repr(cx));
}
}
}
for ty in substs.types.iter() {
strings.push(ty.repr(cx));
}
for projection in projections.iter() {
strings.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
}
return if strings.is_empty() {
format!("{}", base)
} else {
format!("{}<{}>", base, strings.connect(","))
};
}
let mut strs = Vec::new();
match substs.regions {
subst::ErasedRegions => { }
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
let s = region_to_string(cx, "", false, r);
if s.is_empty() {
// This happens when the value of the region
// parameter is not easily serialized. This may be
// because the user omitted it in the first place,
// or because it refers to some block in the code,
// etc. I'm not sure how best to serialize this.
strs.push(format!("'_"));
} else {
strs.push(s)
}
}
}
}
// It is important to execute this conditionally, only if -Z
// verbose is false. Otherwise, debug logs can sometimes cause
// ICEs trying to fetch the generics early in the pipeline. This
// is kind of a hacky workaround in that -Z verbose is required to
// avoid those ICEs.
let generics = get_generics();
let has_self = substs.self_ty().is_some();
let tps = substs.types.get_slice(subst::TypeSpace);
let ty_params = generics.types.get_slice(subst::TypeSpace);
let has_defaults = ty_params.last().map_or(false, |def| def.default.is_some());
let num_defaults = if has_defaults {
ty_params.iter().zip(tps.iter()).rev().take_while(|&(def, &actual)| {
match def.default {
Some(default) => {
if !has_self && ty::type_has_self(default) {
// In an object type, there is no `Self`, and
// thus if the default value references Self,
// the user will be required to give an
// explicit value. We can't even do the
// substitution below to check without causing
// an ICE. (#18956).
false
} else {
default.subst(cx, substs) == actual
}
}
None => false
}
}).count()
} else {
0
};
for t in &tps[..tps.len() - num_defaults] {
strs.push(ty_to_string(cx, *t))
}
for projection in projections {
strs.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
}
if cx.lang_items.fn_trait_kind(did).is_some() && projections.len() == 1 {
let projection_ty = projections[0].ty;
let tail =
if ty::type_is_nil(projection_ty) {
format!("")
} else {
format!(" -> {}", projection_ty.user_string(cx))
};
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
&strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
&strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
} else {
&strs[0][..]
},
tail)
} else if !strs.is_empty() {
format!("{}<{}>", base, strs.connect(", "))
} else {
format!("{}", base)
}
}
pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String {
let mut s = typ.repr(cx).to_string();
if s.len() >= 32 {
s = (&s[0..32]).to_string();
}
return s;
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Option<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&None => "None".to_string(),
&Some(ref t) => t.repr(tcx),
}
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for P<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(**self).repr(tcx)
}
}
impl<'tcx,T:Repr<'tcx>,U:Repr<'tcx>> Repr<'tcx> for Result<T,U> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&Ok(ref t) => t.repr(tcx),
&Err(ref u) => format!("Err({})", u.repr(tcx))
}
}
}
impl<'tcx> Repr<'tcx> for () {
fn repr(&self, _tcx: &ctxt) -> String {
"()".to_string()
}
}
impl<'a, 'tcx, T: ?Sized +Repr<'tcx>> Repr<'tcx> for &'a T {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
Repr::repr(*self, tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Rc<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Box<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
(&**self).repr(tcx)
}
}
fn repr_vec<'tcx, T:Repr<'tcx>>(tcx: &ctxt<'tcx>, v: &[T]) -> String {
vec_map_to_string(v, |t| t.repr(tcx))
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, self)
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[..])
}
}
// This is necessary to handle types like Option<Vec<T>>, for which
// autoderef cannot convert the &[T] handler
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
repr_vec(tcx, &self[..])
}
}
impl<'tcx, T:UserString<'tcx>> UserString<'tcx> for Vec<T> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let strs: Vec<String> =
self.iter().map(|t| t.user_string(tcx)).collect();
strs.connect(", ")
}
}
impl<'tcx> Repr<'tcx> for def::Def {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
/// This curious type is here to help pretty-print trait objects. In
/// a trait object, the projections are stored separately from the
/// main trait bound, but in fact we want to package them together
/// when printing out; they also have separate binders, but we want
/// them to share a binder when we print them out. (And the binder
/// pretty-printing logic is kind of clever and we don't want to
/// reproduce it.) So we just repackage up the structure somewhat.
///
/// Right now there is only one trait in an object that can have
/// projection bounds, so we just stuff them altogether. But in
/// reality we should eventually sort things out better.
type TraitAndProjections<'tcx> =
(ty::TraitRef<'tcx>, Vec<ty::ProjectionPredicate<'tcx>>);
impl<'tcx> UserString<'tcx> for TraitAndProjections<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let &(ref trait_ref, ref projection_bounds) = self;
let base = ty::item_path_str(tcx, trait_ref.def_id);
parameterized(tcx,
&base,
trait_ref.substs,
trait_ref.def_id,
&projection_bounds[..],
|| ty::lookup_trait_def(tcx, trait_ref.def_id).generics.clone())
}
}
impl<'tcx> UserString<'tcx> for ty::TyTrait<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let &ty::TyTrait { ref principal, ref bounds } = self;
let mut components = vec![];
let tap: ty::Binder<TraitAndProjections<'tcx>> =
ty::Binder((principal.0.clone(),
bounds.projection_bounds.iter().map(|x| x.0.clone()).collect()));
// Generate the main trait ref, including associated types.
components.push(tap.user_string(tcx));
// Builtin bounds.
for bound in &bounds.builtin_bounds {
components.push(bound.user_string(tcx));
}
// Region, if not obviously implied by builtin bounds.
if bounds.region_bound != ty::ReStatic {
// Region bound is implied by builtin bounds:
components.push(bounds.region_bound.user_string(tcx));
}
components.retain(|s| !s.is_empty());
components.connect(" + ")
}
}
impl<'tcx> Repr<'tcx> for ty::TypeParameterDef<'tcx> {
fn repr(&self, _tcx: &ctxt<'tcx>) -> String {
format!("TypeParameterDef({:?}, {:?}/{})",
self.def_id,
self.space,
self.index)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionParameterDef {
fn repr(&self, tcx: &ctxt) -> String {
format!("RegionParameterDef(name={}, def_id={}, bounds={})",
token::get_name(self.name),
self.def_id.repr(tcx),
self.bounds.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TyS<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for ty::mt<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
mt_to_string(tcx, self)
}
}
impl<'tcx> Repr<'tcx> for subst::Substs<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Substs[types={}, regions={}]",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for subst::VecPerParamSpace<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("[{};{};{}]",
self.get_slice(subst::TypeSpace).repr(tcx),
self.get_slice(subst::SelfSpace).repr(tcx),
self.get_slice(subst::FnSpace).repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemSubsts<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ItemSubsts({})", self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for subst::RegionSubsts {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
subst::ErasedRegions => "erased".to_string(),
subst::NonerasedRegions(ref regions) => regions.repr(tcx)
}
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBounds {
fn repr(&self, _tcx: &ctxt) -> String {
let mut res = Vec::new();
for b in self {
res.push(match b {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
});
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::ParamBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
res.push(self.builtin_bounds.repr(tcx));
for t in &self.trait_bounds {
res.push(t.repr(tcx));
}
res.connect("+")
}
}
impl<'tcx> Repr<'tcx> for ty::TraitRef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
// when printing out the debug representation, we don't need
// to enumerate the `for<...>` etc because the debruijn index
// tells you everything you need to know.
let base = ty::item_path_str(tcx, self.def_id);
let result = parameterized(tcx, &base, self.substs, self.def_id, &[],
|| ty::lookup_trait_def(tcx, self.def_id).generics.clone());
match self.substs.self_ty() {
None => result,
Some(sty) => format!("<{} as {}>", sty.repr(tcx), result)
}
}
}
impl<'tcx> Repr<'tcx> for ty::TraitDef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitDef(generics={}, trait_ref={})",
self.generics.repr(tcx),
self.trait_ref.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::TraitItem {
fn repr(&self, _tcx: &ctxt) -> String {
let kind = match self.node {
ast::ConstTraitItem(..) => "ConstTraitItem",
ast::MethodTraitItem(..) => "MethodTraitItem",
ast::TypeTraitItem(..) => "TypeTraitItem",
};
format!("{}({}, id={})", kind, self.ident, self.id)
}
}
impl<'tcx> Repr<'tcx> for ast::Expr {
fn repr(&self, _tcx: &ctxt) -> String {
format!("expr({}: {})", self.id, pprust::expr_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Path {
fn repr(&self, _tcx: &ctxt) -> String {
format!("path({})", pprust::path_to_string(self))
}
}
impl<'tcx> UserString<'tcx> for ast::Path {
fn user_string(&self, _tcx: &ctxt) -> String {
pprust::path_to_string(self)
}
}
impl<'tcx> Repr<'tcx> for ast::Ty {
fn repr(&self, _tcx: &ctxt) -> String {
format!("type({})", pprust::ty_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Item {
fn repr(&self, tcx: &ctxt) -> String {
format!("item({})", tcx.map.node_to_string(self.id))
}
}
impl<'tcx> Repr<'tcx> for ast::Lifetime {
fn repr(&self, _tcx: &ctxt) -> String {
format!("lifetime({}: {})", self.id, pprust::lifetime_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Stmt {
fn repr(&self, _tcx: &ctxt) -> String {
format!("stmt({}: {})",
ast_util::stmt_id(self),
pprust::stmt_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ast::Pat {
fn repr(&self, _tcx: &ctxt) -> String {
format!("pat({}: {})", self.id, pprust::pat_to_string(self))
}
}
impl<'tcx> Repr<'tcx> for ty::BoundRegion {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::BrAnon(id) => format!("BrAnon({})", id),
ty::BrNamed(id, name) => {
format!("BrNamed({}, {})", id.repr(tcx), token::get_name(name))
}
ty::BrFresh(id) => format!("BrFresh({})", id),
ty::BrEnv => "BrEnv".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::Region {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::ReEarlyBound(ref data) => {
format!("ReEarlyBound({}, {:?}, {}, {})",
data.param_id,
data.space,
data.index,
token::get_name(data.name))
}
ty::ReLateBound(binder_id, ref bound_region) => {
format!("ReLateBound({:?}, {})",
binder_id,
bound_region.repr(tcx))
}
ty::ReFree(ref fr) => fr.repr(tcx),
ty::ReScope(id) => {
format!("ReScope({:?})", id)
}
ty::ReStatic => {
"ReStatic".to_string()
}
ty::ReInfer(ReVar(ref vid)) => {
format!("{:?}", vid)
}
ty::ReInfer(ReSkolemized(id, ref bound_region)) => {
format!("re_skolemized({}, {})", id, bound_region.repr(tcx))
}
ty::ReEmpty => {
"ReEmpty".to_string()
}
}
}
}
impl<'tcx> UserString<'tcx> for ty::Region {
fn user_string(&self, tcx: &ctxt) -> String {
region_to_string(tcx, "", false, *self)
}
}
impl<'tcx> Repr<'tcx> for ty::FreeRegion {
fn repr(&self, tcx: &ctxt) -> String {
format!("ReFree({}, {})",
self.scope.repr(tcx),
self.bound_region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for region::CodeExtent {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
region::CodeExtent::ParameterScope { fn_id, body_id } =>
format!("ParameterScope({}, {})", fn_id, body_id),
region::CodeExtent::Misc(node_id) =>
format!("Misc({})", node_id),
region::CodeExtent::DestructionScope(node_id) =>
format!("DestructionScope({})", node_id),
region::CodeExtent::Remainder(rem) =>
format!("Remainder({}, {})", rem.block, rem.first_statement_index),
}
}
}
impl<'tcx> Repr<'tcx> for region::DestructionScopeData {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
region::DestructionScopeData{ node_id } =>
format!("DestructionScopeData {{ node_id: {} }}", node_id),
}
}
}
impl<'tcx> Repr<'tcx> for ast::DefId {
fn repr(&self, tcx: &ctxt) -> String {
// Unfortunately, there seems to be no way to attempt to print
// a path for a def-id, so I'll just make a best effort for now
// and otherwise fallback to just printing the crate/node pair
if self.krate == ast::LOCAL_CRATE {
match tcx.map.find(self.node) {
Some(ast_map::NodeItem(..)) |
Some(ast_map::NodeForeignItem(..)) |
Some(ast_map::NodeImplItem(..)) |
Some(ast_map::NodeTraitItem(..)) |
Some(ast_map::NodeVariant(..)) |
Some(ast_map::NodeStructCtor(..)) => {
return format!(
"{:?}:{}",
*self,
ty::item_path_str(tcx, *self))
}
_ => {}
}
}
return format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::TypeScheme<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TypeScheme {{generics: {}, ty: {}}}",
self.generics.repr(tcx),
self.ty.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Generics<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Generics(types: {}, regions: {})",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::GenericPredicates<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("GenericPredicates(predicates: {})",
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::InstantiatedPredicates<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("InstantiatedPredicates({})",
self.predicates.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ItemVariances {
fn repr(&self, tcx: &ctxt) -> String {
format!("ItemVariances(types={}, \
regions={})",
self.types.repr(tcx),
self.regions.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Variance {
fn repr(&self, _: &ctxt) -> String {
// The first `.to_string()` returns a &'static str (it is not an implementation
// of the ToString trait). Because of that, we need to call `.to_string()` again
// if we want to have a `String`.
let result: &'static str = (*self).to_string();
result.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::ImplOrTraitItem<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("ImplOrTraitItem({})",
match *self {
ty::ImplOrTraitItem::MethodTraitItem(ref i) => i.repr(tcx),
ty::ImplOrTraitItem::ConstTraitItem(ref i) => i.repr(tcx),
ty::ImplOrTraitItem::TypeTraitItem(ref i) => i.repr(tcx),
})
}
}
impl<'tcx> Repr<'tcx> for ty::AssociatedConst<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("AssociatedConst(name: {}, ty: {}, vis: {}, def_id: {})",
self.name.repr(tcx),
self.ty.repr(tcx),
self.vis.repr(tcx),
self.def_id.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::AssociatedType<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("AssociatedType(name: {}, vis: {}, def_id: {})",
self.name.repr(tcx),
self.vis.repr(tcx),
self.def_id.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::Method<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Method(name: {}, generics: {}, predicates: {}, fty: {}, \
explicit_self: {}, vis: {}, def_id: {})",
self.name.repr(tcx),
self.generics.repr(tcx),
self.predicates.repr(tcx),
self.fty.repr(tcx),
self.explicit_self.repr(tcx),
self.vis.repr(tcx),
self.def_id.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ast::Name {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_name(*self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ast::Name {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(*self).to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::Ident {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_ident(*self).to_string()
}
}
impl<'tcx> Repr<'tcx> for ast::ExplicitSelf_ {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::Visibility {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BareFnTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("BareFnTy {{unsafety: {}, abi: {}, sig: {}}}",
self.unsafety,
self.abi.to_string(),
self.sig.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnSig<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("fn{} -> {}", self.inputs.repr(tcx), self.output.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::FnOutput<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::FnConverging(ty) =>
format!("FnConverging({0})", ty.repr(tcx)),
ty::FnDiverging =>
"FnDiverging".to_string()
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodCallee<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodCallee {{origin: {}, ty: {}, {}}}",
self.origin.repr(tcx),
self.ty.repr(tcx),
self.substs.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::MethodOrigin<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
match self {
&ty::MethodStatic(def_id) => {
format!("MethodStatic({})", def_id.repr(tcx))
}
&ty::MethodStaticClosure(def_id) => {
format!("MethodStaticClosure({})", def_id.repr(tcx))
}
&ty::MethodTypeParam(ref p) => {
p.repr(tcx)
}
&ty::MethodTraitObject(ref p) => {
p.repr(tcx)
}
}
}
}
impl<'tcx> Repr<'tcx> for ty::MethodParam<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodParam({},{})",
self.trait_ref.repr(tcx),
self.method_num)
}
}
impl<'tcx> Repr<'tcx> for ty::MethodObject<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("MethodObject({},{},{})",
self.trait_ref.repr(tcx),
self.method_num,
self.vtable_index)
}
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBound {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBound {
fn user_string(&self, _tcx: &ctxt) -> String {
match *self {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundSync => "Sync".to_string(),
}
}
}
impl<'tcx> Repr<'tcx> for Span {
fn repr(&self, tcx: &ctxt) -> String {
tcx.sess.codemap().span_to_string(*self).to_string()
}
}
impl<'tcx, A:UserString<'tcx>> UserString<'tcx> for Rc<A> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let this: &A = &**self;
this.user_string(tcx)
}
}
impl<'tcx> UserString<'tcx> for ty::ParamBounds<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let mut result = Vec::new();
let s = self.builtin_bounds.user_string(tcx);
if !s.is_empty() {
result.push(s);
}
for n in &self.trait_bounds {
result.push(n.user_string(tcx));
}
result.connect(" + ")
}
}
impl<'tcx> Repr<'tcx> for ty::ExistentialBounds<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
let region_str = self.region_bound.repr(tcx);
if !region_str.is_empty() {
res.push(region_str);
}
for bound in &self.builtin_bounds {
res.push(bound.repr(tcx));
}
for projection_bound in &self.projection_bounds {
res.push(projection_bound.repr(tcx));
}
res.connect("+")
}
}
impl<'tcx> UserString<'tcx> for ty::BuiltinBounds {
fn user_string(&self, tcx: &ctxt) -> String {
self.iter()
.map(|bb| bb.user_string(tcx))
.collect::<Vec<String>>()
.connect("+")
.to_string()
}
}
impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
where T : UserString<'tcx> + TypeFoldable<'tcx>
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
// Replace any anonymous late-bound regions with named
// variants, using gensym'd identifiers, so that we can
// clearly differentiate between named and unnamed regions in
// the output. We'll probably want to tweak this over time to
// decide just how much information to give.
let mut names = Vec::new();
let (unbound_value, _) = ty::replace_late_bound_regions(tcx, self, |br| {
ty::ReLateBound(ty::DebruijnIndex::new(1), match br {
ty::BrNamed(_, name) => {
names.push(token::get_name(name));
br
}
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => {
let name = token::gensym("'r");
names.push(token::get_name(name));
ty::BrNamed(ast_util::local_def(ast::DUMMY_NODE_ID), name)
}
})
});
let names: Vec<_> = names.iter().map(|s| &s[..]).collect();
let value_str = unbound_value.user_string(tcx);
if names.is_empty() {
value_str
} else {
format!("for<{}> {}", names.connect(","), value_str)
}
}
}
impl<'tcx> UserString<'tcx> for ty::TraitRef<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
let path_str = ty::item_path_str(tcx, self.def_id);
parameterized(tcx, &path_str, self.substs, self.def_id, &[],
|| ty::lookup_trait_def(tcx, self.def_id).generics.clone())
}
}
impl<'tcx> UserString<'tcx> for Ty<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
ty_to_string(tcx, *self)
}
}
impl<'tcx> UserString<'tcx> for ast::Ident {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(self.name).to_string()
}
}
impl<'tcx> Repr<'tcx> for abi::Abi {
fn repr(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> UserString<'tcx> for abi::Abi {
fn user_string(&self, _tcx: &ctxt) -> String {
self.to_string()
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarId {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarId({};`{}`;{})",
self.var_id,
ty::local_var_name_str(tcx, self.var_id),
self.closure_expr_id)
}
}
impl<'tcx> Repr<'tcx> for ast::Mutability {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::BorrowKind {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarBorrow {
fn repr(&self, tcx: &ctxt) -> String {
format!("UpvarBorrow({}, {})",
self.kind.repr(tcx),
self.region.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::UpvarCapture {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::UpvarCapture::ByValue => format!("ByValue"),
ty::UpvarCapture::ByRef(ref data) => format!("ByRef({})", data.repr(tcx)),
}
}
}
impl<'tcx> Repr<'tcx> for ty::IntVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::FloatVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::RegionVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::TyVid {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", self)
}
}
impl<'tcx> Repr<'tcx> for ty::IntVarValue {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::IntTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::UintTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ast::FloatTy {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
}
}
impl<'tcx> Repr<'tcx> for ty::ExplicitSelfCategory {
fn repr(&self, _: &ctxt) -> String {
explicit_self_category_to_str(self).to_string()
}
}
impl<'tcx> UserString<'tcx> for ParamTy {
fn user_string(&self, _tcx: &ctxt) -> String {
format!("{}", token::get_name(self.name))
}
}
impl<'tcx> Repr<'tcx> for ParamTy {
fn repr(&self, tcx: &ctxt) -> String {
let ident = self.user_string(tcx);
format!("{}/{:?}.{}", ident, self.space, self.idx)
}
}
impl<'tcx, A:Repr<'tcx>, B:Repr<'tcx>> Repr<'tcx> for (A,B) {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let &(ref a, ref b) = self;
format!("({},{})", a.repr(tcx), b.repr(tcx))
}
}
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for ty::Binder<T> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("Binder({})", self.0.repr(tcx))
}
}
impl<'tcx, S, K, V> Repr<'tcx> for HashMap<K, V, S>
where K: Hash + Eq + Repr<'tcx>,
V: Repr<'tcx>,
S: HashState,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("HashMap({})",
self.iter()
.map(|(k,v)| format!("{} => {}", k.repr(tcx), v.repr(tcx)))
.collect::<Vec<String>>()
.connect(", "))
}
}
impl<'tcx, T, U> Repr<'tcx> for ty::OutlivesPredicate<T,U>
where T : Repr<'tcx> + TypeFoldable<'tcx>,
U : Repr<'tcx> + TypeFoldable<'tcx>,
{
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("OutlivesPredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx, T, U> UserString<'tcx> for ty::OutlivesPredicate<T,U>
where T : UserString<'tcx> + TypeFoldable<'tcx>,
U : UserString<'tcx> + TypeFoldable<'tcx>,
{
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::EquatePredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("EquatePredicate({}, {})",
self.0.repr(tcx),
self.1.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::EquatePredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.0.user_string(tcx),
self.1.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::TraitPredicate<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("TraitPredicate({})",
self.trait_ref.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::TraitPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} : {}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionPredicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("{} == {}",
self.projection_ty.user_string(tcx),
self.ty.user_string(tcx))
}
}
impl<'tcx> Repr<'tcx> for ty::ProjectionTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
format!("{}::{}",
self.trait_ref.repr(tcx),
self.item_name.repr(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::ProjectionTy<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
format!("<{} as {}>::{}",
self.trait_ref.self_ty().user_string(tcx),
self.trait_ref.user_string(tcx),
self.item_name.user_string(tcx))
}
}
impl<'tcx> UserString<'tcx> for ty::Predicate<'tcx> {
fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
match *self {
ty::Predicate::Trait(ref data) => data.user_string(tcx),
ty::Predicate::Equate(ref predicate) => predicate.user_string(tcx),
ty::Predicate::RegionOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::TypeOutlives(ref predicate) => predicate.user_string(tcx),
ty::Predicate::Projection(ref predicate) => predicate.user_string(tcx),
}
}
}
impl<'tcx> Repr<'tcx> for ast::Unsafety {
fn repr(&self, _: &ctxt<'tcx>) -> String {
format!("{:?}", *self)
}
}<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"encoding/json"
"fmt"
"net/url"
"os"
"time"
"github.com/Sirupsen/logrus"
"github.com/codegangsta/cli"
"github.com/gin-gonic/gin"
"github.com/gorilla/schema"
"github.com/itsjamie/gin-cors"
"github.com/moul/as-a-service"
)
func main() {
app := cli.NewApp()
app.Name = "moul-as-a-service"
app.Usage = "moul, but as a service"
app.Commands = []cli.Command{}
for action := range moul.Actions() {
command := cli.Command{
Name: action,
Action: CliActionCallback,
}
app.Commands = append(app.Commands, command)
}
app.Commands = append(app.Commands, cli.Command{
Name: "server",
Description: "Run as a webserver",
Action: Daemon,
})
app.Run(os.Args)
}
func CliActionCallback(c *cli.Context) {
action := c.Command.Name
ret, err := moul.Actions()[action](c.Args())
if err != nil {
logrus.Fatalf("Failed to execute %q: %v", action, err)
}
out, err := json.MarshalIndent(ret, "", " ")
if err != nil {
logrus.Fatalf("Failed to marshal json: %v", err)
}
fmt.Printf("%s\n", out)
}
func Daemon(c *cli.Context) {
r := gin.Default()
r.Use(cors.Middleware(cors.Config{
Origins: "*",
Methods: "GET",
RequestHeaders: "Origin, Authorization, Content-Type",
ExposedHeaders: "",
MaxAge: 50 * time.Second,
Credentials: true,
ValidateHeaders: false,
}))
// Register index
r.GET("/", func(c *gin.Context) {
services := []string{}
for action := range moul.Actions() {
services = append(services, fmt.Sprintf("/%s", action))
}
c.JSON(200, gin.H{
"services": services,
})
})
// Register actions
for action, fn := range moul.Actions() {
fmt.Println(action, fn)
func(action string, fn moul.Action) {
callback := func(c *gin.Context) {
u, err := url.Parse(c.Request.URL.String())
if err != nil {
c.String(500, fmt.Sprintf("failed to poarse url %q: %v", c.Request.URL.String(), err))
}<|fim▁hole|> // ret, err :- fn(u.RawQuery, c.Request.Body)
if err != nil {
c.JSON(500, gin.H{
"err": err,
})
return
}
// FIXME: handle content-types
m, err := url.ParseQuery(u.RawQuery)
if err != nil {
c.JSON(500, gin.H{
"err": err,
})
return
}
var opts struct {
Callback string `schema:"callback"`
}
if len(m) > 0 {
decoder := schema.NewDecoder()
if err := decoder.Decode(&opts, m); err != nil {
c.JSON(500, gin.H{
"err": err,
})
return
}
}
if opts.Callback != "" {
// JSONP
jsonBytes, err := json.Marshal(ret)
if err != nil {
c.JSON(500, gin.H{
"err": err,
})
return
}
jsonp := fmt.Sprintf("%s(%s)", opts.Callback, string(jsonBytes))
c.String(200, jsonp)
} else {
// Standard JSON
c.JSON(200, ret)
}
}
r.GET(fmt.Sprintf("/%s", action), callback)
// POST
}(action, fn)
}
// Start server
port := "8080"
if os.Getenv("PORT") != "" {
port = os.Getenv("PORT")
}
r.Run(fmt.Sprintf(":%s", port))
}<|fim▁end|> |
ret, err := fn(nil) |
<|file_name|>JadbDevice.java<|end_file_name|><|fim▁begin|>package se.vidstige.jadb;
import se.vidstige.jadb.managers.Bash;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class JadbDevice {
public enum State {
Unknown,
Offline,
Device,
Recovery,
BootLoader
};
private final String serial;
private final ITransportFactory transportFactory;
JadbDevice(String serial, String type, ITransportFactory tFactory) {
this.serial = serial;
this.transportFactory = tFactory;
}
static JadbDevice createAny(JadbConnection connection) {
return new JadbDevice(connection);
}
private JadbDevice(ITransportFactory tFactory) {
serial = null;
this.transportFactory = tFactory;
}
private State convertState(String type) {
switch (type) {
case "device": return State.Device;
case "offline": return State.Offline;
case "bootloader": return State.BootLoader;
case "recovery": return State.Recovery;
default: return State.Unknown;
}
}
private Transport getTransport() throws IOException, JadbException {
Transport transport = transportFactory.createTransport();
if (serial == null) {
transport.send("host:transport-any");
transport.verifyResponse();
} else {
transport.send("host:transport:" + serial);
transport.verifyResponse();
}
return transport;
}
public String getSerial() {
return serial;
}
public State getState() throws IOException, JadbException {
Transport transport = transportFactory.createTransport();
if (serial == null) {
transport.send("host:get-state");
transport.verifyResponse();
} else {
transport.send("host-serial:" + serial + ":get-state");
transport.verifyResponse();
}
State state = convertState(transport.readString());
transport.close();
return state;
}
/** <p>Execute a shell command.</p>
*
* <p>For Lollipop and later see: {@link #execute(String, String...)}</p>
*
* @param command main command to run. E.g. "ls"
* @param args arguments to the command.
* @return combined stdout/stderr stream.
* @throws IOException
* @throws JadbException
*/
public InputStream executeShell(String command, String... args) throws IOException, JadbException {
Transport transport = getTransport();
StringBuilder shellLine = buildCmdLine(command, args);
send(transport, "shell:" + shellLine.toString());
return new AdbFilterInputStream(new BufferedInputStream(transport.getInputStream()));
}
/**
*
* @deprecated Use InputStream executeShell(String command, String... args) method instead. Together with
* Stream.copy(in, out), it is possible to achieve the same effect.
*/
@Deprecated
public void executeShell(OutputStream output, String command, String... args) throws IOException, JadbException {
Transport transport = getTransport();
StringBuilder shellLine = buildCmdLine(command, args);
send(transport, "shell:" + shellLine.toString());
if (output != null) {
AdbFilterOutputStream out = new AdbFilterOutputStream(output);
try {
transport.readResponseTo(out);
} finally {
out.close();
}
}
}
/** <p>Execute a command with raw binary output.</p>
*
* <p>Support for this command was added in Lollipop (Android 5.0), and is the recommended way to transmit binary
* data with that version or later. For earlier versions of Android, use
* {@link #executeShell(String, String...)}.</p>
*
* @param command main command to run, e.g. "screencap"
* @param args arguments to the command, e.g. "-p".
* @return combined stdout/stderr stream.
* @throws IOException
* @throws JadbException
*/
public InputStream execute(String command, String... args) throws IOException, JadbException {
Transport transport = getTransport();
StringBuilder shellLine = buildCmdLine(command, args);
send(transport, "exec:" + shellLine.toString());
return new BufferedInputStream(transport.getInputStream());
}
/**
* Builds a command line string from the command and its arguments.
*
* @param command the command.
* @param args the list of arguments.
* @return the command line.
*/
private StringBuilder buildCmdLine(String command, String... args) {
StringBuilder shellLine = new StringBuilder(command);
for (String arg : args) {
shellLine.append(" ");
shellLine.append(Bash.quote(arg));
}
return shellLine;
}
public List<RemoteFile> list(String remotePath) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("LIST", remotePath);
List<RemoteFile> result = new ArrayList<RemoteFile>();
for (RemoteFileRecord dent = sync.readDirectoryEntry(); dent != RemoteFileRecord.DONE; dent = sync.readDirectoryEntry()) {
result.add(dent);
}
return result;
}
private int getMode(File file) {
//noinspection OctalInteger
return 0664;
}
public void push(InputStream source, long lastModified, int mode, RemoteFile remote) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("SEND", remote.getPath() + "," + Integer.toString(mode));
sync.sendStream(source);
sync.sendStatus("DONE", (int) lastModified);
sync.verifyStatus();
}
<|fim▁hole|> push(fileStream, local.lastModified(), getMode(local), remote);
fileStream.close();
}
public void pull(RemoteFile remote, OutputStream destination) throws IOException, JadbException {
Transport transport = getTransport();
SyncTransport sync = transport.startSync();
sync.send("RECV", remote.getPath());
sync.readChunksTo(destination);
}
public void pull(RemoteFile remote, File local) throws IOException, JadbException {
FileOutputStream fileStream = new FileOutputStream(local);
pull(remote, fileStream);
fileStream.close();
}
private void send(Transport transport, String command) throws IOException, JadbException {
transport.send(command);
transport.verifyResponse();
}
@Override
public String toString() {
return "Device: " + serial;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((serial == null) ? 0 : serial.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
JadbDevice other = (JadbDevice) obj;
if (serial == null) {
if (other.serial != null)
return false;
} else if (!serial.equals(other.serial))
return false;
return true;
}
}<|fim▁end|> | public void push(File local, RemoteFile remote) throws IOException, JadbException {
FileInputStream fileStream = new FileInputStream(local); |
<|file_name|>issue-21221-2.rs<|end_file_name|><|fim▁begin|>pub mod foo {
pub mod bar {<|fim▁hole|> }
// imports should be ignored:
use self::bar::T;
}
pub mod baz {
pub use foo;
pub use std::ops::{Mul as T};
}
struct Foo;
impl T for Foo { }
//~^ ERROR cannot find trait `T`
fn main() {}<|fim▁end|> | // note: trait T is not public, but being in the current
// crate, it's fine to show it, since the programmer can
// decide to make it public based on the suggestion ...
pub trait T {} |
<|file_name|>empty-allocation-non-null.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
pub fn main() {
assert!(Some(Box::new(())).is_some());
let xs: Box<[()]> = Box::<[(); 0]>::new([]);
assert!(Some(xs).is_some());
struct Foo;
assert!(Some(Box::new(Foo)).is_some());
let ys: Box<[Foo]> = Box::<[Foo; 0]>::new([]);
assert!(Some(ys).is_some());
}<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. |
<|file_name|>nsd.go<|end_file_name|><|fim▁begin|>package main
import (
"crypto/sha1"
"encoding/hex"
"encoding/json"
"fmt"
"net"
"os"
"os/exec"
"text/template"
"github.com/golang/glog"
k8sexec "k8s.io/kubernetes/pkg/util/exec"
)
const (
nsdTmpl = `
`
)
type record struct {
name string
ip net.IP
}
type nsd struct {
ns []record
a []record
}
func (k *nsd) WriteCfg(svcs []vip) error {
w, err := os.Create("/etc/nsd/nsd.conf.")
if err != nil {
return err
}
defer w.Close()
t, err := template.New("nsd").Parse(nsdTmpl)
if err != nil {
return err
}
conf := make(map[string]interface{})
conf["ns"] = k.iface
conf["a"] = k.ip
b, _ := json.Marshal(conf)
glog.Infof("%v", string(b))
return t.Execute(w, conf)
}
func (k *nsd) Start() {
cmd := exec.Command("/usr/sbin/nsd",
"-d",
"-P", "/nsd.pid")
<|fim▁hole|>
if err := cmd.Start(); err != nil {
glog.Errorf("nsd error: %v", err)
}
if err := cmd.Wait(); err != nil {
glog.Fatalf("nsd error: %v", err)
}
}
func (k *nsd) Reload() error {
glog.Info("reloading nsd server")
_, err := k8sexec.New().Command("killall", "-1", "nsd").CombinedOutput()
if err != nil {
return fmt.Errorf("error reloading nsd: %v", err)
}
return nil
}<|fim▁end|> | cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr |
<|file_name|>na_to_nc.py<|end_file_name|><|fim▁begin|># Copyright (C) 2004 CCLRC & NERC( Natural Environment Research Council ).
# This software may be distributed under the terms of the
# Q Public License, version 1.0 or later. http://ndg.nerc.ac.uk/public_docs/QPublic_license.txt
"""
na_to_nc.py
===========
Contains the NAToNC class for converting a NASA Ames file to a NetCDF file.
"""
# Imports from python standard library
import logging
# Imports from external packages
try:
import cdms2 as cdms
except:
try:
import cdms
except:
raise Exception("Could not import third-party software. Nappy requires the CDMS and Numeric packages to be installed to convert to CDMS and NetCDF.")
# Import from nappy package
import nappy.nc_interface.na_to_cdms
from nappy.na_error import na_error
logging.basicConfig()
log = logging.getLogger(__name__)
class NAToNC(nappy.nc_interface.na_to_cdms.NADictToCdmsObjects):
"""
Converts a NASA Ames file to a NetCDF file.
"""
def __init__(self, na_file, variables=None, aux_variables=None,
global_attributes=[("Conventions","CF-1.0")],
time_units=None, time_warning=True,
rename_variables={}):
"""
Sets up instance variables. Note that the argument 'na_file' has a relaxes definition
and can be either a NASA Ames file object or the name of a NASA AMES file.
Typical usage is:
>>> import nappy.nc_interface.na_to_nc as na_to_nc
>>> c = na_to_nc.NAToNC("old_file.na")
>>> c.convert()
>>> c.writeNCFile("new_file.nc")
"""
# First open na_file if it is a file rather than an na_file object
na_file_obj = na_file
<|fim▁hole|> na_file_obj = nappy.openNAFile(na_file_obj)
nappy.nc_interface.na_to_cdms.NADictToCdmsObjects.__init__(self, na_file_obj, variables=variables,
aux_variables=aux_variables,
global_attributes=global_attributes,
time_units=time_units, time_warning=time_warning,
rename_variables=rename_variables)
def writeNCFile(self, file_name, mode="w"):
"""
Writes the NASA Ames content that has been converted into CDMS objects to a
NetCDF file of name 'file_name'. Note that mode can be set to append so you
can add the data to an existing file.
"""
if not self.converted:
self.convert()
# Create CDMS output file object
fout = cdms.open(file_name, mode=mode)
# Write main variables
for var in self.cdms_variables:
fout.write(var)
# Write aux variables
for avar in self.cdms_aux_variables:
fout.write(avar)
# Write global attributes
for (att, value) in self.global_attributes:
setattr(fout, att, value)
fout.close()
log.info("NetCDF file '%s' written successfully." % file_name)
return True<|fim▁end|> | print na_file_obj, type(na_file_obj)
if type(na_file_obj) == type("string"):
|
<|file_name|>observer.py<|end_file_name|><|fim▁begin|>#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp import error
class MetaObserver(object):
"""This is a simple facility for exposing internal SNMP Engine
working details to pysnmp applications. These details are
basically local scope variables at a fixed point of execution.
Two modes of operations are offered:
1. Consumer: app can request an execution point context by execution point ID.
2. Provider: app can register its callback function (and context) to be invoked
once execution reaches specified point. All local scope variables
will be passed to the callback as in #1.
It's important to realize that execution context is only guaranteed
to exist to functions that are at the same or deeper level of invocation
relative to execution point specified.
"""
def __init__(self):
self.__observers = {}
self.__contexts = {}
self.__execpoints = {}
def registerObserver(self, cbFun, *execpoints, **kwargs):
if cbFun in self.__contexts:
raise error.PySnmpError('duplicate observer %s' % cbFun)
else:
self.__contexts[cbFun] = kwargs.get('cbCtx')
for execpoint in execpoints:
if execpoint not in self.__observers:
self.__observers[execpoint] = []
self.__observers[execpoint].append(cbFun)
def unregisterObserver(self, cbFun=None):
if cbFun is None:
self.__observers.clear()
self.__contexts.clear()
else:
for execpoint in dict(self.__observers):
if cbFun in self.__observers[execpoint]:
self.__observers[execpoint].remove(cbFun)
<|fim▁hole|>
def storeExecutionContext(self, snmpEngine, execpoint, variables):
self.__execpoints[execpoint] = variables
if execpoint in self.__observers:
for cbFun in self.__observers[execpoint]:
cbFun(snmpEngine, execpoint, variables, self.__contexts[cbFun])
def clearExecutionContext(self, snmpEngine, *execpoints):
if execpoints:
for execpoint in execpoints:
del self.__execpoints[execpoint]
else:
self.__execpoints.clear()
def getExecutionContext(self, execpoint):
return self.__execpoints[execpoint]<|fim▁end|> | if not self.__observers[execpoint]:
del self.__observers[execpoint] |
<|file_name|>auto_ml_client.go<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go_gapic. DO NOT EDIT.
package automl
import (
"context"
"fmt"
"math"
"net/url"
"time"
"cloud.google.com/go/longrunning"
lroauto "cloud.google.com/go/longrunning/autogen"
gax "github.com/googleapis/gax-go/v2"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
"google.golang.org/api/option/internaloption"
gtransport "google.golang.org/api/transport/grpc"
automlpb "google.golang.org/genproto/googleapis/cloud/automl/v1beta1"
longrunningpb "google.golang.org/genproto/googleapis/longrunning"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
"google.golang.org/protobuf/proto"
)
var newClientHook clientHook
// CallOptions contains the retry settings for each method of Client.
type CallOptions struct {
CreateDataset []gax.CallOption
GetDataset []gax.CallOption
ListDatasets []gax.CallOption
UpdateDataset []gax.CallOption
DeleteDataset []gax.CallOption
ImportData []gax.CallOption
ExportData []gax.CallOption
GetAnnotationSpec []gax.CallOption
GetTableSpec []gax.CallOption
ListTableSpecs []gax.CallOption
UpdateTableSpec []gax.CallOption
GetColumnSpec []gax.CallOption
ListColumnSpecs []gax.CallOption
UpdateColumnSpec []gax.CallOption
CreateModel []gax.CallOption
GetModel []gax.CallOption
ListModels []gax.CallOption
DeleteModel []gax.CallOption
DeployModel []gax.CallOption
UndeployModel []gax.CallOption
ExportModel []gax.CallOption
ExportEvaluatedExamples []gax.CallOption
GetModelEvaluation []gax.CallOption
ListModelEvaluations []gax.CallOption
}
func defaultGRPCClientOptions() []option.ClientOption {
return []option.ClientOption{
internaloption.WithDefaultEndpoint("automl.googleapis.com:443"),
internaloption.WithDefaultMTLSEndpoint("automl.mtls.googleapis.com:443"),
internaloption.WithDefaultAudience("https://automl.googleapis.com/"),
internaloption.WithDefaultScopes(DefaultAuthScopes()...),
internaloption.EnableJwtWithScope(),
option.WithGRPCDialOption(grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(math.MaxInt32))),
}
}
func defaultCallOptions() *CallOptions {
return &CallOptions{
CreateDataset: []gax.CallOption{},
GetDataset: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ListDatasets: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
UpdateDataset: []gax.CallOption{},
DeleteDataset: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ImportData: []gax.CallOption{},
ExportData: []gax.CallOption{},
GetAnnotationSpec: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
GetTableSpec: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ListTableSpecs: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
UpdateTableSpec: []gax.CallOption{},
GetColumnSpec: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ListColumnSpecs: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
UpdateColumnSpec: []gax.CallOption{},
CreateModel: []gax.CallOption{},
GetModel: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ListModels: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
DeleteModel: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
DeployModel: []gax.CallOption{},
UndeployModel: []gax.CallOption{},
ExportModel: []gax.CallOption{},
ExportEvaluatedExamples: []gax.CallOption{},
GetModelEvaluation: []gax.CallOption{
gax.WithRetry(func() gax.Retryer {
return gax.OnCodes([]codes.Code{
codes.Unavailable,
codes.DeadlineExceeded,
}, gax.Backoff{
Initial: 100 * time.Millisecond,
Max: 60000 * time.Millisecond,
Multiplier: 1.30,
})
}),
},
ListModelEvaluations: []gax.CallOption{},
}
}
// internalClient is an interface that defines the methods availaible from Cloud AutoML API.
type internalClient interface {
Close() error
setGoogleClientInfo(...string)
Connection() *grpc.ClientConn
CreateDataset(context.Context, *automlpb.CreateDatasetRequest, ...gax.CallOption) (*automlpb.Dataset, error)
GetDataset(context.Context, *automlpb.GetDatasetRequest, ...gax.CallOption) (*automlpb.Dataset, error)
ListDatasets(context.Context, *automlpb.ListDatasetsRequest, ...gax.CallOption) *DatasetIterator
UpdateDataset(context.Context, *automlpb.UpdateDatasetRequest, ...gax.CallOption) (*automlpb.Dataset, error)
DeleteDataset(context.Context, *automlpb.DeleteDatasetRequest, ...gax.CallOption) (*DeleteDatasetOperation, error)
DeleteDatasetOperation(name string) *DeleteDatasetOperation
ImportData(context.Context, *automlpb.ImportDataRequest, ...gax.CallOption) (*ImportDataOperation, error)
ImportDataOperation(name string) *ImportDataOperation
ExportData(context.Context, *automlpb.ExportDataRequest, ...gax.CallOption) (*ExportDataOperation, error)
ExportDataOperation(name string) *ExportDataOperation
GetAnnotationSpec(context.Context, *automlpb.GetAnnotationSpecRequest, ...gax.CallOption) (*automlpb.AnnotationSpec, error)
GetTableSpec(context.Context, *automlpb.GetTableSpecRequest, ...gax.CallOption) (*automlpb.TableSpec, error)
ListTableSpecs(context.Context, *automlpb.ListTableSpecsRequest, ...gax.CallOption) *TableSpecIterator
UpdateTableSpec(context.Context, *automlpb.UpdateTableSpecRequest, ...gax.CallOption) (*automlpb.TableSpec, error)
GetColumnSpec(context.Context, *automlpb.GetColumnSpecRequest, ...gax.CallOption) (*automlpb.ColumnSpec, error)
ListColumnSpecs(context.Context, *automlpb.ListColumnSpecsRequest, ...gax.CallOption) *ColumnSpecIterator
UpdateColumnSpec(context.Context, *automlpb.UpdateColumnSpecRequest, ...gax.CallOption) (*automlpb.ColumnSpec, error)
CreateModel(context.Context, *automlpb.CreateModelRequest, ...gax.CallOption) (*CreateModelOperation, error)
CreateModelOperation(name string) *CreateModelOperation
GetModel(context.Context, *automlpb.GetModelRequest, ...gax.CallOption) (*automlpb.Model, error)
ListModels(context.Context, *automlpb.ListModelsRequest, ...gax.CallOption) *ModelIterator
DeleteModel(context.Context, *automlpb.DeleteModelRequest, ...gax.CallOption) (*DeleteModelOperation, error)
DeleteModelOperation(name string) *DeleteModelOperation
DeployModel(context.Context, *automlpb.DeployModelRequest, ...gax.CallOption) (*DeployModelOperation, error)
DeployModelOperation(name string) *DeployModelOperation
UndeployModel(context.Context, *automlpb.UndeployModelRequest, ...gax.CallOption) (*UndeployModelOperation, error)
UndeployModelOperation(name string) *UndeployModelOperation
ExportModel(context.Context, *automlpb.ExportModelRequest, ...gax.CallOption) (*ExportModelOperation, error)
ExportModelOperation(name string) *ExportModelOperation
ExportEvaluatedExamples(context.Context, *automlpb.ExportEvaluatedExamplesRequest, ...gax.CallOption) (*ExportEvaluatedExamplesOperation, error)
ExportEvaluatedExamplesOperation(name string) *ExportEvaluatedExamplesOperation
GetModelEvaluation(context.Context, *automlpb.GetModelEvaluationRequest, ...gax.CallOption) (*automlpb.ModelEvaluation, error)
ListModelEvaluations(context.Context, *automlpb.ListModelEvaluationsRequest, ...gax.CallOption) *ModelEvaluationIterator
}
// Client is a client for interacting with Cloud AutoML API.
// Methods, except Close, may be called concurrently. However, fields must not be modified concurrently with method calls.
//
// AutoML Server API.
//
// The resource names are assigned by the server.
// The server never reuses names that it has created after the resources with
// those names are deleted.
//
// An ID of a resource is the last element of the item’s resource name. For
// projects/{project_id}/locations/{location_id}/datasets/{dataset_id}, then
// the id for the item is {dataset_id}.
//
// Currently the only supported location_id is “us-central1”.
//
// On any input that is documented to expect a string parameter in
// snake_case or kebab-case, either of those cases is accepted.
type Client struct {
// The internal transport-dependent client.
internalClient internalClient
// The call options for this service.
CallOptions *CallOptions
// LROClient is used internally to handle long-running operations.
// It is exposed so that its CallOptions can be modified if required.
// Users should not Close this client.
LROClient *lroauto.OperationsClient
}
// Wrapper methods routed to the internal client.
// Close closes the connection to the API service. The user should invoke this when
// the client is no longer required.
func (c *Client) Close() error {
return c.internalClient.Close()
}
// setGoogleClientInfo sets the name and version of the application in
// the `x-goog-api-client` header passed on each request. Intended for
// use by Google-written clients.
func (c *Client) setGoogleClientInfo(keyval ...string) {
c.internalClient.setGoogleClientInfo(keyval...)
}
// Connection returns a connection to the API service.
//
// Deprecated.
func (c *Client) Connection() *grpc.ClientConn {
return c.internalClient.Connection()
}
// CreateDataset creates a dataset.
func (c *Client) CreateDataset(ctx context.Context, req *automlpb.CreateDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
return c.internalClient.CreateDataset(ctx, req, opts...)
}
// GetDataset gets a dataset.
func (c *Client) GetDataset(ctx context.Context, req *automlpb.GetDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
return c.internalClient.GetDataset(ctx, req, opts...)
}
// ListDatasets lists datasets in a project.
func (c *Client) ListDatasets(ctx context.Context, req *automlpb.ListDatasetsRequest, opts ...gax.CallOption) *DatasetIterator {
return c.internalClient.ListDatasets(ctx, req, opts...)
}
// UpdateDataset updates a dataset.
func (c *Client) UpdateDataset(ctx context.Context, req *automlpb.UpdateDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
return c.internalClient.UpdateDataset(ctx, req, opts...)
}
// DeleteDataset deletes a dataset and all of its contents.
// Returns empty response in the
// response field when it completes,
// and delete_details in the
// metadata field.
func (c *Client) DeleteDataset(ctx context.Context, req *automlpb.DeleteDatasetRequest, opts ...gax.CallOption) (*DeleteDatasetOperation, error) {
return c.internalClient.DeleteDataset(ctx, req, opts...)
}
// DeleteDatasetOperation returns a new DeleteDatasetOperation from a given name.
// The name must be that of a previously created DeleteDatasetOperation, possibly from a different process.
func (c *Client) DeleteDatasetOperation(name string) *DeleteDatasetOperation {
return c.internalClient.DeleteDatasetOperation(name)
}
// ImportData imports data into a dataset.
// For Tables this method can only be called on an empty Dataset.
//
// For Tables:
//
// A
// schema_inference_version
// parameter must be explicitly set.
// Returns an empty response in the
// response field when it completes.
func (c *Client) ImportData(ctx context.Context, req *automlpb.ImportDataRequest, opts ...gax.CallOption) (*ImportDataOperation, error) {
return c.internalClient.ImportData(ctx, req, opts...)
}
// ImportDataOperation returns a new ImportDataOperation from a given name.
// The name must be that of a previously created ImportDataOperation, possibly from a different process.
func (c *Client) ImportDataOperation(name string) *ImportDataOperation {
return c.internalClient.ImportDataOperation(name)
}
// ExportData exports dataset’s data to the provided output location.
// Returns an empty response in the
// response field when it completes.
func (c *Client) ExportData(ctx context.Context, req *automlpb.ExportDataRequest, opts ...gax.CallOption) (*ExportDataOperation, error) {
return c.internalClient.ExportData(ctx, req, opts...)
}
// ExportDataOperation returns a new ExportDataOperation from a given name.
// The name must be that of a previously created ExportDataOperation, possibly from a different process.
func (c *Client) ExportDataOperation(name string) *ExportDataOperation {
return c.internalClient.ExportDataOperation(name)
}
// GetAnnotationSpec gets an annotation spec.
func (c *Client) GetAnnotationSpec(ctx context.Context, req *automlpb.GetAnnotationSpecRequest, opts ...gax.CallOption) (*automlpb.AnnotationSpec, error) {
return c.internalClient.GetAnnotationSpec(ctx, req, opts...)
}
// GetTableSpec gets a table spec.
func (c *Client) GetTableSpec(ctx context.Context, req *automlpb.GetTableSpecRequest, opts ...gax.CallOption) (*automlpb.TableSpec, error) {
return c.internalClient.GetTableSpec(ctx, req, opts...)
}
// ListTableSpecs lists table specs in a dataset.
func (c *Client) ListTableSpecs(ctx context.Context, req *automlpb.ListTableSpecsRequest, opts ...gax.CallOption) *TableSpecIterator {
return c.internalClient.ListTableSpecs(ctx, req, opts...)
}
// UpdateTableSpec updates a table spec.
func (c *Client) UpdateTableSpec(ctx context.Context, req *automlpb.UpdateTableSpecRequest, opts ...gax.CallOption) (*automlpb.TableSpec, error) {
return c.internalClient.UpdateTableSpec(ctx, req, opts...)
}
// GetColumnSpec gets a column spec.
func (c *Client) GetColumnSpec(ctx context.Context, req *automlpb.GetColumnSpecRequest, opts ...gax.CallOption) (*automlpb.ColumnSpec, error) {
return c.internalClient.GetColumnSpec(ctx, req, opts...)
}
// ListColumnSpecs lists column specs in a table spec.
func (c *Client) ListColumnSpecs(ctx context.Context, req *automlpb.ListColumnSpecsRequest, opts ...gax.CallOption) *ColumnSpecIterator {
return c.internalClient.ListColumnSpecs(ctx, req, opts...)
}
// UpdateColumnSpec updates a column spec.
func (c *Client) UpdateColumnSpec(ctx context.Context, req *automlpb.UpdateColumnSpecRequest, opts ...gax.CallOption) (*automlpb.ColumnSpec, error) {
return c.internalClient.UpdateColumnSpec(ctx, req, opts...)
}
// CreateModel creates a model.
// Returns a Model in the response
// field when it completes.
// When you create a model, several model evaluations are created for it:
// a global evaluation, and one evaluation for each annotation spec.
func (c *Client) CreateModel(ctx context.Context, req *automlpb.CreateModelRequest, opts ...gax.CallOption) (*CreateModelOperation, error) {
return c.internalClient.CreateModel(ctx, req, opts...)
}
// CreateModelOperation returns a new CreateModelOperation from a given name.
// The name must be that of a previously created CreateModelOperation, possibly from a different process.
func (c *Client) CreateModelOperation(name string) *CreateModelOperation {
return c.internalClient.CreateModelOperation(name)
}
// GetModel gets a model.
func (c *Client) GetModel(ctx context.Context, req *automlpb.GetModelRequest, opts ...gax.CallOption) (*automlpb.Model, error) {
return c.internalClient.GetModel(ctx, req, opts...)
}
// ListModels lists models.
func (c *Client) ListModels(ctx context.Context, req *automlpb.ListModelsRequest, opts ...gax.CallOption) *ModelIterator {
return c.internalClient.ListModels(ctx, req, opts...)
}
// DeleteModel deletes a model.
// Returns google.protobuf.Empty in the
// response field when it completes,
// and delete_details in the
// metadata field.
func (c *Client) DeleteModel(ctx context.Context, req *automlpb.DeleteModelRequest, opts ...gax.CallOption) (*DeleteModelOperation, error) {
return c.internalClient.DeleteModel(ctx, req, opts...)
}
// DeleteModelOperation returns a new DeleteModelOperation from a given name.
// The name must be that of a previously created DeleteModelOperation, possibly from a different process.
func (c *Client) DeleteModelOperation(name string) *DeleteModelOperation {
return c.internalClient.DeleteModelOperation(name)
}
// DeployModel deploys a model. If a model is already deployed, deploying it with the
// same parameters has no effect. Deploying with different parametrs
// (as e.g. changing
//
// node_number)
// will reset the deployment state without pausing the model’s availability.
//
// Only applicable for Text Classification, Image Object Detection , Tables, and Image Segmentation; all other domains manage
// deployment automatically.
//
// Returns an empty response in the
// response field when it completes.
func (c *Client) DeployModel(ctx context.Context, req *automlpb.DeployModelRequest, opts ...gax.CallOption) (*DeployModelOperation, error) {
return c.internalClient.DeployModel(ctx, req, opts...)
}
// DeployModelOperation returns a new DeployModelOperation from a given name.
// The name must be that of a previously created DeployModelOperation, possibly from a different process.
func (c *Client) DeployModelOperation(name string) *DeployModelOperation {
return c.internalClient.DeployModelOperation(name)
}
// UndeployModel undeploys a model. If the model is not deployed this method has no effect.
//
// Only applicable for Text Classification, Image Object Detection and Tables;
// all other domains manage deployment automatically.
//
// Returns an empty response in the
// response field when it completes.
func (c *Client) UndeployModel(ctx context.Context, req *automlpb.UndeployModelRequest, opts ...gax.CallOption) (*UndeployModelOperation, error) {
return c.internalClient.UndeployModel(ctx, req, opts...)
}
// UndeployModelOperation returns a new UndeployModelOperation from a given name.
// The name must be that of a previously created UndeployModelOperation, possibly from a different process.
func (c *Client) UndeployModelOperation(name string) *UndeployModelOperation {
return c.internalClient.UndeployModelOperation(name)
}
// ExportModel exports a trained, “export-able”, model to a user specified Google Cloud
// Storage location. A model is considered export-able if and only if it has
// an export format defined for it in
//
// ModelExportOutputConfig.
//
// Returns an empty response in the
// response field when it completes.
func (c *Client) ExportModel(ctx context.Context, req *automlpb.ExportModelRequest, opts ...gax.CallOption) (*ExportModelOperation, error) {
return c.internalClient.ExportModel(ctx, req, opts...)
}
// ExportModelOperation returns a new ExportModelOperation from a given name.
// The name must be that of a previously created ExportModelOperation, possibly from a different process.
func (c *Client) ExportModelOperation(name string) *ExportModelOperation {
return c.internalClient.ExportModelOperation(name)
}
// ExportEvaluatedExamples exports examples on which the model was evaluated (i.e. which were in the
// TEST set of the dataset the model was created from), together with their
// ground truth annotations and the annotations created (predicted) by the
// model.
// The examples, ground truth and predictions are exported in the state
// they were at the moment the model was evaluated.
//
// This export is available only for 30 days since the model evaluation is
// created.
//
// Currently only available for Tables.
//
// Returns an empty response in the
// response field when it completes.
func (c *Client) ExportEvaluatedExamples(ctx context.Context, req *automlpb.ExportEvaluatedExamplesRequest, opts ...gax.CallOption) (*ExportEvaluatedExamplesOperation, error) {
return c.internalClient.ExportEvaluatedExamples(ctx, req, opts...)
}
// ExportEvaluatedExamplesOperation returns a new ExportEvaluatedExamplesOperation from a given name.
// The name must be that of a previously created ExportEvaluatedExamplesOperation, possibly from a different process.
func (c *Client) ExportEvaluatedExamplesOperation(name string) *ExportEvaluatedExamplesOperation {
return c.internalClient.ExportEvaluatedExamplesOperation(name)
}
// GetModelEvaluation gets a model evaluation.
func (c *Client) GetModelEvaluation(ctx context.Context, req *automlpb.GetModelEvaluationRequest, opts ...gax.CallOption) (*automlpb.ModelEvaluation, error) {
return c.internalClient.GetModelEvaluation(ctx, req, opts...)
}
// ListModelEvaluations lists model evaluations.
func (c *Client) ListModelEvaluations(ctx context.Context, req *automlpb.ListModelEvaluationsRequest, opts ...gax.CallOption) *ModelEvaluationIterator {
return c.internalClient.ListModelEvaluations(ctx, req, opts...)
}
// gRPCClient is a client for interacting with Cloud AutoML API over gRPC transport.
//
// Methods, except Close, may be called concurrently. However, fields must not be modified concurrently with method calls.
type gRPCClient struct {
// Connection pool of gRPC connections to the service.
connPool gtransport.ConnPool
// flag to opt out of default deadlines via GOOGLE_API_GO_EXPERIMENTAL_DISABLE_DEFAULT_DEADLINE
disableDeadlines bool
// Points back to the CallOptions field of the containing Client
CallOptions **CallOptions
// The gRPC API client.<|fim▁hole|>
// LROClient is used internally to handle long-running operations.
// It is exposed so that its CallOptions can be modified if required.
// Users should not Close this client.
LROClient **lroauto.OperationsClient
// The x-goog-* metadata to be sent with each request.
xGoogMetadata metadata.MD
}
// NewClient creates a new auto ml client based on gRPC.
// The returned client must be Closed when it is done being used to clean up its underlying connections.
//
// AutoML Server API.
//
// The resource names are assigned by the server.
// The server never reuses names that it has created after the resources with
// those names are deleted.
//
// An ID of a resource is the last element of the item’s resource name. For
// projects/{project_id}/locations/{location_id}/datasets/{dataset_id}, then
// the id for the item is {dataset_id}.
//
// Currently the only supported location_id is “us-central1”.
//
// On any input that is documented to expect a string parameter in
// snake_case or kebab-case, either of those cases is accepted.
func NewClient(ctx context.Context, opts ...option.ClientOption) (*Client, error) {
clientOpts := defaultGRPCClientOptions()
if newClientHook != nil {
hookOpts, err := newClientHook(ctx, clientHookParams{})
if err != nil {
return nil, err
}
clientOpts = append(clientOpts, hookOpts...)
}
disableDeadlines, err := checkDisableDeadlines()
if err != nil {
return nil, err
}
connPool, err := gtransport.DialPool(ctx, append(clientOpts, opts...)...)
if err != nil {
return nil, err
}
client := Client{CallOptions: defaultCallOptions()}
c := &gRPCClient{
connPool: connPool,
disableDeadlines: disableDeadlines,
client: automlpb.NewAutoMlClient(connPool),
CallOptions: &client.CallOptions,
}
c.setGoogleClientInfo()
client.internalClient = c
client.LROClient, err = lroauto.NewOperationsClient(ctx, gtransport.WithConnPool(connPool))
if err != nil {
// This error "should not happen", since we are just reusing old connection pool
// and never actually need to dial.
// If this does happen, we could leak connp. However, we cannot close conn:
// If the user invoked the constructor with option.WithGRPCConn,
// we would close a connection that's still in use.
// TODO: investigate error conditions.
return nil, err
}
c.LROClient = &client.LROClient
return &client, nil
}
// Connection returns a connection to the API service.
//
// Deprecated.
func (c *gRPCClient) Connection() *grpc.ClientConn {
return c.connPool.Conn()
}
// setGoogleClientInfo sets the name and version of the application in
// the `x-goog-api-client` header passed on each request. Intended for
// use by Google-written clients.
func (c *gRPCClient) setGoogleClientInfo(keyval ...string) {
kv := append([]string{"gl-go", versionGo()}, keyval...)
kv = append(kv, "gapic", getVersionClient(), "gax", gax.Version, "grpc", grpc.Version)
c.xGoogMetadata = metadata.Pairs("x-goog-api-client", gax.XGoogHeader(kv...))
}
// Close closes the connection to the API service. The user should invoke this when
// the client is no longer required.
func (c *gRPCClient) Close() error {
return c.connPool.Close()
}
func (c *gRPCClient) CreateDataset(ctx context.Context, req *automlpb.CreateDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).CreateDataset[0:len((*c.CallOptions).CreateDataset):len((*c.CallOptions).CreateDataset)], opts...)
var resp *automlpb.Dataset
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.CreateDataset(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) GetDataset(ctx context.Context, req *automlpb.GetDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetDataset[0:len((*c.CallOptions).GetDataset):len((*c.CallOptions).GetDataset)], opts...)
var resp *automlpb.Dataset
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetDataset(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) ListDatasets(ctx context.Context, req *automlpb.ListDatasetsRequest, opts ...gax.CallOption) *DatasetIterator {
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ListDatasets[0:len((*c.CallOptions).ListDatasets):len((*c.CallOptions).ListDatasets)], opts...)
it := &DatasetIterator{}
req = proto.Clone(req).(*automlpb.ListDatasetsRequest)
it.InternalFetch = func(pageSize int, pageToken string) ([]*automlpb.Dataset, string, error) {
resp := &automlpb.ListDatasetsResponse{}
if pageToken != "" {
req.PageToken = pageToken
}
if pageSize > math.MaxInt32 {
req.PageSize = math.MaxInt32
} else if pageSize != 0 {
req.PageSize = int32(pageSize)
}
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ListDatasets(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, "", err
}
it.Response = resp
return resp.GetDatasets(), resp.GetNextPageToken(), nil
}
fetch := func(pageSize int, pageToken string) (string, error) {
items, nextPageToken, err := it.InternalFetch(pageSize, pageToken)
if err != nil {
return "", err
}
it.items = append(it.items, items...)
return nextPageToken, nil
}
it.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)
it.pageInfo.MaxSize = int(req.GetPageSize())
it.pageInfo.Token = req.GetPageToken()
return it
}
func (c *gRPCClient) UpdateDataset(ctx context.Context, req *automlpb.UpdateDatasetRequest, opts ...gax.CallOption) (*automlpb.Dataset, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "dataset.name", url.QueryEscape(req.GetDataset().GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).UpdateDataset[0:len((*c.CallOptions).UpdateDataset):len((*c.CallOptions).UpdateDataset)], opts...)
var resp *automlpb.Dataset
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.UpdateDataset(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) DeleteDataset(ctx context.Context, req *automlpb.DeleteDatasetRequest, opts ...gax.CallOption) (*DeleteDatasetOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).DeleteDataset[0:len((*c.CallOptions).DeleteDataset):len((*c.CallOptions).DeleteDataset)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.DeleteDataset(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &DeleteDatasetOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) ImportData(ctx context.Context, req *automlpb.ImportDataRequest, opts ...gax.CallOption) (*ImportDataOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ImportData[0:len((*c.CallOptions).ImportData):len((*c.CallOptions).ImportData)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ImportData(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &ImportDataOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) ExportData(ctx context.Context, req *automlpb.ExportDataRequest, opts ...gax.CallOption) (*ExportDataOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ExportData[0:len((*c.CallOptions).ExportData):len((*c.CallOptions).ExportData)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ExportData(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &ExportDataOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) GetAnnotationSpec(ctx context.Context, req *automlpb.GetAnnotationSpecRequest, opts ...gax.CallOption) (*automlpb.AnnotationSpec, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetAnnotationSpec[0:len((*c.CallOptions).GetAnnotationSpec):len((*c.CallOptions).GetAnnotationSpec)], opts...)
var resp *automlpb.AnnotationSpec
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetAnnotationSpec(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) GetTableSpec(ctx context.Context, req *automlpb.GetTableSpecRequest, opts ...gax.CallOption) (*automlpb.TableSpec, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetTableSpec[0:len((*c.CallOptions).GetTableSpec):len((*c.CallOptions).GetTableSpec)], opts...)
var resp *automlpb.TableSpec
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetTableSpec(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) ListTableSpecs(ctx context.Context, req *automlpb.ListTableSpecsRequest, opts ...gax.CallOption) *TableSpecIterator {
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ListTableSpecs[0:len((*c.CallOptions).ListTableSpecs):len((*c.CallOptions).ListTableSpecs)], opts...)
it := &TableSpecIterator{}
req = proto.Clone(req).(*automlpb.ListTableSpecsRequest)
it.InternalFetch = func(pageSize int, pageToken string) ([]*automlpb.TableSpec, string, error) {
resp := &automlpb.ListTableSpecsResponse{}
if pageToken != "" {
req.PageToken = pageToken
}
if pageSize > math.MaxInt32 {
req.PageSize = math.MaxInt32
} else if pageSize != 0 {
req.PageSize = int32(pageSize)
}
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ListTableSpecs(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, "", err
}
it.Response = resp
return resp.GetTableSpecs(), resp.GetNextPageToken(), nil
}
fetch := func(pageSize int, pageToken string) (string, error) {
items, nextPageToken, err := it.InternalFetch(pageSize, pageToken)
if err != nil {
return "", err
}
it.items = append(it.items, items...)
return nextPageToken, nil
}
it.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)
it.pageInfo.MaxSize = int(req.GetPageSize())
it.pageInfo.Token = req.GetPageToken()
return it
}
func (c *gRPCClient) UpdateTableSpec(ctx context.Context, req *automlpb.UpdateTableSpecRequest, opts ...gax.CallOption) (*automlpb.TableSpec, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "table_spec.name", url.QueryEscape(req.GetTableSpec().GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).UpdateTableSpec[0:len((*c.CallOptions).UpdateTableSpec):len((*c.CallOptions).UpdateTableSpec)], opts...)
var resp *automlpb.TableSpec
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.UpdateTableSpec(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) GetColumnSpec(ctx context.Context, req *automlpb.GetColumnSpecRequest, opts ...gax.CallOption) (*automlpb.ColumnSpec, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetColumnSpec[0:len((*c.CallOptions).GetColumnSpec):len((*c.CallOptions).GetColumnSpec)], opts...)
var resp *automlpb.ColumnSpec
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetColumnSpec(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) ListColumnSpecs(ctx context.Context, req *automlpb.ListColumnSpecsRequest, opts ...gax.CallOption) *ColumnSpecIterator {
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ListColumnSpecs[0:len((*c.CallOptions).ListColumnSpecs):len((*c.CallOptions).ListColumnSpecs)], opts...)
it := &ColumnSpecIterator{}
req = proto.Clone(req).(*automlpb.ListColumnSpecsRequest)
it.InternalFetch = func(pageSize int, pageToken string) ([]*automlpb.ColumnSpec, string, error) {
resp := &automlpb.ListColumnSpecsResponse{}
if pageToken != "" {
req.PageToken = pageToken
}
if pageSize > math.MaxInt32 {
req.PageSize = math.MaxInt32
} else if pageSize != 0 {
req.PageSize = int32(pageSize)
}
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ListColumnSpecs(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, "", err
}
it.Response = resp
return resp.GetColumnSpecs(), resp.GetNextPageToken(), nil
}
fetch := func(pageSize int, pageToken string) (string, error) {
items, nextPageToken, err := it.InternalFetch(pageSize, pageToken)
if err != nil {
return "", err
}
it.items = append(it.items, items...)
return nextPageToken, nil
}
it.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)
it.pageInfo.MaxSize = int(req.GetPageSize())
it.pageInfo.Token = req.GetPageToken()
return it
}
func (c *gRPCClient) UpdateColumnSpec(ctx context.Context, req *automlpb.UpdateColumnSpecRequest, opts ...gax.CallOption) (*automlpb.ColumnSpec, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "column_spec.name", url.QueryEscape(req.GetColumnSpec().GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).UpdateColumnSpec[0:len((*c.CallOptions).UpdateColumnSpec):len((*c.CallOptions).UpdateColumnSpec)], opts...)
var resp *automlpb.ColumnSpec
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.UpdateColumnSpec(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) CreateModel(ctx context.Context, req *automlpb.CreateModelRequest, opts ...gax.CallOption) (*CreateModelOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).CreateModel[0:len((*c.CallOptions).CreateModel):len((*c.CallOptions).CreateModel)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.CreateModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &CreateModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) GetModel(ctx context.Context, req *automlpb.GetModelRequest, opts ...gax.CallOption) (*automlpb.Model, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetModel[0:len((*c.CallOptions).GetModel):len((*c.CallOptions).GetModel)], opts...)
var resp *automlpb.Model
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) ListModels(ctx context.Context, req *automlpb.ListModelsRequest, opts ...gax.CallOption) *ModelIterator {
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ListModels[0:len((*c.CallOptions).ListModels):len((*c.CallOptions).ListModels)], opts...)
it := &ModelIterator{}
req = proto.Clone(req).(*automlpb.ListModelsRequest)
it.InternalFetch = func(pageSize int, pageToken string) ([]*automlpb.Model, string, error) {
resp := &automlpb.ListModelsResponse{}
if pageToken != "" {
req.PageToken = pageToken
}
if pageSize > math.MaxInt32 {
req.PageSize = math.MaxInt32
} else if pageSize != 0 {
req.PageSize = int32(pageSize)
}
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ListModels(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, "", err
}
it.Response = resp
return resp.GetModel(), resp.GetNextPageToken(), nil
}
fetch := func(pageSize int, pageToken string) (string, error) {
items, nextPageToken, err := it.InternalFetch(pageSize, pageToken)
if err != nil {
return "", err
}
it.items = append(it.items, items...)
return nextPageToken, nil
}
it.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)
it.pageInfo.MaxSize = int(req.GetPageSize())
it.pageInfo.Token = req.GetPageToken()
return it
}
func (c *gRPCClient) DeleteModel(ctx context.Context, req *automlpb.DeleteModelRequest, opts ...gax.CallOption) (*DeleteModelOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).DeleteModel[0:len((*c.CallOptions).DeleteModel):len((*c.CallOptions).DeleteModel)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.DeleteModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &DeleteModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) DeployModel(ctx context.Context, req *automlpb.DeployModelRequest, opts ...gax.CallOption) (*DeployModelOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).DeployModel[0:len((*c.CallOptions).DeployModel):len((*c.CallOptions).DeployModel)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.DeployModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &DeployModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) UndeployModel(ctx context.Context, req *automlpb.UndeployModelRequest, opts ...gax.CallOption) (*UndeployModelOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).UndeployModel[0:len((*c.CallOptions).UndeployModel):len((*c.CallOptions).UndeployModel)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.UndeployModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &UndeployModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) ExportModel(ctx context.Context, req *automlpb.ExportModelRequest, opts ...gax.CallOption) (*ExportModelOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ExportModel[0:len((*c.CallOptions).ExportModel):len((*c.CallOptions).ExportModel)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ExportModel(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &ExportModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) ExportEvaluatedExamples(ctx context.Context, req *automlpb.ExportEvaluatedExamplesRequest, opts ...gax.CallOption) (*ExportEvaluatedExamplesOperation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ExportEvaluatedExamples[0:len((*c.CallOptions).ExportEvaluatedExamples):len((*c.CallOptions).ExportEvaluatedExamples)], opts...)
var resp *longrunningpb.Operation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ExportEvaluatedExamples(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return &ExportEvaluatedExamplesOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, resp),
}, nil
}
func (c *gRPCClient) GetModelEvaluation(ctx context.Context, req *automlpb.GetModelEvaluationRequest, opts ...gax.CallOption) (*automlpb.ModelEvaluation, error) {
if _, ok := ctx.Deadline(); !ok && !c.disableDeadlines {
cctx, cancel := context.WithTimeout(ctx, 5000*time.Millisecond)
defer cancel()
ctx = cctx
}
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "name", url.QueryEscape(req.GetName())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).GetModelEvaluation[0:len((*c.CallOptions).GetModelEvaluation):len((*c.CallOptions).GetModelEvaluation)], opts...)
var resp *automlpb.ModelEvaluation
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.GetModelEvaluation(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, err
}
return resp, nil
}
func (c *gRPCClient) ListModelEvaluations(ctx context.Context, req *automlpb.ListModelEvaluationsRequest, opts ...gax.CallOption) *ModelEvaluationIterator {
md := metadata.Pairs("x-goog-request-params", fmt.Sprintf("%s=%v", "parent", url.QueryEscape(req.GetParent())))
ctx = insertMetadata(ctx, c.xGoogMetadata, md)
opts = append((*c.CallOptions).ListModelEvaluations[0:len((*c.CallOptions).ListModelEvaluations):len((*c.CallOptions).ListModelEvaluations)], opts...)
it := &ModelEvaluationIterator{}
req = proto.Clone(req).(*automlpb.ListModelEvaluationsRequest)
it.InternalFetch = func(pageSize int, pageToken string) ([]*automlpb.ModelEvaluation, string, error) {
resp := &automlpb.ListModelEvaluationsResponse{}
if pageToken != "" {
req.PageToken = pageToken
}
if pageSize > math.MaxInt32 {
req.PageSize = math.MaxInt32
} else if pageSize != 0 {
req.PageSize = int32(pageSize)
}
err := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {
var err error
resp, err = c.client.ListModelEvaluations(ctx, req, settings.GRPC...)
return err
}, opts...)
if err != nil {
return nil, "", err
}
it.Response = resp
return resp.GetModelEvaluation(), resp.GetNextPageToken(), nil
}
fetch := func(pageSize int, pageToken string) (string, error) {
items, nextPageToken, err := it.InternalFetch(pageSize, pageToken)
if err != nil {
return "", err
}
it.items = append(it.items, items...)
return nextPageToken, nil
}
it.pageInfo, it.nextFunc = iterator.NewPageInfo(fetch, it.bufLen, it.takeBuf)
it.pageInfo.MaxSize = int(req.GetPageSize())
it.pageInfo.Token = req.GetPageToken()
return it
}
// CreateModelOperation manages a long-running operation from CreateModel.
type CreateModelOperation struct {
lro *longrunning.Operation
}
// CreateModelOperation returns a new CreateModelOperation from a given name.
// The name must be that of a previously created CreateModelOperation, possibly from a different process.
func (c *gRPCClient) CreateModelOperation(name string) *CreateModelOperation {
return &CreateModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *CreateModelOperation) Wait(ctx context.Context, opts ...gax.CallOption) (*automlpb.Model, error) {
var resp automlpb.Model
if err := op.lro.WaitWithInterval(ctx, &resp, time.Minute, opts...); err != nil {
return nil, err
}
return &resp, nil
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *CreateModelOperation) Poll(ctx context.Context, opts ...gax.CallOption) (*automlpb.Model, error) {
var resp automlpb.Model
if err := op.lro.Poll(ctx, &resp, opts...); err != nil {
return nil, err
}
if !op.Done() {
return nil, nil
}
return &resp, nil
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *CreateModelOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *CreateModelOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *CreateModelOperation) Name() string {
return op.lro.Name()
}
// DeleteDatasetOperation manages a long-running operation from DeleteDataset.
type DeleteDatasetOperation struct {
lro *longrunning.Operation
}
// DeleteDatasetOperation returns a new DeleteDatasetOperation from a given name.
// The name must be that of a previously created DeleteDatasetOperation, possibly from a different process.
func (c *gRPCClient) DeleteDatasetOperation(name string) *DeleteDatasetOperation {
return &DeleteDatasetOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *DeleteDatasetOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *DeleteDatasetOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *DeleteDatasetOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *DeleteDatasetOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *DeleteDatasetOperation) Name() string {
return op.lro.Name()
}
// DeleteModelOperation manages a long-running operation from DeleteModel.
type DeleteModelOperation struct {
lro *longrunning.Operation
}
// DeleteModelOperation returns a new DeleteModelOperation from a given name.
// The name must be that of a previously created DeleteModelOperation, possibly from a different process.
func (c *gRPCClient) DeleteModelOperation(name string) *DeleteModelOperation {
return &DeleteModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *DeleteModelOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *DeleteModelOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *DeleteModelOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *DeleteModelOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *DeleteModelOperation) Name() string {
return op.lro.Name()
}
// DeployModelOperation manages a long-running operation from DeployModel.
type DeployModelOperation struct {
lro *longrunning.Operation
}
// DeployModelOperation returns a new DeployModelOperation from a given name.
// The name must be that of a previously created DeployModelOperation, possibly from a different process.
func (c *gRPCClient) DeployModelOperation(name string) *DeployModelOperation {
return &DeployModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *DeployModelOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *DeployModelOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *DeployModelOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *DeployModelOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *DeployModelOperation) Name() string {
return op.lro.Name()
}
// ExportDataOperation manages a long-running operation from ExportData.
type ExportDataOperation struct {
lro *longrunning.Operation
}
// ExportDataOperation returns a new ExportDataOperation from a given name.
// The name must be that of a previously created ExportDataOperation, possibly from a different process.
func (c *gRPCClient) ExportDataOperation(name string) *ExportDataOperation {
return &ExportDataOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *ExportDataOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *ExportDataOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *ExportDataOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *ExportDataOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *ExportDataOperation) Name() string {
return op.lro.Name()
}
// ExportEvaluatedExamplesOperation manages a long-running operation from ExportEvaluatedExamples.
type ExportEvaluatedExamplesOperation struct {
lro *longrunning.Operation
}
// ExportEvaluatedExamplesOperation returns a new ExportEvaluatedExamplesOperation from a given name.
// The name must be that of a previously created ExportEvaluatedExamplesOperation, possibly from a different process.
func (c *gRPCClient) ExportEvaluatedExamplesOperation(name string) *ExportEvaluatedExamplesOperation {
return &ExportEvaluatedExamplesOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *ExportEvaluatedExamplesOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *ExportEvaluatedExamplesOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *ExportEvaluatedExamplesOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *ExportEvaluatedExamplesOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *ExportEvaluatedExamplesOperation) Name() string {
return op.lro.Name()
}
// ExportModelOperation manages a long-running operation from ExportModel.
type ExportModelOperation struct {
lro *longrunning.Operation
}
// ExportModelOperation returns a new ExportModelOperation from a given name.
// The name must be that of a previously created ExportModelOperation, possibly from a different process.
func (c *gRPCClient) ExportModelOperation(name string) *ExportModelOperation {
return &ExportModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *ExportModelOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *ExportModelOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *ExportModelOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *ExportModelOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *ExportModelOperation) Name() string {
return op.lro.Name()
}
// ImportDataOperation manages a long-running operation from ImportData.
type ImportDataOperation struct {
lro *longrunning.Operation
}
// ImportDataOperation returns a new ImportDataOperation from a given name.
// The name must be that of a previously created ImportDataOperation, possibly from a different process.
func (c *gRPCClient) ImportDataOperation(name string) *ImportDataOperation {
return &ImportDataOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *ImportDataOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *ImportDataOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *ImportDataOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *ImportDataOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *ImportDataOperation) Name() string {
return op.lro.Name()
}
// UndeployModelOperation manages a long-running operation from UndeployModel.
type UndeployModelOperation struct {
lro *longrunning.Operation
}
// UndeployModelOperation returns a new UndeployModelOperation from a given name.
// The name must be that of a previously created UndeployModelOperation, possibly from a different process.
func (c *gRPCClient) UndeployModelOperation(name string) *UndeployModelOperation {
return &UndeployModelOperation{
lro: longrunning.InternalNewOperation(*c.LROClient, &longrunningpb.Operation{Name: name}),
}
}
// Wait blocks until the long-running operation is completed, returning the response and any errors encountered.
//
// See documentation of Poll for error-handling information.
func (op *UndeployModelOperation) Wait(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.WaitWithInterval(ctx, nil, time.Minute, opts...)
}
// Poll fetches the latest state of the long-running operation.
//
// Poll also fetches the latest metadata, which can be retrieved by Metadata.
//
// If Poll fails, the error is returned and op is unmodified. If Poll succeeds and
// the operation has completed with failure, the error is returned and op.Done will return true.
// If Poll succeeds and the operation has completed successfully,
// op.Done will return true, and the response of the operation is returned.
// If Poll succeeds and the operation has not completed, the returned response and error are both nil.
func (op *UndeployModelOperation) Poll(ctx context.Context, opts ...gax.CallOption) error {
return op.lro.Poll(ctx, nil, opts...)
}
// Metadata returns metadata associated with the long-running operation.
// Metadata itself does not contact the server, but Poll does.
// To get the latest metadata, call this method after a successful call to Poll.
// If the metadata is not available, the returned metadata and error are both nil.
func (op *UndeployModelOperation) Metadata() (*automlpb.OperationMetadata, error) {
var meta automlpb.OperationMetadata
if err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {
return nil, nil
} else if err != nil {
return nil, err
}
return &meta, nil
}
// Done reports whether the long-running operation has completed.
func (op *UndeployModelOperation) Done() bool {
return op.lro.Done()
}
// Name returns the name of the long-running operation.
// The name is assigned by the server and is unique within the service from which the operation is created.
func (op *UndeployModelOperation) Name() string {
return op.lro.Name()
}
// ColumnSpecIterator manages a stream of *automlpb.ColumnSpec.
type ColumnSpecIterator struct {
items []*automlpb.ColumnSpec
pageInfo *iterator.PageInfo
nextFunc func() error
// Response is the raw response for the current page.
// It must be cast to the RPC response type.
// Calling Next() or InternalFetch() updates this value.
Response interface{}
// InternalFetch is for use by the Google Cloud Libraries only.
// It is not part of the stable interface of this package.
//
// InternalFetch returns results from a single call to the underlying RPC.
// The number of results is no greater than pageSize.
// If there are no more results, nextPageToken is empty and err is nil.
InternalFetch func(pageSize int, pageToken string) (results []*automlpb.ColumnSpec, nextPageToken string, err error)
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
func (it *ColumnSpecIterator) PageInfo() *iterator.PageInfo {
return it.pageInfo
}
// Next returns the next result. Its second return value is iterator.Done if there are no more
// results. Once Next returns Done, all subsequent calls will return Done.
func (it *ColumnSpecIterator) Next() (*automlpb.ColumnSpec, error) {
var item *automlpb.ColumnSpec
if err := it.nextFunc(); err != nil {
return item, err
}
item = it.items[0]
it.items = it.items[1:]
return item, nil
}
func (it *ColumnSpecIterator) bufLen() int {
return len(it.items)
}
func (it *ColumnSpecIterator) takeBuf() interface{} {
b := it.items
it.items = nil
return b
}
// DatasetIterator manages a stream of *automlpb.Dataset.
type DatasetIterator struct {
items []*automlpb.Dataset
pageInfo *iterator.PageInfo
nextFunc func() error
// Response is the raw response for the current page.
// It must be cast to the RPC response type.
// Calling Next() or InternalFetch() updates this value.
Response interface{}
// InternalFetch is for use by the Google Cloud Libraries only.
// It is not part of the stable interface of this package.
//
// InternalFetch returns results from a single call to the underlying RPC.
// The number of results is no greater than pageSize.
// If there are no more results, nextPageToken is empty and err is nil.
InternalFetch func(pageSize int, pageToken string) (results []*automlpb.Dataset, nextPageToken string, err error)
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
func (it *DatasetIterator) PageInfo() *iterator.PageInfo {
return it.pageInfo
}
// Next returns the next result. Its second return value is iterator.Done if there are no more
// results. Once Next returns Done, all subsequent calls will return Done.
func (it *DatasetIterator) Next() (*automlpb.Dataset, error) {
var item *automlpb.Dataset
if err := it.nextFunc(); err != nil {
return item, err
}
item = it.items[0]
it.items = it.items[1:]
return item, nil
}
func (it *DatasetIterator) bufLen() int {
return len(it.items)
}
func (it *DatasetIterator) takeBuf() interface{} {
b := it.items
it.items = nil
return b
}
// ModelEvaluationIterator manages a stream of *automlpb.ModelEvaluation.
type ModelEvaluationIterator struct {
items []*automlpb.ModelEvaluation
pageInfo *iterator.PageInfo
nextFunc func() error
// Response is the raw response for the current page.
// It must be cast to the RPC response type.
// Calling Next() or InternalFetch() updates this value.
Response interface{}
// InternalFetch is for use by the Google Cloud Libraries only.
// It is not part of the stable interface of this package.
//
// InternalFetch returns results from a single call to the underlying RPC.
// The number of results is no greater than pageSize.
// If there are no more results, nextPageToken is empty and err is nil.
InternalFetch func(pageSize int, pageToken string) (results []*automlpb.ModelEvaluation, nextPageToken string, err error)
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
func (it *ModelEvaluationIterator) PageInfo() *iterator.PageInfo {
return it.pageInfo
}
// Next returns the next result. Its second return value is iterator.Done if there are no more
// results. Once Next returns Done, all subsequent calls will return Done.
func (it *ModelEvaluationIterator) Next() (*automlpb.ModelEvaluation, error) {
var item *automlpb.ModelEvaluation
if err := it.nextFunc(); err != nil {
return item, err
}
item = it.items[0]
it.items = it.items[1:]
return item, nil
}
func (it *ModelEvaluationIterator) bufLen() int {
return len(it.items)
}
func (it *ModelEvaluationIterator) takeBuf() interface{} {
b := it.items
it.items = nil
return b
}
// ModelIterator manages a stream of *automlpb.Model.
type ModelIterator struct {
items []*automlpb.Model
pageInfo *iterator.PageInfo
nextFunc func() error
// Response is the raw response for the current page.
// It must be cast to the RPC response type.
// Calling Next() or InternalFetch() updates this value.
Response interface{}
// InternalFetch is for use by the Google Cloud Libraries only.
// It is not part of the stable interface of this package.
//
// InternalFetch returns results from a single call to the underlying RPC.
// The number of results is no greater than pageSize.
// If there are no more results, nextPageToken is empty and err is nil.
InternalFetch func(pageSize int, pageToken string) (results []*automlpb.Model, nextPageToken string, err error)
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
func (it *ModelIterator) PageInfo() *iterator.PageInfo {
return it.pageInfo
}
// Next returns the next result. Its second return value is iterator.Done if there are no more
// results. Once Next returns Done, all subsequent calls will return Done.
func (it *ModelIterator) Next() (*automlpb.Model, error) {
var item *automlpb.Model
if err := it.nextFunc(); err != nil {
return item, err
}
item = it.items[0]
it.items = it.items[1:]
return item, nil
}
func (it *ModelIterator) bufLen() int {
return len(it.items)
}
func (it *ModelIterator) takeBuf() interface{} {
b := it.items
it.items = nil
return b
}
// TableSpecIterator manages a stream of *automlpb.TableSpec.
type TableSpecIterator struct {
items []*automlpb.TableSpec
pageInfo *iterator.PageInfo
nextFunc func() error
// Response is the raw response for the current page.
// It must be cast to the RPC response type.
// Calling Next() or InternalFetch() updates this value.
Response interface{}
// InternalFetch is for use by the Google Cloud Libraries only.
// It is not part of the stable interface of this package.
//
// InternalFetch returns results from a single call to the underlying RPC.
// The number of results is no greater than pageSize.
// If there are no more results, nextPageToken is empty and err is nil.
InternalFetch func(pageSize int, pageToken string) (results []*automlpb.TableSpec, nextPageToken string, err error)
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
func (it *TableSpecIterator) PageInfo() *iterator.PageInfo {
return it.pageInfo
}
// Next returns the next result. Its second return value is iterator.Done if there are no more
// results. Once Next returns Done, all subsequent calls will return Done.
func (it *TableSpecIterator) Next() (*automlpb.TableSpec, error) {
var item *automlpb.TableSpec
if err := it.nextFunc(); err != nil {
return item, err
}
item = it.items[0]
it.items = it.items[1:]
return item, nil
}
func (it *TableSpecIterator) bufLen() int {
return len(it.items)
}
func (it *TableSpecIterator) takeBuf() interface{} {
b := it.items
it.items = nil
return b
}<|fim▁end|> | client automlpb.AutoMlClient |
<|file_name|>UUIDDirectoryStoreFilenameDecoder.java<|end_file_name|><|fim▁begin|>package org.apache.activemq.nob.filestore.uuiddir;
import org.apache.activemq.nob.filestore.BrokerFilenameDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.UUID;
/**
* Decoder of filenames in a UUID-based filesystem store of broker configuration files. This store only supports
* broker IDs in the form of UUIDs.
*
* Created by art on 2/19/15.
*/
public class UUIDDirectoryStoreFilenameDecoder implements BrokerFilenameDecoder {
public static final String XBEAN_FILE_PATH_SUFFIX = "-xbean.xml";
private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(UUIDDirectoryStoreFilenameDecoder.class);
private Logger LOG = DEFAULT_LOGGER;
/**
* Decode the pathname as a UUID if it is a regular file (i.e. not a directory) and return the UUID.
*
* @param brokerPath path to the candidate broker.
* @return
*/
@Override
public String extractIdFromFilename(File brokerPath) {
String result = null;
if ( ! brokerPath.isDirectory() ) {
try {
UUID uuid = UUID.fromString(brokerPath.getName());
if (uuid != null) {
result = uuid.toString();
}
} catch ( IllegalArgumentException illegalArgExc ) {
LOG.debug("invalid UUID {}", brokerPath.getName());
}
}
return result;
}
/**
* Locate the path to the xbean configuration file for the broker at the given path. This method validates the
* broker path as it must to determine the broker ID.
*
* @param brokerPath path to the broker.
* @return path to the xbean configuration file, even if it does not exist.
*/
@Override
public File getBrokerXbeanFile(File brokerPath) {
File result = null;
String brokerId = this.extractIdFromFilename(brokerPath);
if ( brokerId != null ) {
result = new File(brokerPath.getPath() + XBEAN_FILE_PATH_SUFFIX);
}
return result;
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>borrowck-ref-mut-of-imm.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn destructure(x: Option<isize>) -> isize {
match x {
None => 0,
Some(ref mut v) => *v //~ ERROR cannot borrow
}
}
fn main() {
assert_eq!(destructure(Some(22)), 22);
}<|fim▁end|> | |
<|file_name|>initialise_variables.cpp<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------------
//
// Vampire - A code for atomistic simulation of magnetic materials
//
// Copyright (C) 2009-2012 R.F.L.Evans
//
// Email:[email protected]
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software Foundation,
// Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
//
// ----------------------------------------------------------------------------
//
// Headers
#include "errors.hpp"
#include "demag.hpp"
#include "voronoi.hpp"
#include "material.hpp"
#include "sim.hpp"
#include "random.hpp"
#include "vio.hpp"
#include "vmath.hpp"
#include "vmpi.hpp"
#include <cmath>
#include <iostream>
#include <sstream>
//==========================================================
// Namespace material_parameters
//==========================================================
namespace mp{
//----------------------------------
// Material Container
//----------------------------------
//const int max_materials=100;
int num_materials=1;
std::vector <materials_t> material(1);
//----------------------------------
//Input Integration parameters
//----------------------------------
double dt_SI;
double gamma_SI = 1.76E11;
//----------------------------------
//Derived Integration parameters
//----------------------------------
double dt;
double half_dt;
// Unrolled material parameters for speed
std::vector <double> MaterialMuSSIArray(0);
std::vector <zkval_t> MaterialScalarAnisotropyArray(0);
std::vector <zkten_t> MaterialTensorAnisotropyArray(0);
std::vector <double> material_second_order_anisotropy_constant_array(0);
std::vector <double> material_sixth_order_anisotropy_constant_array(0);
std::vector <double> material_spherical_harmonic_constants_array(0);
std::vector <double> MaterialCubicAnisotropyArray(0);
///
/// @brief Function to initialise program variables prior to system creation.
///
/// @section License
/// Use of this code, either in source or compiled form, is subject to license from the authors.
/// Copyright \htmlonly © \endhtmlonly Richard Evans, 2009-2010. All Rights Reserved.
///
/// @section Information
/// @author Richard Evans, [email protected]
/// @version 1.0
/// @date 19/01/2010
///
/// @param[in] infile Main input file name for system initialisation
/// @return EXIT_SUCCESS
///
/// @internal
/// Created: 19/01/2010
/// Revision: ---
///=====================================================================================
///
int initialise(std::string const infile){
//----------------------------------------------------------
// check calling of routine if error checking is activated
//----------------------------------------------------------
if(err::check==true){std::cout << "initialise_variables has been called" << std::endl;}
if(vmpi::my_rank==0){
std::cout << "================================================================================" << std::endl;
std::cout << "Initialising system variables" << std::endl;
}
// Setup default system settings
mp::default_system();
// Read values from input files
int iostat = vin::read(infile);
if(iostat==EXIT_FAILURE){
terminaltextcolor(RED);
std::cerr << "Error - input file \'" << infile << "\' not found, exiting" << std::endl;
terminaltextcolor(WHITE);
err::vexit();
}
// Print out material properties
//mp::material[0].print();
// Check for keyword parameter overide
if(cs::single_spin==true){
mp::single_spin_system();
}
// Set derived system parameters
mp::set_derived_parameters();
// Return
return EXIT_SUCCESS;
}
int default_system(){
// Initialise system creation flags to zero
for (int i=0;i<10;i++){
cs::system_creation_flags[i] = 0;
sim::hamiltonian_simulation_flags[i] = 0;
}
// Set system dimensions !Angstroms
cs::unit_cell_size[0] = 3.0;
cs::unit_cell_size[1] = 3.0;
cs::unit_cell_size[2] = 3.0;
cs::system_dimensions[0] = 100.0;
cs::system_dimensions[1] = 100.0;
cs::system_dimensions[2] = 100.0;
cs::particle_scale = 50.0;
cs::particle_spacing = 10.0;
cs::particle_creation_parity=0;
cs::crystal_structure = "sc";
// Voronoi Variables
create_voronoi::voronoi_sd=0.1;
create_voronoi::parity=0;
// Setup Hamiltonian Flags
sim::hamiltonian_simulation_flags[0] = 1; /// Exchange
sim::hamiltonian_simulation_flags[1] = 1; /// Anisotropy
sim::hamiltonian_simulation_flags[2] = 1; /// Applied
sim::hamiltonian_simulation_flags[3] = 1; /// Thermal
sim::hamiltonian_simulation_flags[4] = 0; /// Dipolar
//Integration parameters
dt_SI = 1.0e-15; // seconds
dt = dt_SI*mp::gamma_SI; // Must be set before Hth
half_dt = 0.5*dt;
//------------------------------------------------------------------------------
// Material Definitions
//------------------------------------------------------------------------------
num_materials=1;
material.resize(num_materials);
//-------------------------------------------------------
// Material 0
//-------------------------------------------------------
material[0].name="Co";
material[0].alpha=0.1;
material[0].Jij_matrix_SI[0]=-11.2e-21;
material[0].mu_s_SI=1.5*9.27400915e-24;
material[0].Ku1_SI=-4.644e-24;
material[0].gamma_rel=1.0;
material[0].element="Ag ";
// Disable Error Checking
err::check=false;
// Initialise random number generator
mtrandom::grnd.seed(2106975519);
return EXIT_SUCCESS;
}
int single_spin_system(){
// Reset system creation flags to zero
for (int i=0;i<10;i++){
cs::system_creation_flags[i] = 0;
}
// Set system dimensions !Angstroms
cs::unit_cell_size[0] = 3.0;
cs::unit_cell_size[1] = 3.0;
cs::unit_cell_size[2] = 3.0;
cs::system_dimensions[0] = 2.0;
cs::system_dimensions[1] = 2.0;
cs::system_dimensions[2] = 2.0;
cs::particle_scale = 50.0;
cs::particle_spacing = 10.0;
cs::particle_creation_parity=0;
cs::crystal_structure = "sc";
// Turn off multi-spin Flags
sim::hamiltonian_simulation_flags[0] = 0; /// Exchange
sim::hamiltonian_simulation_flags[4] = 0; /// Dipolar
// MPI Mode (Homogeneous execution)
//vmpi::mpi_mode=0;
//mpi_create_variables::mpi_interaction_range=2; // Unit cells
//mpi_create_variables::mpi_comms_identify=false;
return EXIT_SUCCESS;
}
// Simple function to check for valid input for hysteresis loop parameters
void check_hysteresis_loop_parameters(){
// Only applies to hysteresis loop programs, all others return
if(sim::program!=12) return;
double min=sim::Hmin;
double max=sim::Hmax;
double inc=sim::Hinc;
// + + +
if(min>=0 && max>=0 && inc>0){
if(max<min){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum and maximum fields are both positive, but minimum > maximum with a positive increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;
zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum and maximum fields are both positive, but minimum > maximum with a positive increment, causing an infinite loop. Exiting." << std::endl;
err::vexit();
}
}
}
// + + -
else if(min>=0 && max>=0 && inc<0){
if(max>min){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum and maximum fields are both positive, but maximum > minimum with a negative increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;<|fim▁hole|> }
}
}
// + - +
else if(min>=0 && max<0 && inc>0){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum field is positive and maximum field is negative with a positive increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;
zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum field is positive and maximum field is negative with a positive increment, causing an infinite loop. Exiting." << std::endl;
err::vexit();
}
}
// - + -
else if(min<0 && max>=0 && inc<0){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum field is negative and maximum field is positive with a negative increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;
zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum field is negative and maximum field is positive with a negative increment, causing an infinite loop. Exiting." << std::endl;
err::vexit();
}
}
// - - -
else if(min<0 && max<0 && inc<0){
if(max>min){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum and maximum fields are both negative, but minimum < maximum with a negative increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;
zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum and maximum fields are both negative, but minimum < maximum with a negative increment, causing an infinite loop. Exiting." << std::endl;
err::vexit();
}
}
}
// - - +
else if(min<0 && max<0 && inc>0){
if(max<min){
if(vmpi::my_rank==0){
terminaltextcolor(RED);
std::cout << "Error in hysteresis-loop parameters:" << std::endl;
std::cout << "\t sim:minimum-applied-field-strength = " << min << std::endl;
std::cout << "\t sim:maximum-applied-field-strength = " << max << std::endl;
std::cout << "\t sim:applied-field-strength-increment = " << inc << std::endl;
std::cout << "Minimum and maximum fields are both negative, but maximum < minimum with a positive increment, causing an infinite loop. Exiting." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error in hysteresis-loop parameters:" << std::endl;
zlog << zTs() << "\t sim:minimum-applied-field-strength = " << min << std::endl;
zlog << zTs() << "\t sim:maximum-applied-field-strength = " << max << std::endl;
zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum and maximum fields are both positive, but maximum < minimum with a positive increment, causing an infinite loop. Exiting." << std::endl;
err::vexit();
}
}
}
return;
}
int set_derived_parameters(){
// Set integration constants
mp::dt = mp::dt_SI*mp::gamma_SI; // Must be set before Hth
mp::half_dt = 0.5*mp::dt;
// Check to see if field direction is set by angle
if(sim::applied_field_set_by_angle){
sim::H_vec[0]=sin(sim::applied_field_angle_phi*M_PI/180.0)*cos(sim::applied_field_angle_theta*M_PI/180.0);
sim::H_vec[1]=sin(sim::applied_field_angle_phi*M_PI/180.0)*sin(sim::applied_field_angle_theta*M_PI/180.0);
sim::H_vec[2]=cos(sim::applied_field_angle_phi*M_PI/180.0);
}
// Check for valid particle array offsets
if(cs::particle_array_offset_x >= cs::system_dimensions[0]){
terminaltextcolor(RED);
std::cerr << "Warning: requested particle-array-offset-x is greater than system dimensions." << std::endl;
std::cerr << "Info: This will probably lead to no particles being created and generate an error." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Warning: requested particle-array-offset-x is greater than system dimensions." << std::endl;
zlog << zTs() << "Info: This will probably lead to no particles being created and generate an error." << std::endl;
}
if(cs::particle_array_offset_y >= cs::system_dimensions[1]){
terminaltextcolor(RED);
std::cerr << "Warning: requested particle-array-offset-y is greater than system dimensions." << std::endl;
std::cerr << "Info: This will probably lead to no particles being created and generate an error." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Warning: requested particle-array-offset-y is greater than system dimensions." << std::endl;
zlog << zTs() << "Info: This will probably lead to no particles being created and generate an error." << std::endl;
}
check_hysteresis_loop_parameters();
// Ensure H vector is unit length
// **RE edit 21.11.12 - no longer necessary as value checked on user input**
//double mod_H=1.0/sqrt(sim::H_vec[0]*sim::H_vec[0]+sim::H_vec[1]*sim::H_vec[1]+sim::H_vec[2]*sim::H_vec[2]);
//sim::H_vec[0]*=mod_H;
//sim::H_vec[1]*=mod_H;
//sim::H_vec[2]*=mod_H;
// Calculate moment, magnetisation, and anisotropy constants
/*for(int mat=0;mat<mp::num_materials;mat++){
double V=cs::unit_cell_size[0]*cs::unit_cell_size[1]*cs::unit_cell_size[2];
// Set magnetisation from mu_s and a
if(material[mat].moment_flag==true){
//material[mat].magnetisation=num_atoms_per_unit_cell*material[mat].mu_s_SI/V;
}
// Set mu_s from magnetisation and a
else {
//material[mat].mu_s_SI=material[mat].magnetisation*V/num_atoms_per_unit_cell;
}
// Set K as energy/atom
if(material[mat].anis_flag==false){
material[mat].Ku1_SI=material[mat].Ku1_SI*V/num_atoms_per_unit_cell;
std::cout << "setting " << material[mat].Ku1_SI << std::endl;
}
}*/
const string blank="";
// Check for symmetry of exchange matrix
for(int mi = 0; mi < mp::num_materials; mi++){
for(int mj = 0; mj < mp::num_materials; mj++){
// Check for non-zero value (avoids divide by zero)
if(fabs(material[mi].Jij_matrix_SI[mj]) > 0.0){
// Calculate ratio of i->j / j-> exchange constants
double ratio = material[mj].Jij_matrix_SI[mi]/material[mi].Jij_matrix_SI[mj];
// Check that ratio ~ 1.0 for symmetric exchange interactions
if( (ratio < 0.99999) || (ratio > 1.00001) ){
// Error found - report to user and terminate program
terminaltextcolor(RED);
std::cerr << "Error! Non-symmetric exchange interactions for materials " << mi+1 << " and " << mj+1 << ". Exiting" << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Error! Non-symmetric exchange interactions for materials " << mi+1 << " and " << mj+1 << std::endl;
zlog << zTs() << "\tmaterial[" << mi+1 << "]:exchange-matrix[" << mj+1 << "] = " << material[mi].Jij_matrix_SI[mj] << std::endl;
zlog << zTs() << "\tmaterial[" << mj+1 << "]:exchange-matrix[" << mi+1 << "] = " << material[mj].Jij_matrix_SI[mi] << std::endl;
zlog << zTs() << "\tThe definition of Heisenberg exchange requires that these values are the same. Exiting." << std::endl;
err::vexit();
}
}
}
}
// Set derived material parameters
for(int mat=0;mat<mp::num_materials;mat++){
mp::material[mat].one_oneplusalpha_sq = -mp::material[mat].gamma_rel/(1.0+mp::material[mat].alpha*mp::material[mat].alpha);
mp::material[mat].alpha_oneplusalpha_sq = mp::material[mat].alpha*mp::material[mat].one_oneplusalpha_sq;
for(int j=0;j<mp::num_materials;j++){
material[mat].Jij_matrix[j] = mp::material[mat].Jij_matrix_SI[j]/mp::material[mat].mu_s_SI;
}
mp::material[mat].Ku = mp::material[mat].Ku1_SI/mp::material[mat].mu_s_SI;
mp::material[mat].Ku2 = mp::material[mat].Ku2_SI/mp::material[mat].mu_s_SI;
mp::material[mat].Ku3 = mp::material[mat].Ku3_SI/mp::material[mat].mu_s_SI;
mp::material[mat].Klatt = mp::material[mat].Klatt_SI/mp::material[mat].mu_s_SI;
mp::material[mat].Kc = mp::material[mat].Kc1_SI/mp::material[mat].mu_s_SI;
mp::material[mat].Ks = mp::material[mat].Ks_SI/mp::material[mat].mu_s_SI;
mp::material[mat].H_th_sigma = sqrt(2.0*mp::material[mat].alpha*1.3806503e-23/
(mp::material[mat].mu_s_SI*mp::material[mat].gamma_rel*dt));
// Rename un-named materials with material id
std::string defname="material#n";
if(mp::material[mat].name==defname){
std::stringstream newname;
newname << "material" << mat+1;
mp::material[mat].name=newname.str();
}
// initialise lattice anisotropy initialisation
if(sim::lattice_anisotropy_flag==true) mp::material[mat].lattice_anisotropy.set_interpolation_table();
// output interpolated data to file
//mp::material[mat].lattice_anisotropy.output_interpolated_function(mat);
}
// Check for which anisotropy function(s) are to be used
if(sim::TensorAnisotropy==true){
sim::UniaxialScalarAnisotropy=false; // turn off scalar anisotropy calculation
// loop over materials and convert all scalar anisotropy to tensor (along z)
for(int mat=0;mat<mp::num_materials; mat++){
const double one_o_mu=1.0/mp::material[mat].mu_s_SI;
// If tensor is unset
if(mp::material.at(mat).KuVec_SI.size()==0){
const double ex = mp::material.at(mat).UniaxialAnisotropyUnitVector.at(0);
const double ey = mp::material.at(mat).UniaxialAnisotropyUnitVector.at(1);
const double ez = mp::material.at(mat).UniaxialAnisotropyUnitVector.at(2);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ex*ex);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ex*ey);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ex*ez);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ey*ex);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ey*ey);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ey*ez);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ez*ex);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ez*ey);
mp::material.at(mat).KuVec.push_back(mp::material[mat].Ku*ez*ez);
}
else if(mp::material.at(mat).KuVec_SI.size()==9){
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(0)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(1)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(2)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(3)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(4)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(5)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(6)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(7)*one_o_mu);
mp::material.at(mat).KuVec.push_back(mp::material.at(mat).KuVec_SI.at(8)*one_o_mu);
}
}
}
// Unroll anisotropy values for speed
if(sim::UniaxialScalarAnisotropy==true){
zlog << zTs() << "Setting scalar uniaxial anisotropy." << std::endl;
// Set global anisotropy type
sim::AnisotropyType=0;
MaterialScalarAnisotropyArray.resize(mp::num_materials);
for(int mat=0;mat<mp::num_materials; mat++) MaterialScalarAnisotropyArray[mat].K=mp::material[mat].Ku;
}
else if(sim::TensorAnisotropy==true){
zlog << zTs() << "Setting tensor uniaxial anisotropy." << std::endl;
// Set global anisotropy type
sim::AnisotropyType=1;
MaterialTensorAnisotropyArray.resize(mp::num_materials);
for(int mat=0;mat<mp::num_materials; mat++){
MaterialTensorAnisotropyArray[mat].K[0][0]=mp::material.at(mat).KuVec.at(0);
MaterialTensorAnisotropyArray[mat].K[0][1]=mp::material.at(mat).KuVec.at(1);
MaterialTensorAnisotropyArray[mat].K[0][2]=mp::material.at(mat).KuVec.at(2);
MaterialTensorAnisotropyArray[mat].K[1][0]=mp::material.at(mat).KuVec.at(3);
MaterialTensorAnisotropyArray[mat].K[1][1]=mp::material.at(mat).KuVec.at(4);
MaterialTensorAnisotropyArray[mat].K[1][2]=mp::material.at(mat).KuVec.at(5);
MaterialTensorAnisotropyArray[mat].K[2][0]=mp::material.at(mat).KuVec.at(6);
MaterialTensorAnisotropyArray[mat].K[2][1]=mp::material.at(mat).KuVec.at(7);
MaterialTensorAnisotropyArray[mat].K[2][2]=mp::material.at(mat).KuVec.at(8);
}
}
// Unroll second order uniaxial anisotropy values for speed
if(sim::second_order_uniaxial_anisotropy==true){
zlog << zTs() << "Setting scalar second order uniaxial anisotropy." << std::endl;
mp::material_second_order_anisotropy_constant_array.resize(mp::num_materials);
for(int mat=0;mat<mp::num_materials; mat++) mp::material_second_order_anisotropy_constant_array.at(mat)=mp::material[mat].Ku2;
}
// Unroll sixth order uniaxial anisotropy values for speed
if(sim::second_order_uniaxial_anisotropy==true){
zlog << zTs() << "Setting scalar sixth order uniaxial anisotropy." << std::endl;
mp::material_sixth_order_anisotropy_constant_array.resize(mp::num_materials);
for(int mat=0;mat<mp::num_materials; mat++) mp::material_sixth_order_anisotropy_constant_array.at(mat)=mp::material[mat].Ku3;
}
// Unroll spherical harmonic anisotropy constants for speed
if(sim::spherical_harmonics==true){
zlog << zTs() << "Setting spherical harmonics for uniaxial anisotropy" << std::endl;
mp::material_spherical_harmonic_constants_array.resize(3*mp::num_materials);
for(int mat=0; mat<mp::num_materials; mat++){
mp::material_spherical_harmonic_constants_array.at(3*mat+0)=mp::material[mat].sh2/mp::material[mat].mu_s_SI;
mp::material_spherical_harmonic_constants_array.at(3*mat+1)=mp::material[mat].sh4/mp::material[mat].mu_s_SI;
mp::material_spherical_harmonic_constants_array.at(3*mat+2)=mp::material[mat].sh6/mp::material[mat].mu_s_SI;
}
}
// Unroll cubic anisotropy values for speed
if(sim::CubicScalarAnisotropy==true){
zlog << zTs() << "Setting scalar cubic anisotropy." << std::endl;
MaterialCubicAnisotropyArray.resize(mp::num_materials);
for(int mat=0;mat<mp::num_materials; mat++) MaterialCubicAnisotropyArray.at(mat)=mp::material[mat].Kc;
}
// Loop over materials to check for invalid input and warn appropriately
for(int mat=0;mat<mp::num_materials;mat++){
const double lmin=material[mat].min;
const double lmax=material[mat].max;
for(int nmat=0;nmat<mp::num_materials;nmat++){
if(nmat!=mat){
double min=material[nmat].min;
double max=material[nmat].max;
if(((lmin>min) && (lmin<max)) || ((lmax>min) && (lmax<max))){
terminaltextcolor(RED);
std::cerr << "Warning: Overlapping material heights found. Check log for details." << std::endl;
terminaltextcolor(WHITE);
zlog << zTs() << "Warning: material " << mat+1 << " overlaps material " << nmat+1 << "." << std::endl;
zlog << zTs() << "If you have defined geometry then this may be OK, or possibly you meant to specify alloy keyword instead." << std::endl;
zlog << zTs() << "----------------------------------------------------" << std::endl;
zlog << zTs() << " Material "<< mat+1 << ":minimum-height = " << lmin << std::endl;
zlog << zTs() << " Material "<< mat+1 << ":maximum-height = " << lmax << std::endl;
zlog << zTs() << " Material "<< nmat+1 << ":minimum-height = " << min << std::endl;
zlog << zTs() << " Material "<< nmat+1 << ":maximum-height = " << max << std::endl;
}
}
}
}
return EXIT_SUCCESS;
}
} // end of namespace mp<|fim▁end|> | zlog << zTs() << "\t sim:applied-field-strength-increment = " << inc << std::endl;
zlog << zTs() << "Minimum and maximum fields are both positive, but maximum > minimum with a negative increment, causing an infinite loop. Exiting." << std::endl;
err::vexit(); |
<|file_name|>mbcsgroupprober.js<|end_file_name|><|fim▁begin|>/*
* The Original Code is Mozilla Universal charset detector code.
*
* The Initial Developer of the Original Code is<|fim▁hole|> * Contributor(s):
* António Afonso (antonio.afonso gmail.com) - port to JavaScript
* Mark Pilgrim - port to Python
* Shy Shalom - original C code
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*/
!function(jschardet) {
jschardet.MBCSGroupProber = function() {
jschardet.CharSetGroupProber.apply(this);
this._mProbers = [
new jschardet.UTF8Prober(),
new jschardet.SJISProber(),
new jschardet.EUCJPProber(),
new jschardet.GB2312Prober(),
new jschardet.EUCKRProber(),
new jschardet.Big5Prober(),
new jschardet.EUCTWProber()
];
this.reset();
}
jschardet.MBCSGroupProber.prototype = new jschardet.CharSetGroupProber();
}((typeof process !== 'undefined' && typeof process.title !== 'undefined') ? module.parent.exports : jschardet);<|fim▁end|> | * Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 2001
* the Initial Developer. All Rights Reserved.
* |
<|file_name|>tmpfs_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package provider_test
import (
"github.com/juju/errors"
"github.com/juju/names"
jc "github.com/juju/testing/checkers"
gc "gopkg.in/check.v1"
"github.com/juju/juju/storage"
"github.com/juju/juju/storage/provider"
"github.com/juju/juju/testing"
)
var _ = gc.Suite(&tmpfsSuite{})
type tmpfsSuite struct {
testing.BaseSuite
storageDir string
commands *mockRunCommand
}
func (s *tmpfsSuite) SetUpTest(c *gc.C) {
s.BaseSuite.SetUpTest(c)
s.storageDir = c.MkDir()
}
func (s *tmpfsSuite) TearDownTest(c *gc.C) {
s.commands.assertDrained()
s.BaseSuite.TearDownTest(c)
}
func (s *tmpfsSuite) tmpfsProvider(c *gc.C) storage.Provider {
s.commands = &mockRunCommand{c: c}
return provider.TmpfsProvider(s.commands.run)
}
func (s *tmpfsSuite) TestFilesystemSource(c *gc.C) {
p := s.tmpfsProvider(c)
cfg, err := storage.NewConfig("name", provider.TmpfsProviderType, map[string]interface{}{})
c.Assert(err, jc.ErrorIsNil)
_, err = p.FilesystemSource(nil, cfg)
c.Assert(err, gc.ErrorMatches, "storage directory not specified")
cfg, err = storage.NewConfig("name", provider.TmpfsProviderType, map[string]interface{}{
"storage-dir": c.MkDir(),
})
c.Assert(err, jc.ErrorIsNil)
_, err = p.FilesystemSource(nil, cfg)
c.Assert(err, jc.ErrorIsNil)
}
func (s *tmpfsSuite) TestValidateConfig(c *gc.C) {
p := s.tmpfsProvider(c)
cfg, err := storage.NewConfig("name", provider.TmpfsProviderType, map[string]interface{}{})
c.Assert(err, jc.ErrorIsNil)
err = p.ValidateConfig(cfg)
// The tmpfs provider does not have any user
// configuration, so an empty map will pass.
c.Assert(err, jc.ErrorIsNil)
}
func (s *tmpfsSuite) TestSupports(c *gc.C) {
p := s.tmpfsProvider(c)
c.Assert(p.Supports(storage.StorageKindBlock), jc.IsFalse)
c.Assert(p.Supports(storage.StorageKindFilesystem), jc.IsTrue)
}
func (s *tmpfsSuite) tmpfsFilesystemSource(c *gc.C) storage.FilesystemSource {
s.commands = &mockRunCommand{c: c}
return provider.TmpfsFilesystemSource(
s.storageDir,
s.commands.run,
)
}
func (s *tmpfsSuite) TestCreateFilesystems(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
cmd := s.commands.expect("mount", "-t", "tmpfs", "none", "/mnt/bar", "-o", "size=2097152")
cmd.respond("", nil)
cmd = s.commands.expect("df", "--output=size", "/mnt/bar")
cmd.respond("1K-blocks\n2048", nil)
filesystems, filesystemAttachments, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 2,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id",
},
Path: "/mnt/bar",
},
}})
c.Assert(err, jc.ErrorIsNil)
mountedDirs := provider.MountedDirs(source)
c.Assert(mountedDirs.Size(), gc.Equals, 1)
c.Assert(mountedDirs.Contains("/mnt/bar"), jc.IsTrue)
c.Assert(filesystems, gc.HasLen, 1)
c.Assert(filesystemAttachments, gc.HasLen, 1)
c.Assert(filesystems[0], gc.Equals, storage.Filesystem{
Tag: names.NewFilesystemTag("6"),
Size: 2,
})
c.Assert(filesystemAttachments[0], gc.Equals, storage.FilesystemAttachment{
Path: "/mnt/bar",
Filesystem: names.NewFilesystemTag("6"),
Machine: names.NewMachineTag("1"),<|fim▁hole|>}
func (s *tmpfsSuite) TestCreateFilesystemsIsUse(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{
{
Tag: names.NewFilesystemTag("6"),
Size: 1,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id1",
},
Path: "/mnt/notempty",
},
}, {
Tag: names.NewFilesystemTag("6"),
Size: 2,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("2"),
InstanceId: "instance-id2",
},
Path: "/mnt/notempty",
},
}})
c.Assert(err, gc.ErrorMatches, ".* path must be empty")
}
func (s *tmpfsSuite) TestCreateFilesystemsPathNotDir(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 2,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id",
},
Path: "file",
},
}})
c.Assert(err, gc.ErrorMatches, `.* path "file" must be a directory`)
}
func (s *tmpfsSuite) TestCreateFilesystemsNotEnoughSpace(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
cmd := s.commands.expect("mount", "-t", "tmpfs", "none", "/var/lib/juju/storage/fs/foo", "-o", "size=4194304")
cmd.respond("", nil)
cmd = s.commands.expect("df", "--output=size", "/var/lib/juju/storage/fs/foo")
cmd.respond("1K-blocks\n2048", nil)
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 4,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id",
},
Path: "/var/lib/juju/storage/fs/foo",
},
}})
c.Assert(err, gc.ErrorMatches, ".* filesystem is not big enough \\(2M < 4M\\)")
}
func (s *tmpfsSuite) TestCreateFilesystemsInvalidPath(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
cmd := s.commands.expect("mount", "-t", "tmpfs", "none", "/mnt/bad-dir", "-o", "size=2097152")
cmd.respond("", nil)
cmd = s.commands.expect("df", "--output=size", "/mnt/bad-dir")
cmd.respond("", errors.New("error creating directory"))
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 2,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id",
},
Path: "/mnt/bad-dir",
},
}})
c.Assert(err, gc.ErrorMatches, ".* error creating directory")
}
func (s *tmpfsSuite) TestCreateFilesystemsNoAttachment(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 2,
}})
c.Assert(err, gc.ErrorMatches, ".* creating filesystem without machine attachment not supported")
}
func (s *tmpfsSuite) TestCreateFilesystemsNoPathSpecified(c *gc.C) {
source := s.tmpfsFilesystemSource(c)
_, _, err := source.CreateFilesystems([]storage.FilesystemParams{{
Tag: names.NewFilesystemTag("6"),
Size: 2,
Attachment: &storage.FilesystemAttachmentParams{
AttachmentParams: storage.AttachmentParams{
Machine: names.NewMachineTag("1"),
InstanceId: "instance-id",
},
},
}})
c.Assert(err, gc.ErrorMatches, ".* cannot create a filesystem mount without specifying a path")
}<|fim▁end|> | }) |
<|file_name|>update_roles.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Django appschema released under the MIT license.
# See the LICENSE for more information.
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.contenttypes.models import ContentType
from django.utils.importlib import import_module
import yaml
import os.path
from scholrroles.models import Role, Permission
class Command(BaseCommand):
help = 'Creates a new active schema'
option_list = BaseCommand.option_list + (
make_option('--file', action='store', dest='role_file',
default=None, help='Select a file with roles. '
'Defaults to the settings.ROLE_FILE.'),
)
def handle(self, *args, **options):
files, roles = self.get_roles_files(), []
for role_file in files:
stream = open(role_file, 'r')
data = yaml.load(stream)
roles.extend(data['roles'])
self.update_roles(roles)
files, perms = self.get_permission_files(), []
for perm_file in files:
stream = open(perm_file, 'r')
data = yaml.load(stream)
perms.extend(data['perms'])
self.update_perms(perms)
def get_roles_files(self):
files = []
for app in settings.INSTALLED_APPS:
module = import_module(app)
pth = os.path.abspath(module.__path__[0])
if os.path.isfile(pth + '/roles.yml'):
files.append(pth + '/roles.yml')
return files
def get_permission_files(self):
files = []
for app in settings.INSTALLED_APPS:
module = import_module(app)
pth = os.path.abspath(module.__path__[0])
if os.path.isfile(pth + '/permissions.yml'):
files.append(pth + '/permissions.yml')
return files
def update_roles(self, roles):
existing_roles = Role.objects.all().values_list('name', flat=True)
print """
--------------------
Create Roles
--------------------
"""
for role in roles:
if role not in existing_roles:
print role
Role.objects.create(name = role)
to_delete = [x for x in existing_roles if x not in roles]
if to_delete:
print """
--------------------
Delete Roles
--------------------
"""
for role in to_delete:
print role
Role.objects.filter(name__in = to_delete).delete()
def update_perms(self, perms):
existing_perms = Permission.objects.all()
dont_delete = []
for perm in perms:
existing_perm = existing_perms.filter(content_type=ContentType.objects.get_by_natural_key(perm['app_label'], perm['model']),
name = perm['name'], instance_perm = perm['instance_perm'])
if existing_perm:
self.update_perm_roles(perm, existing_perm[0])
dont_delete.append(existing_perm[0].pk)
else:
existing_perm = Permission.objects.create(content_type=ContentType.objects.get_by_natural_key(perm['app_label'], perm['model']),
name = perm['name'], instance_perm = perm['instance_perm'])
dont_delete.append(existing_perm.pk)
print u" Created Permission: ".format(existing_perm)
self.update_perm_roles(perm, existing_perm)
to_delete = Permission.objects.exclude(pk__in=dont_delete)
if to_delete:
print """
--------------------
Delete Permissions
--------------------
"""
for perm in to_delete:
print perm
to_delete.delete()
<|fim▁hole|> print " Adding roles to: {}".format(existing_perm)
for role in perm['roles']:
if not existing_perm.roles.filter(name=role).exists():
print " Adding role: {}".format(role)
existing_perm.roles.add(Role.objects.get(name=role))
to_delete = existing_perm.roles.exclude(name__in = perm['roles'])
for role in to_delete:
print u" Deleting role from: {}, {}".format(existing_perm,role)
existing_perm.roles.remove(role)<|fim▁end|> | def update_perm_roles(self, perm, existing_perm):
if existing_perm.roles.filter(name__in=perm['roles']).count() < len(perm['roles']): |
<|file_name|>treatment_prescription.js<|end_file_name|><|fim▁begin|>var mongoose = require('mongoose');
var TreatmentPrescription = mongoose.model('TreatmentPrescription', require('../models/TreatmentPrescription').TreatmentPrescriptionSchema);
function list(response, params){
TreatmentPrescription.find(params)
.sort('created_at')
.exec(function(error, prescriptions){
if(error){
console.log(error);
prescriptions = [];
}
response.json(prescriptions);
});
}
function create_from_body(request){
return {
doctor: request.user._id,
treatment: request.param('treatment'),
items: request.body.items
};
}
exports.list = function(request, response){
var params = { treatment: request.param('treatment') };
list(response, params);
};
exports.single = function(request, response){
if(request.param('prescription') === 'new')
{
response.json(new TreatmentPrescription({ treatment: request.param('treatment') }));
return;
}
var params = { _id: request.param('prescription'), treatment: request.param('treatment') };
TreatmentPrescription.findOne(params, function(error, prescription){
if(error){
console.log(error);
prescription = null;
}
response.json(prescription);
});
};
exports.create = function(request, response){
var values = create_from_body(request);
var treatment = new TreatmentPrescription(values);
treatment.save(function(error, document){
if(error || !document){
response.json({ error: error });
} else {
response.json(document);
}
});
};<|fim▁hole|>exports.edit = function(request, response){
var values = create_from_body(request);
TreatmentPrescription.findByIdAndUpdate(request.body._id, values, function(error, document){
if(error){
response.json({ error: error });
return;
}
response.json(document);
});
};
exports.remove = function(request, response){
if(request.user.type === 'doctor'){
TreatmentPrescription.findByIdAndRemove(request.param('prescription'), function(error){
if(error){
console.log(error);
response.send(400);
return;
}
response.send(200)
});
} else {
response.send(200)
}
};<|fim▁end|> | |
<|file_name|>store_runtime.go<|end_file_name|><|fim▁begin|>package session
import (
"container/list"
"sync"
"time"
)
// MemProvider Implement the provider interface
type RuntimeStore struct {
lock *sync.RWMutex // locker
sessions map[string]*list.Element // map in memory
list *list.List // for gc
maxlifetime int64
}
func NewRuntimeStore(config *StoreConfig) *RuntimeStore {
return &RuntimeStore{
lock: new(sync.RWMutex),
sessions: make(map[string]*list.Element),
list: new(list.List),
maxlifetime: config.Maxlifetime,
}
}
// SessionRead get session state by sessionId
func (store *RuntimeStore) SessionRead(sessionId string) (*SessionState, error) {
store.lock.RLock()
if element, ok := store.sessions[sessionId]; ok {
go store.SessionAccess(sessionId)
store.lock.RUnlock()
return element.Value.(*SessionState), nil
}
store.lock.RUnlock()
//if sessionId of state not exist, create a new state
state := NewSessionState(store, sessionId, make(map[interface{}]interface{}))
store.lock.Lock()
element := store.list.PushFront(state)
store.sessions[sessionId] = element
store.lock.Unlock()
return state, nil
}
// SessionExist check session state exist by sessionId
func (store *RuntimeStore) SessionExist(sessionId string) bool {
store.lock.RLock()
defer store.lock.RUnlock()
if _, ok := store.sessions[sessionId]; ok {
return true
}
return false
}
//SessionUpdate update session state in store
func (store *RuntimeStore) SessionUpdate(state *SessionState) error {
store.lock.RLock()
if element, ok := store.sessions[state.sessionId]; ok { //state has exist
go store.SessionAccess(state.sessionId)
store.lock.RUnlock()
element.Value.(*SessionState).values = state.values //only assist update whole session state
return nil
}
store.lock.RUnlock()
//if sessionId of state not exist, create a new state
new_state := NewSessionState(store, state.sessionId, state.values)
store.lock.Lock()
new_element := store.list.PushFront(new_state)
store.sessions[state.sessionId] = new_element
store.lock.Unlock()
return nil
}
// SessionRemove delete session state in store
func (store *RuntimeStore) SessionRemove(sessionId string) error {
store.lock.Lock()
defer store.lock.Unlock()
if element, ok := store.sessions[sessionId]; ok {
delete(store.sessions, sessionId)
store.list.Remove(element)
return nil
}
return nil
}
// SessionGC clean expired session stores in memory session
func (store *RuntimeStore) SessionGC() int {
num := 0
store.lock.RLock()
for {
element := store.list.Back()
if element == nil {
break
}
if (element.Value.(*SessionState).timeAccessed.Unix() + store.maxlifetime) < time.Now().Unix() {
store.lock.RUnlock()
store.lock.Lock()
store.list.Remove(element)
delete(store.sessions, element.Value.(*SessionState).SessionID())
num += 1
store.lock.Unlock()
store.lock.RLock()
} else {
break
}
}
store.lock.RUnlock()
return num
}
// SessionAll get count number of memory session
func (store *RuntimeStore) SessionCount() int {
return store.list.Len()<|fim▁hole|>}
// SessionAccess expand time of session store by id in memory session
func (store *RuntimeStore) SessionAccess(sessionId string) error {
store.lock.Lock()
defer store.lock.Unlock()
if element, ok := store.sessions[sessionId]; ok {
element.Value.(*SessionState).timeAccessed = time.Now()
store.list.MoveToFront(element)
return nil
}
return nil
}<|fim▁end|> | |
<|file_name|>dynamic.py<|end_file_name|><|fim▁begin|>'''
@Summary: Utility Methods for dynamic actions during program operation.
@Author: devopsec
'''
import os, sys, socket, inspect
def getWorkingDirs():
''' Returns project dir, parent dir, and current dir of calling script as dict'''
project_dir, parent_dir, current_dir = None
if os.path.exists(os.path.dirname(__file__)):
current_dir = os.path.abspath(os.path.dirname(__file__))
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
else:
current_dir = os.path.abspath(os.getcwd())
parent_dir = os.path.abspath(os.path.join(os.getcwd(), '..'))
check = os.path.exists(parent_dir)
while (check != False):
project_dir = os.getcwd()
os.chdir("..")
check = os.path.exists(os.path.join(os.getcwd(), '..'))
return {
'project_dir' : project_dir,
'parent_dir' : parent_dir,
'current_dir' : current_dir
}
def setProjectPath(dir=None):
''' Sets project path to current calling script location or provided dir and move to it '''
if not dir == None and os.path.exists(dir):
proj_path = os.path.abspath(dir)
os.chdir(proj_path)
sys.path.append(proj_path)
print("Project path set to: " + proj_path)
else:
if os.path.exists(os.path.dirname(__file__)):
proj_path = os.path.abspath(os.path.dirname(__file__))
os.chdir(proj_path)
sys.path.append(proj_path)
else:
proj_path = os.path.abspath(os.getcwd())
os.chdir(proj_path)
sys.path.append(proj_path)
def get_current_ip():
''' Returns current ip of system '''
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
def get_hostname():
''' Returns hostname of current host '''
if socket.gethostname().find('.') >= 0:
return socket.gethostname()
else:
return socket.gethostbyaddr(socket.gethostname())[0]
def script_info():
'''
Returns a dictionary with information about the running top level Python
---------------------------------------------------------------------------
dir: directory containing script or compiled executable
name: name of script or executable<|fim▁hole|> source: name of source code file
---------------------------------------------------------------------------
"name" and "source" are identical if and only if running interpreted code.
When running code compiled by py2exe or cx_freeze, "source" contains
the name of the originating Python script.
If compiled by PyInstaller, "source" contains no meaningful information.
'''
#---------------------------------------------------------------------------#
# scan through call stack for caller information #
#---------------------------------------------------------------------------#
for teil in inspect.stack():
# skip system calls
if teil[1].startswith("<"):
continue
if teil[1].upper().startswith(sys.exec_prefix.upper()):
continue
trc = teil[1]
# trc contains highest level calling script name, check if we have been compiled
if getattr(sys, 'frozen', False):
scriptdir, scriptname = os.path.split(sys.executable)
return {
"dir" : scriptdir,
"name" : scriptname,
"source" : trc
}
# from here on, we are in the interpreted case
scriptdir, trc = os.path.split(trc)
# if trc did not contain directory information,
# the current working directory is what we need
if not scriptdir:
scriptdir = os.getcwd()
scr_dict = {
"name" : trc,
"source" : trc,
"dir" : scriptdir
}
return scr_dict<|fim▁end|> | |
<|file_name|>TwissTrain9.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 20 13:37:16 2017
Author: Peiyong Jiang : [email protected]
Function:
旋转使得变换。
"""
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
plt.close('all')
emitX=12
alphaX=-10.
betaX=13.
gammaX=(1.+alphaX**2)/betaX
sigmaX=np.array([[betaX,-alphaX],[-alphaX,gammaX]])*emitX;
numPart=np.int32(1e5);
X=np.random.multivariate_normal([0.,0.],sigmaX,numPart).T
plt.figure(1)
plt.plot(X[0,:],X[1,:],'.')
##
w=tf.Variable(tf.random_normal([1,1]))
w1=tf.cos(w)<|fim▁hole|>
P_Row_1=tf.concat([w1,-w2],0)
P_Row_2=tf.concat([w2,w1],0)
P=tf.concat([P_Row_1,P_Row_2],1)
xI=tf.placeholder(tf.float32,[2,None])
xO=tf.matmul(P,xI)
xxp=tf.reduce_mean(xO[0]*xO[1])
lossAlpha=xxp**2
rateLearn=1e-4
optTotal=tf.train.AdamOptimizer(rateLearn)
trainAlpha=optTotal.minimize(lossAlpha)
sess = tf.InteractiveSession(config=tf.ConfigProto(log_device_placement=True))
sess.run(tf.global_variables_initializer())
sizeBatch=64
for _ in xrange(8000):
startBatch=np.random.randint(0,high=numPart-sizeBatch-1)
xFeed=X[:,startBatch:startBatch+sizeBatch:]
sess.run(trainAlpha,feed_dict={xI:xFeed})
#print(sess.run(LambdaR))
#print('---------------------------')
print(sess.run(lossAlpha,feed_dict={xI:X}),_)
print('_______________________________________________')
zReal=sess.run(xO,feed_dict={xI:X})
plt.figure(2)
plt.plot(zReal[0,:],zReal[1,:],'r.')
plt.axis('equal')
plt.figure(10)
plt.hold
plt.plot(zReal[0,:],zReal[1,:],'r.')
plt.plot(X[0,:],X[1,:],'b.')
#plt.plot(zReal[0,:],zReal[1,:],'r.')
plt.axis('equal')
plt.figure(11)
plt.hold
#plt.plot(zReal[0,:],zReal[1,:],'r.')
plt.plot(X[0,:],X[1,:],'b.')
plt.plot(zReal[0,:],zReal[1,:],'r.')
plt.axis('equal')
zRealCov=np.cov(zReal)
emitXReal=np.sqrt(np.linalg.det(zRealCov))
print(emitXReal)<|fim▁end|> | w2=tf.sin(w) |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPhilentropy(RPackage):
"""Computes 46 optimized distance and similarity measures for comparing
probability functions (Drost (2018) <doi:10.21105/joss.00765>). These
comparisons between probability functions have their foundations in a broad
range of scientific disciplines from mathematics to ecology. The aim of
this package is to provide a core framework for clustering, classification,
statistical inference, goodness-of-fit, non-parametric statistics,
information theory, and machine learning tasks that are based on comparing
univariate or multivariate probability functions."""
homepage = "https://github.com/HajkD/philentropy"<|fim▁hole|> url = "https://cloud.r-project.org/src/contrib/philentropy_0.4.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/philentropy"
version('0.4.0', sha256='bfd30bf5635aab6a82716299a87d44cf96c7ab7f4ee069843869bcc85c357127')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('r-rcpp', type=('build', 'run'))
depends_on('r-dplyr', type=('build', 'run'))
depends_on('r-kernsmooth', type=('build', 'run'))<|fim▁end|> | |
<|file_name|>LarmorClipping.py<|end_file_name|><|fim▁begin|>#!/sw/bin/python2.7
import sys
sys.path.append("..")
from ucnacore.PyxUtils import *
from math import *
from ucnacore.LinFitter import *
#from UCNAUtils import *
from bisect import bisect
from calib.FieldMapGen import *
def clip_function(y,rho,h,R):
sqd = sqrt(rho**2-y**2)
if sqd==0:
sqd = 1e-10
return h*rho**2/R*atan(y/sqd)+2*sqd/(3*R)*(3*h*y/2+rho**2-y**2)
def survival_fraction(h,rho,R):
d = R-h
if d < -rho:
return 1
if h <= -rho:
return 0
c1 = 0
if d < rho:
sqd = sqrt(rho**2-d**2)
c1 = pi/2*rho**2-d*sqd-rho**2*atan(d/sqd)
return ( c1 + clip_function(min(h,rho),rho,h,R)
- clip_function(max(h-R,-rho),rho,h,R))/(pi*rho**2)
def radial_clip_function(r,rho,h,R):
return r**2*(3*h-2*r)/(6*R**2)
def radial_survival_fraction(h,rho,R):
d = h-R
if d > rho:
return 1
if h <= 0:
return 0
c1 = 0
if d > 0:
c1 = (h-R)**2
return ( c1 + radial_clip_function(min(h,rho),rho,h,R) - radial_clip_function(max(d,0),rho,h,R) )/(rho**2)
class rot3:
def __init__(self,t1,t2,t3,s=1.0):
self.c1,self.s1 = cos(t1),sin(t1)
self.c2,self.s2 = cos(t2),sin(t2)
self.c3,self.s3 = cos(t3),sin(t3)
self.s = s
def __call__(self,(x,y,z)):
x,y = self.c1*x+self.s1*y,self.c1*y-self.s1*x
y,z = self.c2*y+self.s2*z,self.c2*z-self.s2*y
z,x = self.c3*z+self.s3*x,self.c3*x-self.s3*z
return self.s*x,self.s*y,self.s*z
class path3d:
def __init__(self):
self.pts = []
self.sty = []
self.endsty = []
self.breakunder = False
self.nopatch = False
def addpt(self,(x,y,z),s=1):
self.pts.append((x*s,y*s,z*s))
def apply(self,transf):
self.pts = [transf(xyz) for xyz in self.pts]
def finish(self):
self.p = path.path()
self.p.append(path.moveto(self.pts[0][0],self.pts[0][1]))
for g in self.pts[1:]:
self.p.append(path.lineto(g[0],g[1]))
self.patchpts = []
self.underpts = []
def nearestpt(self,(x,y)):
d0 = 1e20
n = None
for i in range(len(self.pts)):
d1 = (self.pts[i][0]-x)**2+(self.pts[i][1]-y)**2
if d1 < d0:
d0 = d1
n = i
return n
def znear(self,(x,y)):
return self.pts[self.nearestpt((x,y))][2]
def znearc(self,c):
x,y = self.p.at(c)
x,y = 100*x.t,100*y.t
return self.znear((x,y))
def addPatch(self,c,z):
self.patchpts.append((c,z))
def drawto(self,cnvs):
cnvs.stroke(self.p,self.sty)
def interleave(p3d1,p3d2):
print "Finding intersection points..."
is1,is2 = p3d1.p.intersect(p3d2.p)
print "determining patch z..."
assert len(is1)==len(is2)
for i in range(len(is1)):
z1 = p3d1.znearc(is1[i])
z2 = p3d2.znearc(is2[i])
if z1>z2:
p3d1.addPatch(is1[i],z1)
p3d2.underpts.append(is2[i])
else:
p3d2.addPatch(is2[i],z2)
p3d1.underpts.append(is1[i])
print "done."
def drawInterleaved(c,ps):
print "Drawing base curves..."
for p in ps:
p.p = p.p.normpath()
if p.breakunder:
splits = []
for s in p.underpts:
splits += [s-p.breakunder*0.5,s+p.breakunder*0.5]
psplit = p.p.split(splits)<|fim▁hole|> print "Preparing patches..."
patches = []
for (pn,p) in enumerate(ps):
if p.nopatch:
continue
p.patchpts.sort()
splits = []
for s in p.patchpts:
splits += [s[0]-0.05,s[0]+0.05]
psplit = p.p.split(splits)
patches += [ (patch[1],pn,psplit[2*n+1]) for n,patch in enumerate(p.patchpts) ]
patches.sort()
print "Patching intersections..."
for p in patches:
c.stroke(p[2],ps[p[1]].sty)
print "Done."
def fieldPath(fmap,z0,z1,c,cmax,npts=50):
pfield = path3d()
for z in unifrange(z0,z1,npts):
Bdens = c/sqrt(fmap(z)+0.0001)
if abs(Bdens) < cmax:
pfield.addpt((0,Bdens,z))
return pfield
def larmor_unif(fT,theta,KE,t):
b = electron_beta(KE)
z = t*b*cos(theta)*3e8 # m
r = 3.3e-6*b*(KE+511)*sin(theta)/fT # m
f = 2.8e10*fT # Hz
return r*cos(2*pi*f*t),r*sin(2*pi*f*t),z
def larmor_step(p,pt2_per_B,fT):
nu = 2.8e10*fT*2*pi # angular frequency, Hz
pt = sqrt(fT*pt2_per_B) # transverse momentum component, keV
if p<=pt:
return 0,nu
pl = sqrt(p**2-pt**2) # longitudinal momentum, keV
vz = pl/sqrt(p*p+511*511)*3e8; # z velocity, m/s
return vz,nu
def larmorPath(fmap,p,pt2_per_B,z0,z1,dt,theta=0):
lpath = path3d()
z = z0
vz = 1
while z0 <= z <= z1 and vz>0:
fT = fmap(z) # magnetic field, T
r = 3.3e-6*sqrt(pt2_per_B/fT) # larmor radius, m
lpath.addpt((r*cos(theta),r*sin(theta),z))
# step to next point
vz,nu = larmor_step(p,pt2_per_B,fmap(z))
theta += nu*dt
z += vz*dt
return lpath
def plot_larmor_trajectory():
fmap = fieldMap()
fmap.addFlat(-1.0,0.01,1.0)
fmap.addFlat(0.015,1.0,0.6)
#fmap.addFlat(-1.0,0.01,0.6)
#fmap.addFlat(0.08,1.0,1.0)
fT = fmap(0)
theta = 1.4
KE = 511.
#rot = rot3(0,0.0,-pi/2-0.2,500)
rot = rot3(0,0.0,-pi/2+0.2,500)
tm = 1e-9
doFinal = True
plarmor = larmorPath(fmap,500,495**2/fmap(0),0,0.02,5e-13,3*pi/4)
plarmor.apply(rot)
#plarmor.sty = [style.linewidth.thick,rgb.red]
plarmor.sty = [style.linewidth.thick]
plarmor.endsty = [deco.earrow()]
plarmor.finish()
x0,y0 = plarmor.p.at(plarmor.p.begin())
fieldlines = []
w = 0.0025
cmagf = canvas.canvas()
for o in unifrange(-w,w,20):
pf = fieldPath(fmap,-0.002,0.022,o,1.02*w)
if len(pf.pts) < 10:
continue
pf.apply(rot)
pf.finish()
pf.breakunder = 0.07
pf.nopatch = True
#pf.sty=[style.linewidth.thin,rgb.blue]
pf.sty=[style.linewidth.thin] # field line color/style
fieldlines.append(pf)
pf.drawto(cmagf)
if doFinal:
interleave(plarmor,pf)
#cmagf.stroke(path.circle(x0,y0,0.07),[deco.filled([rgb.green])])
cmagf.stroke(path.circle(x0,y0,0.07),[deco.filled([rgb.white]),style.linewidth.Thick])
cmagf.writetofile("/Users/michael/Desktop/Bfield.pdf")
c = canvas.canvas()
if doFinal:
drawInterleaved(c,[plarmor,]+fieldlines)
else:
plarmor.drawto(c)
for pf in fieldlines:
pf.drawto(c)
#c.stroke(path.circle(x0,y0,0.07),[deco.filled([rgb.green])])
c.stroke(path.circle(x0,y0,0.07),[deco.filled([rgb.white]),style.linewidth.Thick])
c.writetofile("/Users/michael/Desktop/larmor_spiral.pdf")
def plot_spectrometer_field():
fmap = fieldMap()
fmap.addFlat(-3,-2.8,0.01)
fmap.addFlat(-2.3,-2.1,0.6)
fmap.addFlat(-1.6,1.6,1.0)
fmap.addFlat(2.1,2.3,0.6)
fmap.addFlat(2.8,3,0.01)
rot = rot3(0.0,0.0,-pi/2.,10.)
w = 0.25
cmagf = canvas.canvas()
for o in unifrange(-w,w,20):
pf = fieldPath(fmap,-2.6,2.6,o,w,400)
pf.apply(rot)
#if len(pf.pts) < 10:
# continue
pf.finish()
#pf.sty=[style.linewidth.thin,rgb.blue]
pf.sty=[style.linewidth.thin] # field line color/style
pf.drawto(cmagf)
cmagf.writetofile("/Users/michael/Desktop/Bfield.pdf")
def larmor_clipping_plot():
gSurv=graph.graphxy(width=20,height=10,
x=graph.axis.lin(title="Source offset [mm]"),
y=graph.axis.lin(title="",min=0,max=1),
key = graph.key.key(pos="bl"))
gSurv.texrunner.set(lfs='foils17pt')
rho = 1.5
h0 = 9.5
gdat = [ [h0-h,survival_fraction(h,rho,2*3.3),survival_fraction(h,rho,2*3.3/2)] for h in unifrange(h0-10,h0,100) ]
gdat = [ g+[0.5*(g[2]<=1e-3)+(g[2]>1e-3)*(g[1]/(g[2]+1e-6)),] for g in gdat]
gSurv.plot(graph.data.points(gdat,x=1,y=3,title="500keV line survival"),[graph.style.line([style.linewidth.Thick,rgb.blue])])
gSurv.plot(graph.data.points(gdat,x=1,y=2,title="1MeV line survival"),[graph.style.line([style.linewidth.Thick,rgb.red])])
gSurv.plot(graph.data.points(gdat,x=1,y=4,title="1MeV:500keV survival ratio"),[graph.style.line([style.linewidth.Thick])])
gSurv.writetofile("/Users/michael/Desktop/survival_%g.pdf"%rho)
def radial_clipping_plot():
gSurv=graph.graphxy(width=20,height=10,
x=graph.axis.lin(title="Source spot radius [mm]",min=0,max=9.5),
y=graph.axis.lin(title="",min=0,max=1),
key = graph.key.key(pos="bl"))
gSurv.texrunner.set(lfs='foils17pt')
h = 9.5
gdat = [ [rho,radial_survival_fraction(h,rho,3.3),radial_survival_fraction(h,rho,3.3/2.0)] for rho in unifrange(0.,9.5,200) ]
gdat = [ g+[0.5*(g[2]<=1e-3)+(g[2]>1e-3)*(g[1]/(g[2]+1e-6)),] for g in gdat]
gSurv.plot(graph.data.points(gdat,x=1,y=3,title="500keV line survival"),[graph.style.line([style.linewidth.Thick,rgb.blue])])
gSurv.plot(graph.data.points(gdat,x=1,y=2,title="1MeV line survival"),[graph.style.line([style.linewidth.Thick,rgb.red])])
gSurv.plot(graph.data.points(gdat,x=1,y=4,title="1MeV:500keV survival ratio"),[graph.style.line([style.linewidth.Thick])])
gSurv.writetofile("/Users/michael/Desktop/survival_radial.pdf")
if __name__ == "__main__":
#larmor_clipping_plot()
#radial_clipping_plot()
#plot_larmor_trajectory()
plot_spectrometer_field()<|fim▁end|> | for seg in psplit[0::2]:
c.stroke(seg,p.sty)
else:
c.stroke(p.p,p.sty+p.endsty) |
<|file_name|>dynamo.py<|end_file_name|><|fim▁begin|>import logging
import pendulum
from pynamodb.attributes import (UnicodeAttribute, UTCDateTimeAttribute)
from pynamodb.exceptions import DoesNotExist
from pynamodb.models import Model
from . import BaseLocker, Lock, LockAccessDenied
log = logging.getLogger(__name__)
class DynamoLock(Model):
class Meta:
table_name = "GlobalLocking"
region = "us-east-1"
write_capacity_units = 1
read_capacity_units = 1
key = UnicodeAttribute(hash_key=True)
owner_name = UnicodeAttribute()
expires = UTCDateTimeAttribute()
class DynamoLocker(BaseLocker):
"""<|fim▁hole|> """
def __init__(self, table=None, url=None, region='us-east-1'):
DynamoLock.region = region
# if url:
# log.warning(f"Using DynamoDB url: {url}")
# if table:
# log.warning(f"Using DynamoDB table: {table}")
DynamoLock.create_table()
def acquire_lock(self, key, owner_name: str='unknown', lease_time: int=3600) -> Lock:
try:
old = DynamoLock.get(key)
# If the lock is not yet expired, and you aren't the owner, you can't have it
if (pendulum.now('UTC') < old.expires) and old.owner_name != owner_name:
log.debug(f"Lock {key} denied")
raise LockAccessDenied()
# delete the old lock
old.delete()
except DoesNotExist:
pass
# create the new lock
rec = DynamoLock(
key = key,
expires = pendulum.now('UTC').add(seconds = lease_time),
owner_name = owner_name,
)
rec.save()
log.debug(f"Lock {rec.key} acquired, expires {rec.expires}")
def release():
self.release_lock(key, owner_name)
# return lock
return Lock(release, expires=rec.expires)
def release_lock(self, key, owner_name='unknown'):
try:
lock = DynamoLock.get(key)
if lock.owner_name != owner_name:
log.debug(f"found lock: {lock.key} owned by {lock.owner_name}")
raise LockAccessDenied()
lock.delete()
log.debug(f"Lock {key} released")
# if it doesn't exist, just do nothing
except DoesNotExist:
return<|fim▁end|> | Use DynamoDB for locking. |
<|file_name|>marketData.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import numpy as np
from pandas import read_csv as importDB
import pandas as pd
database = r'\\UBSPROD.MSAD.UBS.NET\UserData\ozsanos\RF\Desktop\Black\stockData.csv'
tickers = ['AAPL','ADBE','ADI','AMD','AXP','BRCM','C','GLD','GOOG','GS','HNZ','HPQ','IBM','MSFT','TXN','XOM']
dateRange = [("2010-01-01","2010-12-31"),("2011-01-01","2011-12-31")]
# dateRange = pd.date_range(startDate, endDate)
'''
Pre-weightings permutations
'''
schemes = []
points = range(0, 11, 1)
for i in points:
for j in points:
for k in points:
z = i + j + k
if z <= 10:
schemes.append((round(i/10.0,1), round(j/10.0,1), round(k/10.0,1), round(1.0 - z/10.0,1)))
schemes = tuple(schemes)
'''
*** Code Body ***
'''
def getData(startDate, endDate, symbolSet):
return importDB(database, usecols = ['Close'] + symbolSet, index_col = 'Close').loc[startDate : endDate]
def simulate(startDate, endDate, symbolSet, weights):
marketData = getData(startDate, endDate, symbolSet).values
days = len(marketData)
portfolio = np.zeros(days)
returns = portfolio.copy()
for e in range(len(marketData[0])):
marketData[:,e] = weights[e] * marketData[:,e] / marketData[0,e]
portfolio += marketData[:,e]
for e in range(days):
if e > 0: returns[e] = (portfolio[e]/portfolio[e-1]) - 1
meanDailyReturn = np.average(returns)
stdDailyReturn = np.std(returns)
cummDailyReturn = portfolio[-1]
SharpeRatio = (days**0.5) * (meanDailyReturn / stdDailyReturn)
return [round(SharpeRatio,6), round(meanDailyReturn,6), round(stdDailyReturn,6), round(cummDailyReturn,6)]
def optimise(symbolSet, dateFlag):
maxSharpe = 0.0
metrics = []
for e in schemes:
#print e,
s = simulate(dateRange[dateFlag][0], dateRange[dateFlag][1], symbolSet, e)
#print s
if s[0] > maxSharpe:
maxSharpe = s[0]
metrics = [s, e]
print('\n+ - + - +')
print "\nPortfolio:"
print tuple(symbolSet)
print "\nOptimal Weights:"
print metrics[1]
print "\nPerformance Metrics:"
print tuple(metrics[0])
print('\n+ - + - +\n\n\n\n')
'''
Portfolios
'''
'''
# Test 1
optimise(['AAPL', 'GLD', 'GOOG', 'XOM'], True)
# Test 2
optimise(['AXP', 'HPQ', 'IBM', 'HNZ'], False)
'''
# Quiz 1
optimise(['AAPL', 'GOOG', 'IBM', 'MSFT'], True)
# Quiz 2
optimise(['BRCM', 'ADBE', 'AMD', 'ADI'], False)
# Quiz 3
optimise(['BRCM', 'TXN', 'AMD', 'ADI'], True)
# Quiz 4
optimise(['BRCM', 'TXN', 'IBM', 'HNZ'], False)
# Quiz 5
optimise(['C', 'GS', 'IBM', 'HNZ'], False)
'''
# Test 1
is2011 = True
symbolSet = ['AAPL', 'GLD', 'GOOG', 'XOM']
weights = [0.4,0.4,0.0,0.2]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
# Test 2
is2011 = False
symbolSet = ['AXP', 'HPQ', 'IBM', 'HNZ']
weights = [0.0,0.0,0.0,1.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
'''
# Quiz 1
is2011 = True
symbolSet = ['AAPL', 'GOOG', 'IBM', 'MSFT']
weights = [0.5,0.0,0.5,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.2,0.0,0.8,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.2,0.2,0.2,0.4]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.1,0.1,0.8,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
# Quiz 2
is2011 = False
symbolSet = ['BRCM', 'ADBE', 'AMD', 'ADI']
weights = [0.0,0.2,0.8,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.0,0.0,0.0,1.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [1.0,0.0,0.0,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.0,0.0,0.1,0.9]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
# Quiz 3
is2011 = True
symbolSet = ['BRCM', 'TXN', 'AMD', 'ADI']
weights = [0.0,0.0,0.8,0.2]
<|fim▁hole|>weights = [0.0,0.2,0.0,0.8]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.0,0.0,0.1,0.9]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.0,0.0,0.0,1.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
# Quiz 4
is2011 = False
symbolSet = ['BRCM', 'TXN', 'IBM', 'HNZ']
weights = [0.1,0.1,0.6,0.2]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.3,0.0,0.7,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.1,0.1,0.0,0.8]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.4,0.4,0.0,0.2]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')
# Quiz 5
is2011 = False
symbolSet = ['C', 'GS', 'IBM', 'HNZ']
weights = [0.0,0.0,1.0,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.2,0.0,0.0,0.8]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.4,0.6,0.0,0.0]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
weights = [0.2,0.2,0.4,0.2]
print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
print('\n')<|fim▁end|> | print simulate(dateRange[is2011][0], dateRange[is2011][1], symbolSet, weights)
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn hello() -> u8 {
return 0;
}
fn main() {
hello();
println!("Hello, world!");<|fim▁hole|>fn test_hello() {
assert_eq!(0, hello());
}<|fim▁end|> | }
#[test] |
<|file_name|>gh-path.js<|end_file_name|><|fim▁begin|>import {helper} from 'ember-helper';
import {htmlSafe} from 'ember-string';
import ghostPaths from 'ghost-admin/utils/ghost-paths';
// Handlebars Helper {{gh-path}}
// Usage: Assume 'http://www.myghostblog.org/myblog/'
// {{gh-path}} or {{gh-path 'blog'}} for Ghost's root (/myblog/)
// {{gh-path 'admin'}} for Ghost's admin root (/myblog/ghost/)
// {{gh-path 'api'}} for Ghost's api root (/myblog/ghost/api/v0.1/)
// {{gh-path 'admin' '/assets/hi.png'}} for resolved url (/myblog/ghost/assets/hi.png)
export default helper(function (params) {
let paths = ghostPaths();
let [path, url] = params;
let base;
if (!path) {
path = 'blog';
}
if (!/^(blog|admin|api)$/.test(path)) {
url = path;
path = 'blog';
}
<|fim▁hole|> break;
case 'admin':
base = paths.adminRoot;
break;
case 'api':
base = paths.apiRoot;
break;
default:
base = paths.blogRoot;
break;
}
// handle leading and trailing slashes
base = base[base.length - 1] !== '/' ? `${base}/` : base;
if (url && url.length > 0) {
if (url[0] === '/') {
url = url.substr(1);
}
base = base + url;
}
return htmlSafe(base);
});<|fim▁end|> | switch (path.toString()) {
case 'blog':
base = paths.blogRoot; |
<|file_name|>contact.py<|end_file_name|><|fim▁begin|>from sys import maxsize
class Contact:
def __init__(self, Firstname=None, Middlename=None, Lastname=None, Nickname=None, Title=None, Company=None, Address=None, Home=None, Mobile=None, Work=None,
Fax=None, Email=None, Email2=None, Email3=None, Homepage=None, Bday=None, Bmonth=None, Byear=None, Aday=None, Amonth=None, Ayear=None, Address2=None, Phone2=None,
Notes=None, id=None, all_phones_from_home_page=None, all_address_from_home_page=None, all_emails=None):
self.Firstname = Firstname
self.Middlename = Middlename
self.Lastname = Lastname
self.Nickname = Nickname
self.Title = Title
self.Company = Company
self.Address = Address
self.Home = Home
self.Mobile = Mobile
self.Work = Work
self.Fax = Fax
self.Email = Email
self.Email2 = Email2
self.Email3 = Email3
self.Homepage = Homepage
self.Bday = Bday
self.Bmonth = Bmonth
self.Byear = Byear
self.Aday = Aday
self.Amonth = Amonth
self.Ayear = Ayear
self.Address2 = Address2
self.Phone2 = Phone2
self.Notes = Notes
self.id = id
self.all_phones_from_home_page = all_phones_from_home_page
self.all_address_from_home_page = all_address_from_home_page
self.all_emails=all_emails
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.Firstname == other.Firstname and self.Lastname == other.Lastname
def __repr__(self):
return "%s:%s;%s" % (self.Firstname, self.Lastname, self.Middlename)<|fim▁hole|> def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize<|fim▁end|> | |
<|file_name|>qmodelindex.rs<|end_file_name|><|fim▁begin|>use types::*;
extern "C" {
fn dos_qmodelindex_create() -> DosQModelIndex;
// DOS_API DosQModelIndex *DOS_CALL dos_qmodelindex_create_qmodelindex(DosQModelIndex *index);
// DOS_API void DOS_CALL dos_qmodelindex_delete (DosQModelIndex *vptr);
fn dos_qmodelindex_row(vptr: DosQModelIndex) -> i32;
fn dos_qmodelindex_column(vptr: DosQModelIndex) -> i32;
// DOS_API bool DOS_CALL dos_qmodelindex_isValid(const DosQModelIndex *vptr);
// DOS_API DosQVariant *DOS_CALL dos_qmodelindex_data (const DosQModelIndex *vptr, int role);
// DOS_API DosQModelIndex *DOS_CALL dos_qmodelindex_parent (const DosQModelIndex *vptr);
// DOS_API DosQModelIndex *DOS_CALL dos_qmodelindex_child (const DosQModelIndex *vptr, int row, int column);
// DOS_API DosQModelIndex *DOS_CALL dos_qmodelindex_sibling(const DosQModelIndex *vptr, int row, int column);<|fim▁hole|>pub struct QModelIndex(DosQModelIndex);
pub fn get_model_ptr(o: &QModelIndex) -> DosQModelIndex {
o.0
}
impl QModelIndex {
pub fn new() -> Self {
unsafe { QModelIndex(dos_qmodelindex_create()) }
}
pub fn row(&self) -> i32 {
unsafe { dos_qmodelindex_row(self.0) }
}
pub fn column(&self) -> i32 {
unsafe { dos_qmodelindex_column(self.0) }
}
}
impl From<DosQModelIndex> for QModelIndex {
fn from(i: DosQModelIndex) -> Self {
QModelIndex(i)
}
}<|fim▁end|> | // DOS_API void DOS_CALL dos_qmodelindex_assign (DosQModelIndex *l, const DosQModelIndex *r);
}
|
<|file_name|>inctest_runme.py<|end_file_name|><|fim▁begin|><|fim▁hole|>try:
a = inctest.A()
except:
print "didn't find A"
print "therefore, I didn't include 'testdir/subdir1/hello.i'"
error = 1
pass
try:
b = inctest.B()
except:
print "didn't find B"
print "therefore, I didn't include 'testdir/subdir2/hello.i'"
error = 1
pass
if error == 1:
raise RuntimeError
# Check the import in subdirectory worked
if inctest.importtest1(5) != 15:
print "import test 1 failed"
raise RuntimeError
if inctest.importtest2("black") != "white":
print "import test 2 failed"
raise RuntimeError<|fim▁end|> | import inctest
error = 0 |
<|file_name|>UptoboxCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
import urlparse
from ..internal.misc import json
from ..internal.XFSAccount import XFSAccount
class UptoboxCom(XFSAccount):<|fim▁hole|> __status__ = "testing"
__description__ = """Uptobox.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("benbox69", "[email protected]")]
PLUGIN_DOMAIN = "uptobox.com"
PLUGIN_URL = "http://uptobox.com/"
LOGIN_URL = "https://login.uptobox.com/"
def signin(self, user, password, data):
html = self.load(self.LOGIN_URL, cookies=self.COOKIES)
if re.search(self.LOGIN_SKIP_PATTERN, html):
self.skip_login()
html = self.load(urlparse.urljoin(self.LOGIN_URL, "logarithme"),
post={'op': "login",
'redirect': self.PLUGIN_URL,
'login': user,
'password': password},
cookies=self.COOKIES)
if json.loads(html).get('error'):
self.fail_login()<|fim▁end|> | __name__ = "UptoboxCom"
__type__ = "account"
__version__ = "0.21" |
<|file_name|>issue-3389.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct trie_node {
content: ~[~str],
children: ~[trie_node],
}
fn print_str_vector(vector: ~[~str]) {<|fim▁hole|> println(*string);
}
}
pub fn main() {
let mut node: trie_node = trie_node {
content: ~[],
children: ~[]
};
let v = ~[~"123", ~"abc"];
node.content = ~[~"123", ~"abc"];
print_str_vector(v);
print_str_vector(node.content.clone());
}<|fim▁end|> | for vector.iter().advance |string| { |
<|file_name|>stages.js<|end_file_name|><|fim▁begin|>/*
* The MIT License (MIT)
*
* Copyright (c) 2015 maldicion069
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
'use strict';
exports.init = function(req, res){
//res.write("joder\n");
//res.write(req.csrfToken());
//res.end();
req.app.db.models.Experiment.find({})
.select("tableList name uploadedBy dateDevelopment id name neuronType") // Hay que sacar también nombre y tipo D=
.exec(function(err, exps) {
if(err) {
res.send("Error");
} else {
console.log(exps);
res.render('account/experiments/compare/stage1', {
code: req.csrfToken(),
experiments: exps
});
}
});
};
/**
* Esta función sirve para que, dado
*
*/
exports.second = function(req, res) {
console.log("BODY: " + req.body.fcbklist_value);
if(req.body.fcbklist_value.length < 2) {
res.redirect("/account/experiments/compare/");
} else {
//res.send(req.body);
var orArray = [];
req.body.fcbklist_value.forEach(function(idExp) {
orArray.push({_id: idExp});
});
console.log(orArray);
req.app.db.models.Experiment.find({$or: orArray})
.select("tableList name createdBy dateDevelopment id")
//.populate("tableList", "id")
.populate("createdBy", "username")
.exec(function(err, exps) {
console.log(err);
console.log(JSON.stringify(exps));
res.render("account/experiments/compare/stage2", {
experiments: exps,
code: req.csrfToken()
});
});
}
//["54514c6a4650426d2a9bf056","5451853c745acc0a412abf68"]
// Con esta lista de ids, lo que hacemos es sacar:
// 1. Cada experimento con:
// name uploadedBy dateDevelopment id
// y el identificador de cada tabla del experimento.
// 2. Con esos datos, montamos algo guay con radio button.
};
/**
* En este método recibimos los id's de los experimentos
* y la tabla seleccionada
* Por cada tabla, sacamos las columnas comunes entre ellas e
* imprimimos los gráficos de una vez
*/
exports.third = function(req, res) {
var peticiones = req.body.tab;
console.log(peticiones);
var arr = [];
/*var mapMatch = {
"a": null,
"b": null,
"c": null,
"Datos2": null
}*/
var params = req.app.utility.variables;
var tabs = [];
var headersPaths = [];
var data = {}; // To save the return to view
var Set = req.app.utility.Set;
var commons = new Set();
var async = require('async');
async.series([
// First read data from database and save
function(callback){
console.log("ejecuto 1");
async.each(peticiones, function(tabid, call) {
console.log(tabid);
req.app.db.models.TableExperiment.findById(tabid).populate("assignedTo", "name neuronType createdBy dateDevelopment description").exec(function(err, tab) {
var username = "";
async.series({
first: function(call2) {
console.log("Paso 1");
console.log("Jodeeer, " + tab.assignedTo.createdBy);
/*console.log(tab);
async.each(tab, function(tab_, call3) {
console.log("Buscamos username " + tab_);
req.app.db.models.User.findById(tab_.createdBy._id).select("username").exec(function(err, user) {
console.log("USER : " + user);
tab.createdBy = user.username;
call3();
});
});*/
req.app.db.models.User.findById(tab.assignedTo.createdBy).select("username").exec(function(err, user) {
console.log("USER : " + user);
tab["username"] = user.username;
username = user.username;
//tab.username = user.username;
call2();
});
},
second: function(call2) {
console.log("Paso 2");
// TODO: Controlar "err"
var tab_ = {};
tab_.assignedTo = {
_id: tab.assignedTo._id,
username: username,
createdBy: tab.assignedTo.createdBy,
name: tab.assignedTo.name,
neuronType: tab.assignedTo.neuronType,
dateDevelopment: tab.assignedTo.dateDevelopment,
description: tab.assignedTo.description
};
tab_.id = tab._id;
tab_.matrix = tab.matrix;
tab_.headers = tab.headers;
/*
assignedTo:
{ _id: 545bde715e4a5d4a4089ad21,
createdBy: 545bde535e4a5d4a4089ad1f,
name: 'Mi physiological',
neuronType: 'PHYSIOLOGICAL' },
_id: 545bde715e4a5d4a4089ad52,
__v: 0,
matrix: */
console.log("SECOND : " + tab_);
// Añadimos la tabla al sistema
//console.log("Añado " + tab);
tabs.push(tab_);
// Recorro las cabeceras y genero un mapa con k(header) y v(position)
var mapMatch = {};
console.log(tab.headers);
tab.headers.forEach(function(header, pos) {
if(params.contains(header)) {
mapMatch[header] = pos;
}
});
//console.log("Añado " + JSON.stringify(mapMatch));
headersPaths.push(mapMatch); // Guardamos el mapeo
call2();
}
}, function(err, results) {
call();
});
});
}, function(err) {
callback();
});
},
// Filter columns that I use to compare table's experiment
function(callback) {
console.log("Tengo : " + tabs.length);
console.log("Tengo : " + headersPaths.length);
// Guardamos todos los valores de "params" en "data"
data.headers = {};
params.get().forEach(function(value) {
data.headers[value] = [];//undefined;
});
console.log(JSON.stringify(data));
// Creamos el attr "exps" dentro de "data"
data.exps = [];
// Ahora por cada experimento, cargamos los datos correspondientes
headersPaths.forEach(function(headerPath, index) {
console.log("--------- Empezamos a recorrer con header " + headerPath + " ---------");
var posHeader = 0;
Object.keys(headerPath).forEach(function(key) {
console.log(key + " <=> " + headerPath[key]);
//tabs.forEach(function(tab, ii) {
tabs[index].matrix.forEach(function(matrix, area) {
//console.log("Header: " + key + "\tNº Tab: " + ii + "\tArea: " + area);
data.headers[key][area] = data.headers[key][area] || [0]; // Si existe se queda igual, si no, se añade array
data.headers[key][area].push(matrix.data[posHeader]);
});
//});
<|fim▁hole|>
infoData.push(matrix.data[posHeader]);
});
//console.log(infoData);
console.log("Inserta del index " + posHeader);
data.headers[key].push(infoData);*/
posHeader++;
});
// Volcamos la información del experimento asociado a cada tabla
data.exps.push(tabs[index].assignedTo);
});
console.log("----------------------");
console.log("----------------------");
//console.log(data);
console.log("----------------------");
console.log("----------------------");
/*async.each(arr, function(tab, call) {
tab.headers.forEach(function(header, position) {
//if(mapMatch[header] != undefined) {
if(header in tab.mapMatch) {
tab.mapMatch[header] = position;
}
});
// Remove all columns that not contains in mapMatch
//tab.mapMatch.forEach(function(position) {
//});
call();
});*/
callback();
}
],
// finish callback
function(err, results){
console.log("FIN: " + arr.length);
console.log(params.intersect(commons));
//res.send(data);
/*var ret = {
data: data
};
res.send(ret);*/
console.log("----------------------");
console.log("----------------------");
console.log("----------------------");
console.log("----------------------");
console.log(JSON.stringify(data));
res.render("account/experiments/compare/stage3", {
data: data,
code: req.csrfToken(),
id: 0
});
});
};
// TODO: No funciona bien T.T
/*exports.downloadHTML = function(req, res) {
phantom = require('phantom')
phantom.create(function(ph){
ph.createPage(function(page) {
page.open("http://www.google.com", function(status) {
page.render('google.pdf', function(){
console.log('Page Rendered');
ph.exit();
});
});
});
});
};*/<|fim▁end|> | /*var infoData = [];
tabs[index].matrix.forEach(function(matrix, area) { |
<|file_name|>chunk_test.go<|end_file_name|><|fim▁begin|>package minecraft
import (
"testing"
"vimagination.zapto.org/minecraft/nbt"
)
func TestNew(t *testing.T) {
biomes := make(nbt.ByteArray, 256)
biome := int8(-1)
blocks := make(nbt.ByteArray, 4096)
add := make(nbt.ByteArray, 2048)
data := make(nbt.ByteArray, 2048)
for i := 0; i < 256; i++ {
biomes[i] = biome
//if biome++; biome >= 23 {
// biome = -1
//}
}
dataTag := nbt.NewTag("", nbt.Compound{
nbt.NewTag("Level", nbt.Compound{
nbt.NewTag("Biomes", biomes),
nbt.NewTag("HeightMap", make(nbt.IntArray, 256)),
nbt.NewTag("InhabitedTime", nbt.Long(0)),
nbt.NewTag("LastUpdate", nbt.Long(0)),
nbt.NewTag("Sections", &nbt.ListCompound{
nbt.Compound{
nbt.NewTag("Blocks", blocks),
nbt.NewTag("Add", add),
nbt.NewTag("Data", data),
nbt.NewTag("BlockLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("SkyLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("Y", nbt.Byte(0)),
},
nbt.Compound{
nbt.NewTag("Blocks", blocks),
nbt.NewTag("Add", add),
nbt.NewTag("Data", data),
nbt.NewTag("BlockLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("SkyLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("Y", nbt.Byte(1)),
},
nbt.Compound{
nbt.NewTag("Blocks", blocks),
nbt.NewTag("Add", add),
nbt.NewTag("Data", data),
nbt.NewTag("BlockLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("SkyLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("Y", nbt.Byte(3)),
},
nbt.Compound{
nbt.NewTag("Blocks", blocks),
nbt.NewTag("Add", add),
nbt.NewTag("Data", data),
nbt.NewTag("BlockLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("SkyLight", make(nbt.ByteArray, 2048)),
nbt.NewTag("Y", nbt.Byte(10)),
},
}),
nbt.NewTag("TileEntities", &nbt.ListCompound{
nbt.Compound{
nbt.NewTag("id", nbt.String("test1")),
nbt.NewTag("x", nbt.Int(-191)),
nbt.NewTag("y", nbt.Int(13)),
nbt.NewTag("z", nbt.Int(379)),
nbt.NewTag("testTag", nbt.Byte(1)),
},
nbt.Compound{
nbt.NewTag("id", nbt.String("test2")),
nbt.NewTag("x", nbt.Int(-191)),
nbt.NewTag("y", nbt.Int(17)),
nbt.NewTag("z", nbt.Int(372)),
nbt.NewTag("testTag", nbt.Long(8)),
},
}),
nbt.NewTag("Entities", &nbt.ListCompound{
nbt.Compound{
nbt.NewTag("id", nbt.String("testEntity1")),
nbt.NewTag("Pos", &nbt.ListDouble{
nbt.Double(-190),
nbt.Double(13),
nbt.Double(375),
}),
nbt.NewTag("Motion", &nbt.ListDouble{
nbt.Double(1),
nbt.Double(13),
nbt.Double(11),
}),
nbt.NewTag("Rotation", &nbt.ListFloat{
nbt.Float(13),
nbt.Float(11),
}),
nbt.NewTag("FallDistance", nbt.Float(0)),
nbt.NewTag("Fire", nbt.Short(-1)),
nbt.NewTag("Air", nbt.Short(300)),
nbt.NewTag("OnGround", nbt.Byte(1)),
nbt.NewTag("Dimension", nbt.Int(0)),
nbt.NewTag("Invulnerable", nbt.Byte(0)),
nbt.NewTag("PortalCooldown", nbt.Int(0)),
nbt.NewTag("UUIDMost", nbt.Long(0)),
nbt.NewTag("UUIDLease", nbt.Long(0)),
nbt.NewTag("Riding", nbt.Compound{}),
},
nbt.Compound{
nbt.NewTag("id", nbt.String("testEntity2")),
nbt.NewTag("Pos", &nbt.ListDouble{
nbt.Double(-186),
nbt.Double(2),
nbt.Double(378),
}),
nbt.NewTag("Motion", &nbt.ListDouble{
nbt.Double(17.5),
nbt.Double(1000),
nbt.Double(54),
}),
nbt.NewTag("Rotation", &nbt.ListFloat{
nbt.Float(11),
nbt.Float(13),
}),
nbt.NewTag("FallDistance", nbt.Float(30)),
nbt.NewTag("Fire", nbt.Short(4)),
nbt.NewTag("Air", nbt.Short(30)),
nbt.NewTag("OnGround", nbt.Byte(0)),
nbt.NewTag("Dimension", nbt.Int(0)),
nbt.NewTag("Invulnerable", nbt.Byte(1)),
nbt.NewTag("PortalCooldown", nbt.Int(10)),
nbt.NewTag("UUIDMost", nbt.Long(1450)),
nbt.NewTag("UUIDLease", nbt.Long(6435)),
nbt.NewTag("Riding", nbt.Compound{}),
},
}),
nbt.NewTag("TileTicks", &nbt.ListCompound{
nbt.Compound{
nbt.NewTag("i", nbt.Int(0)),
nbt.NewTag("t", nbt.Int(0)),
nbt.NewTag("p", nbt.Int(0)),
nbt.NewTag("x", nbt.Int(-192)),
nbt.NewTag("y", nbt.Int(0)),
nbt.NewTag("z", nbt.Int(368)),
},
nbt.Compound{
nbt.NewTag("i", nbt.Int(1)),
nbt.NewTag("t", nbt.Int(34)),
nbt.NewTag("p", nbt.Int(12)),
nbt.NewTag("x", nbt.Int(-186)),
nbt.NewTag("y", nbt.Int(11)),
nbt.NewTag("z", nbt.Int(381)),
},
}),<|fim▁hole|> nbt.NewTag("xPos", nbt.Int(-12)),
nbt.NewTag("zPos", nbt.Int(23)),
}),
})
if _, err := newChunk(-12, 23, dataTag); err != nil {
t.Fatalf("reveived unexpected error during testing, %q", err.Error())
}
}
func TestBiomes(t *testing.T) {
chunk, _ := newChunk(0, 0, nbt.Tag{})
for b := Biome(0); b < 23; b++ {
biome := b
for x := int32(0); x < 16; x++ {
for z := int32(0); z < 16; z++ {
chunk.SetBiome(x, z, biome)
if newB := chunk.GetBiome(x, z); newB != biome {
t.Errorf("error setting biome at co-ordinates, expecting %q, got %q", biome.String(), newB.String())
}
}
}
}
}
func TestBlock(t *testing.T) {
chunk, _ := newChunk(0, 0, nbt.Tag{})
testBlocks := []struct {
Block
x, y, z int32
recheck bool
}{
//Test simple set
{
Block{
ID: 12,
},
0, 0, 0,
true,
},
//Test higher ids
{
Block{
ID: 853,
},
1, 0, 0,
true,
},
{
Block{
ID: 463,
},
2, 0, 0,
true,
},
{
Block{
ID: 1001,
},
3, 0, 0,
true,
},
//Test data set
{
Block{
ID: 143,
Data: 12,
},
0, 1, 0,
true,
},
{
Block{
ID: 153,
Data: 4,
},
1, 1, 0,
true,
},
{
Block{
ID: 163,
Data: 5,
},
2, 1, 0,
true,
},
//Test metadata [un]set
{
Block{
metadata: nbt.Compound{
nbt.NewTag("testInt2", nbt.Int(1743)),
nbt.NewTag("testString2", nbt.String("world")),
},
},
0, 0, 1,
true,
},
{
Block{
metadata: nbt.Compound{
nbt.NewTag("testInt", nbt.Int(15)),
nbt.NewTag("testString", nbt.String("hello")),
},
},
1, 0, 1,
false,
},
{
Block{},
1, 0, 1,
true,
},
//Test tick [un]set
{
Block{
ticks: []Tick{{123, 1, 4}, {123, 7, -1}},
},
0, 1, 1,
true,
},
{
Block{
ticks: []Tick{{654, 4, 6}, {4, 63, 5}, {4, 5, 9}},
},
1, 1, 1,
false,
},
{
Block{},
1, 1, 1,
true,
},
}
for _, tB := range testBlocks {
chunk.SetBlock(tB.x, tB.y, tB.z, tB.Block)
if block := chunk.GetBlock(tB.x, tB.y, tB.z); !tB.Block.EqualBlock(block) {
t.Errorf("blocks do not match, expecting %s, got %s", tB.Block.String(), block.String())
}
}
for _, tB := range testBlocks {
if tB.recheck {
if block := chunk.GetBlock(tB.x, tB.y, tB.z); !tB.Block.EqualBlock(block) {
t.Errorf("blocks do not match, expecting:-\n%s\ngot:-\n%s", tB.Block.String(), block.String())
}
}
}
}
func TestHeightMap(t *testing.T) {
tests := []struct {
x, y, z int32
Block
height int32
}{
{0, 0, 0, Block{}, 0},
{1, 0, 0, Block{ID: 1}, 1},
{1, 1, 0, Block{ID: 1}, 2},
{1, 0, 0, Block{}, 2},
{1, 1, 0, Block{}, 0},
{2, 10, 0, Block{ID: 1}, 11},
{2, 12, 0, Block{ID: 1}, 13},
{2, 12, 0, Block{}, 11},
{2, 10, 0, Block{}, 0},
{3, 15, 0, Block{ID: 1}, 16},
{3, 16, 0, Block{ID: 1}, 17},
{3, 16, 0, Block{}, 16},
{3, 15, 0, Block{}, 0},
{4, 31, 0, Block{ID: 1}, 32},
{4, 32, 0, Block{ID: 1}, 33},
{4, 32, 0, Block{}, 32},
{4, 31, 0, Block{}, 0},
{5, 16, 0, Block{ID: 1}, 17},
{5, 32, 0, Block{ID: 1}, 33},
{5, 32, 0, Block{}, 17},
{5, 16, 0, Block{}, 0},
}
chunk, _ := newChunk(0, 0, nbt.Tag{})
for n, test := range tests {
chunk.SetBlock(test.x, test.y, test.z, test.Block)
if h := chunk.GetHeight(test.x, test.z); h != test.height {
t.Errorf("test %d: expecting height %d, got %d", n+1, test.height, h)
}
}
}<|fim▁end|> | nbt.NewTag("TerrainPopulated", nbt.Byte(1)), |
<|file_name|>DeleteNodeBST.py<|end_file_name|><|fim▁begin|>class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
node = parent = None
def deleteNode(self, root: TreeNode, key: int) -> TreeNode:
# search for the node and its parent
self.findNodeAndParent(root, key)
if self.node == root and not root.left and not root.right:
return None
if self.node:
self.deleteNodeHelper(self.node, self.parent)
return root
def deleteNodeHelper(self, node, parent):
# if node is a leaf
if not node.left and not node.right:
if parent:
if parent.left == node:
parent.left = None
else:
parent.right = None
return
# if node has only one child
if not node.left or not node.right:
child = node.left if not node.right else node.right
node.val = child.val
node.left = child.left
node.right = child.right
return
# node has two children
successor, succesorParent = self.getNodeSuccessor(node)
node.val = successor.val
self.deleteNodeHelper(successor, succesorParent)
def getNodeSuccessor(self, node):
succesorParent = node
successor = node.right
while successor.left:
succesorParent = successor
successor = successor.left
return successor, succesorParent
def findNodeAndParent(self, root, key):
if not root:
return
if root.val == key:
self.node = root
return
self.parent = root
if key < root.val:
self.findNodeAndParent(root.left, key)
else:
self.findNodeAndParent(root.right, key)
root = TreeNode(10)
root.left = TreeNode(3)<|fim▁hole|>root.left.right.right = TreeNode(9)
root.right = TreeNode(15)
root.right.left = TreeNode(13)
root.right.right = TreeNode(17)
root.right.right.right = TreeNode(19)
ob = Solution()
root = TreeNode(50)
root = ob.deleteNode(root, 50)
print(root)<|fim▁end|> | root.left.left = TreeNode(2)
root.left.right = TreeNode(8)
root.left.right.left = TreeNode(7) |
<|file_name|>Electric dipole field lines.py<|end_file_name|><|fim▁begin|>from __future__ import division #Para não truncar a divisão de inteiros
from visual import * #Módulo com as funções gráficas do VPython
from math import *
scene_range = 15
scene.width = 1920
scene.height = 1080
scene.fullscreen = True
scene.autoscale = False
scene.range = (scene_range, scene_range, scene_range)
scene.center = (0,0,0)
scene.forward = (-1,-0.7,-1)
dt = 10
rate_emf = 1000
numero_planos_linhas_campo = 24
carga_particula = 1
massa_particula = 1.673*10**-27
carga_polo_pos = 5*10**7
pos_polo_pos = vector(0,2,0)
carga_polo_neg = -5*10**7
pos_polo_neg = vector(0,-2,0)
def criacao_emf():
#polos pos e neg
global pos_polo_pos
global pos_polo_neg
polo_pos = sphere(pos=pos_polo_pos, radius=1, material = materials.marble, opacity=0.25)
polo_neg = sphere(pos=pos_polo_neg, radius=1, material = materials.marble, opacity=0.25)
#criacao do referencial dentro da esfera positiva (sendo o vec_y_polo_pos paralelo ao vector que une os dois centros das esferas)
#os vectores serão usados nas rotações (eixos)
norm_vec_conect_center_spheres = norm(polo_pos.pos - polo_neg.pos)
vec_norm_polo_pos = vector(norm_vec_conect_center_spheres.y, norm_vec_conect_center_spheres.x, 0)
vec_x_polo_pos = arrow(pos=polo_pos.pos, axis=vec_norm_polo_pos, opacity=0.25, color = color.red)
vec_y_polo_pos = arrow(pos=polo_pos.pos, axis=norm_vec_conect_center_spheres, opacity=0.25, color = color.green)
vec_z_polo_pos = arrow(pos=polo_pos.pos, axis=cross(vec_y_polo_pos.axis, vec_x_polo_pos.axis), opacity=0.25, color = color.cyan)
#listas com os dados
lista_particulas_emf = []
lista_trajectos = []
#ângulos de rotação
latitude = 0
longitude = 0
#criação das particulas
while (longitude < 180):
dir_longitude = vec_x_polo_pos.axis.rotate(angle=radians(longitude), axis=vec_y_polo_pos.axis)
latitude_axis = vec_z_polo_pos.axis.rotate(angle=radians(longitude), axis=vec_y_polo_pos.axis)
while (latitude < 360):
dir_particula = dir_longitude.rotate(angle=radians(latitude), axis=latitude_axis)
pos_particula = polo_pos.pos + dir_particula
particula = sphere(pos=pos_particula, radius=0.05, opacity=0.25)
trajecto = curve(pos=pos_particula, color=color.yellow)
lista_particulas_emf.append(particula)
lista_trajectos.append(trajecto)
latitude += 360 / numero_planos_linhas_campo
latitude = 0
longitude += 360 / numero_planos_linhas_campo
#criação de arrays a partir das listas
array_particulas_emf = array(lista_particulas_emf)
array_trajectos = array(lista_trajectos)
#cálculo das linhas do campo magnético
continuar = True
picked_pole = None
while continuar:
rate(rate_emf)
#Caso o utilizador altere a posição de uma das partículas, reconstroi as linhas de campo
if scene.mouse.events:
m = scene.mouse.getevent()
if m.drag:
if (m.pick == polo_pos or m.pick == polo_neg):
picked_pole = m.pick
elif m.drop:
if picked_pole:
continuar = False
pos_polo_pos = polo_pos.pos
pos_polo_neg = polo_neg.pos
#Limpa os objectos e linhas de campo actuais
while(len(scene.objects) > 0):
scene.objects[0].visible = False
if picked_pole:
current_pos = scene.mouse.pos
offset = current_pos - picked_pole.pos
if (offset != 0):
picked_pole.pos += offset
<|fim▁hole|>
#Fe = k |q1|*|q1| / K r^2 -> Lei de Coulomb
#E = Fe / q
#E = k * q1 / K r^2
dist_particulas_pos = array_particulas_emf[i].pos - polo_pos.pos
dist_particulas_neg = array_particulas_emf[i].pos - polo_neg.pos
Eqp = ((9*10**9 * carga_polo_pos * 1.602*10**-19) / mag(dist_particulas_pos)**2) * norm(dist_particulas_pos)
Eqn = ((9*10**9 * carga_polo_neg * 1.602*10**-19) / mag(dist_particulas_neg)**2) * norm(dist_particulas_neg)
E = Eqp + Eqn
#x = x0 + v*t
#Como se está a desenhar as linhas de campo, está-se a percorrer o espaço usando E como vector director (análogo à velocidade de uma partícula)
pos = array_particulas_emf[i].pos + E * dt
#update dos dados
#array_campo_mag_emf[i] = E
array_particulas_emf[i].pos = pos
array_trajectos[i].append(pos)
while True:
criacao_emf()<|fim▁end|> | for i in range(array_particulas_emf.size):
#Se as particulas se afastarem consideravelmento do centro dos polos ou quando entrarem dentro do polo neg, são imobilizadas
if ((mag(array_particulas_emf[i].pos) < scene_range) and (mag(array_particulas_emf[i].pos - polo_neg.pos) > polo_neg.radius)):
#cálculo dos dados |
<|file_name|>paper.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on May 12, 2014
Model Paper
fields:
conf
year
passcode
paper id
status
title
category1
category1
keywords
@author: riccardo
'''
class Paper(object):
__conf=""
__year=""
__passcode=""
__pid=""
__status=""
__title=""
__category1=""
__category2=""
__keywords=""
def __init__(self, conf,year,pid, status, title, category1, category2, keywords):
self.__conf = conf
self.__year = year
self.__pid = pid
self.__status = status
self.__title = title
self.__category1 = category1
self.__category2 = category2
self.__keywords = keywords
def get_passcode(self):
return self.__passcode
def get_pid(self):
return self.__pid
def get_status(self):
return self.__status
def get_title(self):
return self.__title
def get_category_1(self):
return self.__category1
def get_category_2(self):
return self.__category2
def get_keywords(self):
return self.__keywords
def set_passcode(self, value):
self.__passcode = value
def set_pid(self, value):
self.__pid = value
def set_status(self, value):
self.__status = value
def set_title(self, value):
self.__title = value
def set_category_1(self, value):
self.__category1 = value
def set_category_2(self, value):
self.__category2 = value
<|fim▁hole|>
def del_passcode(self):
del self.__passcode
def del_pid(self):
del self.__pid
def del_status(self):
del self.__status
def del_title(self):
del self.__title
def del_category_1(self):
del self.__category1
def del_category_2(self):
del self.__category2
def del_keywords(self):
del self.__keywords
passcode = property(get_passcode, set_passcode, del_passcode, "passcode's docstring")
pid = property(get_pid, set_pid, del_pid, "pid's docstring")
status = property(get_status, set_status, del_status, "status's docstring")
title = property(get_title, set_title, del_title, "title's docstring")
category1 = property(get_category_1, set_category_1, del_category_1, "category1's docstring")
category2 = property(get_category_2, set_category_2, del_category_2, "category2's docstring")
keywords = property(get_keywords, set_keywords, del_keywords, "keywords's docstring")
def get_conf(self):
return self.__conf
def get_year(self):
return self.__year
def set_conf(self, value):
self.__conf = value
def set_year(self, value):
self.__year = value
def del_conf(self):
del self.__conf
def del_year(self):
del self.__year
conf = property(get_conf, set_conf, del_conf, "conf's docstring")
year = property(get_year, set_year, del_year, "year's docstring")<|fim▁end|> | def set_keywords(self, value):
self.__keywords = value
|
<|file_name|>GAPopulation.java<|end_file_name|><|fim▁begin|>package ga;
import engine.*;
import java.util.*;
public class GAPopulation {
/* Evolutionary parameters: */
public int size; // size of the population
public int ngens; // total number of generations
public int currgen; // current generation
/* Crossover parameters */
int tournamentK; // size of tournament
int elite; // size of elite
int immigrant; // number of new random individuals
double mutrate; // chance that a mutation will occur
double xoverrate; // chance that the xover will occur
/* Containers */
public ArrayList<Genome> individual;
Genome parent;
Trainer T;
/* Progress data */
public double[] max_fitness;
public double[] avg_fitness;
public double[] terminals; // average total number of terminals
public double[] bigterminals; // average total number of sig. terminals
/**
* Initialize and load parameters.
* Parameter comp is a node from a previous
* scenario, which is used for distance calculations.
*/
public GAPopulation(Genome comp)
{
individual = new ArrayList<Genome>();
parent = comp;
// reading parameters
Parameter param = Parameter.getInstance();
String paramval;
paramval = param.getParam("population size");
if (paramval != null)
size = Integer.valueOf(paramval);
else
{
System.err.println("\"population size\" not defined on parameter file.");
size = 10;
}
paramval = param.getParam("generation number");
if (paramval != null)
ngens = Integer.valueOf(paramval);
else
{
System.err.println("\"generation number\" not defined on parameter file.");
ngens = 10;
}
paramval = param.getParam("tournament K");
if (paramval != null)
tournamentK = Integer.valueOf(paramval);
else
{
System.err.println("\"tournament K\" not defined on parameter file.");
tournamentK = 5;
}
paramval = param.getParam("elite size");
if (paramval != null)
elite = Integer.valueOf(paramval);
else
{
System.err.println("\"elite size\" not defined on parameter file.");
elite = 1;
}
paramval = param.getParam("immigrant size");
if (paramval != null)
immigrant = Integer.valueOf(paramval);
else
{
System.err.println("\"immigrant size\" not defined on parameter file.");
immigrant = 0;;
}
paramval = param.getParam("mutation rate");
if (paramval != null)
mutrate = Double.valueOf(paramval);
else
{
System.err.println("\"mutation rate\" not defined on parameter file.");
mutrate = 0.01;
}
paramval = param.getParam("crossover rate");
if (paramval != null)
xoverrate = Double.valueOf(paramval);
else
{
System.err.println("\"crossover rate\" not defined on parameter file.");
xoverrate = 0.9;
}
}
/**
* Initialize the new population and the local
* variables. Startd is the target date for the
* @param startd
*/
public void initPopulation(Date startd)
{
T = new Trainer(startd);
currgen = 0;
for (int i = 0; i < size; i++)
{
Genome n = new Genome();
n.init();
individual.add(n);
}
<|fim▁hole|>
}
/**
* Runs one generation loop
*
*/
public void runGeneration()
{
eval();
breed();
currgen++;
}
/**
* update the values of the maxfitness/avg fitness/etc
* public arrays;
*/
public void updateStatus()
{
Parameter p = Parameter.getInstance();
String param = p.getParam("asset treshold");
double tresh = Double.valueOf(param);
avg_fitness[currgen-1] = 0;
terminals[currgen-1] = 0;
bigterminals[currgen-1] = 0;
for (int i = 0; i < individual.size(); i++)
{
avg_fitness[currgen-1] += individual.get(i).fitness;
terminals[currgen-1] += individual.get(i).countAsset(0.0);
bigterminals[currgen-1] += individual.get(i).countAsset(tresh);
}
max_fitness[currgen-1] = individual.get(0).fitness;
avg_fitness[currgen-1] /= size;
terminals[currgen-1] /= size;
bigterminals[currgen-1] /= size;
}
/**
* Calculates the fitness value for each individual
* in the population.
*/
public void eval()
{
for (int i = 0; i < size; i++)
{
individual.get(i).eval(T);
}
Collections.sort(individual);
}
/**
* Perform selection, crossover, mutation in
* order to create a new population.
*
* Assumes the eval function has already been
* performed.
*
*/
public void breed()
{
RNG d = RNG.getInstance();
ArrayList<Genome> nextGen = new ArrayList<Genome>();
Genome p1,p2;
// elite: (few copied individuals)
for (int i = 0; i < elite; i++)
{
nextGen.add(individual.get(i).copy());
}
// immigrant: (usually 0)
for (int i = 0; i < immigrant; i++)
{
Genome n = new Genome();
n.init();
nextGen.add(n);
}
// crossover:
for (int i = 0; i < size - (immigrant + elite); i+=2)
{
// selection - the selection function should
// return copies already.
p1 = Tournament();
p2 = Tournament();
// rolls for xover
if (d.nextDouble() < xoverrate)
{
p1.crossover(p2);
}
// rolls for mutation
if (d.nextDouble() < mutrate)
p1.mutation();
if (d.nextDouble() < mutrate)
p2.mutation();
nextGen.add(p1);
nextGen.add(p2);
}
individual = nextGen;
}
/**
* Select one parent from the population by using
* fitness-proportional tournament selection
* (eat candidate has a chance proportional to his
* fitness of being chosen).
*
* The function copy the chosen candidate and send
* him back.
* @return
*/
public Genome Tournament()
{
RNG d = RNG.getInstance();
Genome[] list = new Genome[tournamentK];
double[] rank = new double[tournamentK];
double sum = 0.0;
double ticket = 0.0;
double min = 0.0;
/* Selects individuals and removes negative fitness */
for (int i = 0; i < tournamentK; i++)
{
list[i] = individual.get(d.nextInt(size));
if (list[i].fitness < min)
min = list[i].fitness;
}
/* I'm not sure if this is the best way to
* make the proportion between the fitnesses.
* Some sort of scaling factor should be put here
* to avoit high fitnesses from superdominating.
*
* But maybe the tournament proccess already guarantees this?
*/
for (int i = 0; i < tournamentK; i++)
{
sum += list[i].fitness - min;
rank[i] = sum;
}
ticket = d.nextDouble()*sum;
for (int i = 0; i < tournamentK; i++)
{
if ((ticket) <= rank[i])
return list[i].copy();
}
// should never get here
System.err.println("x" + ticket + " + " + sum);
System.err.println("Warning: MemeTournament - reached unreachable line");
return list[0].copy();
}
}<|fim▁end|> | max_fitness = new double[ngens];
avg_fitness = new double[ngens];
terminals = new double[ngens];
bigterminals = new double[ngens]; |
<|file_name|>ContextTest.py<|end_file_name|><|fim▁begin|>import rdflib
from rdflib.term import URIRef, Variable
from PyOpenWorm.dataObject import DataObject, InverseProperty
from PyOpenWorm.context import Context
from PyOpenWorm.context_store import ContextStore
from .DataTestTemplate import _DataTest
try:
from unittest.mock import MagicMock, Mock
except ImportError:
from mock import MagicMock, Mock
class ContextTest(_DataTest):
def test_inverse_property_context(self):
class A(DataObject):
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.a = A.ObjectProperty(value_type=B)
class B(DataObject):
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.b = B.ObjectProperty(value_type=A)
InverseProperty(B, 'b', A, 'a')
ctx1 = Context(ident='http://example.org/context_1')
ctx2 = Context(ident='http://example.org/context_2')
a = ctx1(A)(ident='a')
b = ctx2(B)(ident='b')
a.a(b)
expected = (URIRef('b'), URIRef('http://openworm.org/entities/B/b'), URIRef('a'))
self.assertIn(expected, list(ctx1.contents_triples()))
def test_defined(self):
class A(DataObject):
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.a = A.ObjectProperty(value_type=B)
def defined_augment(self):
return self.a.has_defined_value()
def identifier_augment(self):
return self.make_identifier(self.a.onedef().identifier.n3())
class B(DataObject):
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.b = B.ObjectProperty(value_type=A)
InverseProperty(B, 'b', A, 'a')
ctx1 = Context(ident='http://example.org/context_1')
ctx2 = Context(ident='http://example.org/context_2')
a = ctx1(A)()
b = ctx2(B)(ident='b')
a.a(b)
self.assertTrue(a.defined)
def test_save_context_no_graph(self):
ctx = Context()
del ctx.conf['rdf.graph']
with self.assertRaisesRegexp(Exception, r'graph'):
ctx.save_context()
def test_context_store(self):
class A(DataObject):
pass
ctx = Context(ident='http://example.com/context_1')
ctx(A)(ident='anA')
self.assertIn(URIRef('anA'),
tuple(x.identifier for x in ctx.query(A)().load()))
def test_decontextualize(self):
class A(DataObject):
pass
ctx = Context(ident='http://example.com/context_1')
ctxda = ctx(A)(ident='anA')
self.assertIsNone(ctxda.decontextualize().context)
def test_init_imports(self):
ctx = Context(ident='http://example.com/context_1')
self.assertEqual(len(list(ctx.imports)), 0)
def test_zero_imports(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 0)
def test_save_import(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
new_ctx = Context(ident='http://example.com/context_1')
ctx.add_import(new_ctx)
ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 1)
def test_add_import(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
ctx2 = Context(ident='http://example.com/context_2')
ctx2_1 = Context(ident='http://example.com/context_2_1')
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident='http://example.com/context_3')
ctx3.add_import(ctx)
final_ctx = Context(ident='http://example.com/context_1', imported=(ctx3,))
final_ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 4)
def test_init_len(self):
ctx = Context(ident='http://example.com/context_1')
self.assertEqual(len(ctx), 0)
def test_len(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
self.assertEqual(len(ctx), 5)
def test_add_remove_statement(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
stmt_to_remove = create_mock_statement(ident_uri, 42)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.add_statement(stmt_to_remove)
ctx.remove_statement(stmt_to_remove)
self.assertEqual(len(ctx), 5)
def test_add_statement_with_different_context(self):
ctx = Context(ident='http://example.com/context_1')
stmt1 = create_mock_statement('http://example.com/context_2', 1)
with self.assertRaises(ValueError):
ctx.add_statement(stmt1)
def test_contents_triples(self):
res_wanted = []
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
stmt = create_mock_statement(ident_uri, i)
ctx.add_statement(stmt)
res_wanted.append(stmt.to_triple())
for triples in ctx.contents_triples():
self.assertTrue(triples in res_wanted)
def test_clear(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.clear()
self.assertEqual(len(ctx), 0)
def test_save_context(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.save_context(graph)
self.assertEqual(len(graph), 5)
def test_save_context_with_inline_imports(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri2 = 'http://example.com/context_2'
ident_uri2_1 = 'http://example.com/context_2_1'
ident_uri3 = 'http://example.com/context_3'
ident_uri4 = 'http://example.com/context_4'
ctx = Context(ident=ident_uri)
ctx2 = Context(ident=ident_uri2)
ctx2_1 = Context(ident=ident_uri2_1)
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident=ident_uri3)
ctx3.add_import(ctx)
last_ctx = Context(ident=ident_uri4)
last_ctx.add_import(ctx3)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1))
ctx3.add_statement(create_mock_statement(ident_uri3, 3))
last_ctx.add_statement(create_mock_statement(ident_uri4, 4))
last_ctx.save_context(graph, True)
self.assertEqual(len(graph), 5)
def test_triples_saved(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri2 = 'http://example.com/context_2'
ident_uri2_1 = 'http://example.com/context_2_1'
ident_uri3 = 'http://example.com/context_3'
ident_uri4 = 'http://example.com/context_4'
ctx = Context(ident=ident_uri)
ctx2 = Context(ident=ident_uri2)
ctx2_1 = Context(ident=ident_uri2_1)
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident=ident_uri3)
ctx3.add_import(ctx)
last_ctx = Context(ident=ident_uri4)
last_ctx.add_import(ctx3)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1))
ctx3.add_statement(create_mock_statement(ident_uri3, 3))
last_ctx.add_statement(create_mock_statement(ident_uri4, 4))
last_ctx.save_context(graph, True)
self.assertEqual(last_ctx.triples_saved, 5)
def test_triples_saved_noundef_triples_counted(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
statement = MagicMock()
statement.context.identifier = rdflib.term.URIRef(ident_uri)
statement.to_triple.return_value = (Variable('var'), 1, 2)
ctx.add_statement(statement)
ctx.save_context(graph)
self.assertEqual(ctx.triples_saved, 0)
def test_triples_saved_multi(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri1 = 'http://example.com/context_11'
ident_uri2 = 'http://example.com/context_12'
ctx = Context(ident=ident_uri)
ctx1 = Context(ident=ident_uri1)
ctx2 = Context(ident=ident_uri2)
ctx2.add_import(ctx)
ctx1.add_import(ctx2)
ctx1.add_import(ctx)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx1.add_statement(create_mock_statement(ident_uri1, 3))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx1.save_context(graph, inline_imports=True)
self.assertEqual(ctx1.triples_saved, 3)
def test_context_getter(self):
ctx = Context(ident='http://example.com/context_1')
self.assertIsNone(ctx.context)
def test_context_setter(self):
ctx = Context(ident='http://example.com/context_1')
ctx.context = 42
self.assertEqual(ctx.context, 42)
class ContextStoreTest(_DataTest):
def test_query(self):
rdf_type = 'http://example.org/A'
ctxid = URIRef('http://example.com/context_1')
ctx = Mock()
graph = Mock()
graph.store.triples.side_effect = ([], [((URIRef('anA0'), rdflib.RDF.type, rdf_type), (ctxid,))],)
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = [(URIRef('anA'), rdflib.RDF.type, rdf_type)]
ctx.identifier = ctxid
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
self.assertEqual(set([URIRef('anA'), URIRef('anA0')]),
set(x[0][0] for x in store.triples((None, rdflib.RDF.type, rdf_type))))
def test_contexts_staged_ignores_stored(self):
ctxid0 = URIRef('http://example.com/context_0')
ctxid1 = URIRef('http://example.com/context_1')
ctx = Mock()<|fim▁hole|> ctx.contents_triples.return_value = ()
ctx.identifier = ctxid1
ctx.imports = []
store = ContextStore(ctx)
self.assertNotIn(ctxid0, set(store.contexts()))
def test_contexts_combined(self):
ctxid0 = URIRef('http://example.com/context_0')
ctxid1 = URIRef('http://example.com/context_1')
ctx = Mock()
graph = Mock()
graph.store.triples.side_effect = [[((None, None, ctxid0), ())], []]
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = ()
ctx.identifier = ctxid1
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
self.assertEqual(set([ctxid0, ctxid1]),
set(store.contexts()))
def test_len_fail(self):
ctx = Mock()
graph = Mock()
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = ()
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
with self.assertRaises(NotImplementedError):
len(store)
def create_mock_statement(ident_uri, stmt_id):
statement = MagicMock()
statement.context.identifier = rdflib.term.URIRef(ident_uri)
statement.to_triple.return_value = (True, stmt_id, -stmt_id)
return statement<|fim▁end|> | graph = Mock()
graph.store.triples.side_effect = [[((None, None, ctxid0), ())], []]
ctx.conf = {'rdf.graph': graph} |
<|file_name|>header.component.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
import { HeaderComponent } from './header.component';
describe('HeaderComponent', () => {
let component: HeaderComponent;
let fixture: ComponentFixture<HeaderComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ HeaderComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(HeaderComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>primitives_OBSERVED.py<|end_file_name|><|fim▁begin|>from astrodata.ReductionObjects import PrimitiveSet
class OBSERVEDPrimitives(PrimitiveSet):
astrotype = "OBSERVED"
def init(self, rc):
print "OBSERVEDPrimitives.init(rc)"
return
def typeSpecificPrimitive(self, rc):
print "OBSERVEDPrimitives::typeSpecificPrimitive()"
def mark(self, rc):
for ad in rc.get_inputs_as_astrodata():
if ad.is_type("MARKED"):
print "OBSERVEDPrimitives::mark(%s) already marked" % ad.filename
else:
ad.phu_set_key_value("S_MARKED", "TRUE")
rc.report_output(ad)
yield rc
<|fim▁hole|> def unmark(self, rc):
for ad in rc.get_inputs_as_astrodata():
if ad.is_type("UNMARKED"):
print "OBSERVEDPrimitives::unmark(%s) not marked" % ad.filename
else:
ad.phu_set_key_value("S_MARKED", None)
rc.report_output(ad)
yield rc<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|>
#[macro_use] extern crate quote;
#[macro_use] extern crate syn;
#[macro_use] extern crate synstructure;
decl_derive!([JSTraceable] => js_traceable_derive);
fn js_traceable_derive(s: synstructure::Structure) -> quote::Tokens {
let match_body = s.each(|binding| {
Some(quote!(#binding.trace(tracer);))
});
let ast = s.ast();
let name = ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
let mut where_clause = where_clause.unwrap_or(&parse_quote!(where)).clone();
for param in ast.generics.type_params() {
let ident = param.ident;
where_clause.predicates.push(parse_quote!(#ident: ::dom::bindings::trace::JSTraceable))
}
let tokens = quote! {
#[allow(unsafe_code)]
unsafe impl #impl_generics ::dom::bindings::trace::JSTraceable for #name #ty_generics #where_clause {
#[inline]
#[allow(unused_variables, unused_imports)]
unsafe fn trace(&self, tracer: *mut ::js::jsapi::JSTracer) {
use ::dom::bindings::trace::JSTraceable;
match *self {
#match_body
}
}
}
};
tokens
}<|fim▁end|> | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
<|file_name|>ModuleImpl.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.module.impl;
import com.intellij.configurationStore.RenameableStateStorageManager;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.ide.plugins.ContainerDescriptor;
import com.intellij.ide.plugins.IdeaPluginDescriptorImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.*;
import com.intellij.openapi.components.impl.stores.IComponentStore;
import com.intellij.openapi.components.impl.stores.ModuleStore;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleComponent;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.impl.scopes.ModuleScopeProviderImpl;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ex.ProjectEx;
import com.intellij.openapi.roots.ExternalProjectSystemRegistry;
import com.intellij.openapi.roots.ProjectModelElement;
import com.intellij.openapi.roots.ProjectModelExternalSource;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.serviceContainer.ComponentManagerImpl;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import com.intellij.util.xmlb.annotations.Property;
import kotlin.Unit;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
public class ModuleImpl extends ComponentManagerImpl implements ModuleEx {
private static final Logger LOG = Logger.getInstance(ModuleImpl.class);
@NotNull private final Project myProject;
@Nullable protected VirtualFilePointer myImlFilePointer;
private volatile boolean isModuleAdded;
private String myName;
private final ModuleScopeProvider myModuleScopeProvider;
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @NotNull String filePath) {
this(name, project);
myImlFilePointer = VirtualFilePointerManager.getInstance().create(
VfsUtilCore.pathToUrl(filePath), this,
new VirtualFilePointerListener() {
@Override
public void validityChanged(@NotNull VirtualFilePointer @NotNull [] pointers) {
if (myImlFilePointer == null) return;
VirtualFile virtualFile = myImlFilePointer.getFile();
if (virtualFile != null) {
((ModuleStore)getStore()).setPath(virtualFile.toNioPath(), virtualFile, false);
ModuleManager.getInstance(myProject).incModificationCount();
}
}
});
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project, @Nullable VirtualFilePointer virtualFilePointer) {
this(name, project);
myImlFilePointer = virtualFilePointer;
}
@ApiStatus.Internal
public ModuleImpl(@NotNull String name, @NotNull Project project) {
super((ComponentManagerImpl)project);
registerServiceInstance(Module.class, this, ComponentManagerImpl.fakeCorePluginDescriptor);
myProject = project;
myModuleScopeProvider = new ModuleScopeProviderImpl(this);
myName = name;
}
@Override
public void init(@Nullable Runnable beforeComponentCreation) {
// do not measure (activityNamePrefix method not overridden by this class)
// because there are a lot of modules and no need to measure each one
registerComponents();
if (!isPersistent()) {
registerService(IComponentStore.class,
NonPersistentModuleStore.class,
ComponentManagerImpl.fakeCorePluginDescriptor,
true, ServiceDescriptor.PreloadMode.FALSE);
}
if (beforeComponentCreation != null) {
beforeComponentCreation.run();
}
createComponents(null);
}
private boolean isPersistent() {
return myImlFilePointer != null;
}
@Override
protected void setProgressDuringInit(@NotNull ProgressIndicator indicator) {
// Component loading progress is not reported for module, because at this stage minimal reporting unit it is the module itself.
// Stage "Loading modules" progress reported for each loaded module and module component count doesn't matter.
}
@Override
public final boolean isDisposed() {
// in case of light project in tests when it's temporarily disposed, the module should be treated as disposed too.
//noinspection TestOnlyProblems
return super.isDisposed() || ((ProjectEx)myProject).isLight() && myProject.isDisposed();
}
@Override
protected boolean isComponentSuitable(@NotNull ComponentConfig componentConfig) {
if (!super.isComponentSuitable(componentConfig)) {
return false;
}
Map<String, String> options = componentConfig.options;
if (options == null || options.isEmpty()) {
return true;
}
for (String optionName : options.keySet()) {
if ("workspace".equals(optionName) || "overrides".equals(optionName)) {
continue;
}
// we cannot filter using module options because at this moment module file data could be not loaded
String message = "Don't specify " + optionName + " in the component registration, transform component to service and implement your logic in your getInstance() method";
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error(message);
}
else {
LOG.warn(message);
}
}
return true;
}
@Override
@Nullable
public VirtualFile getModuleFile() {
if (myImlFilePointer == null) {
return null;
}
return myImlFilePointer.getFile();
}
@Override
public void rename(@NotNull String newName, boolean notifyStorage) {
myName = newName;
if (notifyStorage) {
((RenameableStateStorageManager)getStore().getStorageManager()).rename(newName + ModuleFileType.DOT_DEFAULT_EXTENSION);
}
}
protected @NotNull IComponentStore getStore() {
return Objects.requireNonNull(getService(IComponentStore.class));
}
@Override
public boolean canStoreSettings() {
return !(getStore() instanceof NonPersistentModuleStore);
}
@Override
@NotNull
public Path getModuleNioFile() {
if (!isPersistent()) {
return Paths.get("");
}
return getStore().getStorageManager().expandMacro(StoragePathMacros.MODULE_FILE);
}
@Override
public synchronized void dispose() {
isModuleAdded = false;
super.dispose();
}
@NotNull
@Override
protected ContainerDescriptor getContainerDescriptor(@NotNull IdeaPluginDescriptorImpl pluginDescriptor) {
return pluginDescriptor.moduleContainerDescriptor;
}
@Override
public void projectOpened() {
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
try {
//noinspection deprecation
component.projectOpened();
}
catch (Exception e) {
LOG.error(e);
}
return Unit.INSTANCE;
});
}
@Override
public void projectClosed() {
//noinspection deprecation
List<ModuleComponent> components = new ArrayList<>();
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
components.add(component);
return Unit.INSTANCE;
});
for (int i = components.size() - 1; i >= 0; i--) {
try {
//noinspection deprecation
components.get(i).projectClosed();
}
catch (Throwable e) {
LOG.error(e);
}
}
}
@Override
@NotNull
public Project getProject() {<|fim▁hole|> return myProject;
}
@Override
@NotNull
public String getName() {
return myName;
}
@Override
public boolean isLoaded() {
return isModuleAdded;
}
@Override
public void moduleAdded() {
isModuleAdded = true;
//noinspection deprecation
processInitializedComponents(ModuleComponent.class, (component, __) -> {
//noinspection deprecation
component.moduleAdded();
return Unit.INSTANCE;
});
}
@Override
public void setOption(@NotNull String key, @Nullable String value) {
DeprecatedModuleOptionManager manager = getOptionManager();
if (value == null) {
if (manager.state.options.remove(key) != null) {
manager.incModificationCount();
}
}
else if (!value.equals(manager.state.options.put(key, value))) {
manager.incModificationCount();
}
}
@NotNull
private DeprecatedModuleOptionManager getOptionManager() {
//noinspection ConstantConditions
return ((Module)this).getService(DeprecatedModuleOptionManager.class);
}
@Override
public String getOptionValue(@NotNull String key) {
return getOptionManager().state.options.get(key);
}
@NotNull
@Override
public GlobalSearchScope getModuleScope() {
return myModuleScopeProvider.getModuleScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleScope(boolean includeTests) {
return myModuleScopeProvider.getModuleScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithLibrariesScope() {
return myModuleScopeProvider.getModuleWithLibrariesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesScope() {
return myModuleScopeProvider.getModuleWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentScope() {
return myModuleScopeProvider.getModuleContentScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleContentWithDependenciesScope() {
return myModuleScopeProvider.getModuleContentWithDependenciesScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependenciesAndLibrariesScope(boolean includeTests) {
return myModuleScopeProvider.getModuleWithDependenciesAndLibrariesScope(includeTests);
}
@NotNull
@Override
public GlobalSearchScope getModuleWithDependentsScope() {
return myModuleScopeProvider.getModuleWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleTestsWithDependentsScope() {
return myModuleScopeProvider.getModuleTestsWithDependentsScope();
}
@NotNull
@Override
public GlobalSearchScope getModuleRuntimeScope(boolean includeTests) {
return myModuleScopeProvider.getModuleRuntimeScope(includeTests);
}
@Override
public void clearScopesCache() {
myModuleScopeProvider.clearCache();
}
@Override
public String toString() {
if (myName == null) return "Module (not initialized)";
return "Module: '" + getName() + "'" + (isDisposed() ? " (disposed)" : "");
}
@Override
public long getOptionsModificationCount() {
return getOptionManager().getModificationCount();
}
@ApiStatus.Internal
@State(name = "DeprecatedModuleOptionManager", useLoadedStateAsExisting = false /* doesn't make sense to check it */)
public static class DeprecatedModuleOptionManager extends SimpleModificationTracker implements PersistentStateComponent<DeprecatedModuleOptionManager.State>,
ProjectModelElement {
private final Module module;
DeprecatedModuleOptionManager(@NotNull Module module) {
this.module = module;
}
@Override
@Nullable
public ProjectModelExternalSource getExternalSource() {
if (state.options.size() > 1 || state.options.size() == 1 && !state.options.containsKey(Module.ELEMENT_TYPE) /* unrealistic case, but just to be sure */) {
return null;
}
return ExternalProjectSystemRegistry.getInstance().getExternalSource(module);
}
static final class State {
@Property(surroundWithTag = false)
@MapAnnotation(surroundKeyWithTag = false, surroundValueWithTag = false, surroundWithTag = false, entryTagName = "option")
public final Map<String, String> options = new HashMap<>();
}
private State state = new State();
@Nullable
@Override
public State getState() {
return state;
}
@Override
public void loadState(@NotNull State state) {
this.state = state;
}
}
}<|fim▁end|> | |
<|file_name|>editor-spec.js<|end_file_name|><|fim▁begin|>import React from "react";
import { expect } from "chai";
import { mount } from "enzyme";
import { Provider } from "react-redux";
import Editor from "../../../src/notebook/providers/editor";
import { dummyStore } from "../../utils";
import {
UPDATE_CELL_SOURCE,
FOCUS_CELL_EDITOR
} from "../../../src/notebook/constants";
describe("EditorProvider", () => {
const store = dummyStore();
const setup = (id, cellFocused = true) =>
mount(
<Provider store={store}>
<Editor id={id} cellFocused={cellFocused} />
</Provider>
);
it("can be constructed", () => {
const component = setup("test");
expect(component).to.not.be.null;
});
it("onChange updates cell source", () =>
new Promise(resolve => {
const dispatch = action => {
expect(action.id).to.equal("test");
expect(action.source).to.equal("i love nteract");
expect(action.type).to.equal(UPDATE_CELL_SOURCE);
resolve();
};
store.dispatch = dispatch;
const wrapper = setup("test");
const onChange = wrapper
.findWhere(n => n.prop("onChange") !== undefined)
.first()
.prop("onChange");
onChange("i love nteract");
}));
it("onFocusChange can update editor focus", () =>
new Promise(resolve => {
const dispatch = action => {
expect(action.id).to.equal("test");
expect(action.type).to.equal(FOCUS_CELL_EDITOR);<|fim▁hole|> };
store.dispatch = dispatch;
const wrapper = setup("test");
const onFocusChange = wrapper
.findWhere(n => n.prop("onFocusChange") !== undefined)
.first()
.prop("onFocusChange");
onFocusChange(true);
}));
});<|fim▁end|> | resolve(); |
<|file_name|>game.cpp<|end_file_name|><|fim▁begin|>/**
* The Forgotten Server - a free and open-source MMORPG server emulator
* Copyright (C) 2014 Mark Samman <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "otpch.h"
#include "pugicast.h"
#include "items.h"
#include "commands.h"
#include "creature.h"
#include "monster.h"
#include "game.h"
#include "tile.h"
#include "house.h"
#include "actions.h"
#include "combat.h"
#include "iologindata.h"
#include "iomarket.h"
#include "chat.h"
#include "talkaction.h"
#include "spells.h"
#include "configmanager.h"
#include "ban.h"
#include "raids.h"
#include "database.h"
#include "server.h"
#include "ioguild.h"
#include "quests.h"
#include "globalevent.h"
#include "mounts.h"
#include "bed.h"
#include "scheduler.h"
#include "monster.h"
#include "spawn.h"
#include "connection.h"
#include "events.h"
extern ConfigManager g_config;
extern Actions* g_actions;
extern Chat g_chat;
extern TalkActions* g_talkActions;
extern Spells* g_spells;
extern Vocations g_vocations;
extern GlobalEvents* g_globalEvents;
extern Events* g_events;
Game::Game() :
wildcardTree(false),
offlineTrainingWindow(std::numeric_limits<uint32_t>::max(), "Choose a Skill", "Please choose a skill:")
{
gameState = GAME_STATE_NORMAL;
worldType = WORLD_TYPE_PVP;
services = nullptr;
lastStageLevel = 0;
playersRecord = 0;
motdNum = 0;
useLastStageLevel = false;
stagesEnabled = false;
lastBucket = 0;
//(1440 minutes/day)/(3600 seconds/day)*10 seconds event interval
int32_t dayCycle = 3600;
lightHourDelta = 1440 * 10 / dayCycle;
lightHour = SUNRISE + (SUNSET - SUNRISE) / 2;
lightLevel = LIGHT_LEVEL_DAY;
lightState = LIGHT_STATE_DAY;
offlineTrainingWindow.choices.emplace_back("Sword Fighting and Shielding", SKILL_SWORD);
offlineTrainingWindow.choices.emplace_back("Axe Fighting and Shielding", SKILL_AXE);
offlineTrainingWindow.choices.emplace_back("Club Fighting and Shielding", SKILL_CLUB);
offlineTrainingWindow.choices.emplace_back("Distance Fighting and Shielding", SKILL_DISTANCE);
offlineTrainingWindow.choices.emplace_back("Magic Level and Shielding", SKILL_MAGLEVEL);
offlineTrainingWindow.buttons.emplace_back("Okay", 1);
offlineTrainingWindow.buttons.emplace_back("Cancel", 0);
offlineTrainingWindow.defaultEnterButton = 1;
offlineTrainingWindow.defaultEscapeButton = 0;
offlineTrainingWindow.priority = true;
}
Game::~Game()
{
for (const auto& it : guilds) {
delete it.second;
}
}
void Game::start(ServiceManager* servicer)
{
services = servicer;
g_scheduler.addEvent(createSchedulerTask(EVENT_LIGHTINTERVAL, std::bind(&Game::checkLight, this)));
g_scheduler.addEvent(createSchedulerTask(EVENT_CREATURE_THINK_INTERVAL, std::bind(&Game::checkCreatures, this, 0)));
g_scheduler.addEvent(createSchedulerTask(EVENT_DECAYINTERVAL, std::bind(&Game::checkDecay, this)));
}
GameState_t Game::getGameState() const
{
return gameState;
}
void Game::setWorldType(WorldType_t type)
{
worldType = type;
}
void Game::setGameState(GameState_t newState)
{
if (gameState == GAME_STATE_SHUTDOWN) {
return; //this cannot be stopped
}
if (gameState == newState) {
return;
}
gameState = newState;
switch (newState) {
case GAME_STATE_INIT: {
commands.loadFromXml();
loadExperienceStages();
groups.load();
g_chat.load();
Spawns::getInstance()->startup();
Raids::getInstance()->loadFromXml();
Raids::getInstance()->startup();
Quests::getInstance()->loadFromXml();
Mounts::getInstance()->loadFromXml();
loadMotdNum();
loadPlayersRecord();
g_globalEvents->startup();
break;
}
case GAME_STATE_SHUTDOWN: {
g_globalEvents->execute(GLOBALEVENT_SHUTDOWN);
//kick all players that are still online
auto it = players.begin();
while (it != players.end()) {
it->second->kickPlayer(true);
it = players.begin();
}
saveMotdNum();
saveGameState();
g_dispatcher.addTask(
createTask(std::bind(&Game::shutdown, this)));
g_scheduler.stop();
g_dispatcher.stop();
break;
}
case GAME_STATE_CLOSED: {
/* kick all players without the CanAlwaysLogin flag */
auto it = players.begin();
while (it != players.end()) {
if (!it->second->hasFlag(PlayerFlag_CanAlwaysLogin)) {
it->second->kickPlayer(true);
it = players.begin();
} else {
++it;
}
}
saveGameState();
break;
}
default:
break;
}
}
void Game::saveGameState()
{
if (gameState == GAME_STATE_NORMAL) {
setGameState(GAME_STATE_MAINTAIN);
}
std::cout << "Saving server..." << std::endl;
for (const auto& it : players) {
it.second->loginPosition = it.second->getPosition();
IOLoginData::savePlayer(it.second);
}
map.saveMap();
if (gameState == GAME_STATE_MAINTAIN) {
setGameState(GAME_STATE_NORMAL);
}
}
int32_t Game::loadMainMap(const std::string& filename)
{
Monster::despawnRange = g_config.getNumber(ConfigManager::DEFAULT_DESPAWNRANGE);
Monster::despawnRadius = g_config.getNumber(ConfigManager::DEFAULT_DESPAWNRADIUS);
return map.loadMap("data/world/" + filename + ".otbm");
}
void Game::loadMap(const std::string& path)
{
map.loadMap(path);
}
Cylinder* Game::internalGetCylinder(Player* player, const Position& pos)
{
if (pos.x != 0xFFFF) {
return getTile(pos.x, pos.y, pos.z);
}
//container
if (pos.y & 0x40) {
uint8_t from_cid = pos.y & 0x0F;
return player->getContainerByID(from_cid);
}
//inventory
return player;
}
Thing* Game::internalGetThing(Player* player, const Position& pos, int32_t index, uint32_t spriteId /*= 0*/, stackPosType_t type /*= STACKPOS_NORMAL*/)
{
if (pos.x != 0xFFFF) {
Tile* tile = getTile(pos.x, pos.y, pos.z);
if (tile) {
/*look at*/
if (type == STACKPOS_LOOK) {
return tile->getTopVisibleThing(player);
}
Thing* thing;
/*for move operations*/
if (type == STACKPOS_MOVE) {
Item* item = tile->getTopDownItem();
if (item && item->isMoveable()) {
thing = item;
} else {
thing = tile->getTopVisibleCreature(player);
}
} else if (type == STACKPOS_USEITEM) {
//First check items with topOrder 2 (ladders, signs, splashes)
Item* item = tile->getItemByTopOrder(2);
if (item && g_actions->hasAction(item)) {
thing = item;
} else {
//then down items
thing = tile->getTopDownItem();
if (!thing) {
thing = tile->getTopTopItem(); //then last we check items with topOrder 3 (doors etc)
if (!thing) {
thing = tile->ground;
}
}
}
} else if (type == STACKPOS_USE) {
thing = tile->getTopDownItem();<|fim▁hole|> } else {
thing = tile->__getThing(index);
}
if (player && tile->hasFlag(TILESTATE_SUPPORTS_HANGABLE)) {
//do extra checks here if the thing is accessable
if (thing && thing->getItem()) {
if (tile->hasProperty(CONST_PROP_ISVERTICAL)) {
if (player->getPosition().x + 1 == tile->getPosition().x) {
thing = nullptr;
}
} else { // horizontal
if (player->getPosition().y + 1 == tile->getPosition().y) {
thing = nullptr;
}
}
}
}
return thing;
}
} else {
//container
if (pos.y & 0x40) {
uint8_t fromCid = pos.y & 0x0F;
uint8_t slot = pos.z;
Container* parentContainer = player->getContainerByID(fromCid);
if (!parentContainer) {
return nullptr;
}
if (parentContainer->getID() == ITEM_BROWSEFIELD) {
Tile* tile = parentContainer->getTile();
if (tile && tile->hasFlag(TILESTATE_SUPPORTS_HANGABLE)) {
if (tile->hasProperty(CONST_PROP_ISVERTICAL)) {
if (player->getPosition().x + 1 == tile->getPosition().x) {
return nullptr;
}
} else { // horizontal
if (player->getPosition().y + 1 == tile->getPosition().y) {
return nullptr;
}
}
}
}
return parentContainer->getItemByIndex(player->getContainerIndex(fromCid) + slot);
} else if (pos.y == 0 && pos.z == 0) {
const ItemType& it = Item::items.getItemIdByClientId(spriteId);
if (it.id == 0) {
return nullptr;
}
int32_t subType;
if (it.isFluidContainer() && index < int32_t(sizeof(reverseFluidMap) / sizeof(int8_t))) {
subType = reverseFluidMap[index];
} else {
subType = -1;
}
return findItemOfType(player, it.id, true, subType);
} else { //inventory
slots_t slot = static_cast<slots_t>(pos.y);
return player->getInventoryItem(slot);
}
}
return nullptr;
}
void Game::internalGetPosition(Item* item, Position& pos, uint8_t& stackpos)
{
pos.x = 0;
pos.y = 0;
pos.z = 0;
stackpos = 0;
Cylinder* topParent = item->getTopParent();
if (topParent) {
if (Player* player = dynamic_cast<Player*>(topParent)) {
pos.x = 0xFFFF;
Container* container = dynamic_cast<Container*>(item->getParent());
if (container) {
pos.y = (uint16_t)0x40 | (uint16_t)player->getContainerID(container);
pos.z = container->__getIndexOfThing(item);
stackpos = pos.z;
} else {
pos.y = player->__getIndexOfThing(item);
stackpos = pos.y;
}
} else if (Tile* tile = topParent->getTile()) {
pos = tile->getPosition();
stackpos = tile->__getIndexOfThing(item);
}
}
}
void Game::setTile(Tile* newTile)
{
return map.setTile(newTile->getPosition(), newTile);
}
Tile* Game::getTile(int32_t x, int32_t y, int32_t z)
{
return map.getTile(x, y, z);
}
Tile* Game::getTile(const Position& pos)
{
return map.getTile(pos.x, pos.y, pos.z);
}
QTreeLeafNode* Game::getLeaf(uint32_t x, uint32_t y)
{
return map.getLeaf(x, y);
}
Creature* Game::getCreatureByID(uint32_t id)
{
if (id <= Player::playerAutoID) {
return getPlayerByID(id);
} else if (id <= Monster::monsterAutoID) {
return getMonsterByID(id);
} else if (id <= Npc::npcAutoID) {
return getNpcByID(id);
}
return nullptr;
}
Monster* Game::getMonsterByID(uint32_t id)
{
if (id == 0) {
return nullptr;
}
auto it = monsters.find(id);
if (it == monsters.end()) {
return nullptr;
}
return it->second;
}
Npc* Game::getNpcByID(uint32_t id)
{
if (id == 0) {
return nullptr;
}
auto it = npcs.find(id);
if (it == npcs.end()) {
return nullptr;
}
return it->second;
}
Player* Game::getPlayerByID(uint32_t id)
{
if (id == 0) {
return nullptr;
}
auto it = players.find(id);
if (it == players.end()) {
return nullptr;
}
return it->second;
}
Creature* Game::getCreatureByName(const std::string& s)
{
if (s.empty()) {
return nullptr;
}
const std::string& lowerCaseName = asLowerCaseString(s);
auto m_it = mappedPlayerNames.find(lowerCaseName);
if (m_it != mappedPlayerNames.end()) {
return m_it->second;
}
for (const auto& it : npcs) {
if (lowerCaseName == asLowerCaseString(it.second->getName())) {
return it.second;
}
}
for (const auto& it : monsters) {
if (lowerCaseName == asLowerCaseString(it.second->getName())) {
return it.second;
}
}
return nullptr;
}
Npc* Game::getNpcByName(const std::string& s)
{
if (s.empty()) {
return nullptr;
}
const char* npcName = s.c_str();
for (const auto& it : npcs) {
if (strcasecmp(npcName, it.second->getName().c_str()) == 0) {
return it.second;
}
}
return nullptr;
}
Player* Game::getPlayerByName(const std::string& s)
{
if (s.empty()) {
return nullptr;
}
auto it = mappedPlayerNames.find(asLowerCaseString(s));
if (it == mappedPlayerNames.end()) {
return nullptr;
}
return it->second;
}
Player* Game::getPlayerByGUID(const uint32_t& guid)
{
if (guid == 0) {
return nullptr;
}
for (const auto& it : players) {
if (guid == it.second->getGUID()) {
return it.second;
}
}
return nullptr;
}
ReturnValue Game::getPlayerByNameWildcard(const std::string& s, Player*& player)
{
size_t strlen = s.length();
if (strlen == 0 || strlen > 20) {
return RET_PLAYERWITHTHISNAMEISNOTONLINE;
}
if (s.back() == '~') {
const std::string& query = asLowerCaseString(s.substr(0, strlen - 1));
std::string result;
ReturnValue ret = wildcardTree.findOne(query, result);
if (ret != RET_NOERROR) {
return ret;
}
player = getPlayerByName(result);
} else {
player = getPlayerByName(s);
}
if (!player) {
return RET_PLAYERWITHTHISNAMEISNOTONLINE;
}
return RET_NOERROR;
}
Player* Game::getPlayerByAccount(uint32_t acc)
{
for (const auto& it : players) {
if (it.second->getAccount() == acc) {
return it.second;
}
}
return nullptr;
}
bool Game::internalPlaceCreature(Creature* creature, const Position& pos, bool extendedPos /*=false*/, bool forced /*= false*/)
{
if (creature->getParent() != nullptr) {
return false;
}
if (!map.placeCreature(pos, creature, extendedPos, forced)) {
return false;
}
creature->useThing2();
creature->setID();
creature->addList();
if (!creature->getPlayer()) {
g_events->eventMonsterOnAppear(creature);
}
return true;
}
bool Game::placeCreature(Creature* creature, const Position& pos, bool extendedPos /*=false*/, bool forced /*= false*/)
{
if (!internalPlaceCreature(creature, pos, extendedPos, forced)) {
return false;
}
SpectatorVec list;
getSpectators(list, creature->getPosition(), true);
for (Creature* spectator : list) {
if (Player* tmpPlayer = spectator->getPlayer()) {
tmpPlayer->sendCreatureAppear(creature, creature->getPosition(), true);
}
}
for (Creature* spectator : list) {
spectator->onCreatureAppear(creature, true);
}
Cylinder* creatureParent = creature->getParent();
int32_t newIndex = creatureParent->__getIndexOfThing(creature);
creatureParent->postAddNotification(creature, nullptr, newIndex);
// TODO: Move this code to Player::onCreatureAppear where creature == this.
Player* player = creature->getPlayer();
if (player) {
int32_t offlineTime;
if (player->getLastLogout() != 0) {
// Not counting more than 21 days to prevent overflow when multiplying with 1000 (for milliseconds).
offlineTime = std::min<int32_t>(time(nullptr) - player->getLastLogout(), 86400 * 21);
} else {
offlineTime = 0;
}
Condition* conditionMuted = player->getCondition(CONDITION_MUTED, CONDITIONID_DEFAULT);
if (conditionMuted && conditionMuted->getTicks() > 0) {
conditionMuted->setTicks(conditionMuted->getTicks() - (offlineTime * 1000));
if (conditionMuted->getTicks() <= 0) {
player->removeCondition(conditionMuted);
} else {
player->addCondition(conditionMuted->clone());
}
}
Condition* conditionTrade = player->getCondition(CONDITION_CHANNELMUTEDTICKS, CONDITIONID_DEFAULT, CHANNEL_ADVERTISING);
if (conditionTrade && conditionTrade->getTicks() > 0) {
conditionTrade->setTicks(conditionTrade->getTicks() - (offlineTime * 1000));
if (conditionTrade->getTicks() <= 0) {
player->removeCondition(conditionTrade);
} else {
player->addCondition(conditionTrade->clone());
}
}
Condition* conditionTradeRook = player->getCondition(CONDITION_CHANNELMUTEDTICKS, CONDITIONID_DEFAULT, CHANNEL_ADVERTISINGROOKGAARD);
if (conditionTradeRook && conditionTradeRook->getTicks() > 0) {
conditionTradeRook->setTicks(conditionTradeRook->getTicks() - (offlineTime * 1000));
if (conditionTradeRook->getTicks() <= 0) {
player->removeCondition(conditionTradeRook);
} else {
player->addCondition(conditionTradeRook->clone());
}
}
Condition* conditionHelp = player->getCondition(CONDITION_CHANNELMUTEDTICKS, CONDITIONID_DEFAULT, CHANNEL_HELP);
if (conditionHelp && conditionHelp->getTicks() > 0) {
conditionHelp->setTicks(conditionHelp->getTicks() - (offlineTime * 1000));
if (conditionHelp->getTicks() <= 0) {
player->removeCondition(conditionHelp);
} else {
player->addCondition(conditionHelp->clone());
}
}
Condition* conditionYell = player->getCondition(CONDITION_YELLTICKS, CONDITIONID_DEFAULT);
if (conditionYell && conditionYell->getTicks() > 0) {
conditionYell->setTicks(conditionYell->getTicks() - (offlineTime * 1000));
if (conditionYell->getTicks() <= 0) {
player->removeCondition(conditionYell);
} else {
player->addCondition(conditionYell->clone());
}
}
if (player->isPremium()) {
int32_t value;
player->getStorageValue(STORAGEVALUE_PROMOTION, value);
if (player->isPromoted() && value != 1) {
player->addStorageValue(STORAGEVALUE_PROMOTION, 1);
} else if (!player->isPromoted() && value == 1) {
player->setVocation(g_vocations.getPromotedVocation(player->getVocationId()));
}
} else if (player->isPromoted()) {
player->setVocation(player->vocation->getFromVocation());
}
bool sentStats = false;
int16_t oldStaminaMinutes = player->getStaminaMinutes();
player->regenerateStamina(offlineTime);
int32_t offlineTrainingSkill = player->getOfflineTrainingSkill();
if (offlineTrainingSkill != -1) {
player->setOfflineTrainingSkill(-1);
uint32_t offlineTrainingTime = std::max<int32_t>(0, std::min<int32_t>(offlineTime, std::min<int32_t>(43200, player->getOfflineTrainingTime() / 1000)));
if (offlineTime >= 600) {
player->removeOfflineTrainingTime(offlineTrainingTime * 1000);
int32_t remainder = offlineTime - offlineTrainingTime;
if (remainder > 0) {
player->addOfflineTrainingTime(remainder * 1000);
}
if (offlineTrainingTime >= 60) {
std::ostringstream ss;
ss << "During your absence you trained for ";
int32_t hours = offlineTrainingTime / 3600;
if (hours > 1) {
ss << hours << " hours";
} else if (hours == 1) {
ss << "1 hour";
}
int32_t minutes = (offlineTrainingTime % 3600) / 60;
if (minutes != 0) {
if (hours != 0) {
ss << " and ";
}
if (minutes > 1) {
ss << minutes << " minutes";
} else {
ss << "1 minute";
}
}
ss << '.';
player->sendTextMessage(MESSAGE_EVENT_ADVANCE, ss.str());
Vocation* vocation;
if (player->isPromoted()) {
vocation = player->getVocation();
} else {
int32_t promotedVocationId = g_vocations.getPromotedVocation(player->getVocationId());
vocation = g_vocations.getVocation(promotedVocationId);
if (!vocation) {
vocation = player->getVocation();
}
}
bool sendUpdateSkills = false;
if (offlineTrainingSkill == SKILL_CLUB || offlineTrainingSkill == SKILL_SWORD || offlineTrainingSkill == SKILL_AXE) {
float modifier = vocation->getAttackSpeed() / 1000.f;
sendUpdateSkills = player->addOfflineTrainingTries((skills_t)offlineTrainingSkill, (offlineTrainingTime / modifier) / 2);
} else if (offlineTrainingSkill == SKILL_DISTANCE) {
float modifier = vocation->getAttackSpeed() / 1000.f;
sendUpdateSkills = player->addOfflineTrainingTries((skills_t)offlineTrainingSkill, (offlineTrainingTime / modifier) / 4);
} else if (offlineTrainingSkill == SKILL_MAGLEVEL) {
int32_t gainTicks = vocation->getManaGainTicks() * 2;
if (gainTicks == 0) {
gainTicks = 1;
}
player->addOfflineTrainingTries(SKILL_MAGLEVEL, offlineTrainingTime * (vocation->getManaGainAmount() / gainTicks));
}
if (player->addOfflineTrainingTries(SKILL_SHIELD, offlineTrainingTime / 4) || sendUpdateSkills) {
player->sendSkills();
}
}
player->sendStats();
sentStats = true;
} else {
player->sendTextMessage(MESSAGE_EVENT_ADVANCE, "You must be logged out for more than 10 minutes to start offline training.");
}
} else {
uint16_t oldMinutes = player->getOfflineTrainingTime() / 60 / 1000;
player->addOfflineTrainingTime(offlineTime * 1000);
uint16_t newMinutes = player->getOfflineTrainingTime() / 60 / 1000;
if (oldMinutes != newMinutes) {
player->sendStats();
sentStats = true;
}
}
if (!sentStats && player->getStaminaMinutes() != oldStaminaMinutes) {
player->sendStats();
}
}
addCreatureCheck(creature);
creature->onPlacedCreature();
return true;
}
bool Game::removeCreature(Creature* creature, bool isLogout /*= true*/)
{
if (creature->isRemoved()) {
return false;
}
Tile* tile = creature->getTile();
std::vector<int32_t> oldStackPosVector;
SpectatorVec list;
getSpectators(list, tile->getPosition(), true);
for (Creature* spectator : list) {
if (Player* player = spectator->getPlayer()) {
oldStackPosVector.push_back(player->canSeeCreature(creature) ? tile->getStackposOfCreature(player, creature) : -1);
}
}
int32_t index = tile->__getIndexOfThing(creature);
if (!Map::removeCreature(creature)) {
return false;
}
const Position& tilePosition = tile->getPosition();
//send to client
size_t i = 0;
for (Creature* spectator : list) {
if (Player* player = spectator->getPlayer()) {
player->sendRemoveTileThing(tilePosition, oldStackPosVector[i++]);
}
}
//event method
for (Creature* spectator : list) {
spectator->onCreatureDisappear(creature, index, isLogout);
}
creature->getParent()->postRemoveNotification(creature, nullptr, index, true);
creature->removeList();
creature->setRemoved();
ReleaseCreature(creature);
removeCreatureCheck(creature);
for (Creature* summon : creature->summons) {
summon->setLossSkill(false);
removeCreature(summon);
}
creature->onRemovedCreature();
return true;
}
void Game::playerMoveThing(uint32_t playerId, const Position& fromPos,
uint16_t spriteId, uint8_t fromStackPos, const Position& toPos, uint8_t count)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
uint8_t fromIndex = 0;
if (fromPos.x == 0xFFFF) {
if (fromPos.y & 0x40) {
fromIndex = fromPos.z;
} else {
fromIndex = fromPos.y;
}
} else {
fromIndex = fromStackPos;
}
Thing* thing = internalGetThing(player, fromPos, fromIndex, spriteId, STACKPOS_MOVE);
if (!thing) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Cylinder* toCylinder = internalGetCylinder(player, toPos);
if (!toCylinder) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
if (Creature* movingCreature = thing->getCreature()) {
if (Position::areInRange<1, 1, 0>(movingCreature->getPosition(), player->getPosition())) {
SchedulerTask* task = createSchedulerTask(1000,
std::bind(&Game::playerMoveCreature, this, player->getID(),
movingCreature->getID(), movingCreature->getPosition(), toCylinder->getPosition()));
player->setNextActionTask(task);
} else {
playerMoveCreature(playerId, movingCreature->getID(), movingCreature->getPosition(), toCylinder->getPosition());
}
} else if (thing->getItem()) {
playerMoveItem(playerId, fromPos, spriteId, fromStackPos, toPos, count);
}
}
void Game::playerMoveCreature(uint32_t playerId, uint32_t movingCreatureId,
const Position& movingCreatureOrigPos, const Position& toPos)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Tile* toTile = getTile(toPos);
if (!toTile) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Creature* movingCreature = getCreatureByID(movingCreatureId);
if (!movingCreature) {
return;
}
if (!player->canDoAction()) {
uint32_t delay = player->getNextActionTime();
SchedulerTask* task = createSchedulerTask(delay, std::bind(&Game::playerMoveCreature,
this, playerId, movingCreatureId, movingCreatureOrigPos, toPos));
player->setNextActionTask(task);
return;
}
player->setNextActionTask(nullptr);
if (!Position::areInRange<1, 1, 0>(movingCreatureOrigPos, player->getPosition())) {
//need to walk to the creature first before moving it
std::list<Direction> listDir;
if (player->getPathTo(movingCreatureOrigPos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(1500, std::bind(&Game::playerMoveCreature, this,
playerId, movingCreatureId, movingCreatureOrigPos, toPos));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
if ((!movingCreature->isPushable() && !player->hasFlag(PlayerFlag_CanPushAllCreatures)) ||
(movingCreature->isInGhostMode() && !player->isAccessPlayer())) {
player->sendCancelMessage(RET_NOTMOVEABLE);
return;
}
//check throw distance
const Position& movingCreaturePos = movingCreature->getPosition();
if ((Position::getDistanceX(movingCreaturePos, toPos) > movingCreature->getThrowRange()) || (Position::getDistanceY(movingCreaturePos, toPos) > movingCreature->getThrowRange()) || (Position::getDistanceZ(movingCreaturePos, toPos) * 4 > movingCreature->getThrowRange())) {
player->sendCancelMessage(RET_DESTINATIONOUTOFREACH);
return;
}
Tile* movingCreatureTile = movingCreature->getTile();
if (!movingCreatureTile) {
player->sendCancelMessage(RET_NOTMOVEABLE);
return;
}
if (player != movingCreature) {
if (toTile->hasProperty(CONST_PROP_BLOCKPATH)) {
player->sendCancelMessage(RET_NOTENOUGHROOM);
return;
} else if ((movingCreature->getZone() == ZONE_PROTECTION && !toTile->hasFlag(TILESTATE_PROTECTIONZONE)) || (movingCreature->getZone() == ZONE_NOPVP && !toTile->hasFlag(TILESTATE_NOPVPZONE))) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
} else {
if (CreatureVector* tileCreatures = toTile->getCreatures()) {
for (Creature* tileCreature : *tileCreatures) {
if (!tileCreature->isInGhostMode()) {
player->sendCancelMessage(RET_NOTENOUGHROOM);
return;
}
}
}
Npc* movingNpc = movingCreature->getNpc();
if (movingNpc && !Spawns::getInstance()->isInZone(movingNpc->getMasterPos(), movingNpc->getMasterRadius(), toPos)) {
player->sendCancelMessage(RET_NOTENOUGHROOM);
return;
}
}
}
if (!g_events->eventPlayerOnMoveCreature(player, movingCreature, movingCreaturePos, toPos)) {
return;
}
ReturnValue ret = internalMoveCreature(movingCreature, movingCreatureTile, toTile);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
}
}
ReturnValue Game::internalMoveCreature(Creature* creature, Direction direction, uint32_t flags /*= 0*/)
{
Cylinder* fromTile = creature->getTile();
Cylinder* toTile = nullptr;
creature->setLastPosition(creature->getPosition());
const Position& currentPos = creature->getPosition();
Position destPos = currentPos;
bool diagonalMovement;
switch (direction) {
case NORTHWEST:
case NORTHEAST:
case SOUTHWEST:
case SOUTHEAST:
diagonalMovement = true;
break;
default:
diagonalMovement = false;
break;
}
destPos = getNextPosition(direction, destPos);
if (creature->getPlayer() && !diagonalMovement) {
//try go up
if (currentPos.z != 8 && creature->getTile()->hasHeight(3)) {
Tile* tmpTile = getTile(currentPos.x, currentPos.y, currentPos.getZ() - 1);
if (tmpTile == nullptr || (tmpTile->ground == nullptr && !tmpTile->hasProperty(CONST_PROP_BLOCKSOLID))) {
tmpTile = getTile(destPos.x, destPos.y, destPos.getZ() - 1);
if (tmpTile && tmpTile->ground && !tmpTile->hasProperty(CONST_PROP_BLOCKSOLID)) {
flags = flags | FLAG_IGNOREBLOCKITEM | FLAG_IGNOREBLOCKCREATURE;
if (!tmpTile->floorChange()) {
destPos.z--;
}
}
}
} else {
//try go down
Tile* tmpTile = getTile(destPos);
if (currentPos.z != 7 && (tmpTile == nullptr || (tmpTile->ground == nullptr && !tmpTile->hasProperty(CONST_PROP_BLOCKSOLID)))) {
tmpTile = getTile(destPos.x, destPos.y, destPos.z + 1);
if (tmpTile && tmpTile->hasHeight(3)) {
flags |= FLAG_IGNOREBLOCKITEM | FLAG_IGNOREBLOCKCREATURE;
destPos.z++;
}
}
}
}
toTile = getTile(destPos);
ReturnValue ret = RET_NOTPOSSIBLE;
if (toTile != nullptr) {
ret = internalMoveCreature(creature, fromTile, toTile, flags);
}
return ret;
}
ReturnValue Game::internalMoveCreature(Creature* creature, Cylinder* fromCylinder, Cylinder* toCylinder, uint32_t flags /*= 0*/)
{
//check if we can move the creature to the destination
ReturnValue ret = toCylinder->__queryAdd(0, creature, 1, flags);
if (ret != RET_NOERROR) {
return ret;
}
fromCylinder->getTile()->moveCreature(creature, toCylinder);
int32_t index = 0;
Item* toItem = nullptr;
Cylinder* subCylinder = nullptr;
uint32_t n = 0;
while ((subCylinder = toCylinder->__queryDestination(index, creature, &toItem, flags)) != toCylinder) {
toCylinder->getTile()->moveCreature(creature, subCylinder);
if (creature->getParent() != subCylinder) {
//could happen if a script move the creature
break;
}
toCylinder = subCylinder;
flags = 0;
//to prevent infinite loop
if (++n >= MAP_MAX_LAYERS) {
break;
}
}
return RET_NOERROR;
}
void Game::playerMoveItem(uint32_t playerId, const Position& fromPos,
uint16_t spriteId, uint8_t fromStackPos, const Position& toPos, uint8_t count)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->canDoAction()) {
uint32_t delay = player->getNextActionTime();
SchedulerTask* task = createSchedulerTask(delay, std::bind(&Game::playerMoveItem, this,
playerId, fromPos, spriteId, fromStackPos, toPos, count));
player->setNextActionTask(task);
return;
}
player->setNextActionTask(nullptr);
Cylinder* fromCylinder = internalGetCylinder(player, fromPos);
uint8_t fromIndex = 0;
if (fromPos.x == 0xFFFF) {
if (fromPos.y & 0x40) {
fromIndex = fromPos.z;
} else {
fromIndex = static_cast<uint8_t>(fromPos.y);
}
} else {
fromIndex = fromStackPos;
}
Thing* thing = internalGetThing(player, fromPos, fromIndex, spriteId, STACKPOS_MOVE);
if (!thing || !thing->getItem()) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Item* item = thing->getItem();
Cylinder* toCylinder = internalGetCylinder(player, toPos);
uint8_t toIndex = 0;
if (toPos.x == 0xFFFF) {
if (toPos.y & 0x40) {
toIndex = toPos.z;
} else {
toIndex = toPos.y;
}
}
if (fromCylinder == nullptr || toCylinder == nullptr || item == nullptr || item->getClientID() != spriteId) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
if (!item->isPushable() || item->hasAttribute(ITEM_ATTRIBUTE_UNIQUEID)) {
player->sendCancelMessage(RET_NOTMOVEABLE);
return;
}
const Position& playerPos = player->getPosition();
const Position& mapFromPos = fromCylinder->getTile()->getPosition();
if (playerPos.z != mapFromPos.z) {
player->sendCancelMessage(playerPos.z > mapFromPos.z ? RET_FIRSTGOUPSTAIRS : RET_FIRSTGODOWNSTAIRS);
return;
}
if (!Position::areInRange<1, 1>(playerPos, mapFromPos)) {
//need to walk to the item first before using it
std::list<Direction> listDir;
if (player->getPathTo(item->getPosition(), listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerMoveItem, this,
playerId, fromPos, spriteId, fromStackPos, toPos, count));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
const Tile* toCylinderTile = toCylinder->getTile();
const Position& mapToPos = toCylinderTile->getPosition();
//hangable item specific code
if (item->isHangable() && toCylinderTile->hasFlag(TILESTATE_SUPPORTS_HANGABLE)) {
//destination supports hangable objects so need to move there first
bool vertical = toCylinderTile->hasProperty(CONST_PROP_ISVERTICAL);
if (vertical) {
if (playerPos.x + 1 == mapToPos.x) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
} else { // horizontal
if (playerPos.y + 1 == mapToPos.y) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
}
if (!Position::areInRange<1, 1, 0>(playerPos, mapToPos)) {
Position walkPos = mapToPos;
if (vertical) {
walkPos.x++;
} else {
walkPos.y++;
}
Position itemPos = fromPos;
uint8_t itemStackPos = fromStackPos;
if (fromPos.x != 0xFFFF && Position::areInRange<1, 1>(mapFromPos, playerPos)
&& !Position::areInRange<1, 1, 0>(mapFromPos, walkPos)) {
//need to pickup the item first
Item* moveItem = nullptr;
ReturnValue ret = internalMoveItem(fromCylinder, player, INDEX_WHEREEVER, item, count, &moveItem);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
return;
}
//changing the position since its now in the inventory of the player
internalGetPosition(moveItem, itemPos, itemStackPos);
}
std::list<Direction> listDir;
if (player->getPathTo(walkPos, listDir, 0, 0, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerMoveItem, this,
playerId, itemPos, spriteId, itemStackPos, toPos, count));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
}
if ((Position::getDistanceX(playerPos, mapToPos) > item->getThrowRange()) ||
(Position::getDistanceY(playerPos, mapToPos) > item->getThrowRange()) ||
(Position::getDistanceZ(mapFromPos, mapToPos) * 4 > item->getThrowRange())) {
player->sendCancelMessage(RET_DESTINATIONOUTOFREACH);
return;
}
if (!canThrowObjectTo(mapFromPos, mapToPos)) {
player->sendCancelMessage(RET_CANNOTTHROW);
return;
}
if (!g_events->eventPlayerOnMoveItem(player, item, count, fromPos, toPos)) {
return;
}
ReturnValue ret = internalMoveItem(fromCylinder, toCylinder, toIndex, item, count, nullptr, 0, player);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
}
}
ReturnValue Game::internalMoveItem(Cylinder* fromCylinder, Cylinder* toCylinder, int32_t index,
Item* item, uint32_t count, Item** _moveItem, uint32_t flags /*= 0*/, Creature* actor/* = nullptr*/, Item* tradeItem/* = nullptr*/)
{
if (!toCylinder) {
return RET_NOTPOSSIBLE;
}
Tile* fromTile = fromCylinder->getTile();
if (fromTile) {
auto it = browseFields.find(fromTile);
if (it != browseFields.end() && it->second == fromCylinder) {
fromCylinder = fromTile;
}
}
Item* toItem = nullptr;
Cylinder* subCylinder;
int floorN = 0;
while ((subCylinder = toCylinder->__queryDestination(index, item, &toItem, flags)) != toCylinder) {
toCylinder = subCylinder;
flags = 0;
//to prevent infinite loop
if (++floorN >= MAP_MAX_LAYERS) {
break;
}
}
//destination is the same as the source?
if (item == toItem) {
return RET_NOERROR; //silently ignore move
}
//check if we can add this item
ReturnValue ret = toCylinder->__queryAdd(index, item, count, flags, actor);
if (ret == RET_NEEDEXCHANGE) {
//check if we can add it to source cylinder
ret = fromCylinder->__queryAdd(fromCylinder->__getIndexOfThing(item), toItem, toItem->getItemCount(), 0);
if (ret == RET_NOERROR) {
//check how much we can move
uint32_t maxExchangeQueryCount = 0;
ReturnValue retExchangeMaxCount = fromCylinder->__queryMaxCount(INDEX_WHEREEVER, toItem, toItem->getItemCount(), maxExchangeQueryCount, 0);
if (retExchangeMaxCount != RET_NOERROR && maxExchangeQueryCount == 0) {
return retExchangeMaxCount;
}
if (toCylinder->__queryRemove(toItem, toItem->getItemCount(), flags) == RET_NOERROR) {
int32_t oldToItemIndex = toCylinder->__getIndexOfThing(toItem);
toCylinder->__removeThing(toItem, toItem->getItemCount());
fromCylinder->__addThing(toItem);
if (oldToItemIndex != -1) {
toCylinder->postRemoveNotification(toItem, fromCylinder, oldToItemIndex, true);
}
int32_t newToItemIndex = fromCylinder->__getIndexOfThing(toItem);
if (newToItemIndex != -1) {
fromCylinder->postAddNotification(toItem, toCylinder, newToItemIndex);
}
ret = toCylinder->__queryAdd(index, item, count, flags);
toItem = nullptr;
}
}
}
if (ret != RET_NOERROR) {
return ret;
}
//check how much we can move
uint32_t maxQueryCount = 0;
ReturnValue retMaxCount = toCylinder->__queryMaxCount(index, item, count, maxQueryCount, flags);
if (retMaxCount != RET_NOERROR && maxQueryCount == 0) {
return retMaxCount;
}
uint32_t m;
if (item->isStackable()) {
m = std::min<uint32_t>(count, maxQueryCount);
} else {
m = maxQueryCount;
}
Item* moveItem = item;
//check if we can remove this item
ret = fromCylinder->__queryRemove(item, m, flags);
if (ret != RET_NOERROR) {
return ret;
}
if (tradeItem) {
if (toCylinder->getItem() == tradeItem) {
return RET_NOTENOUGHROOM;
}
Cylinder* tmpCylinder = toCylinder->getParent();
while (tmpCylinder) {
if (tmpCylinder->getItem() == tradeItem) {
return RET_NOTENOUGHROOM;
}
tmpCylinder = tmpCylinder->getParent();
}
}
//remove the item
int32_t itemIndex = fromCylinder->__getIndexOfThing(item);
Item* updateItem = nullptr;
fromCylinder->__removeThing(item, m);
bool isCompleteRemoval = item->isRemoved();
//update item(s)
if (item->isStackable()) {
uint32_t n;
if (toItem && toItem->getID() == item->getID()) {
n = std::min<uint32_t>(100 - toItem->getItemCount(), m);
toCylinder->__updateThing(toItem, toItem->getID(), toItem->getItemCount() + n);
updateItem = toItem;
} else {
n = 0;
}
int32_t newCount = m - n;
if (newCount > 0) {
moveItem = Item::CreateItem(item->getID(), newCount);
} else {
moveItem = nullptr;
}
if (item->isRemoved()) {
ReleaseItem(item);
}
}
//add item
if (moveItem /*m - n > 0*/) {
toCylinder->__addThing(index, moveItem);
}
if (itemIndex != -1) {
fromCylinder->postRemoveNotification(item, toCylinder, itemIndex, isCompleteRemoval);
}
if (moveItem) {
int32_t moveItemIndex = toCylinder->__getIndexOfThing(moveItem);
if (moveItemIndex != -1) {
toCylinder->postAddNotification(moveItem, fromCylinder, moveItemIndex);
}
}
if (updateItem) {
int32_t updateItemIndex = toCylinder->__getIndexOfThing(updateItem);
if (updateItemIndex != -1) {
toCylinder->postAddNotification(updateItem, fromCylinder, updateItemIndex);
}
}
if (_moveItem) {
if (moveItem) {
*_moveItem = moveItem;
} else {
*_moveItem = item;
}
}
//we could not move all, inform the player
if (item->isStackable() && maxQueryCount < count) {
return retMaxCount;
}
return ret;
}
ReturnValue Game::internalAddItem(Cylinder* toCylinder, Item* item, int32_t index /*= INDEX_WHEREEVER*/,
uint32_t flags/* = 0*/, bool test/* = false*/)
{
uint32_t remainderCount = 0;
return internalAddItem(toCylinder, item, index, flags, test, remainderCount);
}
ReturnValue Game::internalAddItem(Cylinder* toCylinder, Item* item, int32_t index,
uint32_t flags, bool test, uint32_t& remainderCount)
{
remainderCount = 0;
if (toCylinder == nullptr || item == nullptr) {
return RET_NOTPOSSIBLE;
}
Cylinder* destCylinder = toCylinder;
Item* toItem = nullptr;
toCylinder = toCylinder->__queryDestination(index, item, &toItem, flags);
//check if we can add this item
ReturnValue ret = toCylinder->__queryAdd(index, item, item->getItemCount(), flags);
if (ret != RET_NOERROR) {
return ret;
}
/*
Check if we can move add the whole amount, we do this by checking against the original cylinder,
since the queryDestination can return a cylinder that might only hold a part of the full amount.
*/
uint32_t maxQueryCount = 0;
ret = destCylinder->__queryMaxCount(INDEX_WHEREEVER, item, item->getItemCount(), maxQueryCount, flags);
if (ret != RET_NOERROR) {
return ret;
}
if (test) {
return RET_NOERROR;
}
if (item->isStackable() && toItem && toItem->getID() == item->getID()) {
uint32_t m = std::min<uint32_t>(item->getItemCount(), maxQueryCount);
uint32_t n = 0;
if (toItem->getID() == item->getID()) {
n = std::min<uint32_t>(100 - toItem->getItemCount(), m);
toCylinder->__updateThing(toItem, toItem->getID(), toItem->getItemCount() + n);
}
int32_t count = m - n;
if (count > 0) {
if (item->getItemCount() != count) {
Item* remainderItem = Item::CreateItem(item->getID(), count);
if (internalAddItem(destCylinder, remainderItem, INDEX_WHEREEVER, flags, false) != RET_NOERROR) {
ReleaseItem(remainderItem);
remainderCount = count;
}
} else {
toCylinder->__addThing(index, item);
int32_t itemIndex = toCylinder->__getIndexOfThing(item);
if (itemIndex != -1) {
toCylinder->postAddNotification(item, nullptr, itemIndex);
}
}
} else {
//fully merged with toItem, item will be destroyed
item->onRemoved();
ReleaseItem(item);
int32_t itemIndex = toCylinder->__getIndexOfThing(toItem);
if (itemIndex != -1) {
toCylinder->postAddNotification(toItem, nullptr, itemIndex);
}
}
} else {
toCylinder->__addThing(index, item);
int32_t itemIndex = toCylinder->__getIndexOfThing(item);
if (itemIndex != -1) {
toCylinder->postAddNotification(item, nullptr, itemIndex);
}
}
return RET_NOERROR;
}
ReturnValue Game::internalRemoveItem(Item* item, int32_t count /*= -1*/, bool test /*= false*/, uint32_t flags /*= 0*/)
{
Cylinder* cylinder = item->getParent();
if (cylinder == nullptr) {
return RET_NOTPOSSIBLE;
}
Tile* fromTile = cylinder->getTile();
if (fromTile) {
auto it = browseFields.find(fromTile);
if (it != browseFields.end() && it->second == cylinder) {
cylinder = fromTile;
}
}
if (count == -1) {
count = item->getItemCount();
}
//check if we can remove this item
ReturnValue ret = cylinder->__queryRemove(item, count, flags | FLAG_IGNORENOTMOVEABLE);
if (ret != RET_NOERROR) {
return ret;
}
if (!item->canRemove()) {
return RET_NOTPOSSIBLE;
}
if (!test) {
int32_t index = cylinder->__getIndexOfThing(item);
//remove the item
cylinder->__removeThing(item, count);
bool isCompleteRemoval = false;
if (item->isRemoved()) {
isCompleteRemoval = true;
ReleaseItem(item);
}
cylinder->postRemoveNotification(item, nullptr, index, isCompleteRemoval);
}
item->onRemoved();
return RET_NOERROR;
}
ReturnValue Game::internalPlayerAddItem(Player* player, Item* item, bool dropOnMap /*= true*/, slots_t slot /*= CONST_SLOT_WHEREEVER*/)
{
uint32_t remainderCount = 0;
ReturnValue ret = internalAddItem(player, item, (int32_t)slot, 0, false, remainderCount);
if (remainderCount > 0) {
Item* remainderItem = Item::CreateItem(item->getID(), remainderCount);
ReturnValue remaindRet = internalAddItem(player->getTile(), remainderItem, INDEX_WHEREEVER, FLAG_NOLIMIT);
if (remaindRet != RET_NOERROR) {
ReleaseItem(remainderItem);
}
}
if (ret != RET_NOERROR && dropOnMap) {
ret = internalAddItem(player->getTile(), item, INDEX_WHEREEVER, FLAG_NOLIMIT);
}
return ret;
}
Item* Game::findItemOfType(Cylinder* cylinder, uint16_t itemId,
bool depthSearch /*= true*/, int32_t subType /*= -1*/)
{
if (cylinder == nullptr) {
return nullptr;
}
std::vector<Container*> containers;
for (int32_t i = cylinder->__getFirstIndex(), j = cylinder->__getLastIndex(); i < j; ++i) {
Thing* thing = cylinder->__getThing(i);
if (!thing) {
continue;
}
Item* item = thing->getItem();
if (!item) {
continue;
}
if (item->getID() == itemId && (subType == -1 || subType == item->getSubType())) {
return item;
}
if (depthSearch) {
Container* container = item->getContainer();
if (container) {
containers.push_back(container);
}
}
}
size_t i = 0;
while (i < containers.size()) {
Container* container = containers[i++];
for (Item* item : container->getItemList()) {
if (item->getID() == itemId && (subType == -1 || subType == item->getSubType())) {
return item;
}
Container* tmpContainer = item->getContainer();
if (tmpContainer) {
containers.push_back(tmpContainer);
}
}
}
return nullptr;
}
bool Game::removeMoney(Cylinder* cylinder, uint64_t money, uint32_t flags /*= 0*/)
{
if (cylinder == nullptr) {
return false;
}
if (money <= 0) {
return true;
}
std::vector<Container*> containers;
typedef std::multimap<uint64_t, Item*, std::less<uint64_t>> MoneyMap;
typedef MoneyMap::value_type moneymap_pair;
MoneyMap moneyMap;
uint64_t moneyCount = 0;
for (int32_t i = cylinder->__getFirstIndex(), j = cylinder->__getLastIndex(); i < j; ++i) {
Thing* thing = cylinder->__getThing(i);
if (!thing) {
continue;
}
Item* item = thing->getItem();
if (!item) {
continue;
}
Container* container = item->getContainer();
if (container) {
containers.push_back(container);
} else if (item->getWorth() != 0) {
moneyCount += item->getWorth();
moneyMap.insert(moneymap_pair(item->getWorth(), item));
}
}
size_t i = 0;
while (i < containers.size()) {
Container* container = containers[i++];
for (Item* item : container->getItemList()) {
Container* tmpContainer = item->getContainer();
if (tmpContainer) {
containers.push_back(tmpContainer);
} else if (item->getWorth() != 0) {
moneyCount += item->getWorth();
moneyMap.insert(moneymap_pair(item->getWorth(), item));
}
}
}
/*not enough money*/
if (moneyCount < money) {
return false;
}
for (MoneyMap::const_iterator mit = moneyMap.begin(), mend = moneyMap.end(); mit != mend && money > 0; ++mit) {
Item* item = mit->second;
internalRemoveItem(item);
if (mit->first > money) {
/* Remove a monetary value from an item*/
uint64_t remaind = item->getWorth() - money;
addMoney(cylinder, remaind, flags);
money = 0;
} else {
money -= mit->first;
}
}
return money == 0;
}
void Game::addMoney(Cylinder* cylinder, uint64_t money, uint32_t flags /*= 0*/)
{
uint32_t crys = money / 10000;
money -= crys * 10000;
while (crys > 0) {
Item* remaindItem = Item::CreateItem(ITEM_CRYSTAL_COIN, std::min<int32_t>(100, crys));
ReturnValue ret = internalAddItem(cylinder, remaindItem, INDEX_WHEREEVER, flags);
if (ret != RET_NOERROR) {
internalAddItem(cylinder->getTile(), remaindItem, INDEX_WHEREEVER, FLAG_NOLIMIT);
}
crys -= std::min<int32_t>(100, crys);
}
uint16_t plat = money / 100;
if (plat != 0) {
Item* remaindItem = Item::CreateItem(ITEM_PLATINUM_COIN, plat);
ReturnValue ret = internalAddItem(cylinder, remaindItem, INDEX_WHEREEVER, flags);
if (ret != RET_NOERROR) {
internalAddItem(cylinder->getTile(), remaindItem, INDEX_WHEREEVER, FLAG_NOLIMIT);
}
money -= plat * 100;
}
if (money != 0) {
Item* remaindItem = Item::CreateItem(ITEM_GOLD_COIN, money);
ReturnValue ret = internalAddItem(cylinder, remaindItem, INDEX_WHEREEVER, flags);
if (ret != RET_NOERROR) {
internalAddItem(cylinder->getTile(), remaindItem, INDEX_WHEREEVER, FLAG_NOLIMIT);
}
}
}
Item* Game::transformItem(Item* item, uint16_t newId, int32_t newCount /*= -1*/)
{
if (item->getID() == newId && (newCount == -1 || (newCount == item->getSubType() && newCount != 0))) { //chargeless item placed on map = infinite
return item;
}
Cylinder* cylinder = item->getParent();
if (cylinder == nullptr) {
return nullptr;
}
Tile* fromTile = cylinder->getTile();
if (fromTile) {
auto it = browseFields.find(fromTile);
if (it != browseFields.end() && it->second == cylinder) {
cylinder = fromTile;
}
}
int32_t itemIndex = cylinder->__getIndexOfThing(item);
if (itemIndex == -1) {
return item;
}
if (!item->canTransform()) {
return item;
}
const ItemType& curType = Item::items[item->getID()];
const ItemType& newType = Item::items[newId];
if (curType.alwaysOnTop != newType.alwaysOnTop) {
//This only occurs when you transform items on tiles from a downItem to a topItem (or vice versa)
//Remove the old, and add the new
ReturnValue ret = internalRemoveItem(item);
if (ret != RET_NOERROR) {
return item;
}
Item* newItem;
if (newCount == -1) {
newItem = Item::CreateItem(newId);
} else {
newItem = Item::CreateItem(newId, newCount);
}
if (!newItem) {
return nullptr;
}
newItem->stealAttributes(item);
ret = internalAddItem(cylinder, newItem, INDEX_WHEREEVER, FLAG_NOLIMIT);
if (ret != RET_NOERROR) {
delete newItem;
return nullptr;
}
return newItem;
}
if (curType.type == newType.type) {
//Both items has the same type so we can safely change id/subtype
if (newCount == 0 && (item->isStackable() || item->hasAttribute(ITEM_ATTRIBUTE_CHARGES))) {
if (item->isStackable()) {
internalRemoveItem(item);
return nullptr;
} else {
int32_t newItemId = newId;
if (curType.id == newType.id) {
newItemId = curType.decayTo;
}
if (newItemId == -1) {
internalRemoveItem(item);
return nullptr;
} else if (newItemId != newId) {
//Replacing the the old item with the new while maintaining the old position
Item* newItem = Item::CreateItem(newItemId, 1);
if (newItem == nullptr) {
return nullptr;
}
cylinder->__replaceThing(itemIndex, newItem);
cylinder->postAddNotification(newItem, cylinder, itemIndex);
item->setParent(nullptr);
cylinder->postRemoveNotification(item, cylinder, itemIndex, true);
ReleaseItem(item);
return newItem;
} else {
return transformItem(item, newItemId);
}
}
} else {
cylinder->postRemoveNotification(item, cylinder, itemIndex, false);
uint16_t itemId = item->getID();
int32_t count = item->getSubType();
if (curType.id != newType.id) {
if (newType.group != curType.group) {
item->setDefaultSubtype();
}
itemId = newId;
}
if (newCount != -1 && newType.hasSubType()) {
count = newCount;
}
cylinder->__updateThing(item, itemId, count);
cylinder->postAddNotification(item, cylinder, itemIndex);
return item;
}
}
//Replacing the the old item with the new while maintaining the old position
Item* newItem;
if (newCount == -1) {
newItem = Item::CreateItem(newId);
} else {
newItem = Item::CreateItem(newId, newCount);
}
if (newItem == nullptr) {
return nullptr;
}
cylinder->__replaceThing(itemIndex, newItem);
cylinder->postAddNotification(newItem, cylinder, itemIndex);
item->setParent(nullptr);
cylinder->postRemoveNotification(item, cylinder, itemIndex, true);
ReleaseItem(item);
return newItem;
}
ReturnValue Game::internalTeleport(Thing* thing, const Position& newPos, bool pushMove/* = true*/, uint32_t flags /*= 0*/)
{
if (newPos == thing->getPosition()) {
return RET_NOERROR;
} else if (thing->isRemoved()) {
return RET_NOTPOSSIBLE;
}
Tile* toTile = getTile(newPos.x, newPos.y, newPos.z);
if (toTile) {
if (Creature* creature = thing->getCreature()) {
ReturnValue ret = toTile->__queryAdd(0, creature, 1, FLAG_NOLIMIT);
if (ret != RET_NOERROR) {
return ret;
}
creature->getTile()->moveCreature(creature, toTile, !pushMove);
return RET_NOERROR;
} else if (Item* item = thing->getItem()) {
return internalMoveItem(item->getParent(), toTile, INDEX_WHEREEVER, item, item->getItemCount(), nullptr, flags);
}
}
return RET_NOTPOSSIBLE;
}
//Implementation of player invoked events
void Game::playerMove(uint32_t playerId, Direction direction)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->resetIdleTime();
player->setNextWalkActionTask(nullptr);
player->startAutoWalk(std::list<Direction> { direction });
}
bool Game::playerBroadcastMessage(Player* player, const std::string& text) const
{
if (!player->hasFlag(PlayerFlag_CanBroadcast)) {
return false;
}
std::cout << "> " << player->getName() << " broadcasted: \"" << text << "\"." << std::endl;
for (const auto& it : players) {
it.second->sendPrivateMessage(player, TALKTYPE_BROADCAST, text);
}
return true;
}
void Game::playerCreatePrivateChannel(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player || !player->isPremium()) {
return;
}
ChatChannel* channel = g_chat.createChannel(*player, CHANNEL_PRIVATE);
if (!channel || !channel->addUser(*player)) {
return;
}
player->sendCreatePrivateChannel(channel->getId(), channel->getName());
}
void Game::playerChannelInvite(uint32_t playerId, const std::string& name)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
PrivateChatChannel* channel = g_chat.getPrivateChannel(*player);
if (!channel) {
return;
}
Player* invitePlayer = getPlayerByName(name);
if (!invitePlayer) {
return;
}
if (player == invitePlayer) {
return;
}
channel->invitePlayer(*player, *invitePlayer);
}
void Game::playerChannelExclude(uint32_t playerId, const std::string& name)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
PrivateChatChannel* channel = g_chat.getPrivateChannel(*player);
if (!channel) {
return;
}
Player* excludePlayer = getPlayerByName(name);
if (!excludePlayer) {
return;
}
if (player == excludePlayer) {
return;
}
channel->excludePlayer(*player, *excludePlayer);
}
void Game::playerRequestChannels(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->sendChannelsDialog();
}
void Game::playerOpenChannel(uint32_t playerId, uint16_t channelId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
ChatChannel* channel = g_chat.addUserToChannel(*player, channelId);
if (!channel) {
return;
}
const InvitedMap* invitedUsers = channel->getInvitedUsersPtr();
const UsersMap* users;
if (!channel->isPublicChannel()) {
users = &channel->getUsers();
} else {
users = nullptr;
}
player->sendChannel(channel->getId(), channel->getName(), users, invitedUsers);
}
void Game::playerCloseChannel(uint32_t playerId, uint16_t channelId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
g_chat.removeUserFromChannel(*player, channelId);
}
void Game::playerOpenPrivateChannel(uint32_t playerId, std::string& receiver)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!IOLoginData::formatPlayerName(receiver)) {
player->sendCancel("A player with this name does not exist.");
return;
}
player->sendOpenPrivateChannel(receiver);
}
void Game::playerCloseNpcChannel(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
SpectatorVec list;
getSpectators(list, player->getPosition());
for (Creature* spectator : list) {
if (Npc* npc = spectator->getNpc()) {
npc->onPlayerCloseChannel(player);
}
}
}
void Game::playerReceivePing(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->receivePing();
}
void Game::playerReceivePingBack(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->sendPingBack();
}
void Game::playerAutoWalk(uint32_t playerId, const std::list<Direction>& listDir)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->resetIdleTime();
player->setNextWalkTask(nullptr);
player->startAutoWalk(listDir);
}
void Game::playerStopAutoWalk(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->stopWalk();
}
void Game::playerUseItemEx(uint32_t playerId, const Position& fromPos, uint8_t fromStackPos, uint16_t fromSpriteId,
const Position& toPos, uint8_t toStackPos, uint16_t toSpriteId, bool isHotkey)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (isHotkey && !g_config.getBoolean(ConfigManager::AIMBOT_HOTKEY_ENABLED)) {
return;
}
Thing* thing = internalGetThing(player, fromPos, fromStackPos, fromSpriteId, STACKPOS_USEITEM);
if (!thing) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Item* item = thing->getItem();
if (!item || !item->isUseable() || item->getClientID() != fromSpriteId) {
player->sendCancelMessage(RET_CANNOTUSETHISOBJECT);
return;
}
Position walkToPos = fromPos;
ReturnValue ret = g_actions->canUse(player, fromPos);
if (ret == RET_NOERROR) {
ret = g_actions->canUse(player, toPos, item);
if (ret == RET_TOOFARAWAY) {
walkToPos = toPos;
}
}
if (ret != RET_NOERROR) {
if (ret == RET_TOOFARAWAY) {
Position itemPos = fromPos;
uint8_t itemStackPos = fromStackPos;
if (fromPos.x != 0xFFFF && toPos.x != 0xFFFF && Position::areInRange<1, 1, 0>(fromPos, player->getPosition()) &&
!Position::areInRange<1, 1, 0>(fromPos, toPos)) {
Item* moveItem = nullptr;
ret = internalMoveItem(item->getParent(), player, INDEX_WHEREEVER, item, item->getItemCount(), &moveItem);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
return;
}
//changing the position since its now in the inventory of the player
internalGetPosition(moveItem, itemPos, itemStackPos);
}
std::list<Direction> listDir;
if (player->getPathTo(walkToPos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk, this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerUseItemEx, this,
playerId, itemPos, itemStackPos, fromSpriteId, toPos, toStackPos, toSpriteId, isHotkey));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
player->sendCancelMessage(ret);
return;
}
if (!player->canDoAction()) {
uint32_t delay = player->getNextActionTime();
SchedulerTask* task = createSchedulerTask(delay, std::bind(&Game::playerUseItemEx, this,
playerId, fromPos, fromStackPos, fromSpriteId, toPos, toStackPos, toSpriteId, isHotkey));
player->setNextActionTask(task);
return;
}
player->resetIdleTime();
player->setNextActionTask(nullptr);
g_actions->useItemEx(player, fromPos, toPos, toStackPos, item, isHotkey);
}
void Game::playerUseItem(uint32_t playerId, const Position& pos, uint8_t stackPos,
uint8_t index, uint16_t spriteId, bool isHotkey)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (isHotkey && !g_config.getBoolean(ConfigManager::AIMBOT_HOTKEY_ENABLED)) {
return;
}
Thing* thing = internalGetThing(player, pos, stackPos, spriteId, STACKPOS_USEITEM);
if (!thing) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Item* item = thing->getItem();
if (!item || item->isUseable() || item->getClientID() != spriteId) {
player->sendCancelMessage(RET_CANNOTUSETHISOBJECT);
return;
}
ReturnValue ret = g_actions->canUse(player, pos);
if (ret != RET_NOERROR) {
if (ret == RET_TOOFARAWAY) {
std::list<Direction> listDir;
if (player->getPathTo(pos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerUseItem, this,
playerId, pos, stackPos, index, spriteId, isHotkey));
player->setNextWalkActionTask(task);
return;
}
ret = RET_THEREISNOWAY;
}
player->sendCancelMessage(ret);
return;
}
if (!player->canDoAction()) {
uint32_t delay = player->getNextActionTime();
SchedulerTask* task = createSchedulerTask(delay, std::bind(&Game::playerUseItem, this,
playerId, pos, stackPos, index, spriteId, isHotkey));
player->setNextActionTask(task);
return;
}
player->resetIdleTime();
player->setNextActionTask(nullptr);
g_actions->useItem(player, pos, index, item, isHotkey);
}
void Game::playerUseWithCreature(uint32_t playerId, const Position& fromPos, uint8_t fromStackPos, uint32_t creatureId, uint16_t spriteId, bool isHotkey)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Creature* creature = getCreatureByID(creatureId);
if (!creature) {
return;
}
if (!Position::areInRange<7, 5, 0>(creature->getPosition(), player->getPosition())) {
return;
}
if (!g_config.getBoolean(ConfigManager::AIMBOT_HOTKEY_ENABLED)) {
if (creature->getPlayer() || isHotkey) {
player->sendCancelMessage(RET_DIRECTPLAYERSHOOT);
return;
}
}
Thing* thing = internalGetThing(player, fromPos, fromStackPos, spriteId, STACKPOS_USEITEM);
if (!thing) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Item* item = thing->getItem();
if (!item || !item->isUseable() || item->getClientID() != spriteId) {
player->sendCancelMessage(RET_CANNOTUSETHISOBJECT);
return;
}
Position toPos = creature->getPosition();
Position walkToPos = fromPos;
ReturnValue ret = g_actions->canUse(player, fromPos);
if (ret == RET_NOERROR) {
ret = g_actions->canUse(player, toPos, item);
if (ret == RET_TOOFARAWAY) {
walkToPos = toPos;
}
}
if (ret != RET_NOERROR) {
if (ret == RET_TOOFARAWAY) {
Position itemPos = fromPos;
uint8_t itemStackPos = fromStackPos;
if (fromPos.x != 0xFFFF && Position::areInRange<1, 1, 0>(fromPos, player->getPosition()) && !Position::areInRange<1, 1, 0>(fromPos, toPos)) {
Item* moveItem = nullptr;
ret = internalMoveItem(item->getParent(), player, INDEX_WHEREEVER, item, item->getItemCount(), &moveItem);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
return;
}
//changing the position since its now in the inventory of the player
internalGetPosition(moveItem, itemPos, itemStackPos);
}
std::list<Direction> listDir;
if (player->getPathTo(walkToPos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerUseWithCreature, this,
playerId, itemPos, itemStackPos, creatureId, spriteId, isHotkey));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
player->sendCancelMessage(ret);
return;
}
if (!player->canDoAction()) {
uint32_t delay = player->getNextActionTime();
SchedulerTask* task = createSchedulerTask(delay, std::bind(&Game::playerUseWithCreature, this,
playerId, fromPos, fromStackPos, creatureId, spriteId, isHotkey));
player->setNextActionTask(task);
return;
}
player->resetIdleTime();
player->setNextActionTask(nullptr);
g_actions->useItemEx(player, fromPos, creature->getPosition(), creature->getParent()->__getIndexOfThing(creature), item, isHotkey, creatureId);
}
void Game::playerCloseContainer(uint32_t playerId, uint8_t cid)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->closeContainer(cid);
player->sendCloseContainer(cid);
}
void Game::playerMoveUpContainer(uint32_t playerId, uint8_t cid)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Container* container = player->getContainerByID(cid);
if (!container) {
return;
}
Container* parentContainer = dynamic_cast<Container*>(container->getRealParent());
if (!parentContainer) {
Tile* tile = container->getTile();
if (!tile) {
return;
}
auto it = browseFields.find(tile);
if (it == browseFields.end()) {
parentContainer = new Container(tile);
parentContainer->useThing2();
browseFields[tile] = parentContainer;
g_scheduler.addEvent(createSchedulerTask(30000, std::bind(&Game::decreaseBrowseFieldRef, this, tile->getPosition())));
} else {
parentContainer = it->second;
}
}
player->addContainer(cid, parentContainer);
player->sendContainer(cid, parentContainer, parentContainer->hasParent(), player->getContainerIndex(cid));
}
void Game::playerUpdateContainer(uint32_t playerId, uint8_t cid)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Container* container = player->getContainerByID(cid);
if (!container) {
return;
}
player->sendContainer(cid, container, container->hasParent(), player->getContainerIndex(cid));
}
void Game::playerRotateItem(uint32_t playerId, const Position& pos, uint8_t stackPos, const uint16_t spriteId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Thing* thing = internalGetThing(player, pos, stackPos);
if (!thing) {
return;
}
Item* item = thing->getItem();
if (!item || item->getClientID() != spriteId || !item->isRoteable() || item->hasAttribute(ITEM_ATTRIBUTE_UNIQUEID)) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
if (pos.x != 0xFFFF && !Position::areInRange<1, 1, 0>(pos, player->getPosition())) {
std::list<Direction> listDir;
if (player->getPathTo(pos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerRotateItem, this,
playerId, pos, stackPos, spriteId));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
uint16_t newId = Item::items[item->getID()].rotateTo;
if (newId != 0) {
transformItem(item, newId);
}
}
void Game::playerWriteItem(uint32_t playerId, uint32_t windowTextId, const std::string& text)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
uint16_t maxTextLength = 0;
uint32_t internalWindowTextId = 0;
Item* writeItem = player->getWriteItem(internalWindowTextId, maxTextLength);
if (text.length() > maxTextLength || windowTextId != internalWindowTextId) {
return;
}
if (!writeItem || writeItem->isRemoved()) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Cylinder* topParent = writeItem->getTopParent();
Player* owner = dynamic_cast<Player*>(topParent);
if (owner && owner != player) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
if (!Position::areInRange<1, 1, 0>(writeItem->getPosition(), player->getPosition())) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
if (!g_events->eventPlayerOnTextEdit(player, writeItem, text)) {
player->setWriteItem(nullptr);
return;
}
if (!text.empty()) {
if (writeItem->getText() != text) {
writeItem->setText(text);
writeItem->setWriter(player->getName());
writeItem->setDate(time(nullptr));
}
} else {
writeItem->resetText();
writeItem->resetWriter();
writeItem->resetDate();
}
uint16_t newId = Item::items[writeItem->getID()].writeOnceItemId;
if (newId != 0) {
transformItem(writeItem, newId);
}
player->setWriteItem(nullptr);
}
void Game::playerBrowseField(uint32_t playerId, const Position& pos)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
const Position& playerPos = player->getPosition();
if (playerPos.z != pos.z) {
player->sendCancelMessage(playerPos.z > pos.z ? RET_FIRSTGOUPSTAIRS : RET_FIRSTGODOWNSTAIRS);
return;
}
if (!Position::areInRange<1, 1>(playerPos, pos)) {
std::list<Direction> listDir;
if (player->getPathTo(pos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(
&Game::playerBrowseField, this, playerId, pos
));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
Tile* tile = getTile(pos);
if (!tile) {
return;
}
if (!g_events->eventPlayerOnBrowseField(player, pos)) {
return;
}
Container* container;
auto it = browseFields.find(tile);
if (it == browseFields.end()) {
container = new Container(tile);
container->useThing2();
browseFields[tile] = container;
g_scheduler.addEvent(createSchedulerTask(30000, std::bind(&Game::decreaseBrowseFieldRef, this, tile->getPosition())));
} else {
container = it->second;
}
uint8_t dummyContainerId = 0xF - ((pos.x % 3) * 3 + (pos.y % 3));
Container* openContainer = player->getContainerByID(dummyContainerId);
if (openContainer) {
player->onCloseContainer(openContainer);
player->closeContainer(dummyContainerId);
} else {
player->addContainer(dummyContainerId, container);
player->sendContainer(dummyContainerId, container, false, 0);
}
}
void Game::playerSeekInContainer(uint32_t playerId, uint8_t containerId, uint16_t index)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Container* container = player->getContainerByID(containerId);
if (!container || !container->hasPagination()) {
return;
}
if ((index % container->capacity()) != 0 || index >= container->size()) {
return;
}
player->setContainerIndex(containerId, index);
player->sendContainer(containerId, container, false, index);
}
void Game::playerUpdateHouseWindow(uint32_t playerId, uint8_t listId, uint32_t windowTextId, const std::string& text)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
uint32_t internalWindowTextId;
uint32_t internalListId;
House* house = player->getEditHouse(internalWindowTextId, internalListId);
if (house && internalWindowTextId == windowTextId && listId == 0) {
house->setAccessList(internalListId, text);
player->setEditHouse(nullptr);
}
}
void Game::playerRequestTrade(uint32_t playerId, const Position& pos, uint8_t stackPos,
uint32_t tradePlayerId, uint16_t spriteId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Player* tradePartner = getPlayerByID(tradePlayerId);
if (!tradePartner || tradePartner == player) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "Sorry, not possible.");
return;
}
if (!Position::areInRange<2, 2, 0>(tradePartner->getPosition(), player->getPosition())) {
std::ostringstream ss;
ss << tradePartner->getName() << " tells you to move closer.";
player->sendTextMessage(MESSAGE_INFO_DESCR, ss.str());
return;
}
if (!canThrowObjectTo(tradePartner->getPosition(), player->getPosition())) {
player->sendCancelMessage(RET_CREATUREISNOTREACHABLE);
return;
}
Item* tradeItem = dynamic_cast<Item*>(internalGetThing(player, pos, stackPos, spriteId, STACKPOS_USE));
if (!tradeItem || tradeItem->getClientID() != spriteId || !tradeItem->isPickupable() || tradeItem->hasAttribute(ITEM_ATTRIBUTE_UNIQUEID)) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
const Position& playerPosition = player->getPosition();
const Position& tradeItemPosition = tradeItem->getPosition();
if (playerPosition.z != tradeItemPosition.z) {
player->sendCancelMessage(playerPosition.z > tradeItemPosition.z ? RET_FIRSTGOUPSTAIRS : RET_FIRSTGODOWNSTAIRS);
return;
}
if (!Position::areInRange<1, 1>(tradeItemPosition, playerPosition)) {
std::list<Direction> listDir;
if (player->getPathTo(pos, listDir, 0, 1, true, true)) {
g_dispatcher.addTask(createTask(std::bind(&Game::playerAutoWalk,
this, player->getID(), listDir)));
SchedulerTask* task = createSchedulerTask(400, std::bind(&Game::playerRequestTrade, this,
playerId, pos, stackPos, tradePlayerId, spriteId));
player->setNextWalkActionTask(task);
} else {
player->sendCancelMessage(RET_THEREISNOWAY);
}
return;
}
Container* tradeItemContainer = tradeItem->getContainer();
if (tradeItemContainer) {
for (const auto& it : tradeItems) {
Item* item = it.first;
if (tradeItem == item) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "This item is already being traded.");
return;
}
if (tradeItemContainer->isHoldingItem(item)) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "This item is already being traded.");
return;
}
Container* container = item->getContainer();
if (container && container->isHoldingItem(tradeItem)) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "This item is already being traded.");
return;
}
}
} else {
for (const auto& it : tradeItems) {
Item* item = it.first;
if (tradeItem == item) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "This item is already being traded.");
return;
}
Container* container = item->getContainer();
if (container && container->isHoldingItem(tradeItem)) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "This item is already being traded.");
return;
}
}
}
Container* tradeContainer = tradeItem->getContainer();
if (tradeContainer && tradeContainer->getItemHoldingCount() + 1 > 100) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "You can not trade more than 100 items.");
return;
}
if (!g_events->eventPlayerOnTradeRequest(player, tradePartner, tradeItem)) {
return;
}
internalStartTrade(player, tradePartner, tradeItem);
}
bool Game::internalStartTrade(Player* player, Player* tradePartner, Item* tradeItem)
{
if (player->tradeState != TRADE_NONE && !(player->tradeState == TRADE_ACKNOWLEDGE && player->tradePartner == tradePartner)) {
player->sendCancelMessage(RET_YOUAREALREADYTRADING);
return false;
} else if (tradePartner->tradeState != TRADE_NONE && tradePartner->tradePartner != player) {
player->sendCancelMessage(RET_THISPLAYERISALREADYTRADING);
return false;
}
player->tradePartner = tradePartner;
player->tradeItem = tradeItem;
player->tradeState = TRADE_INITIATED;
tradeItem->useThing2();
tradeItems[tradeItem] = player->getID();
player->sendTradeItemRequest(player, tradeItem, true);
if (tradePartner->tradeState == TRADE_NONE) {
std::ostringstream ss;
ss << player->getName() << " wants to trade with you.";
tradePartner->sendTextMessage(MESSAGE_EVENT_ADVANCE, ss.str());
tradePartner->tradeState = TRADE_ACKNOWLEDGE;
tradePartner->tradePartner = player;
} else {
Item* counterOfferItem = tradePartner->tradeItem;
player->sendTradeItemRequest(tradePartner, counterOfferItem, false);
tradePartner->sendTradeItemRequest(player, tradeItem, false);
}
return true;
}
void Game::playerAcceptTrade(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!(player->getTradeState() == TRADE_ACKNOWLEDGE || player->getTradeState() == TRADE_INITIATED)) {
return;
}
Player* tradePartner = player->tradePartner;
if (!tradePartner) {
return;
}
if (!canThrowObjectTo(tradePartner->getPosition(), player->getPosition())) {
player->sendCancelMessage(RET_CREATUREISNOTREACHABLE);
return;
}
player->setTradeState(TRADE_ACCEPT);
if (tradePartner->getTradeState() == TRADE_ACCEPT) {
Item* tradeItem1 = player->tradeItem;
Item* tradeItem2 = tradePartner->tradeItem;
player->setTradeState(TRADE_TRANSFER);
tradePartner->setTradeState(TRADE_TRANSFER);
std::map<Item*, uint32_t>::iterator it = tradeItems.find(tradeItem1);
if (it != tradeItems.end()) {
ReleaseItem(it->first);
tradeItems.erase(it);
}
it = tradeItems.find(tradeItem2);
if (it != tradeItems.end()) {
ReleaseItem(it->first);
tradeItems.erase(it);
}
bool isSuccess = false;
ReturnValue ret1 = internalAddItem(tradePartner, tradeItem1, INDEX_WHEREEVER, 0, true);
ReturnValue ret2 = internalAddItem(player, tradeItem2, INDEX_WHEREEVER, 0, true);
if (ret1 == RET_NOERROR && ret2 == RET_NOERROR) {
ret1 = internalRemoveItem(tradeItem1, tradeItem1->getItemCount(), true);
ret2 = internalRemoveItem(tradeItem2, tradeItem2->getItemCount(), true);
if (ret1 == RET_NOERROR && ret2 == RET_NOERROR) {
Cylinder* cylinder1 = tradeItem1->getParent();
Cylinder* cylinder2 = tradeItem2->getParent();
uint32_t count1 = tradeItem1->getItemCount();
uint32_t count2 = tradeItem2->getItemCount();
ret1 = internalMoveItem(cylinder1, tradePartner, INDEX_WHEREEVER, tradeItem1, count1, nullptr, FLAG_IGNOREAUTOSTACK, nullptr, tradeItem2);
if (ret1 == RET_NOERROR) {
internalMoveItem(cylinder2, player, INDEX_WHEREEVER, tradeItem2, count2, nullptr, FLAG_IGNOREAUTOSTACK);
tradeItem1->onTradeEvent(ON_TRADE_TRANSFER, tradePartner);
tradeItem2->onTradeEvent(ON_TRADE_TRANSFER, player);
isSuccess = true;
}
}
}
if (!isSuccess) {
std::string errorDescription;
if (tradePartner->tradeItem) {
errorDescription = getTradeErrorDescription(ret1, tradeItem1);
tradePartner->sendTextMessage(MESSAGE_EVENT_ADVANCE, errorDescription);
tradePartner->tradeItem->onTradeEvent(ON_TRADE_CANCEL, tradePartner);
}
if (player->tradeItem) {
errorDescription = getTradeErrorDescription(ret2, tradeItem2);
player->sendTextMessage(MESSAGE_EVENT_ADVANCE, errorDescription);
player->tradeItem->onTradeEvent(ON_TRADE_CANCEL, player);
}
}
player->setTradeState(TRADE_NONE);
player->tradeItem = nullptr;
player->tradePartner = nullptr;
player->sendTradeClose();
tradePartner->setTradeState(TRADE_NONE);
tradePartner->tradeItem = nullptr;
tradePartner->tradePartner = nullptr;
tradePartner->sendTradeClose();
}
}
std::string Game::getTradeErrorDescription(ReturnValue ret, Item* item)
{
if (item) {
if (ret == RET_NOTENOUGHCAPACITY) {
std::ostringstream ss;
ss << "You do not have enough capacity to carry";
if (item->isStackable() && item->getItemCount() > 1) {
ss << " these objects.";
} else {
ss << " this object.";
}
ss << std::endl << ' ' << item->getWeightDescription();
return ss.str();
} else if (ret == RET_NOTENOUGHROOM || ret == RET_CONTAINERNOTENOUGHROOM) {
std::ostringstream ss;
ss << "You do not have enough room to carry";
if (item->isStackable() && item->getItemCount() > 1) {
ss << " these objects.";
} else {
ss << " this object.";
}
return ss.str();
}
}
return "Trade could not be completed.";
}
void Game::playerLookInTrade(uint32_t playerId, bool lookAtCounterOffer, uint8_t index)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Player* tradePartner = player->tradePartner;
if (!tradePartner) {
return;
}
Item* tradeItem;
if (lookAtCounterOffer) {
tradeItem = tradePartner->getTradeItem();
} else {
tradeItem = player->getTradeItem();
}
if (!tradeItem) {
return;
}
const Position& playerPosition = player->getPosition();
const Position& tradeItemPosition = tradeItem->getPosition();
int32_t lookDistance = std::max<int32_t>(Position::getDistanceX(playerPosition, tradeItemPosition),
Position::getDistanceY(playerPosition, tradeItemPosition));
if (index == 0) {
g_events->eventPlayerOnLookInTrade(player, tradePartner, tradeItem, lookDistance);
return;
}
Container* tradeContainer = tradeItem->getContainer();
if (!tradeContainer) {
return;
}
std::vector<const Container*> containers {tradeContainer};
size_t i = 0;
while (i < containers.size()) {
const Container* container = containers[i++];
for (Item* item : container->getItemList()) {
Container* tmpContainer = item->getContainer();
if (tmpContainer) {
containers.push_back(tmpContainer);
}
if (--index == 0) {
g_events->eventPlayerOnLookInTrade(player, tradePartner, item, lookDistance);
return;
}
}
}
}
void Game::playerCloseTrade(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
internalCloseTrade(player);
}
bool Game::internalCloseTrade(Player* player)
{
Player* tradePartner = player->tradePartner;
if ((tradePartner && tradePartner->getTradeState() == TRADE_TRANSFER) || player->getTradeState() == TRADE_TRANSFER) {
return true;
}
if (player->getTradeItem()) {
std::map<Item*, uint32_t>::iterator it = tradeItems.find(player->getTradeItem());
if (it != tradeItems.end()) {
ReleaseItem(it->first);
tradeItems.erase(it);
}
player->tradeItem->onTradeEvent(ON_TRADE_CANCEL, player);
player->tradeItem = nullptr;
}
player->setTradeState(TRADE_NONE);
player->tradePartner = nullptr;
player->sendTextMessage(MESSAGE_STATUS_SMALL, "Trade cancelled.");
player->sendTradeClose();
if (tradePartner) {
if (tradePartner->getTradeItem()) {
std::map<Item*, uint32_t>::iterator it = tradeItems.find(tradePartner->getTradeItem());
if (it != tradeItems.end()) {
ReleaseItem(it->first);
tradeItems.erase(it);
}
tradePartner->tradeItem->onTradeEvent(ON_TRADE_CANCEL, tradePartner);
tradePartner->tradeItem = nullptr;
}
tradePartner->setTradeState(TRADE_NONE);
tradePartner->tradePartner = nullptr;
tradePartner->sendTextMessage(MESSAGE_STATUS_SMALL, "Trade cancelled.");
tradePartner->sendTradeClose();
}
return true;
}
void Game::playerPurchaseItem(uint32_t playerId, uint16_t spriteId, uint8_t count, uint8_t amount,
bool ignoreCap/* = false*/, bool inBackpacks/* = false*/)
{
if (amount == 0 || amount > 100) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
int32_t onBuy, onSell;
Npc* merchant = player->getShopOwner(onBuy, onSell);
if (!merchant) {
return;
}
const ItemType& it = Item::items.getItemIdByClientId(spriteId);
if (it.id == 0) {
return;
}
uint8_t subType;
if (it.isSplash() || it.isFluidContainer()) {
subType = clientFluidToServer(count);
} else {
subType = count;
}
if (!player->hasShopItemForSale(it.id, subType)) {
return;
}
merchant->onPlayerTrade(player, onBuy, it.id, subType, amount, ignoreCap, inBackpacks);
}
void Game::playerSellItem(uint32_t playerId, uint16_t spriteId, uint8_t count, uint8_t amount, bool ignoreEquipped)
{
if (amount == 0 || amount > 100) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
int32_t onBuy, onSell;
Npc* merchant = player->getShopOwner(onBuy, onSell);
if (!merchant) {
return;
}
const ItemType& it = Item::items.getItemIdByClientId(spriteId);
if (it.id == 0) {
return;
}
uint8_t subType;
if (it.isSplash() || it.isFluidContainer()) {
subType = clientFluidToServer(count);
} else {
subType = count;
}
merchant->onPlayerTrade(player, onSell, it.id, subType, amount, ignoreEquipped);
}
void Game::playerCloseShop(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->closeShopWindow();
}
void Game::playerLookInShop(uint32_t playerId, uint16_t spriteId, uint8_t count)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
int32_t onBuy, onSell;
Npc* merchant = player->getShopOwner(onBuy, onSell);
if (!merchant) {
return;
}
const ItemType& it = Item::items.getItemIdByClientId(spriteId);
if (it.id == 0) {
return;
}
int32_t subType;
if (it.isFluidContainer() || it.isSplash()) {
subType = clientFluidToServer(count);
} else {
subType = count;
}
if (!player->hasShopItemForSale(it.id, subType)) {
return;
}
if (!g_events->eventPlayerOnLookInShop(player, &it, subType)) {
return;
}
std::ostringstream ss;
ss << "You see " << Item::getDescription(it, 1, nullptr, subType);
player->sendTextMessage(MESSAGE_INFO_DESCR, ss.str());
}
void Game::playerLookAt(uint32_t playerId, const Position& pos, uint16_t spriteId, uint8_t stackPos)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Thing* thing = internalGetThing(player, pos, stackPos, spriteId, STACKPOS_LOOK);
if (!thing) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Position thingPos = thing->getPosition();
if (!player->canSee(thingPos)) {
player->sendCancelMessage(RET_NOTPOSSIBLE);
return;
}
Position playerPos = player->getPosition();
int32_t lookDistance;
if (thing != player) {
lookDistance = std::max<int32_t>(Position::getDistanceX(playerPos, thingPos), Position::getDistanceY(playerPos, thingPos));
if (playerPos.z != thingPos.z) {
lookDistance += 15;
}
} else {
lookDistance = -1;
}
g_events->eventPlayerOnLook(player, pos, thing, stackPos, lookDistance);
}
void Game::playerLookInBattleList(uint32_t playerId, uint32_t creatureId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Creature* creature = getCreatureByID(creatureId);
if (!creature) {
return;
}
if (!player->canSeeCreature(creature)) {
return;
}
const Position& creaturePos = creature->getPosition();
if (!player->canSee(creaturePos)) {
return;
}
int32_t lookDistance;
if (creature != player) {
const Position& playerPos = player->getPosition();
lookDistance = std::max<int32_t>(Position::getDistanceX(playerPos, creaturePos), Position::getDistanceY(playerPos, creaturePos));
if (playerPos.z != creaturePos.z) {
lookDistance += 15;
}
} else {
lookDistance = -1;
}
g_events->eventPlayerOnLookInBattleList(player, creature, lookDistance);
}
void Game::playerCancelAttackAndFollow(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
playerSetAttackedCreature(playerId, 0);
playerFollowCreature(playerId, 0);
player->stopWalk();
}
void Game::playerSetAttackedCreature(uint32_t playerId, uint32_t creatureId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (player->getAttackedCreature() && creatureId == 0) {
player->setAttackedCreature(nullptr);
player->sendCancelTarget();
return;
}
Creature* attackCreature = getCreatureByID(creatureId);
if (!attackCreature) {
player->setAttackedCreature(nullptr);
player->sendCancelTarget();
return;
}
ReturnValue ret = Combat::canTargetCreature(player, attackCreature);
if (ret != RET_NOERROR) {
player->sendCancelMessage(ret);
player->sendCancelTarget();
player->setAttackedCreature(nullptr);
return;
}
player->setAttackedCreature(attackCreature);
g_dispatcher.addTask(createTask(std::bind(&Game::updateCreatureWalk, this, player->getID())));
}
void Game::playerFollowCreature(uint32_t playerId, uint32_t creatureId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->setAttackedCreature(nullptr);
g_dispatcher.addTask(createTask(std::bind(&Game::updateCreatureWalk, this, player->getID())));
player->setFollowCreature(getCreatureByID(creatureId));
}
void Game::playerSetFightModes(uint32_t playerId, fightMode_t fightMode, chaseMode_t chaseMode, secureMode_t secureMode)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->setFightMode(fightMode);
player->setChaseMode(chaseMode);
player->setSecureMode(secureMode);
}
void Game::playerRequestAddVip(uint32_t playerId, const std::string& name)
{
if (name.length() > 20) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Player* vipPlayer = getPlayerByName(name);
if (!vipPlayer) {
uint32_t guid;
bool specialVip;
std::string formattedName = name;
if (!IOLoginData::getGuidByNameEx(guid, specialVip, formattedName)) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "A player with this name does not exist.");
return;
}
if (specialVip && !player->hasFlag(PlayerFlag_SpecialVIP)) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "You can not add this player.");
return;
}
player->addVIP(guid, formattedName, VIPSTATUS_OFFLINE);
} else {
if (vipPlayer->hasFlag(PlayerFlag_SpecialVIP) && !player->hasFlag(PlayerFlag_SpecialVIP)) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "You can not add this player.");
return;
}
if (!vipPlayer->isInGhostMode() || player->isAccessPlayer()) {
player->addVIP(vipPlayer->getGUID(), vipPlayer->getName(), VIPSTATUS_ONLINE);
} else {
player->addVIP(vipPlayer->getGUID(), vipPlayer->getName(), VIPSTATUS_OFFLINE);
}
}
}
void Game::playerRequestRemoveVip(uint32_t playerId, uint32_t guid)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->removeVIP(guid);
}
void Game::playerRequestEditVip(uint32_t playerId, uint32_t guid, const std::string& description, uint32_t icon, bool notify)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->editVIP(guid, description, icon, notify);
}
void Game::playerTurn(uint32_t playerId, Direction dir)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!g_events->eventPlayerOnTurn(player, dir)) {
return;
}
player->resetIdleTime();
internalCreatureTurn(player, dir);
}
void Game::playerRequestOutfit(uint32_t playerId)
{
if (!g_config.getBoolean(ConfigManager::ALLOW_CHANGEOUTFIT)) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->sendOutfitWindow();
}
void Game::playerToggleMount(uint32_t playerId, bool mount)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->toggleMount(mount);
}
void Game::playerChangeOutfit(uint32_t playerId, Outfit_t outfit)
{
if (!g_config.getBoolean(ConfigManager::ALLOW_CHANGEOUTFIT)) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->hasRequestedOutfit()) {
return;
}
player->hasRequestedOutfit(false);
if (outfit.lookMount != 0) {
Mount* mount = Mounts::getInstance()->getMountByClientID(outfit.lookMount);
if (!mount) {
return;
}
if (!player->hasMount(mount)) {
return;
}
if (player->isMounted()) {
Mount* prevMount = Mounts::getInstance()->getMountByID(player->getCurrentMount());
if (prevMount) {
changeSpeed(player, mount->speed - prevMount->speed);
}
player->setCurrentMount(mount->id);
} else {
player->setCurrentMount(mount->id);
outfit.lookMount = 0;
}
} else if (player->isMounted()) {
player->dismount();
}
if (player->canWear(outfit.lookType, outfit.lookAddons)) {
player->defaultOutfit = outfit;
if (player->hasCondition(CONDITION_OUTFIT)) {
return;
}
internalCreatureChangeOutfit(player, outfit);
}
}
void Game::playerShowQuestLog(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->sendQuestLog();
}
void Game::playerShowQuestLine(uint32_t playerId, uint16_t questId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Quest* quest = Quests::getInstance()->getQuestByID(questId);
if (!quest) {
return;
}
player->sendQuestLine(quest);
}
void Game::playerSay(uint32_t playerId, uint16_t channelId, SpeakClasses type,
const std::string& receiver, const std::string& text)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->resetIdleTime();
uint32_t muteTime = player->isMuted();
if (muteTime > 0) {
std::ostringstream ss;
ss << "You are still muted for " << muteTime << " seconds.";
player->sendTextMessage(MESSAGE_STATUS_SMALL, ss.str());
return;
}
if (playerSayCommand(player, text)) {
return;
}
if (playerSaySpell(player, type, text)) {
return;
}
if (!text.empty() && text.front() == '/' && player->isAccessPlayer()) {
return;
}
if (type != TALKTYPE_PRIVATE_PN) {
player->removeMessageBuffer();
}
switch (type) {
case TALKTYPE_SAY:
internalCreatureSay(player, TALKTYPE_SAY, text, false);
break;
case TALKTYPE_WHISPER:
playerWhisper(player, text);
break;
case TALKTYPE_YELL:
playerYell(player, text);
break;
case TALKTYPE_PRIVATE_TO:
case TALKTYPE_PRIVATE_RED_TO:
playerSpeakTo(player, type, receiver, text);
break;
case TALKTYPE_CHANNEL_O:
case TALKTYPE_CHANNEL_Y:
case TALKTYPE_CHANNEL_R1:
g_chat.talkToChannel(*player, type, text, channelId);
break;
case TALKTYPE_PRIVATE_PN:
playerSpeakToNpc(player, text);
break;
case TALKTYPE_BROADCAST:
playerBroadcastMessage(player, text);
break;
default:
break;
}
}
bool Game::playerSayCommand(Player* player, const std::string& text)
{
if (text.empty()) {
return false;
}
char firstCharacter = text.front();
for (char commandTag : commandTags) {
if (commandTag == firstCharacter) {
if (commands.exeCommand(*player, text)) {
return true;
}
}
}
return false;
}
bool Game::playerSaySpell(Player* player, SpeakClasses type, const std::string& text)
{
std::string words = text;
TalkActionResult_t result = g_talkActions->playerSaySpell(player, type, words);
if (result == TALKACTION_BREAK) {
return true;
}
result = g_spells->playerSaySpell(player, words);
if (result == TALKACTION_BREAK) {
if (!g_config.getBoolean(ConfigManager::EMOTE_SPELLS)) {
return internalCreatureSay(player, TALKTYPE_SAY, words, false);
} else {
return internalCreatureSay(player, TALKTYPE_MONSTER_SAY, words, false);
}
} else if (result == TALKACTION_FAILED) {
return true;
}
return false;
}
bool Game::playerWhisper(Player* player, const std::string& text)
{
SpectatorVec list;
getSpectators(list, player->getPosition(), false, false,
Map::maxClientViewportX, Map::maxClientViewportX,
Map::maxClientViewportY, Map::maxClientViewportY);
//send to client
for (Creature* spectator : list) {
if (Player* spectatorPlayer = spectator->getPlayer()) {
if (!Position::areInRange<1, 1>(player->getPosition(), spectatorPlayer->getPosition())) {
spectatorPlayer->sendCreatureSay(player, TALKTYPE_WHISPER, "pspsps");
} else {
spectatorPlayer->sendCreatureSay(player, TALKTYPE_WHISPER, text);
}
}
}
//event method
for (Creature* spectator : list) {
spectator->onCreatureSay(player, TALKTYPE_WHISPER, text);
}
return true;
}
bool Game::playerYell(Player* player, const std::string& text)
{
if (player->getLevel() == 1) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "You may not yell as long as you are on level 1.");
return false;
}
if (player->hasCondition(CONDITION_YELLTICKS)) {
player->sendCancelMessage(RET_YOUAREEXHAUSTED);
return false;
}
if (player->getAccountType() < ACCOUNT_TYPE_GAMEMASTER) {
Condition* condition = Condition::createCondition(CONDITIONID_DEFAULT, CONDITION_YELLTICKS, 30000, 0);
player->addCondition(condition);
}
internalCreatureSay(player, TALKTYPE_YELL, asUpperCaseString(text), false);
return true;
}
bool Game::playerSpeakTo(Player* player, SpeakClasses type, const std::string& receiver,
const std::string& text)
{
Player* toPlayer = getPlayerByName(receiver);
if (!toPlayer) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "A player with this name is not online.");
return false;
}
if (type == TALKTYPE_PRIVATE_RED_TO && (player->hasFlag(PlayerFlag_CanTalkRedPrivate) || player->getAccountType() >= ACCOUNT_TYPE_GAMEMASTER)) {
type = TALKTYPE_PRIVATE_RED_FROM;
} else {
type = TALKTYPE_PRIVATE_FROM;
}
toPlayer->sendPrivateMessage(player, type, text);
toPlayer->onCreatureSay(player, type, text);
if (toPlayer->isInGhostMode() && !player->isAccessPlayer()) {
player->sendTextMessage(MESSAGE_STATUS_SMALL, "A player with this name is not online.");
} else {
std::ostringstream ss;
ss << "Message sent to " << toPlayer->getName() << '.';
player->sendTextMessage(MESSAGE_STATUS_SMALL, ss.str());
}
return true;
}
bool Game::playerSpeakToNpc(Player* player, const std::string& text)
{
SpectatorVec list;
getSpectators(list, player->getPosition());
for (Creature* spectator : list) {
if (spectator->getNpc()) {
spectator->onCreatureSay(player, TALKTYPE_PRIVATE_PN, text);
}
}
return true;
}
//--
bool Game::canThrowObjectTo(const Position& fromPos, const Position& toPos, bool checkLineOfSight /*= true*/,
int32_t rangex /*= Map::maxClientViewportX*/, int32_t rangey /*= Map::maxClientViewportY*/) const
{
return map.canThrowObjectTo(fromPos, toPos, checkLineOfSight, rangex, rangey);
}
bool Game::isSightClear(const Position& fromPos, const Position& toPos, bool floorCheck) const
{
return map.isSightClear(fromPos, toPos, floorCheck);
}
bool Game::internalCreatureTurn(Creature* creature, Direction dir)
{
if (creature->getDirection() == dir) {
return false;
}
creature->setDirection(dir);
//send to client
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureTurn(creature);
}
return true;
}
bool Game::internalCreatureSay(Creature* creature, SpeakClasses type, const std::string& text,
bool ghostMode, SpectatorVec* listPtr/* = nullptr*/, const Position* pos/* = nullptr*/)
{
if (text.empty()) {
return false;
}
if (!pos) {
pos = &creature->getPosition();
}
SpectatorVec list;
if (!listPtr || listPtr->empty()) {
// This somewhat complex construct ensures that the cached SpectatorVec
// is used if available and if it can be used, else a local vector is
// used (hopefully the compiler will optimize away the construction of
// the temporary when it's not used).
if (type != TALKTYPE_YELL && type != TALKTYPE_MONSTER_YELL) {
getSpectators(list, *pos, false, false,
Map::maxClientViewportX, Map::maxClientViewportX,
Map::maxClientViewportY, Map::maxClientViewportY);
} else {
getSpectators(list, *pos, true, false, 18, 18, 14, 14);
}
} else {
list = (*listPtr);
}
//send to client
for (Creature* spectator : list) {
if (Player* tmpPlayer = spectator->getPlayer()) {
if (!ghostMode || tmpPlayer->canSeeCreature(creature)) {
tmpPlayer->sendCreatureSay(creature, type, text, pos);
}
}
}
//event method
for (Creature* spectator : list) {
spectator->onCreatureSay(creature, type, text);
if (creature != spectator) {
g_events->eventCreatureOnHear(spectator, creature, text, type, creature->getPosition());
}
}
return true;
}
void Game::checkCreatureWalk(uint32_t creatureId)
{
Creature* creature = getCreatureByID(creatureId);
if (creature && creature->getHealth() > 0) {
creature->onWalk();
cleanup();
}
}
void Game::updateCreatureWalk(uint32_t creatureId)
{
Creature* creature = getCreatureByID(creatureId);
if (creature && creature->getHealth() > 0) {
creature->goToFollowCreature();
}
}
void Game::checkCreatureAttack(uint32_t creatureId)
{
Creature* creature = getCreatureByID(creatureId);
if (creature && creature->getHealth() > 0) {
creature->onAttacking(0);
}
}
void Game::addCreatureCheck(Creature* creature)
{
creature->creatureCheck = true;
if (creature->inCheckCreaturesVector) {
// already in a vector
return;
}
creature->inCheckCreaturesVector = true;
checkCreatureLists[uniform_random(0, EVENT_CREATURECOUNT - 1)].push_back(creature);
creature->useThing2();
}
void Game::removeCreatureCheck(Creature* creature)
{
if (creature->inCheckCreaturesVector) {
creature->creatureCheck = false;
}
}
void Game::checkCreatures(size_t index)
{
g_scheduler.addEvent(createSchedulerTask(EVENT_CHECK_CREATURE_INTERVAL, std::bind(&Game::checkCreatures, this, (index + 1) % EVENT_CREATURECOUNT)));
auto& checkCreatureList = checkCreatureLists[index];
for (auto it = checkCreatureList.begin(), end = checkCreatureList.end(); it != end;) {
Creature* creature = *it;
if (creature->creatureCheck) {
if (creature->getHealth() > 0) {
creature->onThink(EVENT_CREATURE_THINK_INTERVAL);
creature->onAttacking(EVENT_CREATURE_THINK_INTERVAL);
creature->executeConditions(EVENT_CREATURE_THINK_INTERVAL);
} else {
creature->onDeath();
}
++it;
} else {
creature->inCheckCreaturesVector = false;
it = checkCreatureList.erase(it);
ReleaseCreature(creature);
}
}
cleanup();
}
void Game::changeSpeed(Creature* creature, int32_t varSpeedDelta)
{
int32_t varSpeed = creature->getSpeed() - creature->getBaseSpeed();
varSpeed += varSpeedDelta;
creature->setSpeed(varSpeed);
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), false, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendChangeSpeed(creature, creature->getStepSpeed());
}
}
void Game::internalCreatureChangeOutfit(Creature* creature, const Outfit_t& outfit)
{
if (!g_events->eventCreatureOnChangeOutfit(creature, outfit, creature->getCurrentOutfit())) {
return;
}
creature->setCurrentOutfit(outfit);
if (creature->isInvisible()) {
return;
}
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureChangeOutfit(creature, outfit);
}
}
void Game::internalCreatureChangeVisible(Creature* creature, bool visible)
{
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureChangeVisible(creature, visible);
}
}
void Game::changeLight(const Creature* creature)
{
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureLight(creature);
}
}
bool Game::combatBlockHit(CombatType_t combatType, Creature* attacker, Creature* target,
int32_t& healthChange, bool checkDefense, bool checkArmor, bool field)
{
if (combatType == COMBAT_NONE) {
return true;
}
if (target->getPlayer() && target->getPlayer()->isInGhostMode()) {
return true;
}
if (healthChange > 0) {
return false;
}
const Position& targetPos = target->getPosition();
SpectatorVec list;
getSpectators(list, targetPos, false, true);
if (!target->isAttackable() || Combat::canDoCombat(attacker, target) != RET_NOERROR) {
if (!target->isInGhostMode()) {
addMagicEffect(list, targetPos, CONST_ME_POFF);
}
return true;
}
int32_t damage = -healthChange;
BlockType_t blockType = target->blockHit(attacker, combatType, damage, checkDefense, checkArmor, field);
healthChange = -damage;
if (blockType == BLOCK_DEFENSE) {
addMagicEffect(list, targetPos, CONST_ME_POFF);
return true;
} else if (blockType == BLOCK_ARMOR) {
addMagicEffect(list, targetPos, CONST_ME_BLOCKHIT);
return true;
} else if (blockType == BLOCK_IMMUNITY) {
uint8_t hitEffect = 0;
switch (combatType) {
case COMBAT_UNDEFINEDDAMAGE:
break;
case COMBAT_ENERGYDAMAGE:
case COMBAT_FIREDAMAGE:
case COMBAT_PHYSICALDAMAGE:
case COMBAT_ICEDAMAGE:
case COMBAT_DEATHDAMAGE: {
hitEffect = CONST_ME_BLOCKHIT;
break;
}
case COMBAT_EARTHDAMAGE: {
hitEffect = CONST_ME_GREEN_RINGS;
break;
}
case COMBAT_HOLYDAMAGE: {
hitEffect = CONST_ME_HOLYDAMAGE;
break;
}
default: {
hitEffect = CONST_ME_POFF;
break;
}
}
addMagicEffect(list, targetPos, hitEffect);
return true;
}
return false;
}
void Game::combatGetTypeInfo(CombatType_t combatType, Creature* target, TextColor_t& color, uint8_t& effect)
{
switch (combatType) {
case COMBAT_PHYSICALDAMAGE: {
Item* splash = nullptr;
switch (target->getRace()) {
case RACE_VENOM:
color = TEXTCOLOR_LIGHTGREEN;
effect = CONST_ME_HITBYPOISON;
splash = Item::CreateItem(ITEM_SMALLSPLASH, FLUID_GREEN);
break;
case RACE_BLOOD:
color = TEXTCOLOR_RED;
effect = CONST_ME_DRAWBLOOD;
splash = Item::CreateItem(ITEM_SMALLSPLASH, FLUID_BLOOD);
break;
case RACE_UNDEAD:
color = TEXTCOLOR_LIGHTGREY;
effect = CONST_ME_HITAREA;
break;
case RACE_FIRE:
color = TEXTCOLOR_ORANGE;
effect = CONST_ME_DRAWBLOOD;
break;
case RACE_ENERGY:
color = TEXTCOLOR_PURPLE;
effect = CONST_ME_ENERGYHIT;
break;
default:
color = TEXTCOLOR_NONE;
effect = CONST_ME_NONE;
break;
}
if (splash) {
internalAddItem(target->getTile(), splash, INDEX_WHEREEVER, FLAG_NOLIMIT);
startDecay(splash);
}
break;
}
case COMBAT_ENERGYDAMAGE: {
color = TEXTCOLOR_PURPLE;
effect = CONST_ME_ENERGYHIT;
break;
}
case COMBAT_EARTHDAMAGE: {
color = TEXTCOLOR_LIGHTGREEN;
effect = CONST_ME_GREEN_RINGS;
break;
}
case COMBAT_DROWNDAMAGE: {
color = TEXTCOLOR_LIGHTBLUE;
effect = CONST_ME_LOSEENERGY;
break;
}
case COMBAT_FIREDAMAGE: {
color = TEXTCOLOR_ORANGE;
effect = CONST_ME_HITBYFIRE;
break;
}
case COMBAT_ICEDAMAGE: {
color = TEXTCOLOR_SKYBLUE;
effect = CONST_ME_ICEATTACK;
break;
}
case COMBAT_HOLYDAMAGE: {
color = TEXTCOLOR_YELLOW;
effect = CONST_ME_HOLYDAMAGE;
break;
}
case COMBAT_DEATHDAMAGE: {
color = TEXTCOLOR_DARKRED;
effect = CONST_ME_SMALLCLOUDS;
break;
}
case COMBAT_LIFEDRAIN: {
color = TEXTCOLOR_RED;
effect = CONST_ME_MAGIC_RED;
break;
}
default: {
color = TEXTCOLOR_NONE;
effect = CONST_ME_NONE;
break;
}
}
}
bool Game::combatChangeHealth(Creature* attacker, Creature* target, CombatDamage& damage)
{
const Position& targetPos = target->getPosition();
if (damage.primary.value > 0) {
if (target->getHealth() <= 0) {
return false;
}
Player* attackerPlayer;
if (attacker) {
attackerPlayer = attacker->getPlayer();
} else {
attackerPlayer = nullptr;
}
Player* targetPlayer = target->getPlayer();
if (attackerPlayer && targetPlayer) {
if (g_config.getBoolean(ConfigManager::CANNOT_ATTACK_SAME_LOOKFEET) && attackerPlayer->defaultOutfit.lookFeet == target->defaultOutfit.lookFeet && damage.primary.type != COMBAT_HEALING) {
return false;
}
if (attackerPlayer->getSkull() == SKULL_BLACK && attackerPlayer->getSkullClient(targetPlayer) == SKULL_NONE) {
return false;
}
}
if (damage.origin != ORIGIN_NONE) {
g_events->eventCreatureOnChangeHealth(target, attacker, damage);
damage.origin = ORIGIN_NONE;
return combatChangeHealth(attacker, target, damage);
}
int32_t realHealthChange = target->getHealth();
target->gainHealth(attacker, damage.primary.value);
realHealthChange = target->getHealth() - realHealthChange;
if (realHealthChange > 0 && !target->isInGhostMode()) {
std::string damageString = std::to_string(realHealthChange);
std::string pluralString = (realHealthChange != 1 ? "s." : ".");
std::string spectatorMessage = ucfirst(target->getNameDescription());
if (!attacker) {
spectatorMessage += " was healed for " + damageString + " hitpoint" + pluralString;
} else {
spectatorMessage += " healed ";
if (attacker == target) {
spectatorMessage += (targetPlayer ? (targetPlayer->getSex() == PLAYERSEX_FEMALE ? "herself" : "himself") : "itself");
} else {
spectatorMessage += target->getNameDescription();
}
spectatorMessage += " for " + damageString + " hitpoint" + pluralString;
}
TextMessage message;
message.position = targetPos;
message.primary.value = realHealthChange;
message.primary.color = TEXTCOLOR_MAYABLUE;
SpectatorVec list;
getSpectators(list, targetPos, false, true);
for (Creature* spectator : list) {
Player* tmpPlayer = spectator->getPlayer();
if (tmpPlayer == attackerPlayer && attackerPlayer != targetPlayer) {
message.type = MESSAGE_HEALED;
message.text = "You heal " + target->getNameDescription() + " for " + damageString + " hitpoint" + pluralString;
} else if (tmpPlayer == targetPlayer) {
message.type = MESSAGE_HEALED;
if (!attacker) {
message.text = "You were healed for " + damageString + " hitpoint" + pluralString;
} else if (targetPlayer == attackerPlayer) {
message.text = "You heal yourself for " + damageString + " hitpoint" + pluralString;
} else {
message.text = "You were healed by " + attacker->getNameDescription() + " for " + damageString + " hitpoint" + pluralString;
}
} else {
message.type = MESSAGE_HEALED_OTHERS;
message.text = spectatorMessage;
}
tmpPlayer->sendTextMessage(message);
}
}
} else {
SpectatorVec list;
getSpectators(list, targetPos, true, true);
if (!target->isAttackable() || Combat::canDoCombat(attacker, target) != RET_NOERROR) {
addMagicEffect(list, targetPos, CONST_ME_POFF);
return true;
}
Player* attackerPlayer;
if (attacker) {
attackerPlayer = attacker->getPlayer();
} else {
attackerPlayer = nullptr;
}
Player* targetPlayer = target->getPlayer();
if (attackerPlayer && targetPlayer) {
if (g_config.getBoolean(ConfigManager::CANNOT_ATTACK_SAME_LOOKFEET) && attacker->defaultOutfit.lookFeet == target->defaultOutfit.lookFeet && damage.primary.type != COMBAT_HEALING) {
return false;
}
if (attackerPlayer->getSkull() == SKULL_BLACK && attackerPlayer->getSkullClient(targetPlayer) == SKULL_NONE) {
return false;
}
}
damage.primary.value = std::abs(damage.primary.value);
damage.secondary.value = std::abs(damage.secondary.value);
int32_t healthChange = damage.primary.value + damage.secondary.value;
if (healthChange == 0) {
return true;
}
TextMessage message;
message.position = targetPos;
if (target->hasCondition(CONDITION_MANASHIELD) && damage.primary.type != COMBAT_UNDEFINEDDAMAGE) {
int32_t manaDamage = std::min<int32_t>(target->getMana(), healthChange);
if (manaDamage != 0) {
if (damage.origin != ORIGIN_NONE) {
g_events->eventCreatureOnChangeMana(target, attacker, healthChange, damage.origin);
if (healthChange == 0) {
return true;
}
manaDamage = std::min<int32_t>(target->getMana(), healthChange);
}
target->drainMana(attacker, manaDamage);
addMagicEffect(list, targetPos, CONST_ME_LOSEENERGY);
std::string damageString = std::to_string(manaDamage);
std::string spectatorMessage = ucfirst(target->getNameDescription()) + " loses " + damageString + " mana";
if (attacker) {
spectatorMessage += " blocking an attack by ";
if (attacker == target) {
spectatorMessage += (targetPlayer ? (targetPlayer->getSex() == PLAYERSEX_FEMALE ? "herself" : "himself") : "itself");
} else {
spectatorMessage += attacker->getNameDescription();
}
}
spectatorMessage += '.';
message.primary.value = manaDamage;
message.primary.color = TEXTCOLOR_BLUE;
for (Creature* spectator : list) {
Player* tmpPlayer = spectator->getPlayer();
if (tmpPlayer->getPosition().z == targetPos.z) {
if (tmpPlayer == attackerPlayer && attackerPlayer != targetPlayer) {
message.type = MESSAGE_DAMAGE_DEALT;
message.text = ucfirst(target->getNameDescription()) + " loses " + damageString + " mana blocking your attack.";
} else if (tmpPlayer == targetPlayer) {
message.type = MESSAGE_DAMAGE_RECEIVED;
if (!attacker) {
message.text = "You lose " + damageString + " mana.";
} else if (targetPlayer == attackerPlayer) {
message.text = "You lose " + damageString + " mana blocking an attack by yourself.";
} else {
message.text = "You lose " + damageString + " mana blocking an attack by " + attacker->getNameDescription() + '.';
}
} else {
message.type = MESSAGE_DAMAGE_OTHERS;
message.text = spectatorMessage;
}
tmpPlayer->sendTextMessage(message);
}
}
damage.primary.value -= manaDamage;
if (damage.primary.value < 0) {
damage.secondary.value = std::max<int32_t>(0, damage.secondary.value + damage.primary.value);
damage.primary.value = 0;
}
}
}
int32_t realDamage = damage.primary.value + damage.secondary.value;
if (realDamage == 0) {
return true;
}
if (damage.origin != ORIGIN_NONE) {
g_events->eventCreatureOnChangeHealth(target, attacker, damage);
damage.origin = ORIGIN_NONE;
return combatChangeHealth(attacker, target, damage);
}
int32_t targetHealth = target->getHealth();
if (damage.primary.value >= targetHealth) {
damage.primary.value = targetHealth;
damage.secondary.value = 0;
} else if (damage.secondary.value) {
damage.secondary.value = std::min<int32_t>(damage.secondary.value, targetHealth - damage.primary.value);
}
realDamage = damage.primary.value + damage.secondary.value;
if (realDamage == 0) {
return true;
} else if (realDamage >= targetHealth) {
if (!g_events->eventCreatureOnPrepareDeath(target, attacker)) {
return false;
}
}
target->drainHealth(attacker, realDamage);
addCreatureHealth(list, target);
message.primary.value = damage.primary.value;
message.secondary.value = damage.secondary.value;
uint8_t hitEffect;
if (message.primary.value) {
combatGetTypeInfo(damage.primary.type, target, message.primary.color, hitEffect);
if (hitEffect != CONST_ME_NONE) {
addMagicEffect(list, targetPos, hitEffect);
}
}
if (message.secondary.value) {
combatGetTypeInfo(damage.secondary.type, target, message.secondary.color, hitEffect);
if (hitEffect != CONST_ME_NONE) {
addMagicEffect(list, targetPos, hitEffect);
}
}
if (message.primary.color != TEXTCOLOR_NONE || message.secondary.color != TEXTCOLOR_NONE) {
std::string damageString = std::to_string(realDamage);
std::string pluralString = (realDamage != 1 ? "s" : "");
std::string spectatorMessage = ucfirst(target->getNameDescription()) + " loses " + damageString + " hitpoint" + pluralString;
if (attacker) {
spectatorMessage += " due to ";
if (attacker == target) {
spectatorMessage += (targetPlayer ? (targetPlayer->getSex() == PLAYERSEX_FEMALE ? "her" : "his") : "its");
spectatorMessage += " own attack";
} else {
spectatorMessage += "an attack by " + target->getNameDescription();
}
}
spectatorMessage += '.';
for (Creature* spectator : list) {
Player* tmpPlayer = spectator->getPlayer();
if (tmpPlayer->getPosition().z == targetPos.z) {
if (tmpPlayer == attackerPlayer && attackerPlayer != targetPlayer) {
message.type = MESSAGE_DAMAGE_DEALT;
message.text = ucfirst(target->getNameDescription()) + " loses " + damageString + " hitpoint" + pluralString + " due to your attack.";
} else if (tmpPlayer == targetPlayer) {
message.type = MESSAGE_DAMAGE_RECEIVED;
if (!attacker) {
message.text = "You lose " + damageString + " hitpoint" + pluralString + '.';
} else if (targetPlayer == attackerPlayer) {
message.text = "You lose " + damageString + " hitpoint" + pluralString + " due to your own attack.";
} else {
message.text = "You lose " + damageString + " hitpoint" + pluralString + " due to an attack by " + attacker->getNameDescription() + '.';
}
} else {
message.type = MESSAGE_DAMAGE_OTHERS;
message.text = spectatorMessage;
}
tmpPlayer->sendTextMessage(message);
}
}
}
}
return true;
}
bool Game::combatChangeMana(Creature* attacker, Creature* target, int32_t manaChange, CombatOrigin origin)
{
if (manaChange > 0) {
if (attacker) {
Player* attackerPlayer = attacker->getPlayer();
Player* targetPlayer = target->getPlayer();
if (attackerPlayer && targetPlayer) {
if (g_config.getBoolean(ConfigManager::CANNOT_ATTACK_SAME_LOOKFEET) && attacker->defaultOutfit.lookFeet == target->defaultOutfit.lookFeet) {
return false;
}
if (attackerPlayer->getSkull() == SKULL_BLACK && attackerPlayer->getSkullClient(targetPlayer) == SKULL_NONE) {
return false;
}
}
}
if (origin != ORIGIN_NONE) {
g_events->eventCreatureOnChangeMana(target, attacker, manaChange, origin);
return combatChangeMana(attacker, target, manaChange, ORIGIN_NONE);
}
target->changeMana(manaChange);
} else {
const Position& targetPos = target->getPosition();
if (!target->isAttackable() || Combat::canDoCombat(attacker, target) != RET_NOERROR) {
addMagicEffect(targetPos, CONST_ME_POFF);
return false;
}
Player* attackerPlayer;
if (attacker) {
attackerPlayer = attacker->getPlayer();
} else {
attackerPlayer = nullptr;
}
Player* targetPlayer = target->getPlayer();
if (attackerPlayer && targetPlayer) {
if (g_config.getBoolean(ConfigManager::CANNOT_ATTACK_SAME_LOOKFEET) && attacker->defaultOutfit.lookFeet == target->defaultOutfit.lookFeet) {
return false;
}
if (attackerPlayer->getSkull() == SKULL_BLACK && attackerPlayer->getSkullClient(targetPlayer) == SKULL_NONE) {
return false;
}
}
int32_t manaLoss = std::min<int32_t>(target->getMana(), -manaChange);
BlockType_t blockType = target->blockHit(attacker, COMBAT_MANADRAIN, manaLoss);
if (blockType != BLOCK_NONE) {
addMagicEffect(targetPos, CONST_ME_POFF);
return false;
}
if (manaLoss <= 0) {
return true;
}
if (origin != ORIGIN_NONE) {
g_events->eventCreatureOnChangeMana(target, attacker, manaChange, origin);
return combatChangeMana(attacker, target, manaChange, ORIGIN_NONE);
}
target->drainMana(attacker, manaLoss);
std::string damageString = std::to_string(manaLoss);
std::string spectatorMessage = ucfirst(target->getNameDescription()) + " loses " + damageString + " mana";
if (attacker) {
spectatorMessage += " blocking an attack by ";
if (attacker == target) {
spectatorMessage += (targetPlayer ? (targetPlayer->getSex() == PLAYERSEX_FEMALE ? "herself" : "himself") : "itself");
} else {
spectatorMessage += attacker->getNameDescription();
}
}
spectatorMessage += '.';
TextMessage message;
message.position = targetPos;
message.primary.value = manaLoss;
message.primary.color = TEXTCOLOR_BLUE;
SpectatorVec list;
getSpectators(list, targetPos, false, true);
for (Creature* spectator : list) {
Player* tmpPlayer = spectator->getPlayer();
if (tmpPlayer == attackerPlayer && attackerPlayer != targetPlayer) {
message.type = MESSAGE_DAMAGE_DEALT;
message.text = ucfirst(target->getNameDescription()) + " loses " + damageString + " mana blocking your attack.";
} else if (tmpPlayer == targetPlayer) {
message.type = MESSAGE_DAMAGE_RECEIVED;
if (!attacker) {
message.text = "You lose " + damageString + " mana.";
} else if (targetPlayer == attackerPlayer) {
message.text = "You lose " + damageString + " mana blocking an attack by yourself.";
} else {
message.text = "You lose " + damageString + " mana blocking an attack by " + attacker->getNameDescription() + '.';
}
} else {
message.type = MESSAGE_DAMAGE_OTHERS;
message.text = spectatorMessage;
}
tmpPlayer->sendTextMessage(message);
}
}
return true;
}
void Game::addCreatureHealth(const Creature* target)
{
SpectatorVec list;
getSpectators(list, target->getPosition(), true, true);
addCreatureHealth(list, target);
}
void Game::addCreatureHealth(const SpectatorVec& list, const Creature* target)
{
for (Creature* spectator : list) {
if (Player* tmpPlayer = spectator->getPlayer()) {
tmpPlayer->sendCreatureHealth(target);
}
}
}
void Game::addMagicEffect(const Position& pos, uint8_t effect)
{
SpectatorVec list;
getSpectators(list, pos, true, true);
addMagicEffect(list, pos, effect);
}
void Game::addMagicEffect(const SpectatorVec& list, const Position& pos, uint8_t effect)
{
for (Creature* spectator : list) {
if (Player* tmpPlayer = spectator->getPlayer()) {
tmpPlayer->sendMagicEffect(pos, effect);
}
}
}
void Game::addDistanceEffect(const Position& fromPos, const Position& toPos, uint8_t effect)
{
SpectatorVec list;
getSpectators(list, fromPos, false, true);
getSpectators(list, toPos, false, true);
addDistanceEffect(list, fromPos, toPos, effect);
}
void Game::addDistanceEffect(const SpectatorVec& list, const Position& fromPos, const Position& toPos, uint8_t effect)
{
for (Creature* spectator : list) {
if (Player* tmpPlayer = spectator->getPlayer()) {
tmpPlayer->sendDistanceShoot(fromPos, toPos, effect);
}
}
}
void Game::startDecay(Item* item)
{
if (!item || !item->canDecay()) {
return;
}
ItemDecayState_t decayState = item->getDecaying();
if (decayState == DECAYING_TRUE) {
return;
}
if (item->getDuration() > 0) {
item->useThing2();
item->setDecaying(DECAYING_TRUE);
toDecayItems.push_front(item);
} else {
internalDecayItem(item);
}
}
void Game::internalDecayItem(Item* item)
{
const ItemType& it = Item::items[item->getID()];
if (it.decayTo != 0) {
Item* newItem = transformItem(item, it.decayTo);
startDecay(newItem);
} else {
ReturnValue ret = internalRemoveItem(item);
if (ret != RET_NOERROR) {
std::cout << "DEBUG, internalDecayItem failed, error code: " << (int32_t) ret << "item id: " << item->getID() << std::endl;
}
}
}
void Game::checkDecay()
{
g_scheduler.addEvent(createSchedulerTask(EVENT_DECAYINTERVAL, std::bind(&Game::checkDecay, this)));
size_t bucket = (lastBucket + 1) % EVENT_DECAY_BUCKETS;
for (auto it = decayItems[bucket].begin(); it != decayItems[bucket].end();) {
Item* item = *it;
if (!item->canDecay()) {
item->setDecaying(DECAYING_FALSE);
ReleaseItem(item);
it = decayItems[bucket].erase(it);
continue;
}
int32_t decreaseTime = EVENT_DECAYINTERVAL * EVENT_DECAY_BUCKETS;
int32_t duration = item->getDuration();
if (duration - decreaseTime < 0) {
decreaseTime = duration;
}
duration -= decreaseTime;
item->decreaseDuration(decreaseTime);
if (duration <= 0) {
it = decayItems[bucket].erase(it);
internalDecayItem(item);
ReleaseItem(item);
} else if (duration < EVENT_DECAYINTERVAL * EVENT_DECAY_BUCKETS) {
it = decayItems[bucket].erase(it);
size_t newBucket = (bucket + ((duration + EVENT_DECAYINTERVAL / 2) / 1000)) % EVENT_DECAY_BUCKETS;
if (newBucket == bucket) {
internalDecayItem(item);
ReleaseItem(item);
} else {
decayItems[newBucket].push_back(item);
}
} else {
++it;
}
}
lastBucket = bucket;
cleanup();
}
void Game::checkLight()
{
g_scheduler.addEvent(createSchedulerTask(EVENT_LIGHTINTERVAL, std::bind(&Game::checkLight, this)));
lightHour += lightHourDelta;
if (lightHour > 1440) {
lightHour -= 1440;
}
if (std::abs(lightHour - SUNRISE) < 2 * lightHourDelta) {
lightState = LIGHT_STATE_SUNRISE;
} else if (std::abs(lightHour - SUNSET) < 2 * lightHourDelta) {
lightState = LIGHT_STATE_SUNSET;
}
int32_t newLightLevel = lightLevel;
bool lightChange = false;
switch (lightState) {
case LIGHT_STATE_SUNRISE: {
newLightLevel += (LIGHT_LEVEL_DAY - LIGHT_LEVEL_NIGHT) / 30;
lightChange = true;
break;
}
case LIGHT_STATE_SUNSET: {
newLightLevel -= (LIGHT_LEVEL_DAY - LIGHT_LEVEL_NIGHT) / 30;
lightChange = true;
break;
}
default:
break;
}
if (newLightLevel <= LIGHT_LEVEL_NIGHT) {
lightLevel = LIGHT_LEVEL_NIGHT;
lightState = LIGHT_STATE_NIGHT;
} else if (newLightLevel >= LIGHT_LEVEL_DAY) {
lightLevel = LIGHT_LEVEL_DAY;
lightState = LIGHT_STATE_DAY;
} else {
lightLevel = newLightLevel;
}
if (lightChange) {
LightInfo lightInfo;
getWorldLightInfo(lightInfo);
for (const auto& it : players) {
it.second->sendWorldLight(lightInfo);
}
}
}
void Game::getWorldLightInfo(LightInfo& lightInfo) const
{
lightInfo.level = lightLevel;
lightInfo.color = 0xD7;
}
void Game::addCommandTag(char tag)
{
for (char commandTag : commandTags) {
if (commandTag == tag) {
return;
}
}
commandTags.push_back(tag);
}
void Game::resetCommandTag()
{
commandTags.clear();
}
void Game::shutdown()
{
std::cout << "Shutting down server..." << std::flush;
g_scheduler.shutdown();
g_dispatcher.shutdown();
Spawns::getInstance()->clear();
Raids::getInstance()->clear();
cleanup();
if (services) {
services->stop();
}
ConnectionManager::getInstance()->closeAll();
std::cout << " done!" << std::endl;
}
void Game::cleanup()
{
//free memory
for (auto creature : ToReleaseCreatures) {
creature->releaseThing2();
}
ToReleaseCreatures.clear();
for (auto item : ToReleaseItems) {
item->releaseThing2();
}
ToReleaseItems.clear();
for (Item* item : toDecayItems) {
const uint32_t dur = item->getDuration();
if (dur >= EVENT_DECAYINTERVAL * EVENT_DECAY_BUCKETS) {
decayItems[lastBucket].push_back(item);
} else {
decayItems[(lastBucket + 1 + dur / 1000) % EVENT_DECAY_BUCKETS].push_back(item);
}
}
toDecayItems.clear();
}
void Game::ReleaseCreature(Creature* creature)
{
ToReleaseCreatures.push_back(creature);
}
void Game::ReleaseItem(Item* item)
{
ToReleaseItems.push_back(item);
}
void Game::broadcastMessage(const std::string& text, MessageClasses type) const
{
std::cout << "> Broadcasted message: \"" << text << "\"." << std::endl;
for (const auto& it : players) {
it.second->sendTextMessage(type, text);
}
}
void Game::updateCreatureWalkthrough(const Creature* creature)
{
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
for (Creature* spectator : list) {
Player* tmpPlayer = spectator->getPlayer();
tmpPlayer->sendCreatureWalkthrough(creature, tmpPlayer->canWalkthroughEx(creature));
}
}
void Game::updatePlayerSkull(Player* player)
{
if (getWorldType() != WORLD_TYPE_PVP) {
return;
}
SpectatorVec list;
getSpectators(list, player->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureSkull(player);
}
}
void Game::updatePlayerShield(Player* player)
{
SpectatorVec list;
getSpectators(list, player->getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureShield(player);
}
}
void Game::updatePlayerHelpers(const Player& player)
{
uint32_t creatureId = player.getID();
uint16_t helpers = player.getHelpers();
SpectatorVec list;
getSpectators(list, player.getPosition(), true, true);
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureHelpers(creatureId, helpers);
}
}
void Game::updateCreatureType(Creature* creature)
{
const Player* masterPlayer = nullptr;
uint32_t creatureId = creature->getID();
CreatureType_t creatureType = creature->getType();
if (creatureType == CREATURETYPE_MONSTER) {
const Creature* master = creature->getMaster();
if (master) {
masterPlayer = master->getPlayer();
if (masterPlayer) {
creatureType = CREATURETYPE_SUMMON_OTHERS;
}
}
}
//send to clients
SpectatorVec list;
getSpectators(list, creature->getPosition(), true, true);
if (creatureType == CREATURETYPE_SUMMON_OTHERS) {
for (Creature* spectator : list) {
Player* player = spectator->getPlayer();
if (masterPlayer == player) {
player->sendCreatureType(creatureId, CREATURETYPE_SUMMON_OWN);
} else {
player->sendCreatureType(creatureId, creatureType);
}
}
} else {
for (Creature* spectator : list) {
spectator->getPlayer()->sendCreatureType(creatureId, creatureType);
}
}
}
void Game::updatePremium(Account& account)
{
bool save = false;
time_t timeNow = time(nullptr);
if (account.premiumDays != 0 && account.premiumDays != std::numeric_limits<uint16_t>::max()) {
if (account.lastDay == 0) {
account.lastDay = timeNow;
save = true;
} else {
uint32_t days = (timeNow - account.lastDay) / 86400;
if (days > 0) {
if (days >= account.premiumDays) {
account.premiumDays = 0;
account.lastDay = 0;
} else {
account.premiumDays -= days;
uint32_t remainder = (timeNow - account.lastDay) % 86400;
account.lastDay = timeNow - remainder;
}
save = true;
}
}
} else if (account.lastDay != 0) {
account.lastDay = 0;
save = true;
}
if (save && !IOLoginData::saveAccount(account)) {
std::cout << "> ERROR: Failed to save account: " << account.name << "!" << std::endl;
}
}
void Game::loadMotdNum()
{
Database* db = Database::getInstance();
DBResult_ptr result = db->storeQuery("SELECT `value` FROM `server_config` WHERE `config` = 'motd_num'");
if (result) {
motdNum = result->getDataInt("value");
} else {
db->executeQuery("INSERT INTO `server_config` (`config`, `value`) VALUES ('motd_num', '0')");
}
result = db->storeQuery("SELECT `value` FROM `server_config` WHERE `config` = 'motd_hash'");
if (result) {
motdHash = result->getDataString("value");
if (motdHash != transformToSHA1(g_config.getString(ConfigManager::MOTD))) {
++motdNum;
}
} else {
db->executeQuery("INSERT INTO `server_config` (`config`, `value`) VALUES ('motd_hash', '')");
}
}
void Game::saveMotdNum() const
{
Database* db = Database::getInstance();
std::ostringstream query;
query << "UPDATE `server_config` SET `value` = '" << motdNum << "' WHERE `config` = 'motd_num'";
db->executeQuery(query.str());
query.str("");
query << "UPDATE `server_config` SET `value` = '" << transformToSHA1(g_config.getString(ConfigManager::MOTD)) << "' WHERE `config` = 'motd_hash'";
db->executeQuery(query.str());
}
void Game::checkPlayersRecord()
{
const size_t playersOnline = getPlayersOnline();
if (playersOnline > playersRecord) {
uint32_t previousRecord = playersRecord;
playersRecord = playersOnline;
for (const auto& it : g_globalEvents->getEventMap(GLOBALEVENT_RECORD)) {
it.second->executeRecord(playersRecord, previousRecord);
}
updatePlayersRecord();
}
}
void Game::updatePlayersRecord() const
{
Database* db = Database::getInstance();
std::ostringstream query;
query << "UPDATE `server_config` SET `value` = '" << playersRecord << "' WHERE `config` = 'players_record'";
db->executeQuery(query.str());
}
void Game::loadPlayersRecord()
{
Database* db = Database::getInstance();
DBResult_ptr result = db->storeQuery("SELECT `value` FROM `server_config` WHERE `config` = 'players_record'");
if (result) {
playersRecord = result->getDataInt("value");
} else {
db->executeQuery("INSERT INTO `server_config` (`config`, `value`) VALUES ('players_record', '0')");
}
}
uint64_t Game::getExperienceStage(uint32_t level)
{
if (!stagesEnabled) {
return g_config.getNumber(ConfigManager::RATE_EXPERIENCE);
}
if (useLastStageLevel && level >= lastStageLevel) {
return stages[lastStageLevel];
}
return stages[level];
}
bool Game::loadExperienceStages()
{
pugi::xml_document doc;
pugi::xml_parse_result result = doc.load_file("data/XML/stages.xml");
if (!result) {
std::cout << "[Error - Game::loadExperienceStages] Failed to load data/XML/stages.xml: " << result.description() << std::endl;
return false;
}
for (pugi::xml_node stageNode = doc.child("stages").first_child(); stageNode; stageNode = stageNode.next_sibling()) {
if (strcasecmp(stageNode.name(), "config") == 0) {
stagesEnabled = stageNode.attribute("enabled").as_bool();
} else {
uint32_t minLevel, maxLevel, multiplier;
pugi::xml_attribute minLevelAttribute = stageNode.attribute("minlevel");
if (minLevelAttribute) {
minLevel = pugi::cast<uint32_t>(minLevelAttribute.value());
} else {
minLevel = 1;
}
pugi::xml_attribute maxLevelAttribute = stageNode.attribute("maxlevel");
if (maxLevelAttribute) {
maxLevel = pugi::cast<uint32_t>(maxLevelAttribute.value());
} else {
maxLevel = 0;
lastStageLevel = minLevel;
useLastStageLevel = true;
}
pugi::xml_attribute multiplierAttribute = stageNode.attribute("multiplier");
if (multiplierAttribute) {
multiplier = pugi::cast<uint32_t>(multiplierAttribute.value());
} else {
multiplier = 1;
}
if (useLastStageLevel) {
stages[lastStageLevel] = multiplier;
} else {
for (uint32_t i = minLevel; i <= maxLevel; ++i) {
stages[i] = multiplier;
}
}
}
}
return true;
}
void Game::playerInviteToParty(uint32_t playerId, uint32_t invitedId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Player* invitedPlayer = getPlayerByID(invitedId);
if (!invitedPlayer || invitedPlayer->isInviting(player)) {
return;
}
if (invitedPlayer->getParty()) {
std::ostringstream ss;
ss << invitedPlayer->getName() << " is already in a party.";
player->sendTextMessage(MESSAGE_INFO_DESCR, ss.str());
return;
}
Party* party = player->getParty();
if (!party) {
party = new Party(player);
} else if (party->getLeader() != player) {
return;
}
party->invitePlayer(*invitedPlayer);
}
void Game::playerJoinParty(uint32_t playerId, uint32_t leaderId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Player* leader = getPlayerByID(leaderId);
if (!leader || !leader->isInviting(player)) {
return;
}
Party* party = leader->getParty();
if (!party || party->getLeader() != leader) {
return;
}
if (player->getParty()) {
player->sendTextMessage(MESSAGE_INFO_DESCR, "You are already in a party.");
return;
}
party->joinParty(*player);
}
void Game::playerRevokePartyInvitation(uint32_t playerId, uint32_t invitedId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Party* party = player->getParty();
if (!party || party->getLeader() != player) {
return;
}
Player* invitedPlayer = getPlayerByID(invitedId);
if (!invitedPlayer || !player->isInviting(invitedPlayer)) {
return;
}
party->revokeInvitation(*invitedPlayer);
}
void Game::playerPassPartyLeadership(uint32_t playerId, uint32_t newLeaderId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Party* party = player->getParty();
if (!party || party->getLeader() != player) {
return;
}
Player* newLeader = getPlayerByID(newLeaderId);
if (!newLeader || !player->isPartner(newLeader)) {
return;
}
party->passPartyLeadership(newLeader);
}
void Game::playerLeaveParty(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Party* party = player->getParty();
if (!party || player->hasCondition(CONDITION_INFIGHT)) {
return;
}
party->leaveParty(player);
}
void Game::playerEnableSharedPartyExperience(uint32_t playerId, bool sharedExpActive)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Party* party = player->getParty();
if (!party || player->hasCondition(CONDITION_INFIGHT)) {
return;
}
party->setSharedExperience(player, sharedExpActive);
}
void Game::sendGuildMotd(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
Guild* guild = player->getGuild();
if (guild) {
player->sendChannelMessage("Message of the Day", guild->getMotd(), TALKTYPE_CHANNEL_R1, CHANNEL_GUILD);
}
}
void Game::kickPlayer(uint32_t playerId, bool displayEffect)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->kickPlayer(displayEffect);
}
void Game::playerReportBug(uint32_t playerId, const std::string& bug)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (player->getAccountType() == ACCOUNT_TYPE_NORMAL) {
return;
}
std::string fileName = "data/reports/" + player->getName() + " report.txt";
FILE* file = fopen(fileName.c_str(), "a");
if (file) {
const Position& position = player->getPosition();
fprintf(file, "------------------------------\nName: %s [Position X: %u Y: %u Z: %u]\nBug Report: %s\n", player->getName().c_str(), position.x, position.y, position.z, bug.c_str());
fclose(file);
}
player->sendTextMessage(MESSAGE_EVENT_DEFAULT, "Your report has been sent to " + g_config.getString(ConfigManager::SERVER_NAME) + ".");
}
void Game::playerDebugAssert(uint32_t playerId, const std::string& assertLine, const std::string& date, const std::string& description, const std::string& comment)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
// TODO: move debug assertions to database
FILE* file = fopen("client_assertions.txt", "a");
if (file) {
fprintf(file, "----- %s - %s (%s) -----\n", formatDate(time(nullptr)).c_str(), player->getName().c_str(), convertIPToString(player->getIP()).c_str());
fprintf(file, "%s\n%s\n%s\n%s\n", assertLine.c_str(), date.c_str(), description.c_str(), comment.c_str());
fclose(file);
}
}
void Game::playerLeaveMarket(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
player->setInMarket(false);
}
void Game::playerBrowseMarket(uint32_t playerId, uint16_t spriteId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
const ItemType& it = Item::items.getItemIdByClientId(spriteId);
if (it.id == 0) {
return;
}
if (it.wareId == 0) {
return;
}
const MarketOfferList& buyOffers = IOMarket::getActiveOffers(MARKETACTION_BUY, it.id);
const MarketOfferList& sellOffers = IOMarket::getActiveOffers(MARKETACTION_SELL, it.id);
player->sendMarketBrowseItem(it.id, buyOffers, sellOffers);
player->sendMarketDetail(it.id);
}
void Game::playerBrowseMarketOwnOffers(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
const MarketOfferList& buyOffers = IOMarket::getOwnOffers(MARKETACTION_BUY, player->getGUID());
const MarketOfferList& sellOffers = IOMarket::getOwnOffers(MARKETACTION_SELL, player->getGUID());
player->sendMarketBrowseOwnOffers(buyOffers, sellOffers);
}
void Game::playerBrowseMarketOwnHistory(uint32_t playerId)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
const HistoryMarketOfferList& buyOffers = IOMarket::getOwnHistory(MARKETACTION_BUY, player->getGUID());
const HistoryMarketOfferList& sellOffers = IOMarket::getOwnHistory(MARKETACTION_SELL, player->getGUID());
player->sendMarketBrowseOwnHistory(buyOffers, sellOffers);
}
void Game::playerCreateMarketOffer(uint32_t playerId, uint8_t type, uint16_t spriteId, uint16_t amount, uint32_t price, bool anonymous)
{
if (amount == 0 || amount > 64000) {
return;
}
if (price == 0 || price > 999999999) {
return;
}
if (type != MARKETACTION_BUY && type != MARKETACTION_SELL) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
if (g_config.getBoolean(ConfigManager::MARKET_PREMIUM) && !player->isPremium()) {
player->sendMarketLeave();
return;
}
const ItemType& itt = Item::items.getItemIdByClientId(spriteId);
if (itt.id == 0 || itt.wareId == 0) {
return;
}
const ItemType& it = Item::items.getItemIdByClientId(itt.wareId);
if (it.id == 0 || it.wareId == 0) {
return;
}
if (!it.stackable && amount > 2000) {
return;
}
const int32_t maxOfferCount = g_config.getNumber(ConfigManager::MAX_MARKET_OFFERS_AT_A_TIME_PER_PLAYER);
if (maxOfferCount > 0) {
const int32_t offerCount = IOMarket::getPlayerOfferCount(player->getGUID());
if (offerCount == -1 || offerCount >= maxOfferCount) {
return;
}
}
uint64_t fee = (price / 100.) * amount;
if (fee < 20) {
fee = 20;
} else if (fee > 1000) {
fee = 1000;
}
if (type == MARKETACTION_SELL) {
if (fee > player->bankBalance) {
return;
}
DepotChest* depotChest = player->getDepotChest(player->getLastDepotId(), false);
if (!depotChest) {
return;
}
ItemList itemList;
uint32_t count = 0;
std::vector<Container*> containers {depotChest, player->getInbox()};
bool enough = false;
size_t i = 0;
do {
Container* container = containers[i++];
for (Item* item : container->getItemList()) {
Container* c = item->getContainer();
if (c && !c->empty()) {
containers.push_back(c);
continue;
}
const ItemType& itemType = Item::items[item->getID()];
if (itemType.wareId != it.wareId) {
continue;
}
if (item->hasAttributes()) {
bool badAttribute = false;
ItemAttributes* attributes = item->getAttributes();
for (const auto& attr : attributes->getList()) {
if (attr.type == ITEM_ATTRIBUTE_CHARGES) {
uint16_t charges = static_cast<uint16_t>(0xFFFF & reinterpret_cast<ptrdiff_t>(attr.value));
if (charges != itemType.charges) {
badAttribute = true;
break;
}
} else if (attr.type == ITEM_ATTRIBUTE_DURATION) {
uint32_t duration = static_cast<uint32_t>(0xFFFFFFFF & reinterpret_cast<ptrdiff_t>(attr.value));
if (duration != itemType.decayTime) {
badAttribute = true;
break;
}
} else {
badAttribute = true;
break;
}
}
if (badAttribute) {
continue;
}
}
itemList.push_back(item);
count += Item::countByType(item, -1);
if (count >= amount) {
enough = true;
break;
}
}
if (enough) {
break;
}
} while (i < containers.size());
if (!enough) {
return;
}
if (it.stackable) {
uint16_t tmpAmount = amount;
for (Item* item : itemList) {
uint16_t removeCount = std::min<uint16_t>(tmpAmount, item->getItemCount());
tmpAmount -= removeCount;
internalRemoveItem(item, removeCount);
if (tmpAmount == 0) {
break;
}
}
} else {
for (Item* item : itemList) {
internalRemoveItem(item);
}
}
player->bankBalance -= fee;
} else {
uint64_t totalPrice = (uint64_t)price * amount;
totalPrice += fee;
if (totalPrice > player->bankBalance) {
return;
}
player->bankBalance -= totalPrice;
}
IOMarket::createOffer(player->getGUID(), (MarketAction_t)type, it.id, amount, price, anonymous);
player->sendMarketEnter(player->getLastDepotId());
const MarketOfferList& buyOffers = IOMarket::getActiveOffers(MARKETACTION_BUY, it.id);
const MarketOfferList& sellOffers = IOMarket::getActiveOffers(MARKETACTION_SELL, it.id);
player->sendMarketBrowseItem(it.id, buyOffers, sellOffers);
}
void Game::playerCancelMarketOffer(uint32_t playerId, uint32_t timestamp, uint16_t counter)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
MarketOfferEx offer = IOMarket::getOfferByCounter(timestamp, counter);
if (offer.id == 0 || offer.playerId != player->getGUID()) {
return;
}
if (offer.type == MARKETACTION_BUY) {
player->bankBalance += (uint64_t)offer.price * offer.amount;
player->sendMarketEnter(player->getLastDepotId());
} else {
const ItemType& it = Item::items[offer.itemId];
if (it.id == 0) {
return;
}
if (it.stackable) {
uint16_t tmpAmount = offer.amount;
while (tmpAmount > 0) {
int32_t stackCount = std::min<int32_t>(100, tmpAmount);
Item* item = Item::CreateItem(it.id, stackCount);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
tmpAmount -= stackCount;
}
} else {
int32_t subType;
if (it.charges != 0) {
subType = it.charges;
} else {
subType = -1;
}
for (uint16_t i = 0; i < offer.amount; ++i) {
Item* item = Item::CreateItem(it.id, subType);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
}
}
}
IOMarket::moveOfferToHistory(offer.id, OFFERSTATE_CANCELLED);
offer.amount = 0;
offer.timestamp += g_config.getNumber(ConfigManager::MARKET_OFFER_DURATION);
player->sendMarketCancelOffer(offer);
}
void Game::playerAcceptMarketOffer(uint32_t playerId, uint32_t timestamp, uint16_t counter, uint16_t amount)
{
if (amount == 0 || amount > 64000) {
return;
}
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->isInMarket()) {
return;
}
MarketOfferEx offer = IOMarket::getOfferByCounter(timestamp, counter);
if (offer.id == 0) {
return;
}
if (amount > offer.amount) {
return;
}
const ItemType& it = Item::items[offer.itemId];
if (it.id == 0) {
return;
}
uint64_t totalPrice = (uint64_t)offer.price * amount;
if (offer.type == MARKETACTION_BUY) {
DepotChest* depotChest = player->getDepotChest(player->getLastDepotId(), false);
if (!depotChest) {
return;
}
ItemList itemList;
uint32_t count = 0;
std::vector<Container*> containers {depotChest, player->getInbox()};
bool enough = false;
size_t i = 0;
do {
Container* container = containers[i++];
for (Item* item : container->getItemList()) {
Container* c = item->getContainer();
if (c && !c->empty()) {
containers.push_back(c);
continue;
}
const ItemType& itemType = Item::items[item->getID()];
if (itemType.wareId != it.wareId) {
continue;
}
if (item->hasAttributes()) {
bool badAttribute = false;
ItemAttributes* attributes = item->getAttributes();
for (const auto& attr : attributes->getList()) {
if (attr.type == ITEM_ATTRIBUTE_CHARGES) {
uint16_t charges = static_cast<uint16_t>(0xFFFF & reinterpret_cast<ptrdiff_t>(attr.value));
if (charges != itemType.charges) {
badAttribute = true;
break;
}
} else if (attr.type == ITEM_ATTRIBUTE_DURATION) {
uint32_t duration = static_cast<uint32_t>(0xFFFFFFFF & reinterpret_cast<ptrdiff_t>(attr.value));
if (duration != itemType.decayTime) {
badAttribute = true;
break;
}
} else {
badAttribute = true;
break;
}
}
if (badAttribute) {
continue;
}
}
itemList.push_back(item);
count += Item::countByType(item, -1);
if (count >= amount) {
enough = true;
break;
}
}
if (enough) {
break;
}
} while (i < containers.size());
if (!enough) {
return;
}
Player* buyerPlayer = getPlayerByGUID(offer.playerId);
if (!buyerPlayer) {
buyerPlayer = new Player(nullptr);
if (!IOLoginData::loadPlayerById(buyerPlayer, offer.playerId)) {
delete buyerPlayer;
return;
}
}
if (it.stackable) {
uint16_t tmpAmount = amount;
for (Item* item : itemList) {
uint16_t removeCount = std::min<uint16_t>(tmpAmount, item->getItemCount());
tmpAmount -= removeCount;
internalRemoveItem(item, removeCount);
if (tmpAmount == 0) {
break;
}
}
} else {
for (Item* item : itemList) {
internalRemoveItem(item);
}
}
player->bankBalance += totalPrice;
if (it.stackable) {
uint16_t tmpAmount = amount;
while (tmpAmount > 0) {
uint16_t stackCount = std::min<uint16_t>(100, tmpAmount);
Item* item = Item::CreateItem(it.id, stackCount);
if (internalAddItem(buyerPlayer->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
tmpAmount -= stackCount;
}
} else {
int32_t subType;
if (it.charges != 0) {
subType = it.charges;
} else {
subType = -1;
}
for (uint16_t i = 0; i < amount; ++i) {
Item* item = Item::CreateItem(it.id, subType);
if (internalAddItem(buyerPlayer->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
}
}
if (buyerPlayer->isOffline()) {
IOLoginData::savePlayer(buyerPlayer);
delete buyerPlayer;
} else {
buyerPlayer->onReceiveMail();
}
} else {
if (totalPrice > player->bankBalance) {
return;
}
player->bankBalance -= totalPrice;
if (it.stackable) {
uint16_t tmpAmount = amount;
while (tmpAmount > 0) {
uint16_t stackCount = std::min<uint16_t>(100, tmpAmount);
Item* item = Item::CreateItem(it.id, stackCount);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
tmpAmount -= stackCount;
}
} else {
int32_t subType;
if (it.charges != 0) {
subType = it.charges;
} else {
subType = -1;
}
for (uint16_t i = 0; i < amount; ++i) {
Item* item = Item::CreateItem(it.id, subType);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
}
}
Player* sellerPlayer = getPlayerByGUID(offer.playerId);
if (sellerPlayer) {
sellerPlayer->bankBalance += totalPrice;
} else {
IOLoginData::increaseBankBalance(offer.playerId, totalPrice);
}
player->onReceiveMail();
}
const int32_t marketOfferDuration = g_config.getNumber(ConfigManager::MARKET_OFFER_DURATION);
IOMarket::appendHistory(player->getGUID(), (offer.type == MARKETACTION_BUY ? MARKETACTION_SELL : MARKETACTION_BUY), offer.itemId, amount, offer.price, offer.timestamp + marketOfferDuration, OFFERSTATE_ACCEPTEDEX);
IOMarket::appendHistory(offer.playerId, offer.type, offer.itemId, amount, offer.price, offer.timestamp + marketOfferDuration, OFFERSTATE_ACCEPTED);
offer.amount -= amount;
if (offer.amount == 0) {
IOMarket::deleteOffer(offer.id);
} else {
IOMarket::acceptOffer(offer.id, amount);
}
player->sendMarketEnter(player->getLastDepotId());
offer.timestamp += marketOfferDuration;
player->sendMarketAcceptOffer(offer);
}
void Game::checkExpiredMarketOffers()
{
const ExpiredMarketOfferList& expiredBuyOffers = IOMarket::getExpiredOffers(MARKETACTION_BUY);
for (const ExpiredMarketOffer& offer : expiredBuyOffers) {
uint64_t totalPrice = (uint64_t)offer.price * offer.amount;
Player* player = getPlayerByGUID(offer.playerId);
if (player) {
player->bankBalance += totalPrice;
} else {
IOLoginData::increaseBankBalance(offer.playerId, totalPrice);
}
IOMarket::moveOfferToHistory(offer.id, OFFERSTATE_EXPIRED);
}
const ExpiredMarketOfferList& expiredSellOffers = IOMarket::getExpiredOffers(MARKETACTION_SELL);
for (const ExpiredMarketOffer& offer : expiredSellOffers) {
Player* player = getPlayerByGUID(offer.playerId);
if (!player) {
player = new Player(nullptr);
if (!IOLoginData::loadPlayerById(player, offer.playerId)) {
delete player;
continue;
}
}
const ItemType& itemType = Item::items[offer.itemId];
if (itemType.id == 0) {
continue;
}
if (itemType.stackable) {
uint16_t tmpAmount = offer.amount;
while (tmpAmount > 0) {
uint16_t stackCount = std::min<uint16_t>(100, tmpAmount);
Item* item = Item::CreateItem(itemType.id, stackCount);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
tmpAmount -= stackCount;
}
} else {
int32_t subType;
if (itemType.charges != 0) {
subType = itemType.charges;
} else {
subType = -1;
}
for (uint16_t i = 0; i < offer.amount; ++i) {
Item* item = Item::CreateItem(itemType.id, subType);
if (internalAddItem(player->getInbox(), item, INDEX_WHEREEVER, FLAG_NOLIMIT) != RET_NOERROR) {
delete item;
break;
}
}
}
if (player->isOffline()) {
IOLoginData::savePlayer(player);
delete player;
}
IOMarket::moveOfferToHistory(offer.id, OFFERSTATE_EXPIRED);
}
int32_t checkExpiredMarketOffersEachMinutes = g_config.getNumber(ConfigManager::CHECK_EXPIRED_MARKET_OFFERS_EACH_MINUTES);
if (checkExpiredMarketOffersEachMinutes <= 0) {
return;
}
g_scheduler.addEvent(createSchedulerTask(checkExpiredMarketOffersEachMinutes * 60 * 1000, std::bind(&Game::checkExpiredMarketOffers, this)));
}
void Game::parsePlayerExtendedOpcode(uint32_t playerId, uint8_t opcode, const std::string& buffer)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
g_events->eventPlayerOnExtendedOpcode(player, opcode, buffer);
}
void Game::forceAddCondition(uint32_t creatureId, Condition* condition)
{
Creature* creature = getCreatureByID(creatureId);
if (!creature) {
delete condition;
return;
}
creature->addCondition(condition, true);
}
void Game::forceRemoveCondition(uint32_t creatureId, ConditionType_t type)
{
Creature* creature = getCreatureByID(creatureId);
if (!creature) {
return;
}
creature->removeCondition(type, true);
}
void Game::sendOfflineTrainingDialog(Player* player)
{
if (!player) {
return;
}
if (!player->hasModalWindowOpen(offlineTrainingWindow.id)) {
player->sendModalWindow(offlineTrainingWindow);
}
}
void Game::playerAnswerModalWindow(uint32_t playerId, uint32_t modalWindowId, uint8_t button, uint8_t choice)
{
Player* player = getPlayerByID(playerId);
if (!player) {
return;
}
if (!player->hasModalWindowOpen(modalWindowId)) {
return;
}
player->onModalWindowHandled(modalWindowId);
// offline training, hardcoded
if (modalWindowId == std::numeric_limits<uint32_t>::max()) {
if (button == 1) {
if (choice == SKILL_SWORD || choice == SKILL_AXE || choice == SKILL_CLUB || choice == SKILL_DISTANCE || choice == SKILL_MAGLEVEL) {
BedItem* bedItem = player->getBedItem();
if (bedItem && bedItem->sleep(player)) {
player->setOfflineTrainingSkill(choice);
return;
}
}
} else {
player->sendTextMessage(MESSAGE_EVENT_ADVANCE, "Offline training aborted.");
}
player->setBedItem(nullptr);
} else {
g_events->eventPlayerOnModalWindow(player, modalWindowId, button, choice);
}
}
void Game::addPlayer(Player* player)
{
const std::string& lowercase_name = asLowerCaseString(player->getName());
mappedPlayerNames[lowercase_name] = player;
wildcardTree.insert(lowercase_name);
players[player->getID()] = player;
}
void Game::removePlayer(Player* player)
{
const std::string& lowercase_name = asLowerCaseString(player->getName());
mappedPlayerNames.erase(lowercase_name);
wildcardTree.remove(lowercase_name);
players.erase(player->getID());
}
void Game::addNpc(Npc* npc)
{
npcs[npc->getID()] = npc;
}
void Game::removeNpc(Npc* npc)
{
npcs.erase(npc->getID());
}
void Game::addMonster(Monster* monster)
{
monsters[monster->getID()] = monster;
}
void Game::removeMonster(Monster* monster)
{
monsters.erase(monster->getID());
}
Guild* Game::getGuild(uint32_t id) const
{
auto it = guilds.find(id);
if (it == guilds.end()) {
return nullptr;
}
return it->second;
}
void Game::addGuild(Guild* guild)
{
guilds[guild->getId()] = guild;
}
void Game::decreaseBrowseFieldRef(const Position& pos)
{
Tile* tile = getTile(pos);
if (!tile) {
return;
}
auto it = browseFields.find(tile);
if (it != browseFields.end()) {
it->second->releaseThing2();
}
}
Group* Game::getGroup(uint32_t id)
{
return groups.getGroup(id);
}
void Game::internalRemoveItems(std::vector<Item*> itemList, uint32_t amount, bool stackable)
{
if (stackable) {
for (Item* item : itemList) {
if (item->getItemCount() > amount) {
internalRemoveItem(item, amount);
break;
} else {
amount -= item->getItemCount();
internalRemoveItem(item);
}
}
} else {
for (Item* item : itemList) {
internalRemoveItem(item);
}
}
}
BedItem* Game::getBedBySleeper(uint32_t guid) const
{
auto it = bedSleepersMap.find(guid);
if (it == bedSleepersMap.end()) {
return nullptr;
}
return it->second;
}
void Game::setBedSleeper(BedItem* bed, uint32_t guid)
{
bedSleepersMap[guid] = bed;
}
void Game::removeBedSleeper(uint32_t guid)
{
auto it = bedSleepersMap.find(guid);
if (it != bedSleepersMap.end()) {
bedSleepersMap.erase(it);
}
}<|fim▁end|> | |
<|file_name|>pyramid.cpp<|end_file_name|><|fim▁begin|>/*
contourlet - Implementation of the contourlet transform for image coding
Copyright (C) 2005 Vivien Chappelier - IRISA/University of Rennes 1
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free
Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include "it/mat.h"
#include "it/io.h"
#include "it/distance.h"
#include <stdio.h>
#include <math.h>
/* X = x + d with proper bound checking and mirroring */
#define mirror_add(X, x, d, min, max) do { \<|fim▁hole|> if((X) < (min) || (X) >= (max)) \
(X) = (x) - (d); \
} while(0)
/* split an image into two laplacian pyramid bands */
void laplacian_pyramid_split(mat image, /* input band */
mat low, /* low-frequency band */
mat high, /* high-frequency band */
vec H0, /* symmetric analysis filter */
vec G0) /* symmetric synthesis filter */
{
int x, y;
int w, h;
idx_t dx, dy;
idx_t wf, hf;
idx_t wl, hl;
int px, py;
int X, Y;
w = mat_width(image);
h = mat_height(image);
wl = (w + 1) / 2;
hl = (h + 1) / 2;
assert(mat_width(high) == w);
assert(mat_height(high) == h);
assert(mat_width(low) == wl);
assert(mat_height(low) == hl);
wf = hf = vec_length(H0);
px = wf / 2;
py = hf / 2;
for(x = 0; x < wl; x++) {
for(y = 0; y < hl; y++) {
/* filter around position x,y */
low[y][x] = 0;
for(dx = -px; dx < wf-px; dx++) {
for(dy = -py; dy < hf-py; dy++) {
/* compute the input sample location */
mirror_add(X, 2*x, dx, 0, w);
mirror_add(Y, 2*y, dy, 0, h);
low[y][x] += H0[dy+py] * H0[dx+px] * image[Y][X];
}
}
}
}
wf = hf = vec_length(G0);
px = wf / 2;
py = hf / 2;
for(x = 0; x < w; x++) {
for(y = 0; y < h; y++) {
/* filter around position x,y */
high[y][x] = image[y][x];
for(dx = -px; dx < wf-px; dx++) {
for(dy = -py; dy < hf-py; dy++) {
/* compute the input sample location */
mirror_add(X, x, dx, 0, w);
mirror_add(Y, y, dy, 0, h);
if(!(X & 1) && !(Y & 1))
high[y][x] -= G0[dy+py] * G0[dx+px] * low[Y/2][X/2];
}
}
}
}
}
/* merge two laplacian pyramid bands into an image */
void laplacian_pyramid_merge(mat image, /* output band */
mat low, /* low-frequency band */
mat high, /* high-frequency band */
vec H0, /* symmetric analysis filter */
vec G0) /* symmetric synthesis filter */
{
int x, y;
int w, h;
idx_t dx, dy;
idx_t wf, hf;
idx_t wl, hl;
int px, py;
int X, Y;
w = mat_width(image);
h = mat_height(image);
wl = (w + 1) / 2;
hl = (h + 1) / 2;
assert(mat_width(high) == w);
assert(mat_height(high) == h);
assert(mat_width(low) == wl);
assert(mat_height(low) == hl);
/* use pseudo inverse reconstruction */
/* this assumes the filters are orthogonal */
/* the 9/7 are close enough to orthogonality for this to work quite well */
#define DUAL
#ifdef DUAL
wf = hf = vec_length(H0);
px = wf / 2;
py = hf / 2;
for(x = 0; x < wl; x++) {
for(y = 0; y < hl; y++) {
/* filter around position x,y */
for(dx = -px; dx < wf-px; dx++) {
for(dy = -py; dy < hf-py; dy++) {
/* compute the input sample location */
mirror_add(X, 2*x, dx, 0, w);
mirror_add(Y, 2*y, dy, 0, h);
low[y][x] -= H0[dy+py] * H0[dx+px] * high[Y][X];
}
}
}
}
#endif
wf = hf = vec_length(G0);
px = wf / 2;
py = hf / 2;
for(x = 0; x < w; x++) {
for(y = 0; y < h; y++) {
/* filter around position x,y */
image[y][x] = high[y][x];
for(dx = -px; dx < wf-px; dx++) {
for(dy = -py; dy < hf-py; dy++) {
/* compute the input sample location */
mirror_add(X, x, dx, 0, w);
mirror_add(Y, y, dy, 0, h);
if(!(X & 1) && !(Y & 1))
image[y][x] += G0[dy+py] * G0[dx+px] * low[Y/2][X/2];
}
}
}
}
}<|fim▁end|> | (X) = (x) + (d); \ |
<|file_name|>mirroring.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import flask
import functools
import logging
import requests
from .. import storage
from .. import toolkit
from . import cache
from . import config
DEFAULT_CACHE_TAGS_TTL = 48 * 3600
logger = logging.getLogger(__name__)
def is_mirror():
cfg = config.load()
return bool(cfg.get('mirroring', False))
def _response_headers(base):
headers = {}
if not base:
return headers
for k, v in base.iteritems():
if k.lower() == 'content-encoding':
continue
headers[k.lower()] = v
logger.warn(headers)
return headers
def lookup_source(path, stream=False, source=None):
if not source:
cfg = config.load()
mirroring_cfg = cfg.mirroring
if not mirroring_cfg:
return
source = cfg.mirroring['source']
source_url = '{0}{1}'.format(source, path)
headers = {}
for k, v in flask.request.headers.iteritems():
if k.lower() != 'location' and k.lower() != 'host':
headers[k] = v
logger.debug('Request: GET {0}\nHeaders: {1}'.format(
source_url, headers
))
source_resp = requests.get(
source_url,
headers=headers,
cookies=flask.request.cookies,
stream=stream
)
if source_resp.status_code != 200:
logger.debug('Source responded to request with non-200'
' status')<|fim▁hole|> source_resp.status_code, source_resp.text
))
return None
return source_resp
def source_lookup_tag(f):
@functools.wraps(f)
def wrapper(namespace, repository, *args, **kwargs):
cfg = config.load()
mirroring_cfg = cfg.mirroring
resp = f(namespace, repository, *args, **kwargs)
if not mirroring_cfg:
return resp
source = mirroring_cfg['source']
tags_cache_ttl = mirroring_cfg.get('tags_cache_ttl',
DEFAULT_CACHE_TAGS_TTL)
if resp.status_code != 404:
logger.debug('Status code is not 404, no source '
'lookup required')
return resp
if not cache.redis_conn:
# No tags cache, just return
logger.warning('mirroring: Tags cache is disabled, please set a '
'valid `cache\' directive in the config.')
source_resp = lookup_source(
flask.request.path, stream=False, source=source
)
if not source_resp:
return resp
headers = _response_headers(source_resp.headers)
return toolkit.response(data=source_resp.content, headers=headers,
raw=True)
store = storage.load()
request_path = flask.request.path
if request_path.endswith('/tags'):
# client GETs a list of tags
tag_path = store.tag_path(namespace, repository)
else:
# client GETs a single tag
tag_path = store.tag_path(namespace, repository, kwargs['tag'])
data = cache.redis_conn.get('{0}:{1}'.format(
cache.cache_prefix, tag_path
))
if data is not None:
return toolkit.response(data=data, raw=True)
source_resp = lookup_source(
flask.request.path, stream=False, source=source
)
if not source_resp:
return resp
data = source_resp.content
headers = _response_headers(source_resp.headers)
cache.redis_conn.setex('{0}:{1}'.format(
cache.cache_prefix, tag_path
), tags_cache_ttl, data)
return toolkit.response(data=data, headers=headers,
raw=True)
return wrapper
def source_lookup(cache=False, stream=False, index_route=False):
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
cfg = config.load()
mirroring_cfg = cfg.mirroring
resp = f(*args, **kwargs)
if not mirroring_cfg:
return resp
source = mirroring_cfg['source']
if index_route:
source = mirroring_cfg.get('source_index', source)
logger.debug('Source provided, registry acts as mirror')
if resp.status_code != 404:
logger.debug('Status code is not 404, no source '
'lookup required')
return resp
source_resp = lookup_source(
flask.request.path, stream=stream, source=source
)
if not source_resp:
return resp
store = storage.load()
headers = _response_headers(source_resp.headers)
if index_route and 'x-docker-endpoints' in headers:
headers['x-docker-endpoints'] = toolkit.get_endpoints()
if not stream:
logger.debug('JSON data found on source, writing response')
resp_data = source_resp.content
if cache:
store_mirrored_data(
resp_data, flask.request.url_rule.rule, kwargs,
store
)
return toolkit.response(
data=resp_data,
headers=headers,
raw=True
)
logger.debug('Layer data found on source, preparing to '
'stream response...')
layer_path = store.image_layer_path(kwargs['image_id'])
return _handle_mirrored_layer(source_resp, layer_path, store,
headers)
return wrapper
return decorator
def _handle_mirrored_layer(source_resp, layer_path, store, headers):
sr = toolkit.SocketReader(source_resp)
tmp, hndlr = storage.temp_store_handler()
sr.add_handler(hndlr)
def generate():
for chunk in sr.iterate(store.buffer_size):
yield chunk
# FIXME: this could be done outside of the request context
tmp.seek(0)
store.stream_write(layer_path, tmp)
tmp.close()
return flask.Response(generate(), headers=headers)
def store_mirrored_data(data, endpoint, args, store):
logger.debug('Endpoint: {0}'.format(endpoint))
path_method, arglist = ({
'/v1/images/<image_id>/json': ('image_json_path', ('image_id',)),
'/v1/images/<image_id>/ancestry': (
'image_ancestry_path', ('image_id',)
),
'/v1/repositories/<path:repository>/json': (
'registry_json_path', ('namespace', 'repository')
),
}).get(endpoint, (None, None))
if not path_method:
return
logger.debug('Path method: {0}'.format(path_method))
pm_args = {}
for arg in arglist:
pm_args[arg] = args[arg]
logger.debug('Path method args: {0}'.format(pm_args))
storage_path = getattr(store, path_method)(**pm_args)
logger.debug('Storage path: {0}'.format(storage_path))
store.put_content(storage_path, data)<|fim▁end|> | logger.debug('Response: {0}\n{1}\n'.format( |
<|file_name|>higher.rs<|end_file_name|><|fim▁begin|>//! This module contains functions that retrieves specifiec elements.
#![deny(clippy::missing_docs_in_private_items)]
use crate::{is_expn_of, match_def_path, paths};
use if_chain::if_chain;
use rustc_ast::ast::{self, LitKind};
use rustc_hir as hir;
use rustc_hir::{Block, BorrowKind, Expr, ExprKind, LoopSource, Node, Pat, StmtKind, UnOp};
use rustc_lint::LateContext;
use rustc_span::{sym, ExpnKind, Span, Symbol};
/// The essential nodes of a desugared for loop as well as the entire span:
/// `for pat in arg { body }` becomes `(pat, arg, body)`. Return `(pat, arg, body, span)`.
pub struct ForLoop<'tcx> {
pub pat: &'tcx hir::Pat<'tcx>,
pub arg: &'tcx hir::Expr<'tcx>,
pub body: &'tcx hir::Expr<'tcx>,
pub span: Span,
}
impl<'tcx> ForLoop<'tcx> {
#[inline]
pub fn hir(expr: &Expr<'tcx>) -> Option<Self> {
if_chain! {
if let hir::ExprKind::Match(ref iterexpr, ref arms, hir::MatchSource::ForLoopDesugar) = expr.kind;
if let Some(first_arm) = arms.get(0);
if let hir::ExprKind::Call(_, ref iterargs) = iterexpr.kind;
if let Some(first_arg) = iterargs.get(0);
if iterargs.len() == 1 && arms.len() == 1 && first_arm.guard.is_none();
if let hir::ExprKind::Loop(ref block, ..) = first_arm.body.kind;
if block.expr.is_none();
if let [ _, _, ref let_stmt, ref body ] = *block.stmts;
if let hir::StmtKind::Local(ref local) = let_stmt.kind;
if let hir::StmtKind::Expr(ref body_expr) = body.kind;
then {
return Some(Self {
pat: &*local.pat,
arg: first_arg,
body: body_expr,
span: first_arm.span
});
}
}
None
}
}
pub struct If<'hir> {
pub cond: &'hir Expr<'hir>,
pub r#else: Option<&'hir Expr<'hir>>,
pub then: &'hir Expr<'hir>,
}
impl<'hir> If<'hir> {
#[inline]
pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
if let ExprKind::If(
Expr {
kind: ExprKind::DropTemps(cond),
..
},
then,
r#else,
) = expr.kind
{
Some(Self { cond, r#else, then })
} else {
None
}
}
}
pub struct IfLet<'hir> {
pub let_pat: &'hir Pat<'hir>,
pub let_expr: &'hir Expr<'hir>,
pub if_then: &'hir Expr<'hir>,
pub if_else: Option<&'hir Expr<'hir>>,
}
impl<'hir> IfLet<'hir> {
pub fn hir(cx: &LateContext<'_>, expr: &Expr<'hir>) -> Option<Self> {
if let ExprKind::If(
Expr {
kind: ExprKind::Let(let_pat, let_expr, _),
..
},
if_then,
if_else,
) = expr.kind
{
let hir = cx.tcx.hir();
let mut iter = hir.parent_iter(expr.hir_id);
if let Some((_, Node::Block(Block { stmts: [], .. }))) = iter.next() {
if let Some((_, Node::Expr(Expr { kind: ExprKind::Loop(_, _, LoopSource::While, _), .. }))) = iter.next() {
// while loop desugar
return None;
}
}
return Some(Self {
let_pat,
let_expr,
if_then,
if_else,
});
}
None
}
}
pub struct IfOrIfLet<'hir> {
pub cond: &'hir Expr<'hir>,
pub r#else: Option<&'hir Expr<'hir>>,
pub then: &'hir Expr<'hir>,
}
impl<'hir> IfOrIfLet<'hir> {
#[inline]
pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
if let ExprKind::If(cond, then, r#else) = expr.kind {
if let ExprKind::DropTemps(new_cond) = cond.kind {
return Some(Self {
cond: new_cond,
r#else,
then,
});
}
if let ExprKind::Let(..) = cond.kind {
return Some(Self { cond, r#else, then });
}
}
None
}
}
/// Represent a range akin to `ast::ExprKind::Range`.
#[derive(Debug, Copy, Clone)]
pub struct Range<'a> {
/// The lower bound of the range, or `None` for ranges such as `..X`.
pub start: Option<&'a hir::Expr<'a>>,
/// The upper bound of the range, or `None` for ranges such as `X..`.
pub end: Option<&'a hir::Expr<'a>>,
/// Whether the interval is open or closed.
pub limits: ast::RangeLimits,
}
impl<'a> Range<'a> {
/// Higher a `hir` range to something similar to `ast::ExprKind::Range`.
pub fn hir(expr: &'a hir::Expr<'_>) -> Option<Range<'a>> {
/// Finds the field named `name` in the field. Always return `Some` for
/// convenience.
fn get_field<'c>(name: &str, fields: &'c [hir::ExprField<'_>]) -> Option<&'c hir::Expr<'c>> {
let expr = &fields.iter().find(|field| field.ident.name.as_str() == name)?.expr;
Some(expr)
}
match expr.kind {
hir::ExprKind::Call(ref path, ref args)
if matches!(
path.kind,
hir::ExprKind::Path(hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, _))
) =>
{
Some(Range {
start: Some(&args[0]),
end: Some(&args[1]),
limits: ast::RangeLimits::Closed,
})
},
hir::ExprKind::Struct(ref path, ref fields, None) => match path {
hir::QPath::LangItem(hir::LangItem::RangeFull, _) => Some(Range {
start: None,
end: None,
limits: ast::RangeLimits::HalfOpen,
}),
hir::QPath::LangItem(hir::LangItem::RangeFrom, _) => Some(Range {
start: Some(get_field("start", fields)?),
end: None,
limits: ast::RangeLimits::HalfOpen,
}),
hir::QPath::LangItem(hir::LangItem::Range, _) => Some(Range {
start: Some(get_field("start", fields)?),
end: Some(get_field("end", fields)?),
limits: ast::RangeLimits::HalfOpen,
}),
hir::QPath::LangItem(hir::LangItem::RangeToInclusive, _) => Some(Range {
start: None,
end: Some(get_field("end", fields)?),
limits: ast::RangeLimits::Closed,
}),
hir::QPath::LangItem(hir::LangItem::RangeTo, _) => Some(Range {
start: None,
end: Some(get_field("end", fields)?),
limits: ast::RangeLimits::HalfOpen,
}),
_ => None,
},
_ => None,
}
}
}
/// Represent the pre-expansion arguments of a `vec!` invocation.
pub enum VecArgs<'a> {
/// `vec![elem; len]`
Repeat(&'a hir::Expr<'a>, &'a hir::Expr<'a>),
/// `vec![a, b, c]`
Vec(&'a [hir::Expr<'a>]),
}
impl<'a> VecArgs<'a> {
/// Returns the arguments of the `vec!` macro if this expression was expanded
/// from `vec!`.
pub fn hir(cx: &LateContext<'_>, expr: &'a hir::Expr<'_>) -> Option<VecArgs<'a>> {
if_chain! {
if let hir::ExprKind::Call(ref fun, ref args) = expr.kind;
if let hir::ExprKind::Path(ref qpath) = fun.kind;<|fim▁hole|> // `vec![elem; size]` case
Some(VecArgs::Repeat(&args[0], &args[1]))
}
else if match_def_path(cx, fun_def_id, &paths::SLICE_INTO_VEC) && args.len() == 1 {
// `vec![a, b, c]` case
if_chain! {
if let hir::ExprKind::Box(ref boxed) = args[0].kind;
if let hir::ExprKind::Array(ref args) = boxed.kind;
then {
return Some(VecArgs::Vec(&*args));
}
}
None
}
else if match_def_path(cx, fun_def_id, &paths::VEC_NEW) && args.is_empty() {
Some(VecArgs::Vec(&[]))
}
else {
None
};
}
}
None
}
}
pub struct While<'hir> {
pub if_cond: &'hir Expr<'hir>,
pub if_then: &'hir Expr<'hir>,
pub if_else: Option<&'hir Expr<'hir>>,
}
impl<'hir> While<'hir> {
#[inline]
pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
if let ExprKind::Loop(
Block {
expr:
Some(Expr {
kind:
ExprKind::If(
Expr {
kind: ExprKind::DropTemps(if_cond),
..
},
if_then,
if_else_ref,
),
..
}),
..
},
_,
LoopSource::While,
_,
) = expr.kind
{
let if_else = *if_else_ref;
return Some(Self {
if_cond,
if_then,
if_else,
});
}
None
}
}
pub struct WhileLet<'hir> {
pub if_expr: &'hir Expr<'hir>,
pub let_pat: &'hir Pat<'hir>,
pub let_expr: &'hir Expr<'hir>,
pub if_then: &'hir Expr<'hir>,
pub if_else: Option<&'hir Expr<'hir>>,
}
impl<'hir> WhileLet<'hir> {
#[inline]
pub const fn hir(expr: &Expr<'hir>) -> Option<Self> {
if let ExprKind::Loop(
Block {
expr: Some(if_expr), ..
},
_,
LoopSource::While,
_,
) = expr.kind
{
if let Expr {
kind:
ExprKind::If(
Expr {
kind: ExprKind::Let(let_pat, let_expr, _),
..
},
if_then,
if_else_ref,
),
..
} = if_expr
{
let if_else = *if_else_ref;
return Some(Self {
if_expr,
let_pat,
let_expr,
if_then,
if_else,
});
}
}
None
}
}
/// Converts a hir binary operator to the corresponding `ast` type.
#[must_use]
pub fn binop(op: hir::BinOpKind) -> ast::BinOpKind {
match op {
hir::BinOpKind::Eq => ast::BinOpKind::Eq,
hir::BinOpKind::Ge => ast::BinOpKind::Ge,
hir::BinOpKind::Gt => ast::BinOpKind::Gt,
hir::BinOpKind::Le => ast::BinOpKind::Le,
hir::BinOpKind::Lt => ast::BinOpKind::Lt,
hir::BinOpKind::Ne => ast::BinOpKind::Ne,
hir::BinOpKind::Or => ast::BinOpKind::Or,
hir::BinOpKind::Add => ast::BinOpKind::Add,
hir::BinOpKind::And => ast::BinOpKind::And,
hir::BinOpKind::BitAnd => ast::BinOpKind::BitAnd,
hir::BinOpKind::BitOr => ast::BinOpKind::BitOr,
hir::BinOpKind::BitXor => ast::BinOpKind::BitXor,
hir::BinOpKind::Div => ast::BinOpKind::Div,
hir::BinOpKind::Mul => ast::BinOpKind::Mul,
hir::BinOpKind::Rem => ast::BinOpKind::Rem,
hir::BinOpKind::Shl => ast::BinOpKind::Shl,
hir::BinOpKind::Shr => ast::BinOpKind::Shr,
hir::BinOpKind::Sub => ast::BinOpKind::Sub,
}
}
/// Extract args from an assert-like macro.
/// Currently working with:
/// - `assert!`, `assert_eq!` and `assert_ne!`
/// - `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!`
/// For example:
/// `assert!(expr)` will return `Some([expr])`
/// `debug_assert_eq!(a, b)` will return `Some([a, b])`
pub fn extract_assert_macro_args<'tcx>(e: &'tcx Expr<'tcx>) -> Option<Vec<&'tcx Expr<'tcx>>> {
/// Try to match the AST for a pattern that contains a match, for example when two args are
/// compared
fn ast_matchblock(matchblock_expr: &'tcx Expr<'tcx>) -> Option<Vec<&Expr<'_>>> {
if_chain! {
if let ExprKind::Match(headerexpr, _, _) = &matchblock_expr.kind;
if let ExprKind::Tup([lhs, rhs]) = &headerexpr.kind;
if let ExprKind::AddrOf(BorrowKind::Ref, _, lhs) = lhs.kind;
if let ExprKind::AddrOf(BorrowKind::Ref, _, rhs) = rhs.kind;
then {
return Some(vec![lhs, rhs]);
}
}
None
}
if let ExprKind::Block(block, _) = e.kind {
if block.stmts.len() == 1 {
if let StmtKind::Semi(matchexpr) = block.stmts.get(0)?.kind {
// macros with unique arg: `{debug_}assert!` (e.g., `debug_assert!(some_condition)`)
if_chain! {
if let Some(If { cond, .. }) = If::hir(matchexpr);
if let ExprKind::Unary(UnOp::Not, condition) = cond.kind;
then {
return Some(vec![condition]);
}
}
// debug macros with two args: `debug_assert_{ne, eq}` (e.g., `assert_ne!(a, b)`)
if_chain! {
if let ExprKind::Block(matchblock,_) = matchexpr.kind;
if let Some(matchblock_expr) = matchblock.expr;
then {
return ast_matchblock(matchblock_expr);
}
}
}
} else if let Some(matchblock_expr) = block.expr {
// macros with two args: `assert_{ne, eq}` (e.g., `assert_ne!(a, b)`)
return ast_matchblock(matchblock_expr);
}
}
None
}
/// A parsed `format!` expansion
pub struct FormatExpn<'tcx> {
/// Span of `format!(..)`
pub call_site: Span,
/// Inner `format_args!` expansion
pub format_args: FormatArgsExpn<'tcx>,
}
impl FormatExpn<'tcx> {
/// Parses an expanded `format!` invocation
pub fn parse(expr: &'tcx Expr<'tcx>) -> Option<Self> {
if_chain! {
if let ExprKind::Block(block, _) = expr.kind;
if let [stmt] = block.stmts;
if let StmtKind::Local(local) = stmt.kind;
if let Some(init) = local.init;
if let ExprKind::Call(_, [format_args]) = init.kind;
let expn_data = expr.span.ctxt().outer_expn_data();
if let ExpnKind::Macro(_, sym::format) = expn_data.kind;
if let Some(format_args) = FormatArgsExpn::parse(format_args);
then {
Some(FormatExpn {
call_site: expn_data.call_site,
format_args,
})
} else {
None
}
}
}
}
/// A parsed `format_args!` expansion
pub struct FormatArgsExpn<'tcx> {
/// Span of the first argument, the format string
pub format_string_span: Span,
/// Values passed after the format string
pub value_args: Vec<&'tcx Expr<'tcx>>,
/// String literal expressions which represent the format string split by "{}"
pub format_string_parts: &'tcx [Expr<'tcx>],
/// Symbols corresponding to [`Self::format_string_parts`]
pub format_string_symbols: Vec<Symbol>,
/// Expressions like `ArgumentV1::new(arg0, Debug::fmt)`
pub args: &'tcx [Expr<'tcx>],
/// The final argument passed to `Arguments::new_v1_formatted`, if applicable
pub fmt_expr: Option<&'tcx Expr<'tcx>>,
}
impl FormatArgsExpn<'tcx> {
/// Parses an expanded `format_args!` or `format_args_nl!` invocation
pub fn parse(expr: &'tcx Expr<'tcx>) -> Option<Self> {
if_chain! {
if let ExpnKind::Macro(_, name) = expr.span.ctxt().outer_expn_data().kind;
let name = name.as_str();
if name.ends_with("format_args") || name.ends_with("format_args_nl");
if let ExprKind::Match(inner_match, [arm], _) = expr.kind;
// `match match`, if you will
if let ExprKind::Match(args, [inner_arm], _) = inner_match.kind;
if let ExprKind::Tup(value_args) = args.kind;
if let Some(value_args) = value_args
.iter()
.map(|e| match e.kind {
ExprKind::AddrOf(_, _, e) => Some(e),
_ => None,
})
.collect();
if let ExprKind::Array(args) = inner_arm.body.kind;
if let ExprKind::Block(Block { stmts: [], expr: Some(expr), .. }, _) = arm.body.kind;
if let ExprKind::Call(_, call_args) = expr.kind;
if let Some((strs_ref, fmt_expr)) = match call_args {
// Arguments::new_v1
[strs_ref, _] => Some((strs_ref, None)),
// Arguments::new_v1_formatted
[strs_ref, _, fmt_expr] => Some((strs_ref, Some(fmt_expr))),
_ => None,
};
if let ExprKind::AddrOf(BorrowKind::Ref, _, strs_arr) = strs_ref.kind;
if let ExprKind::Array(format_string_parts) = strs_arr.kind;
if let Some(format_string_symbols) = format_string_parts
.iter()
.map(|e| {
if let ExprKind::Lit(lit) = &e.kind {
if let LitKind::Str(symbol, _style) = lit.node {
return Some(symbol);
}
}
None
})
.collect();
then {
Some(FormatArgsExpn {
format_string_span: strs_ref.span,
value_args,
format_string_parts,
format_string_symbols,
args,
fmt_expr,
})
} else {
None
}
}
}
}
/// Checks if a `let` statement is from a `for` loop desugaring.
pub fn is_from_for_desugar(local: &hir::Local<'_>) -> bool {
// This will detect plain for-loops without an actual variable binding:
//
// ```
// for x in some_vec {
// // do stuff
// }
// ```
if_chain! {
if let Some(ref expr) = local.init;
if let hir::ExprKind::Match(_, _, hir::MatchSource::ForLoopDesugar) = expr.kind;
then {
return true;
}
}
// This detects a variable binding in for loop to avoid `let_unit_value`
// lint (see issue #1964).
//
// ```
// for _ in vec![()] {
// // anything
// }
// ```
if let hir::LocalSource::ForLoopDesugar = local.source {
return true;
}
false
}<|fim▁end|> | if is_expn_of(fun.span, "vec").is_some();
if let Some(fun_def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
then {
return if match_def_path(cx, fun_def_id, &paths::VEC_FROM_ELEM) && args.len() == 2 { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>namespace adoneTests.text {
const {
text
} = adone;
let str: string;
let regExp: RegExp;
let bool: boolean;
let num: number;
namespace escape {
const {
escape
} = text;
str = escape.regExpPattern("asd");
str = escape.regExpReplacement("asd");
str = escape.format("asd");
str = escape.shellArg("asd");
str = escape.control("sad");<|fim▁hole|> const {
regexp
} = text;
str = regexp.array2alternatives(["a"]);
}
str = text.escapeStringRegexp("asd");
str = text.toCamelCase("asd");
str = text.camelCaseToDashed("asd");
regExp = text.endLineRegExp;
str = text.splitLines("a")[0];
str = text.regExpIndexOf("a", /ads/);
str = text.regExpIndexOf("a", /ads/, 1);
str = text.stripAnsi("ad");
bool = text.hasAnsi("asd");
str = text.random(100);
str = text.detectNewLine("asd");
str = text.wordwrap("ads", 10)[0];
str = text.wordwrap("ads", 10, { join: true });
str = text.wordwrap("ads", 10, { countAnsiEscapeCodes: true });
str = text.wordwrap("ads", 10, { mode: "hard" });
num = text.stringDistance("as", "ds");
num = text.stringDistance("as", "ds", [[1]]);
num = text.stringDistanceCapped("a", "b", 10);
str = text.capitalize("asd");
str = text.capitalizeWords("asd");
num = text.width("asd");
str = text.indent("asd", 10);
str = text.stripEof("asd");
str = text.stripLastCRLF("asd");
str = text.stripBOM("ads");
num = text.toUTF8Array("asd")[0];
}<|fim▁end|> | str = escape.htmlSpecialChars("asd");
}
namespace regexp { |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.forms.models import modelformset_factory
from django.forms.formsets import formset_factory
from formsettesthelpers import *
from formsettesthelpers.test_app.forms import (<|fim▁hole|> UserFormSet,
PersonFormSet,
PersonForm,
)
class UsageTest(TestCase):
def test_demonstration(self):
from django.forms.models import modelformset_factory
# The following formset is something one could use in a view.
FormSet = modelformset_factory(User, fields=('username', 'email'))
# To test such view, we'd need to generate a formset data dict
# to POST to that view.
formset_helper = ModelFormSetHelper(FormSet)
data = formset_helper.generate([
{'username': 'admin', 'email': '[email protected]'},
{'username': 'user1', 'email': '[email protected]'},
], total_forms=2)
# `data` now contains the formset data, something like
# """{u'form-INITIAL_FORMS': 0, u'form-MAX_NUM_FORMS': 1000,
# u'form-1-username': 'user1', u'form-1-email':
# '[email protected]',...}"""
self.assertEquals(data['form-1-username'], 'user1')
# The `test_app` application just happens to have such view, so lets
# use that.
self.client.post(reverse('modelformset'), data)
self.assertEquals(User.objects.count(), 2)
self.assertEquals(User.objects.get(username='admin').email,
'[email protected]')
self.assertEquals(User.objects.get(username='user1').email,
'[email protected]')
class BasicFormsetTestSkeleton(object):
def setUp(self):
self.fh = self.helper_class(self.formset_class)
def test_valid(self):
data = self.fh.generate(self.two_forms_data, total_forms=2)
response = self.client.post(reverse(self.view_name), data)
self.assertEquals(response.content, 'Is valid')
def test_to_dict(self):
data = self.fh.generate(self.single_list_data, total_forms=1)
response = self.client.post(reverse(self.view_name), data)
self.assertEquals(response.content, 'Is valid')
def test_prefixed(self):
fh = self.helper_class(self.formset_class, prefix='humans')
data = fh.generate(self.two_forms_data, total_forms=2)
response = self.client.post(
reverse('prefixed_%s' % self.view_name), data)
self.assertEquals(response.content, 'Is valid')
def test_extra_is_zero(self):
fh = self.helper_class(self.formset_class_zero_extra)
data = fh.generate(self.two_forms_data, total_forms=2)
response = self.client.post(reverse(self.view_name), data)
self.assertEquals(response.content, 'Is valid')
class TestModelFormSet(BasicFormsetTestSkeleton, TestCase):
helper_class = ModelFormSetHelper
formset_class = UserFormSet
formset_class_zero_extra = modelformset_factory(
User, fields=('username', 'email', 'is_staff'), extra=0)
two_forms_data = [
{'username': 'user1', 'email': '[email protected]'},
{'username': 'user2', 'email': '[email protected]'},
]
single_list_data = [['justin', '[email protected]']]
view_name = 'modelformset'
def test_valid(self):
super(TestModelFormSet, self).test_valid()
self.assertEquals(User.objects.count(), 2)
def test_to_dict(self):
super(TestModelFormSet, self).test_to_dict()
self.assertEquals(User.objects.count(), 1)
def test_prefixed(self):
super(TestModelFormSet, self).test_prefixed()
self.assertEquals(User.objects.count(), 2)
def test_extra_is_zero(self):
super(TestModelFormSet, self).test_extra_is_zero()
self.assertEquals(User.objects.count(), 2)
class TestFormSet(BasicFormsetTestSkeleton, TestCase):
helper_class = FormSetHelper
formset_class = PersonFormSet
formset_class_zero_extra = formset_factory(PersonForm, extra=0)
two_forms_data = [
{'name': 'Janelle', 'slug': 'j1', 'age': 24},
{'name': 'Joe', 'slug': 'j2', 'age': 25},
]
single_list_data = [['Max', 'max', 42]]
view_name = 'formset'<|fim▁end|> | |
<|file_name|>hello.js<|end_file_name|><|fim▁begin|>var task = function(request, callback, configuration){
var template = "helloParamterized.ejs";
var AWS = configuration.aws;
var S3 = new AWS.S3();
callback(null, {template: template, params:{info:"Hello World from code!"}});<|fim▁hole|>exports.action = task;<|fim▁end|> | }
|
<|file_name|>GlobalObjects.go<|end_file_name|><|fim▁begin|>package config
import (
"github.com/griesbacher/nagflux/data"
"sync"
)
//PauseMap is a map to store if an target requested pause or not
type PauseMap map[data.Target]bool
//pauseNagflux is used to sync the state of the influxdb
var pauseNagflux = PauseMap{}
var objMutex = &sync.Mutex{}
//IsAnyTargetOnPause will return true if any target requested pause, false otherwise
func IsAnyTargetOnPause() bool {
objMutex.Lock()
result := false
for _, v := range pauseNagflux {
if v {<|fim▁hole|> break
}
}
objMutex.Unlock()
return result
}
func StoreValue(target data.Target, value bool) {
objMutex.Lock()
pauseNagflux[target] = value
objMutex.Unlock()
}<|fim▁end|> | result = true |
<|file_name|>heroes.routes.ts<|end_file_name|><|fim▁begin|>import { RouterConfig } from '@angular/router';
import { HeroesComponent } from '../components/hero/heroes.component';<|fim▁hole|>import { HeroDetailComponent } from "../components/hero/./hero-detail.component";
import { NewHeroDetailComponent } from "../components/hero/./new-hero-detail.component";
export const HeroesRoutes: RouterConfig = [
{path: 'heroes', component: HeroesComponent},
{path: 'hero/:id', component: HeroDetailComponent},
{path: 'newHero', component: NewHeroDetailComponent}
];<|fim▁end|> | |
<|file_name|>historystore.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::ops::Deref;
use std::path::PathBuf;
use anyhow::Result;
use edenapi_types::HistoryEntry;
use types::Key;
use types::NodeInfo;
use crate::localstore::LocalStore;
use crate::types::StoreKey;
pub trait HgIdHistoryStore: LocalStore + Send + Sync {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>>;
fn refresh(&self) -> Result<()>;
}
pub trait HgIdMutableHistoryStore: HgIdHistoryStore + Send + Sync {
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()>;
fn flush(&self) -> Result<Option<Vec<PathBuf>>>;
fn add_entry(&self, entry: &HistoryEntry) -> Result<()> {
self.add(&entry.key, &entry.nodeinfo)
}
}
/// The `RemoteHistoryStore` trait indicates that data can fetched over the network. Care must be
/// taken to avoid serially fetching data and instead data should be fetched in bulk via the<|fim▁hole|> /// When implemented on a pure remote store, like the `EdenApi`, the method will always fetch
/// everything that was asked. On a higher level store, such as the `MetadataStore`, this will
/// avoid fetching data that is already present locally.
fn prefetch(&self, keys: &[StoreKey]) -> Result<()>;
}
/// Implement `HgIdHistoryStore` for all types that can be `Deref` into a `HgIdHistoryStore`.
impl<T: HgIdHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> HgIdHistoryStore for U {
fn get_node_info(&self, key: &Key) -> Result<Option<NodeInfo>> {
T::get_node_info(self, key)
}
fn refresh(&self) -> Result<()> {
T::refresh(self)
}
}
impl<T: HgIdMutableHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync>
HgIdMutableHistoryStore for U
{
fn add(&self, key: &Key, info: &NodeInfo) -> Result<()> {
T::add(self, key, info)
}
fn flush(&self) -> Result<Option<Vec<PathBuf>>> {
T::flush(self)
}
}
impl<T: RemoteHistoryStore + ?Sized, U: Deref<Target = T> + Send + Sync> RemoteHistoryStore for U {
fn prefetch(&self, keys: &[StoreKey]) -> Result<()> {
T::prefetch(self, keys)
}
}<|fim▁end|> | /// `prefetch` API.
pub trait RemoteHistoryStore: HgIdHistoryStore + Send + Sync {
/// Attempt to bring the data corresponding to the passed in keys to a local store.
/// |
<|file_name|>limits.py<|end_file_name|><|fim▁begin|># Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at<|fim▁hole|># Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from nova.openstack.common import timeutils
class ViewBuilder(object):
"""OpenStack API base limits view builder."""
def build(self, rate_limits, absolute_limits):
rate_limits = self._build_rate_limits(rate_limits)
absolute_limits = self._build_absolute_limits(absolute_limits)
output = {
"limits": {
"rate": rate_limits,
"absolute": absolute_limits,
},
}
return output
def _build_absolute_limits(self, absolute_limits):
"""Builder for absolute limits
absolute_limits should be given as a dict of limits.
For example: {"ram": 512, "gigabytes": 1024}.
"""
limit_names = {
"ram": ["maxTotalRAMSize"],
"instances": ["maxTotalInstances"],
"cores": ["maxTotalCores"],
"key_pairs": ["maxTotalKeypairs"],
"floating_ips": ["maxTotalFloatingIps"],
"metadata_items": ["maxServerMeta", "maxImageMeta"],
"injected_files": ["maxPersonality"],
"injected_file_content_bytes": ["maxPersonalitySize"],
"security_groups": ["maxSecurityGroups"],
"security_group_rules": ["maxSecurityGroupRules"],
}
limits = {}
for name, value in absolute_limits.iteritems():
if name in limit_names and value is not None:
for name in limit_names[name]:
limits[name] = value
return limits
def _build_rate_limits(self, rate_limits):
limits = []
for rate_limit in rate_limits:
_rate_limit_key = None
_rate_limit = self._build_rate_limit(rate_limit)
# check for existing key
for limit in limits:
if (limit["uri"] == rate_limit["URI"] and
limit["regex"] == rate_limit["regex"]):
_rate_limit_key = limit
break
# ensure we have a key if we didn't find one
if not _rate_limit_key:
_rate_limit_key = {
"uri": rate_limit["URI"],
"regex": rate_limit["regex"],
"limit": [],
}
limits.append(_rate_limit_key)
_rate_limit_key["limit"].append(_rate_limit)
return limits
def _build_rate_limit(self, rate_limit):
_get_utc = datetime.datetime.utcfromtimestamp
next_avail = _get_utc(rate_limit["resetTime"])
return {
"verb": rate_limit["verb"],
"value": rate_limit["value"],
"remaining": int(rate_limit["remaining"]),
"unit": rate_limit["unit"],
"next-available": timeutils.isotime(at=next_avail),
}
class ViewBuilderV3(ViewBuilder):
def build(self, rate_limits):
rate_limits = self._build_rate_limits(rate_limits)
return {"limits": {"rate": rate_limits}}<|fim▁end|> | #
# http://www.apache.org/licenses/LICENSE-2.0
# |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
class DonateAppConfig(AppConfig):
name = 'readthedocs.donate'<|fim▁hole|> def ready(self):
import readthedocs.donate.signals # noqa<|fim▁end|> | verbose_name = 'Donate'
|
<|file_name|>unique-object-move.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #5192
<|fim▁hole|>
#![allow(unknown_features)]
#![feature(box_syntax)]
pub trait EventLoop { fn foo(&self) {} }
pub struct UvEventLoop {
uvio: isize
}
impl EventLoop for UvEventLoop { }
pub fn main() {
let loop_: Box<EventLoop> = box UvEventLoop { uvio: 0 } as Box<EventLoop>;
let _loop2_ = loop_;
}<|fim▁end|> | // pretty-expanded FIXME #23616 |
<|file_name|>Hasher.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the LICENSE file in
* the root directory of this source tree.
*
* @flow
*/
/**
* Get a hash for the provider object. Hashes are unique per-hasher, so if you have two different
* hashers, there is no guarantee that they will give the same hash for the same object.
*
* One use case for this is with lists of React elements. Just create a hasher and use the hash as
* a key:
*
* class MyComponent extends React.Component {
* constructor(props) {
* super(props);
* this._hasher = new Hasher();
* }
* render() {
* return this.props.items.map(item => (
* <ItemView key={this._hasher.getHash(item)} model={item} />
* ));
* }
* }
*/
export default class Hasher<K> {
_hashes: WeakMap<K, string>;
_objectCount: number;
constructor() {
this._hashes = new WeakMap();
this._objectCount = 0;
}
getHash(item: K): string | number {
if (item === null) {
return 'null';
}
const type = typeof item;
switch (typeof item) {
case 'object': {
let hash = this._hashes.get(item);
if (hash == null) {
hash = `${type}:${this._objectCount}`;
this._hashes.set(item, hash);
this._objectCount = this._objectCount + 1 === Number.MAX_SAFE_INTEGER
? Number.MIN_SAFE_INTEGER
: this._objectCount + 1;
}
return hash;
}
case 'undefined':
return 'undefined';
case 'string':
case 'boolean':
return `${type}:${item.toString()}`;<|fim▁hole|> }
}
}<|fim▁end|> | case 'number':
return item;
default:
throw new Error('Unhashable object'); |
<|file_name|>TemporarySummon.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2011 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/><|fim▁hole|> *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "Log.h"
#include "ObjectAccessor.h"
#include "CreatureAI.h"
#include "ObjectMgr.h"
#include "TemporarySummon.h"
TempSummon::TempSummon(SummonPropertiesEntry const *properties, Unit *owner) :
Creature(), m_Properties(properties), m_type(TEMPSUMMON_MANUAL_DESPAWN),
m_timer(0), m_lifetime(0)
{
m_summonerGUID = owner ? owner->GetGUID() : 0;
m_unitTypeMask |= UNIT_MASK_SUMMON;
}
Unit* TempSummon::GetSummoner() const
{
return m_summonerGUID ? ObjectAccessor::GetUnit(*this, m_summonerGUID) : NULL;
}
void TempSummon::Update(uint32 diff)
{
Creature::Update(diff);
if (m_deathState == DEAD)
{
UnSummon();
return;
}
switch(m_type)
{
case TEMPSUMMON_MANUAL_DESPAWN:
break;
case TEMPSUMMON_TIMED_DESPAWN:
{
if (m_timer <= diff)
{
UnSummon();
return;
}
m_timer -= diff;
break;
}
case TEMPSUMMON_TIMED_DESPAWN_OUT_OF_COMBAT:
{
if (!isInCombat())
{
if (m_timer <= diff)
{
UnSummon();
return;
}
m_timer -= diff;
}
else if (m_timer != m_lifetime)
m_timer = m_lifetime;
break;
}
case TEMPSUMMON_CORPSE_TIMED_DESPAWN:
{
if (m_deathState == CORPSE)
{
if (m_timer <= diff)
{
UnSummon();
return;
}
m_timer -= diff;
}
break;
}
case TEMPSUMMON_CORPSE_DESPAWN:
{
// if m_deathState is DEAD, CORPSE was skipped
if (m_deathState == CORPSE || m_deathState == DEAD)
{
UnSummon();
return;
}
break;
}
case TEMPSUMMON_DEAD_DESPAWN:
{
if (m_deathState == DEAD)
{
UnSummon();
return;
}
break;
}
case TEMPSUMMON_TIMED_OR_CORPSE_DESPAWN:
{
// if m_deathState is DEAD, CORPSE was skipped
if (m_deathState == CORPSE || m_deathState == DEAD)
{
UnSummon();
return;
}
if (!isInCombat())
{
if (m_timer <= diff)
{
UnSummon();
return;
}
else
m_timer -= diff;
}
else if (m_timer != m_lifetime)
m_timer = m_lifetime;
break;
}
case TEMPSUMMON_TIMED_OR_DEAD_DESPAWN:
{
// if m_deathState is DEAD, CORPSE was skipped
if (m_deathState == DEAD)
{
UnSummon();
return;
}
if (!isInCombat() && isAlive())
{
if (m_timer <= diff)
{
UnSummon();
return;
}
else
m_timer -= diff;
}
else if (m_timer != m_lifetime)
m_timer = m_lifetime;
break;
}
default:
UnSummon();
sLog->outError("Temporary summoned creature (entry: %u) have unknown type %u of ", GetEntry(), m_type);
break;
}
}
void TempSummon::InitStats(uint32 duration)
{
ASSERT(!isPet());
m_timer = duration;
m_lifetime = duration;
if (m_type == TEMPSUMMON_MANUAL_DESPAWN)
m_type = (duration == 0) ? TEMPSUMMON_DEAD_DESPAWN : TEMPSUMMON_TIMED_DESPAWN;
Unit *owner = GetSummoner();
if (owner && isTrigger() && m_spells[0])
{
setFaction(owner->getFaction());
SetLevel(owner->getLevel());
if (owner->GetTypeId() == TYPEID_PLAYER)
m_ControlledByPlayer = true;
}
if (!m_Properties)
return;
if (owner)
{
if (uint32 slot = m_Properties->Slot)
{
if (owner->m_SummonSlot[slot] && owner->m_SummonSlot[slot] != GetGUID())
{
Creature *oldSummon = GetMap()->GetCreature(owner->m_SummonSlot[slot]);
if (oldSummon && oldSummon->isSummon())
oldSummon->ToTempSummon()->UnSummon();
}
owner->m_SummonSlot[slot] = GetGUID();
}
}
if (m_Properties->Faction)
setFaction(m_Properties->Faction);
else if (IsVehicle()) // properties should be vehicle
setFaction(owner->getFaction());
}
void TempSummon::InitSummon()
{
Unit* owner = GetSummoner();
if (owner)
{
if (owner->GetTypeId() == TYPEID_UNIT && owner->ToCreature()->IsAIEnabled)
owner->ToCreature()->AI()->JustSummoned(this);
if (IsAIEnabled)
AI()->IsSummonedBy(owner);
}
}
void TempSummon::SetTempSummonType(TempSummonType type)
{
m_type = type;
}
void TempSummon::UnSummon(uint32 msTime)
{
if (msTime)
{
ForcedUnsummonDelayEvent *pEvent = new ForcedUnsummonDelayEvent(*this);
m_Events.AddEvent(pEvent, m_Events.CalculateTime(msTime));
return;
}
//ASSERT(!isPet());
if (isPet())
{
((Pet*)this)->Remove(PET_SAVE_NOT_IN_SLOT);
ASSERT(!IsInWorld());
return;
}
Unit* owner = GetSummoner();
if (owner && owner->GetTypeId() == TYPEID_UNIT && owner->ToCreature()->IsAIEnabled)
owner->ToCreature()->AI()->SummonedCreatureDespawn(this);
if (owner &&
owner->GetTypeId() == TYPEID_PLAYER &&
((Player*)owner)->HaveBot() &&
((Player*)owner)->GetBot()->GetGUID()==this->GetGUID() &&
this->isDead()) { // dont unsummon corpse if a bot
return;
}
AddObjectToRemoveList();
}
bool ForcedUnsummonDelayEvent::Execute(uint64 /*e_time*/, uint32 /*p_time*/)
{
m_owner.UnSummon();
return true;
}
void TempSummon::RemoveFromWorld()
{
if (!IsInWorld())
return;
if (m_Properties)
if (uint32 slot = m_Properties->Slot)
if (Unit* owner = GetSummoner())
if (owner->m_SummonSlot[slot] == GetGUID())
owner->m_SummonSlot[slot] = 0;
//if (GetOwnerGUID())
// sLog->outError("Unit %u has owner guid when removed from world", GetEntry());
Creature::RemoveFromWorld();
}
Minion::Minion(SummonPropertiesEntry const *properties, Unit *owner) : TempSummon(properties, owner)
, m_owner(owner)
{
ASSERT(m_owner);
m_unitTypeMask |= UNIT_MASK_MINION;
m_followAngle = PET_FOLLOW_ANGLE;
}
void Minion::InitStats(uint32 duration)
{
TempSummon::InitStats(duration);
SetReactState(REACT_PASSIVE);
SetCreatorGUID(m_owner->GetGUID());
setFaction(m_owner->getFaction());
m_owner->SetMinion(this, true);
}
void Minion::RemoveFromWorld()
{
if (!IsInWorld())
return;
m_owner->SetMinion(this, false);
TempSummon::RemoveFromWorld();
}
bool Minion::IsGuardianPet() const
{
return isPet() || (m_Properties && m_Properties->Category == SUMMON_CATEGORY_PET);
}
Guardian::Guardian(SummonPropertiesEntry const *properties, Unit *owner) : Minion(properties, owner)
, m_bonusSpellDamage(0)
{
memset(m_statFromOwner, 0, sizeof(float)*MAX_STATS);
m_unitTypeMask |= UNIT_MASK_GUARDIAN;
if (properties && properties->Type == SUMMON_TYPE_PET)
{
m_unitTypeMask |= UNIT_MASK_CONTROLABLE_GUARDIAN;
InitCharmInfo();
}
}
void Guardian::InitStats(uint32 duration)
{
Minion::InitStats(duration);
InitStatsForLevel(m_owner->getLevel());
if (m_owner->GetTypeId() == TYPEID_PLAYER && HasUnitTypeMask(UNIT_MASK_CONTROLABLE_GUARDIAN))
m_charmInfo->InitCharmCreateSpells();
SetReactState(REACT_AGGRESSIVE);
}
void Guardian::InitSummon()
{
TempSummon::InitSummon();
if (m_owner->GetTypeId() == TYPEID_PLAYER
&& m_owner->GetMinionGUID() == GetGUID()
&& !m_owner->GetCharmGUID())
m_owner->ToPlayer()->CharmSpellInitialize();
}
Puppet::Puppet(SummonPropertiesEntry const *properties, Unit *owner) : Minion(properties, owner)
{
ASSERT(owner->GetTypeId() == TYPEID_PLAYER);
m_owner = (Player*)owner;
m_unitTypeMask |= UNIT_MASK_PUPPET;
}
void Puppet::InitStats(uint32 duration)
{
Minion::InitStats(duration);
SetLevel(m_owner->getLevel());
SetReactState(REACT_PASSIVE);
}
void Puppet::InitSummon()
{
Minion::InitSummon();
if (!SetCharmedBy(m_owner, CHARM_TYPE_POSSESS))
ASSERT(false);
}
void Puppet::Update(uint32 time)
{
Minion::Update(time);
//check if caster is channelling?
if (IsInWorld())
{
if (!isAlive())
{
UnSummon();
// TODO: why long distance .die does not remove it
}
}
}
void Puppet::RemoveFromWorld()
{
if (!IsInWorld())
return;
RemoveCharmedBy(NULL);
Minion::RemoveFromWorld();
}<|fim▁end|> | |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from reportlab.lib.colors import Color
from unittest import TestCase
from xhtml2pdf.util import getCoords, getColor, getSize, getFrameDimensions, \
getPos, getBox
from xhtml2pdf.tags import int_to_roman
class UtilsCoordTestCase(TestCase):
def test_getCoords_simple(self):
res = getCoords(1, 1, 10, 10, (10,10))
self.assertEqual(res, (1, -1, 10, 10))
# A second time - it's memoized!
res = getCoords(1, 1, 10, 10, (10,10))
self.assertEqual(res, (1, -1, 10, 10))
def test_getCoords_x_lt_0(self):
res = getCoords(-1, 1, 10, 10, (10,10))
self.assertEqual(res, (9, -1, 10, 10))
def test_getCoords_y_lt_0(self):
res = getCoords(1, -1, 10, 10, (10,10))
self.assertEqual(res, (1, -9, 10, 10))
def test_getCoords_w_and_h_none(self):
res = getCoords(1, 1, None, None, (10,10))
self.assertEqual(res, (1, 9))
def test_getCoords_w_lt_0(self):
res = getCoords(1, 1, -1, 10, (10,10))
self.assertEqual(res, (1, -1, 8, 10))
def test_getCoords_h_lt_0(self):
res = getCoords(1, 1, 10, -1, (10,10))
self.assertEqual(res, (1, 1, 10, 8))
class UtilsColorTestCase(TestCase):
def test_get_color_simple(self):
res = getColor('red')
self.assertEqual(res, Color(1,0,0,1))
# Testing it being memoized properly
res = getColor('red')
self.assertEqual(res, Color(1,0,0,1))
def test_get_color_from_color(self):
# Noop if argument is already a color
res = getColor(Color(1,0,0,1))
self.assertEqual(res, Color(1,0,0,1))
def test_get_transparent_color(self):
res = getColor('transparent', default='TOKEN')
self.assertEqual(res, 'TOKEN')
res = getColor('none', default='TOKEN')
self.assertEqual(res, 'TOKEN')
def test_get_color_for_none(self):
res = getColor(None, default='TOKEN')
self.assertEqual(res, 'TOKEN')
def test_get_color_for_RGB(self):
res = getColor('#FF0000')
self.assertEqual(res, Color(1,0,0,1))
def test_get_color_for_RGB_with_len_4(self):
res = getColor('#F00')
self.assertEqual(res, Color(1,0,0,1))
def test_get_color_for_CSS_RGB_function(self):
# It's regexp based, let's try common cases.
res = getColor('rgb(255,0,0)')
self.assertEqual(res, Color(1,0,0,1))
res = getColor('<css function: rgb(255,0,0)>')
self.assertEqual(res, Color(1,0,0,1))
class UtilsGetSizeTestCase(TestCase):
def test_get_size_simple(self):
res = getSize('12pt')
self.assertEqual(res, 12.00)
# Memoized...
res = getSize('12pt')
self.assertEqual(res, 12.00)
def test_get_size_for_none(self):
res = getSize(None, relative='TOKEN')
self.assertEqual(res, 'TOKEN')
def test_get_size_for_float(self):
res = getSize(12.00)
self.assertEqual(res, 12.00)
def test_get_size_for_tuple(self):
# TODO: This is a really strange case. Probably should not work this way.
res = getSize(("12", ".12"))
self.assertEqual(res, 12.12)
def test_get_size_for_cm(self):
res = getSize("1cm")
self.assertEqual(res, 28.346456692913385)
def test_get_size_for_mm(self):
res = getSize("1mm")
self.assertEqual(res, 2.8346456692913385)
def test_get_size_for_i(self):
res = getSize("1i")
self.assertEqual(res, 72.00)
def test_get_size_for_in(self):
res = getSize("1in")
self.assertEqual(res, 72.00)
def test_get_size_for_inch(self):
res = getSize("1in")
self.assertEqual(res, 72.00)
def test_get_size_for_pc(self):
res = getSize("1pc")
self.assertEqual(res, 12.00)
def test_get_size_for_none_str(self):
res = getSize("none")
self.assertEqual(res, 0.0)
res = getSize("0")
self.assertEqual(res, 0.0)
res = getSize("auto") # Really?
self.assertEqual(res, 0.0)
class PisaDimensionTestCase(TestCase):
def test_FrameDimensions_left_top_width_height(self):
#builder = pisaCSSBuilder(mediumSet=['all'])
dims = {
'left': '10pt',
'top': '20pt',
'width': '30pt',
'height': '40pt',
}
expected = (10.0, 20.0, 30.0, 40.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_FrameDimensions_left_top_bottom_right(self):
dims = {
'left': '10pt',<|fim▁hole|> 'top': '20pt',
'bottom': '30pt',
'right': '40pt',
}
expected = (10.0, 20.0, 50.0, 150.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_FrameDimensions_bottom_right_width_height(self):
dims = {
'bottom': '10pt',
'right': '20pt',
'width': '70pt',
'height': '80pt',
}
expected = (10.0, 110.0, 70.0, 80.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_FrameDimensions_left_top_width_height_with_margin(self):
dims = {
'left': '10pt',
'top': '20pt',
'width': '70pt',
'height': '80pt',
'margin-top': '10pt',
'margin-left': '15pt',
'margin-bottom': '20pt',
'margin-right': '25pt',
}
expected = (25.0, 30.0, 30.0, 50.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_FrameDimensions_bottom_right_width_height_with_margin(self):
dims = {
'bottom': '10pt',
'right': '20pt',
'width': '70pt',
'height': '80pt',
'margin-top': '10pt',
'margin-left': '15pt',
'margin-bottom': '20pt',
'margin-right': '25pt',
}
expected = (25.0, 120.0, 30.0, 50.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_frame_dimensions_for_box_len_eq_4(self):
dims = {
'-pdf-frame-box': ['12pt','12,pt','12pt','12pt']
}
expected = [12.0, 12.0, 12.0, 12.0]
result = getFrameDimensions(dims, 100, 200)
self.assertEqual(result, expected)
def test_trame_dimentions_for_height_without_top_or_bottom(self):
dims = {
'left': '10pt',
#'top': '20pt',
'width': '30pt',
'height': '40pt',
}
expected = (10.0, 0.0, 30.0, 200.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
def test_trame_dimentions_for_width_without_left_or_right(self):
dims = {
#'left': '10pt',
'top': '20pt',
'width': '30pt',
'height': '40pt',
}
expected = (0.0, 20.0, 100.0, 40.0)
result = getFrameDimensions(dims, 100, 200)
self.assertEquals(expected, result)
class GetPosTestCase(TestCase):
def test_get_pos_simple(self):
res = getBox("1pt 1pt 10pt 10pt", (10,10))
self.assertEqual(res,(1.0, -1.0, 10, 10))
def test_get_pos_raising(self):
raised = False
try:
getBox("1pt 1pt 10pt", (10,10))
except Exception:
raised = True
self.assertTrue(raised)
class TestTagUtils(TestCase):
def test_roman_numeral_conversion(self):
self.assertEqual("I", int_to_roman(1))
self.assertEqual("L", int_to_roman(50))
self.assertEqual("XLII", int_to_roman(42))
self.assertEqual("XXVI", int_to_roman(26))<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import CoreClient from './CoreClient';<|fim▁hole|><|fim▁end|> |
export default CoreClient; |
<|file_name|>test_memory.py<|end_file_name|><|fim▁begin|># @Author: dileep
# @Last Modified by: dileep
import random
import pytest
from microbial_ai.regulation import Event, Action, Memory
@pytest.fixture
def random_action():
return Action(type='fixed', phi={'rxn1': (random.random(), '+')})
@pytest.fixture
def random_event(random_action):
return Event(state=random.randint(0, 100), action=random_action,
next_state=random.randint(0, 100), reward=random.random())
@pytest.mark.usefixtures("random_event")
class TestMemory:
"""
Tests for the Memory class
"""
def test_initialization(self):
memory = Memory(1000)
assert memory.capacity == 1000
assert memory.idx == 0
def test_add_event(self, random_event):
memory = Memory(1000)
memory.add_event(random_event)
assert len(memory.memory) == 1
assert memory.idx == 1
for _ in range(1500):<|fim▁hole|> assert memory.idx == (1000 - 500 + 1)
def test_sample(self, random_event):
memory = Memory(1000)
with pytest.raises(ValueError):
memory.sample(100)
for _ in range(400):
memory.add_event(random_event)
assert len(memory.sample(200)) == 200<|fim▁end|> | memory.add_event(random_event)
assert len(memory.memory) == memory.capacity |
<|file_name|>hash-to-url.js<|end_file_name|><|fim▁begin|>const _parseHash = function (hash) {
let name = '';
let urlType = '';
let hashParts = hash.split('_');
if (hashParts && hashParts.length === 2) {
name = hashParts[1];
let type = hashParts[0];
// take off the "#"
let finalType = type.slice(1, type.length);<|fim▁hole|> case 'method':
urlType = 'methods';
break;
case 'property':
urlType = 'properties';
break;
case 'event':
urlType = 'events';
break;
default:
urlType = '';
}
return {
urlType,
name,
};
}
return null;
};
function hashToUrl(window) {
if (window && window.location && window.location.hash) {
let hashInfo = _parseHash(window.location.hash);
if (hashInfo) {
return `${window.location.pathname}/${hashInfo.urlType}/${hashInfo.name}?anchor=${hashInfo.name}`;
}
}
return null;
}
function hasRedirectableHash(window) {
let canRedirect = false;
if (window && window.location && window.location.hash) {
let hashParts = window.location.hash.split('_');
if (hashParts && hashParts.length === 2) {
canRedirect = true;
}
}
return canRedirect;
}
export { hashToUrl, hasRedirectableHash };<|fim▁end|> | switch (finalType) { |
<|file_name|>ns_css_value.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Little helpers for `nsCSSValue`.
use gecko_bindings::bindings;
use gecko_bindings::structs;
use gecko_bindings::structs::{nsCSSValue, nsCSSUnit};
use gecko_bindings::structs::{nsCSSValue_Array, nsCSSValueList, nscolor};
use gecko_string_cache::Atom;
use std::marker::PhantomData;
use std::mem;
use std::ops::{Index, IndexMut};
use std::slice;
use values::computed::{Angle, LengthOrPercentage, Percentage};
use values::specified::url::SpecifiedUrl;
impl nsCSSValue {
/// Create a CSSValue with null unit, useful to be used as a return value.
#[inline]
pub fn null() -> Self {
unsafe { mem::zeroed() }
}
/// Returns this nsCSSValue value as an integer, unchecked in release
/// builds.
pub fn integer_unchecked(&self) -> i32 {
debug_assert!(self.mUnit == nsCSSUnit::eCSSUnit_Integer ||
self.mUnit == nsCSSUnit::eCSSUnit_Enumerated ||
self.mUnit == nsCSSUnit::eCSSUnit_EnumColor);
unsafe { *self.mValue.mInt.as_ref() }
}
/// Checks if it is an integer and returns it if so
pub fn integer(&self) -> Option<i32> {
if self.mUnit == nsCSSUnit::eCSSUnit_Integer ||
self.mUnit == nsCSSUnit::eCSSUnit_Enumerated ||
self.mUnit == nsCSSUnit::eCSSUnit_EnumColor {
Some(unsafe { *self.mValue.mInt.as_ref() })
} else {
None
}
}
/// Checks if it is an RGBA color, returning it if so
/// Only use it with colors set by SetColorValue(),
/// which always sets RGBA colors
pub fn color_value(&self) -> Option<nscolor> {
if self.mUnit == nsCSSUnit::eCSSUnit_RGBAColor {
Some(unsafe { *self.mValue.mColor.as_ref() })
} else {
None
}
}
/// Returns this nsCSSValue value as a floating point value, unchecked in
/// release builds.
pub fn float_unchecked(&self) -> f32 {
debug_assert!(nsCSSUnit::eCSSUnit_Number as u32 <= self.mUnit as u32);
unsafe { *self.mValue.mFloat.as_ref() }
}
/// Returns this nsCSSValue as a nsCSSValue::Array, unchecked in release
/// builds.
pub unsafe fn array_unchecked(&self) -> &nsCSSValue_Array {
debug_assert!(nsCSSUnit::eCSSUnit_Array as u32 <= self.mUnit as u32 &&
self.mUnit as u32 <= nsCSSUnit::eCSSUnit_Calc_Divided as u32);
let array = *self.mValue.mArray.as_ref();
debug_assert!(!array.is_null());<|fim▁hole|> pub unsafe fn set_lop(&mut self, lop: LengthOrPercentage) {
match lop {
LengthOrPercentage::Length(px) => {
bindings::Gecko_CSSValue_SetPixelLength(self, px.px())
}
LengthOrPercentage::Percentage(pc) => {
bindings::Gecko_CSSValue_SetPercentage(self, pc.0)
}
LengthOrPercentage::Calc(calc) => {
bindings::Gecko_CSSValue_SetCalc(self, calc.into())
}
}
}
/// Returns LengthOrPercentage value.
pub unsafe fn get_lop(&self) -> LengthOrPercentage {
use values::computed::Length;
match self.mUnit {
nsCSSUnit::eCSSUnit_Pixel => {
LengthOrPercentage::Length(Length::new(bindings::Gecko_CSSValue_GetNumber(self)))
},
nsCSSUnit::eCSSUnit_Percent => {
LengthOrPercentage::Percentage(Percentage(bindings::Gecko_CSSValue_GetPercentage(self)))
},
nsCSSUnit::eCSSUnit_Calc => {
LengthOrPercentage::Calc(bindings::Gecko_CSSValue_GetCalc(self).into())
},
x => panic!("The unit should not be {:?}", x),
}
}
fn set_valueless_unit(&mut self, unit: nsCSSUnit) {
debug_assert_eq!(self.mUnit, nsCSSUnit::eCSSUnit_Null);
debug_assert!(unit as u32 <= nsCSSUnit::eCSSUnit_DummyInherit as u32, "Not a valueless unit");
self.mUnit = unit;
}
/// Set to an auto value
///
/// This method requires the current value to be null.
pub fn set_auto(&mut self) {
self.set_valueless_unit(nsCSSUnit::eCSSUnit_Auto);
}
/// Set to a normal value
///
/// This method requires the current value to be null.
pub fn set_normal(&mut self) {
self.set_valueless_unit(nsCSSUnit::eCSSUnit_Normal);
}
fn set_string_internal(&mut self, s: &str, unit: nsCSSUnit) {
unsafe { bindings::Gecko_CSSValue_SetString(self, s.as_ptr(), s.len() as u32, unit) }
}
fn set_string_from_atom_internal(&mut self, s: &Atom, unit: nsCSSUnit) {
unsafe { bindings::Gecko_CSSValue_SetStringFromAtom(self, s.as_ptr(), unit) }
}
/// Set to a string value
pub fn set_string(&mut self, s: &str) {
self.set_string_internal(s, nsCSSUnit::eCSSUnit_String)
}
/// Set to a string value from the given atom
pub fn set_string_from_atom(&mut self, s: &Atom) {
self.set_string_from_atom_internal(s, nsCSSUnit::eCSSUnit_String)
}
/// Set to a ident value from the given atom
pub fn set_ident_from_atom(&mut self, s: &Atom) {
self.set_string_from_atom_internal(s, nsCSSUnit::eCSSUnit_Ident)
}
/// Set to an identifier value
pub fn set_ident(&mut self, s: &str) {
self.set_string_internal(s, nsCSSUnit::eCSSUnit_Ident)
}
/// Set to an atom identifier value
pub fn set_atom_ident(&mut self, s: Atom) {
unsafe { bindings::Gecko_CSSValue_SetAtomIdent(self, s.into_addrefed()) }
}
/// Set to a font format
pub fn set_font_format(&mut self, s: &str) {
self.set_string_internal(s, nsCSSUnit::eCSSUnit_Font_Format);
}
/// Set to a local font value
pub fn set_local_font(&mut self, s: &Atom) {
self.set_string_from_atom_internal(s, nsCSSUnit::eCSSUnit_Local_Font);
}
fn set_int_internal(&mut self, value: i32, unit: nsCSSUnit) {
unsafe { bindings::Gecko_CSSValue_SetInt(self, value, unit) }
}
/// Set to an integer value
pub fn set_integer(&mut self, value: i32) {
self.set_int_internal(value, nsCSSUnit::eCSSUnit_Integer)
}
/// Set to an enumerated value
pub fn set_enum<T: Into<i32>>(&mut self, value: T) {
self.set_int_internal(value.into(), nsCSSUnit::eCSSUnit_Enumerated);
}
/// Set to a url value
pub fn set_url(&mut self, url: &SpecifiedUrl) {
unsafe { bindings::Gecko_CSSValue_SetURL(self, url.for_ffi()) }
}
/// Set to an array of given length
pub fn set_array(&mut self, len: i32) -> &mut nsCSSValue_Array {
unsafe { bindings::Gecko_CSSValue_SetArray(self, len) }
unsafe { self.mValue.mArray.as_mut().as_mut() }.unwrap()
}
/// Generic set from any value that implements the ToNsCssValue trait.
pub fn set_from<T: ToNsCssValue>(&mut self, value: T) {
value.convert(self)
}
/// Returns an `Angle` value from this `nsCSSValue`.
///
/// Panics if the unit is not `eCSSUnit_Degree` `eCSSUnit_Grad`, `eCSSUnit_Turn`
/// or `eCSSUnit_Radian`.
pub fn get_angle(&self) -> Angle {
unsafe {
Angle::from_gecko_values(self.float_unchecked(), self.mUnit)
}
}
/// Sets Angle value to this nsCSSValue.
pub fn set_angle(&mut self, angle: Angle) {
debug_assert_eq!(self.mUnit, nsCSSUnit::eCSSUnit_Null);
let (value, unit) = angle.to_gecko_values();
self.mUnit = unit;
unsafe {
*self.mValue.mFloat.as_mut() = value;
}
}
/// Set to a pair value
///
/// This is only supported on the main thread.
pub fn set_pair(&mut self, x: &nsCSSValue, y: &nsCSSValue) {
unsafe { bindings::Gecko_CSSValue_SetPair(self, x, y) }
}
/// Set to a list value
///
/// This is only supported on the main thread.
pub fn set_list<I>(&mut self, values: I) where I: ExactSizeIterator<Item=nsCSSValue> {
debug_assert!(values.len() > 0, "Empty list is not supported");
unsafe { bindings::Gecko_CSSValue_SetList(self, values.len() as u32); }
debug_assert_eq!(self.mUnit, nsCSSUnit::eCSSUnit_List);
let list: &mut structs::nsCSSValueList = &mut unsafe {
self.mValue.mList.as_ref() // &*nsCSSValueList_heap
.as_mut().expect("List pointer should be non-null")
}._base;
for (item, new_value) in list.into_iter().zip(values) {
*item = new_value;
}
}
/// Set to a pair list value
///
/// This is only supported on the main thread.
pub fn set_pair_list<I>(&mut self, mut values: I)
where I: ExactSizeIterator<Item=(nsCSSValue, nsCSSValue)> {
debug_assert!(values.len() > 0, "Empty list is not supported");
unsafe { bindings::Gecko_CSSValue_SetPairList(self, values.len() as u32); }
debug_assert_eq!(self.mUnit, nsCSSUnit::eCSSUnit_PairList);
let mut item_ptr = &mut unsafe {
self.mValue.mPairList.as_ref() // &*nsCSSValuePairList_heap
.as_mut().expect("List pointer should be non-null")
}._base as *mut structs::nsCSSValuePairList;
while let Some(item) = unsafe { item_ptr.as_mut() } {
let value = values.next().expect("Values shouldn't have been exhausted");
item.mXValue = value.0;
item.mYValue = value.1;
item_ptr = item.mNext;
}
debug_assert!(values.next().is_none(), "Values should have been exhausted");
}
/// Set a shared list
pub fn set_shared_list<I>(&mut self, values: I) where I: ExactSizeIterator<Item=nsCSSValue> {
debug_assert!(values.len() > 0, "Empty list is not supported");
unsafe { bindings::Gecko_CSSValue_InitSharedList(self, values.len() as u32) };
debug_assert_eq!(self.mUnit, nsCSSUnit::eCSSUnit_SharedList);
let list = unsafe {
self.mValue.mSharedList.as_ref()
.as_mut().expect("List pointer should be non-null").mHead.as_mut()
};
debug_assert!(list.is_some(), "New created shared list shouldn't be null");
for (item, new_value) in list.unwrap().into_iter().zip(values) {
*item = new_value;
}
}
}
impl Drop for nsCSSValue {
fn drop(&mut self) {
unsafe { bindings::Gecko_CSSValue_Drop(self) };
}
}
/// Iterator of nsCSSValueList.
#[allow(non_camel_case_types)]
pub struct nsCSSValueListIterator<'a> {
current: Option<&'a nsCSSValueList>,
}
impl<'a> Iterator for nsCSSValueListIterator<'a> {
type Item = &'a nsCSSValue;
fn next(&mut self) -> Option<Self::Item> {
match self.current {
Some(item) => {
self.current = unsafe { item.mNext.as_ref() };
Some(&item.mValue)
},
None => None
}
}
}
impl<'a> IntoIterator for &'a nsCSSValueList {
type Item = &'a nsCSSValue;
type IntoIter = nsCSSValueListIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
nsCSSValueListIterator { current: Some(self) }
}
}
/// Mutable Iterator of nsCSSValueList.
#[allow(non_camel_case_types)]
pub struct nsCSSValueListMutIterator<'a> {
current: *mut nsCSSValueList,
phantom: PhantomData<&'a mut nsCSSValue>,
}
impl<'a> Iterator for nsCSSValueListMutIterator<'a> {
type Item = &'a mut nsCSSValue;
fn next(&mut self) -> Option<Self::Item> {
match unsafe { self.current.as_mut() } {
Some(item) => {
self.current = item.mNext;
Some(&mut item.mValue)
},
None => None
}
}
}
impl<'a> IntoIterator for &'a mut nsCSSValueList {
type Item = &'a mut nsCSSValue;
type IntoIter = nsCSSValueListMutIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
nsCSSValueListMutIterator { current: self as *mut nsCSSValueList,
phantom: PhantomData }
}
}
impl nsCSSValue_Array {
/// Return the length of this `nsCSSValue::Array`
#[inline]
pub fn len(&self) -> usize {
self.mCount
}
#[inline]
fn buffer(&self) -> *const nsCSSValue {
self.mArray.as_ptr()
}
/// Get the array as a slice of nsCSSValues.
#[inline]
pub fn as_slice(&self) -> &[nsCSSValue] {
unsafe { slice::from_raw_parts(self.buffer(), self.len()) }
}
/// Get the array as a mutable slice of nsCSSValues.
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [nsCSSValue] {
unsafe { slice::from_raw_parts_mut(self.buffer() as *mut _, self.len()) }
}
}
impl Index<usize> for nsCSSValue_Array {
type Output = nsCSSValue;
#[inline]
fn index(&self, i: usize) -> &nsCSSValue {
&self.as_slice()[i]
}
}
impl IndexMut<usize> for nsCSSValue_Array {
#[inline]
fn index_mut(&mut self, i: usize) -> &mut nsCSSValue {
&mut self.as_mut_slice()[i]
}
}
/// Generic conversion to nsCSSValue
pub trait ToNsCssValue {
/// Convert
fn convert(self, nscssvalue: &mut nsCSSValue);
}<|fim▁end|> | &*array
}
/// Sets LengthOrPercentage value to this nsCSSValue. |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from copy import deepcopy
from manager_utils import upsert
class BaseSmartManager(object):
def __init__(self, template):
self._template = deepcopy(template)
self._built_objs = set()
@property
def built_objs(self):
return self._built_objs
def build_obj(self, model_class, is_deletable=True, updates=None, defaults=None, **kwargs):
"""<|fim▁hole|> by the smart manager, it is added to the internal _built_objs list and returned.
"""
built_obj = upsert(model_class.objects, updates=updates, defaults=defaults, **kwargs)[0]
if is_deletable:
self._built_objs |= set([built_obj])
return built_obj
def build_using(self, smart_manager_class, template):
"""
Builds objects using another builder and a template. Adds the resulting built objects
from that builder to the built objects of this builder.
"""
smart_manager = smart_manager_class(template)
built_objs = smart_manager.build()
self._built_objs |= smart_manager.built_objs
# make sure build objs is a list or tuple
if type(built_objs) not in (list, tuple,):
built_objs = [built_objs]
return built_objs
def build(self):
"""
All builders must implement the build function, which returns the built object. All build
functions must also maintain an interal list of built objects, which are accessed by
self.built_objs.
"""
raise NotImplementedError<|fim▁end|> | Builds an object using the upsert function in manager utils. If the object can be deleted |
<|file_name|>Halos.js<|end_file_name|><|fim▁begin|>Clazz.declarePackage ("J.shape");
Clazz.load (["J.shape.AtomShape"], "J.shape.Halos", ["JU.BSUtil", "$.C"], function () {
c$ = Clazz.decorateAsClass (function () {
this.colixSelection = 2;
this.bsHighlight = null;
this.colixHighlight = 10;
Clazz.instantialize (this, arguments);
}, J.shape, "Halos", J.shape.AtomShape);
Clazz.defineMethod (c$, "initState",
function () {
this.translucentAllowed = false;
});
Clazz.overrideMethod (c$, "setProperty",
function (propertyName, value, bs) {
if ("translucency" === propertyName) return;
if ("argbSelection" === propertyName) {
this.colixSelection = JU.C.getColix ((value).intValue ());
return;
}if ("argbHighlight" === propertyName) {
this.colixHighlight = JU.C.getColix ((value).intValue ());
return;
}if ("highlight" === propertyName) {
this.bsHighlight = value;
return;
}if (propertyName === "deleteModelAtoms") {
JU.BSUtil.deleteBits (this.bsHighlight, bs);
}this.setPropAS (propertyName, value, bs);
}, "~S,~O,JU.BS");
Clazz.overrideMethod (c$, "setModelVisibilityFlags",
function (bs) {
var bsSelected = (this.vwr.getSelectionHaloEnabled (false) ? this.vwr.bsA () : null);
for (var i = this.ac; --i >= 0; ) {
var isVisible = bsSelected != null && bsSelected.get (i) || (this.mads != null && this.mads[i] != 0);<|fim▁hole|>function () {
return this.vwr.getShapeState (this);
});
});<|fim▁end|> | this.setShapeVisibility (this.atoms[i], isVisible);
}
}, "JU.BS");
Clazz.overrideMethod (c$, "getShapeState", |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###########################################################
#
# Copyright (c) 2005-2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,<|fim▁hole|>
from ui_playground_panel_wdg import *
from font_palettes_example_wdg import *
from panning_scroll_example_wdg import *
from menu_examples_wdg import *
from event_examples_wdg import *
from misc_examples_wdg import *
from fx_anim_examples_wdg import *
from keyboard_handler_examples_wdg import *
from search_class_tag_examples_wdg import *
from efficient_table_example_wdg import *
from dev_sandbox_01_wdg import *
from dev_sandbox_02_wdg import *
from dev_sandbox_03_wdg import *<|fim▁end|> | # or disclosed in any way without written permission.
#
#
# |
<|file_name|>camera.rs<|end_file_name|><|fim▁begin|>extern crate mithril;
use std::f64;
use std::num::Float;
use self::mithril::math::{ Vector, Quaternion };
pub struct Camera {
position: Vector,
focus_point: Vector,
up: Vector,
field_of_view: f64,
aspect_ratio: f64,
far: f64,
near: f64,
anchor_point: Option<[f64; 2]>,
control_point: [f64; 2],
}
impl Camera {
pub fn new(position: Vector, focus_point: Vector, up: Vector) -> Camera {
Camera{
position: position,
focus_point: focus_point,
up: up.normalize(),
field_of_view: (90.0 * f64::consts::PI / 180.0),
aspect_ratio: 640.0/480.0,
far: 100.0,
near: 1.0,
anchor_point: None,
control_point: [0.0; 2],
}
}
pub fn position(&self) -> Vector {
self.position
}
pub fn focus_point(&self) -> Vector {
self.focus_point
}
pub fn go_to(&mut self, position: Vector) {
self.position = position;
}
pub fn update(&mut self) {
}
pub fn start_control(&mut self, x: f64, y: f64) {
self.anchor_point = Some([x, y]);
self.control_point[0] = x;
self.control_point[1] = y;
}
pub fn set_control_point(&mut self, x: f64, y: f64) {
self.control_point[0] = x;
self.control_point[1] = y;
}
pub fn release_controls(&mut self) {
self.anchor_point = None;
}
pub fn is_controlled(&self) -> bool {
self.anchor_point != None
}<|fim▁hole|> let mut y_view = z_view.cross(x_view).normalize();
let x_trans = -self.position.dot(x_view);
let y_trans = -self.position.dot(y_view);
let z_trans = -self.position.dot(z_view);
match self.anchor_point {
Some(anchor_point) => {
let diff = [
(self.control_point[1] - anchor_point[1]) as f32,
(anchor_point[0] - self.control_point[0]) as f32,
];
let diff_sq = (diff[0] * diff[0] + diff[1] * diff[1]).sqrt();
if diff_sq > 0.0001 {
let diff_length = diff_sq.sqrt();
let rot_axis = (x_view * diff[0] + y_view * diff[1]) / diff_length;
let rot_in_radians = diff_length * 2.0;
let u_quat = Quaternion::new(0.0, x_view[0], x_view[1], x_view[2]);
let v_quat = Quaternion::new(0.0, y_view[0], y_view[1], y_view[2]);
let w_quat = Quaternion::new(0.0, z_view[0], z_view[1], z_view[2]);
let rot_quat = Quaternion::new_from_rotation(rot_in_radians, rot_axis[0], rot_axis[1], rot_axis[2]);
let new_u_quat = rot_quat * u_quat * rot_quat.inverse();
let new_v_quat = rot_quat * v_quat * rot_quat.inverse();
let new_w_quat = rot_quat * w_quat * rot_quat.inverse();
x_view[0] = new_u_quat[1];
x_view[1] = new_u_quat[2];
x_view[2] = new_u_quat[3];
y_view[0] = new_v_quat[1];
y_view[1] = new_v_quat[2];
y_view[2] = new_v_quat[3];
z_view[0] = new_w_quat[1];
z_view[1] = new_w_quat[2];
z_view[2] = new_w_quat[3];
}
}
None => {
// do nothing
}
}
[
x_view[0], x_view[1], x_view[2], x_trans,
y_view[0], y_view[1], y_view[2], y_trans,
z_view[0], z_view[1], z_view[2], z_trans,
0.0, 0.0, 0.0, 1.0,
]
}
pub fn projection_matrix(&self) -> [f32; 16] {
let m_11 = (1.0 / (self.field_of_view / 2.0).tan()) as f32;
let m_22 = m_11 * (self.aspect_ratio as f32);
let m_33 = -((self.far + self.near) / (self.far - self.near)) as f32;
let m_43 = -((2.0 * self.far * self.near) / (self.far - self.near)) as f32;
[
m_11, 0.0, 0.0, 0.0,
0.0, m_22, 0.0, 0.0,
0.0, 0.0, m_33, m_43,
0.0, 0.0, -1.0, 0.0,
]
}
}<|fim▁end|> |
pub fn view_matrix(&self) -> [f32; 16] {
let mut z_view = (self.position - self.focus_point).normalize();
let mut x_view = self.up.cross(z_view).normalize(); |
<|file_name|>PersistedElement.tsx<|end_file_name|><|fim▁begin|>/*
Copyright 2018 New Vector Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import React from 'react';
import ReactDOM from 'react-dom';
import { throttle } from "lodash";
import { isNullOrUndefined } from "matrix-js-sdk/src/utils";
import dis from '../../../dispatcher/dispatcher';
import MatrixClientContext from "../../../contexts/MatrixClientContext";
import { MatrixClientPeg } from "../../../MatrixClientPeg";
import { replaceableComponent } from "../../../utils/replaceableComponent";
import { ActionPayload } from "../../../dispatcher/payloads";
export const getPersistKey = (appId: string) => 'widget_' + appId;
// Shamelessly ripped off Modal.js. There's probably a better way
// of doing reusable widgets like dialog boxes & menus where we go and
// pass in a custom control as the actual body.
function getContainer(containerId: string): HTMLDivElement {
return document.getElementById(containerId) as HTMLDivElement;
}
function getOrCreateContainer(containerId: string): HTMLDivElement {
let container = getContainer(containerId);
if (!container) {
container = document.createElement("div");
container.id = containerId;
document.body.appendChild(container);
}
return container;
}
interface IProps {
// Unique identifier for this PersistedElement instance
// Any PersistedElements with the same persistKey will use
// the same DOM container.
persistKey: string;
// z-index for the element. Defaults to 9.
zIndex?: number;
style?: React.StyleHTMLAttributes<HTMLDivElement>;
}
/**
* Class of component that renders its children in a separate ReactDOM virtual tree
* in a container element appended to document.body.
*
* This prevents the children from being unmounted when the parent of PersistedElement
* unmounts, allowing them to persist.
*
* When PE is unmounted, it hides the children using CSS. When mounted or updated, the
* children are made visible and are positioned into a div that is given the same
* bounding rect as the parent of PE.
*/
@replaceableComponent("views.elements.PersistedElement")
export default class PersistedElement extends React.Component<IProps> {
private resizeObserver: ResizeObserver;
private dispatcherRef: string;
private childContainer: HTMLDivElement;
private child: HTMLDivElement;
constructor(props: IProps) {
super(props);
this.resizeObserver = new ResizeObserver(this.repositionChild);
// Annoyingly, a resize observer is insufficient, since we also care
// about when the element moves on the screen without changing its
// dimensions. Doesn't look like there's a ResizeObserver equivalent
// for this, so we bodge it by listening for document resize and
// the timeline_resize action.
window.addEventListener('resize', this.repositionChild);
this.dispatcherRef = dis.register(this.onAction);
}
/**
* Removes the DOM elements created when a PersistedElement with the given
* persistKey was mounted. The DOM elements will be re-added if another
* PersistedElement is mounted in the future.
*
* @param {string} persistKey Key used to uniquely identify this PersistedElement
*/
public static destroyElement(persistKey: string): void {
const container = getContainer('mx_persistedElement_' + persistKey);
if (container) {
container.remove();
}
}
static isMounted(persistKey) {
return Boolean(getContainer('mx_persistedElement_' + persistKey));
}
private collectChildContainer = (ref: HTMLDivElement): void => {
if (this.childContainer) {
this.resizeObserver.unobserve(this.childContainer);
}
this.childContainer = ref;
if (ref) {
this.resizeObserver.observe(ref);
}
};
private collectChild = (ref: HTMLDivElement): void => {
this.child = ref;
this.updateChild();
};
public componentDidMount(): void {
this.updateChild();
this.renderApp();
}
public componentDidUpdate(): void {
this.updateChild();
this.renderApp();
}
public componentWillUnmount(): void {
this.updateChildVisibility(this.child, false);
this.resizeObserver.disconnect();
window.removeEventListener('resize', this.repositionChild);
dis.unregister(this.dispatcherRef);
}
private onAction = (payload: ActionPayload): void => {
if (payload.action === 'timeline_resize') {
this.repositionChild();
} else if (payload.action === 'logout') {
PersistedElement.destroyElement(this.props.persistKey);
}<|fim▁hole|> private repositionChild = (): void => {
this.updateChildPosition(this.child, this.childContainer);
};
private updateChild(): void {
this.updateChildPosition(this.child, this.childContainer);
this.updateChildVisibility(this.child, true);
}
private renderApp(): void {
const content = <MatrixClientContext.Provider value={MatrixClientPeg.get()}>
<div ref={this.collectChild} style={this.props.style}>
{ this.props.children }
</div>
</MatrixClientContext.Provider>;
ReactDOM.render(content, getOrCreateContainer('mx_persistedElement_'+this.props.persistKey));
}
private updateChildVisibility(child: HTMLDivElement, visible: boolean): void {
if (!child) return;
child.style.display = visible ? 'block' : 'none';
}
private updateChildPosition = throttle((child: HTMLDivElement, parent: HTMLDivElement): void => {
if (!child || !parent) return;
const parentRect = parent.getBoundingClientRect();
Object.assign(child.style, {
zIndex: isNullOrUndefined(this.props.zIndex) ? 9 : this.props.zIndex,
position: 'absolute',
top: parentRect.top + 'px',
left: parentRect.left + 'px',
width: parentRect.width + 'px',
height: parentRect.height + 'px',
});
}, 16, { trailing: true, leading: true });
public render(): JSX.Element {
return <div ref={this.collectChildContainer} />;
}
}<|fim▁end|> | };
|
<|file_name|>GameCharacter.java<|end_file_name|><|fim▁begin|>package zyx.game.components.world.characters;
import java.util.ArrayList;
import org.lwjgl.util.vector.Matrix4f;
import zyx.game.behavior.characters.CharacterAnimationBehavior;
import zyx.game.behavior.player.OnlinePositionInterpolator;
import zyx.game.components.AnimatedMesh;
import zyx.game.components.GameObject;
import zyx.game.components.IAnimatedMesh;
import zyx.game.components.world.IItemHolder;
import zyx.game.components.world.interactable.InteractionAction;
import zyx.game.components.world.items.GameItem;
import zyx.game.vo.CharacterType;
import zyx.opengl.models.implementations.physics.PhysBox;
public class GameCharacter extends GameObject implements IItemHolder
{
private static final ArrayList<InteractionAction> EMPTY_LIST = new ArrayList<>();
private static final ArrayList<InteractionAction> GUEST_LIST = new ArrayList<>();
static
{
GUEST_LIST.add(InteractionAction.TAKE_ORDER);
}
private AnimatedMesh mesh;
public final CharacterInfo info;
public GameCharacter()
{
info = new CharacterInfo();
mesh = new AnimatedMesh();
addChild(mesh);
}
public IAnimatedMesh getAnimatedMesh()
{
return mesh;
}
@Override
public int getUniqueId()
{
return info.uniqueId;
}
public void load(CharacterSetupVo vo)
{
mesh.load("mesh.character");
setPosition(false, vo.pos);
lookAt(vo.look);
addBehavior(new OnlinePositionInterpolator(info));
addBehavior(new CharacterAnimationBehavior());
info.uniqueId = vo.id;<|fim▁hole|>
@Override
public void hold(GameItem item)
{
info.heldItem = item;
mesh.addChildAsAttachment(item, "bone_carry");
}
@Override
public void removeItem(GameItem item)
{
if (info.heldItem != null)
{
mesh.removeChildAsAttachment(item);
info.heldItem = null;
}
}
@Override
public boolean isInteractable()
{
if (info.type == CharacterType.GUEST)
{
return true;
}
return false;
}
@Override
public PhysBox getPhysbox()
{
return mesh.getPhysbox();
}
@Override
public Matrix4f getMatrix()
{
return mesh.getMatrix();
}
@Override
public Matrix4f getBoneMatrix(int boneId)
{
return mesh.getBoneMatrix(boneId);
}
@Override
public GameObject getWorldObject()
{
return this;
}
@Override
public ArrayList<InteractionAction> getInteractions()
{
if (info.type == CharacterType.GUEST)
{
return GUEST_LIST;
}
else
{
return EMPTY_LIST;
}
}
}<|fim▁end|> | info.name = vo.name;
info.gender = vo.gender;
info.type = vo.type;
} |
<|file_name|>config_unspecified.go<|end_file_name|><|fim▁begin|>// +build !suse,!ubuntu,!al2
// Copyright 2017-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may
// not use this file except in compliance with the License. A copy of the
// License is located at
//
// http://aws.amazon.com/apache2.0/
//
// or in the "license" file accompanying this file. This file is distributed
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either<|fim▁hole|>
const (
cgroupMountpoint = "/cgroup"
hostCertsDirPath = "/etc/pki/tls/certs"
hostPKIDirPath = "/etc/pki"
)<|fim▁end|> | // express or implied. See the License for the specific language governing
// permissions and limitations under the License.
package config |
<|file_name|>clothes_shop.py<|end_file_name|><|fim▁begin|>clouthes = ["T-Shirt","Sweater"]
print("Hello, welcome to my shop\n")
while (True):
comment = input("Welcome to our shop, what do you want (C, R, U, D)? ")
if comment.upper()=="C":
new_item = input("Enter new item: ")
clouthes.append(new_item.capitalize())
elif comment.upper()=="R":
print(end='')
elif comment.upper()=="U":
pos = int(input("Update position? "))
if pos <= len(clouthes):
new_item = input("Enter new item: ")
clouthes[pos-1] = new_item.capitalize()
else:
print("Sorry, your item is out of sale!")
elif comment.upper()=="D":
pos = int(input("Delete position? "))
if pos <= len(clouthes):
clouthes.pop(pos-1)
else:
print("Sorry, your item is out of sale!")
else:
print("Allahu akbar! We're in reconstructing and can't serve you. See you again!")
# items =[", "+clouthe for clouthe in clouthes if clouthes.index(clouthe)>0]
# items.insert(0,clouthes[0])
# print("Our items: {0}".format(items))
# print("\n")
print("Our items: ",end='')<|fim▁hole|> print(item+"\n")<|fim▁end|> | for item in clouthes:
if clouthes.index(item)<len(clouthes)-1:
print(item,end=', ')
else: |
<|file_name|>MaterialeCorso_View.java<|end_file_name|><|fim▁begin|>/** ======================================================================== */
/** */
/** @copyright Copyright (c) 2010-2015, S2S s.r.l. */
/** @license http://www.gnu.org/licenses/gpl-2.0.html GNU Public License v.2 */
/** @version 6.0 */
/** This file is part of SdS - Sistema della Sicurezza . */
/** SdS - Sistema della Sicurezza is free software: you can redistribute it and/or modify */
/** it under the terms of the GNU General Public License as published by */
/** the Free Software Foundation, either version 3 of the License, or */
/** (at your option) any later version. */<|fim▁hole|>
/** SdS - Sistema della Sicurezza is distributed in the hope that it will be useful, */
/** but WITHOUT ANY WARRANTY; without even the implied warranty of */
/** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/** GNU General Public License for more details. */
/** You should have received a copy of the GNU General Public License */
/** along with SdS - Sistema della Sicurezza . If not, see <http://www.gnu.org/licenses/gpl-2.0.html> GNU Public License v.2 */
/** */
/** ======================================================================== */
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.apconsulting.luna.ejb.Corsi;
/**
*
* @author Dario
*/
public class MaterialeCorso_View implements java.io.Serializable {
public long COD_DOC;
public String TIT_DOC;
public java.sql.Date DAT_REV_DOC;
public String RSP_DOC;
public String NOME_FILE;
}<|fim▁end|> | |
<|file_name|>plex.py<|end_file_name|><|fim▁begin|>"""
homeassistant.components.media_player.plex
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides an interface to the Plex API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.plex/
"""
import os
import json
import logging
from datetime import timedelta
from urllib.parse import urlparse
from homeassistant.loader import get_component
import homeassistant.util as util
from homeassistant.components.media_player import (
MediaPlayerDevice, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_NEXT_TRACK, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO)
from homeassistant.const import (
DEVICE_DEFAULT_NAME, STATE_IDLE, STATE_PLAYING,
STATE_PAUSED, STATE_OFF, STATE_UNKNOWN)
REQUIREMENTS = ['plexapi==1.1.0']
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
PLEX_CONFIG_FILE = 'plex.conf'
# Map ip to request id for configuring
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
SUPPORT_PLEX = SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK
def config_from_file(filename, config=None):
''' Small configuration file management function'''
if config:
# We're writing configuration
try:
with open(filename, 'w') as fdesc:
fdesc.write(json.dumps(config))
except IOError as error:
_LOGGER.error('Saving config file failed: %s', error)
return False
return True
else:
# We're reading config
if os.path.isfile(filename):
try:
with open(filename, 'r') as fdesc:
return json.loads(fdesc.read())
except IOError as error:<|fim▁hole|> else:
return {}
# pylint: disable=abstract-method, unused-argument
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Sets up the plex platform. """
config = config_from_file(hass.config.path(PLEX_CONFIG_FILE))
if len(config):
# Setup a configured PlexServer
host, token = config.popitem()
token = token['token']
# Via discovery
elif discovery_info is not None:
# Parse discovery data
host = urlparse(discovery_info[1]).netloc
_LOGGER.info('Discovered PLEX server: %s', host)
if host in _CONFIGURING:
return
token = None
else:
return
setup_plexserver(host, token, hass, add_devices_callback)
# pylint: disable=too-many-branches
def setup_plexserver(host, token, hass, add_devices_callback):
''' Setup a plexserver based on host parameter'''
import plexapi.server
import plexapi.exceptions
try:
plexserver = plexapi.server.PlexServer('http://%s' % host, token)
except (plexapi.exceptions.BadRequest,
plexapi.exceptions.Unauthorized,
plexapi.exceptions.NotFound) as error:
_LOGGER.info(error)
# No token or wrong token
request_configuration(host, hass, add_devices_callback)
return
# If we came here and configuring this host, mark as done
if host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
_LOGGER.info('Discovery configuration done!')
# Save config
if not config_from_file(
hass.config.path(PLEX_CONFIG_FILE),
{host: {'token': token}}):
_LOGGER.error('failed to save config file')
_LOGGER.info('Connected to: htts://%s', host)
plex_clients = {}
plex_sessions = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
""" Updates the devices objects. """
try:
devices = plexserver.clients()
except plexapi.exceptions.BadRequest:
_LOGGER.exception("Error listing plex devices")
return
new_plex_clients = []
for device in devices:
# For now, let's allow all deviceClass types
if device.deviceClass in ['badClient']:
continue
if device.machineIdentifier not in plex_clients:
new_client = PlexClient(device, plex_sessions, update_devices,
update_sessions)
plex_clients[device.machineIdentifier] = new_client
new_plex_clients.append(new_client)
else:
plex_clients[device.machineIdentifier].set_device(device)
if new_plex_clients:
add_devices_callback(new_plex_clients)
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_sessions():
""" Updates the sessions objects. """
try:
sessions = plexserver.sessions()
except plexapi.exceptions.BadRequest:
_LOGGER.exception("Error listing plex sessions")
return
plex_sessions.clear()
for session in sessions:
plex_sessions[session.player.machineIdentifier] = session
update_devices()
update_sessions()
def request_configuration(host, hass, add_devices_callback):
""" Request configuration steps from the user. """
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], "Failed to register, please try again.")
return
def plex_configuration_callback(data):
""" Actions to do when our configuration callback is called. """
setup_plexserver(host, data.get('token'), hass, add_devices_callback)
_CONFIGURING[host] = configurator.request_config(
hass, "Plex Media Server", plex_configuration_callback,
description=('Enter the X-Plex-Token'),
description_image="/static/images/config_plex_mediaserver.png",
submit_caption="Confirm",
fields=[{'id': 'token', 'name': 'X-Plex-Token', 'type': ''}]
)
class PlexClient(MediaPlayerDevice):
""" Represents a Plex device. """
# pylint: disable=too-many-public-methods, attribute-defined-outside-init
def __init__(self, device, plex_sessions, update_devices, update_sessions):
self.plex_sessions = plex_sessions
self.update_devices = update_devices
self.update_sessions = update_sessions
self.set_device(device)
def set_device(self, device):
""" Sets the device property. """
self.device = device
@property
def unique_id(self):
""" Returns the id of this plex client """
return "{}.{}".format(
self.__class__, self.device.machineIdentifier or self.device.name)
@property
def name(self):
""" Returns the name of the device. """
return self.device.name or DEVICE_DEFAULT_NAME
@property
def session(self):
""" Returns the session, if any. """
if self.device.machineIdentifier not in self.plex_sessions:
return None
return self.plex_sessions[self.device.machineIdentifier]
@property
def state(self):
""" Returns the state of the device. """
if self.session:
state = self.session.player.state
if state == 'playing':
return STATE_PLAYING
elif state == 'paused':
return STATE_PAUSED
# This is nasty. Need to find a way to determine alive
elif self.device:
return STATE_IDLE
else:
return STATE_OFF
return STATE_UNKNOWN
def update(self):
self.update_devices(no_throttle=True)
self.update_sessions(no_throttle=True)
@property
def media_content_id(self):
""" Content ID of current playing media. """
if self.session is not None:
return self.session.ratingKey
@property
def media_content_type(self):
""" Content type of current playing media. """
if self.session is None:
return None
media_type = self.session.type
if media_type == 'episode':
return MEDIA_TYPE_TVSHOW
elif media_type == 'movie':
return MEDIA_TYPE_VIDEO
return None
@property
def media_duration(self):
""" Duration of current playing media in seconds. """
if self.session is not None:
return self.session.duration
@property
def media_image_url(self):
""" Image url of current playing media. """
if self.session is not None:
return self.session.thumbUrl
@property
def media_title(self):
""" Title of current playing media. """
# find a string we can use as a title
if self.session is not None:
return self.session.title
@property
def media_season(self):
""" Season of curent playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.seasons()[0].index
@property
def media_series_title(self):
""" Series title of current playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.grandparentTitle
@property
def media_episode(self):
""" Episode of current playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.index
@property
def supported_media_commands(self):
""" Flags of media commands that are supported. """
return SUPPORT_PLEX
def media_play(self):
""" media_play media player. """
self.device.play()
def media_pause(self):
""" media_pause media player. """
self.device.pause()
def media_next_track(self):
""" Send next track command. """
self.device.skipNext()
def media_previous_track(self):
""" Send previous track command. """
self.device.skipPrevious()<|fim▁end|> | _LOGGER.error('Reading config file failed: %s', error)
# This won't work yet
return False |
<|file_name|>_logger.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Custom module logger
"""
import logging
module_name = 'moflow'
logger = logging.getLogger(module_name)
logger.addHandler(logging.NullHandler()) # best practice to not show anything
def use_basic_config(level=logging.INFO, format=logging.BASIC_FORMAT):
"""Add basic configuration and formatting to the logger
By default, the logger should not be configured in any way. However
users and developers may prefer to see the logger messages.
"""<|fim▁hole|> handler.name = module_name
handler.setFormatter(formatter)
logger.addHandler(handler)<|fim▁end|> | logger.level = level
if module_name not in [_.name for _ in logger.handlers]:
formatter = logging.Formatter(format)
handler = logging.StreamHandler() |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate util;
pub mod constants;
use std::i64;
use std::io;
use std::str::from_utf8;
use std::u32;
#[cfg(test)]
use std::usize;
use util::htonl;
use constants::*;
#[derive(Debug)]
pub enum EncodeError {
IOError(io::Error),
OverflowError,
}
impl From<io::Error> for EncodeError {
fn from(err: io::Error) -> Self {
EncodeError::IOError(err)
}
}
impl From<EncodeError> for io::Error {
fn from(err: EncodeError) -> Self {
match err {
EncodeError::IOError(err) => err,
EncodeError::OverflowError => io::Error::from(io::ErrorKind::InvalidData),
}
}
}
// I wish I didn't have to specify i64 and it can be a generic type instead
// But... I don't know how can it be specified that a generic can be compared
// with hardcoded numbers (e.g.: -(1 << 7)) or what would happen with unsigned
// numbers.
// For now, the caller will have to explicitely cast or implement a wrapper function
pub fn encode_i64<W: io::Write>(value: i64, enc: &mut W) -> Result<(), EncodeError> {
Ok(if value >= -(1 << 7) && value < 1 << 7 {
enc.write_all(&[(ENCVAL << 6) | ENC_INT8, (value & 0xFF) as u8])
} else if value >= -(1 << 15) && value < 1 << 15 {
enc.write_all(&[
(ENCVAL << 6) | ENC_INT16,
(value & 0xFF) as u8,
((value >> 8) & 0xFF) as u8,
])
} else if value >= -(1 << 31) && value < 1 << 31 {
enc.write_all(&[
(ENCVAL << 6) | ENC_INT32,
(value & 0xFF) as u8,
((value >> 8) & 0xFF) as u8,
((value >> 16) & 0xFF) as u8,
((value >> 24) & 0xFF) as u8,
])
} else {
return Err(EncodeError::OverflowError);
}?)
}
pub fn encode_u8<W: io::Write>(value: u8, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_u16<W: io::Write>(value: u16, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_u32<W: io::Write>(value: u32, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_i8<W: io::Write>(value: i8, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_i16<W: io::Write>(value: i16, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_i32<W: io::Write>(value: i32, enc: &mut W) -> Result<(), EncodeError> {
encode_i64(value as i64, enc)
}
pub fn encode_usize<W: io::Write>(value: usize, enc: &mut W) -> Result<(), EncodeError> {
if value > i64::MAX as usize {
Err(EncodeError::OverflowError)
} else {
encode_i64(value as i64, enc)
}
}
pub fn encode_u16_to_slice_u8<W: io::Write>(value: u16, enc: &mut W) -> Result<(), EncodeError> {
Ok(enc.write_all(&[(value & 0xFF) as u8, ((value >> 8) & 0xFF) as u8])?)
}
pub fn encode_u32_to_slice_u8<W: io::Write>(value: u32, enc: &mut W) -> Result<(), EncodeError> {
Ok(enc.write_all(&[
(value & 0xFF) as u8,
((value >> 8) & 0xFF) as u8,
((value >> 16) & 0xFF) as u8,
((value >> 24) & 0xFF) as u8,
])?)
}
pub fn encode_u64_to_slice_u8<W: io::Write>(value: u64, enc: &mut W) -> Result<(), EncodeError> {
Ok(enc.write_all(&[
(value & 0xFF) as u8,
((value >> 8) & 0xFF) as u8,
((value >> 16) & 0xFF) as u8,
((value >> 24) & 0xFF) as u8,
((value >> 32) & 0xFF) as u8,
((value >> 40) & 0xFF) as u8,
((value >> 48) & 0xFF) as u8,
((value >> 56) & 0xFF) as u8,
])?)
}
pub fn encode_len<W: io::Write>(len: usize, enc: &mut W) -> Result<(), EncodeError> {
if len > u32::MAX as usize {
panic!("Length does not fit in four bytes");
}
if len < (1 << 6) {
enc.write_all(&[((len & 0xFF) as u8) | (BITLEN6 << 6)])?;
} else if len < (1 << 14) {
enc.write_all(&[((len >> 8) as u8) | (BITLEN14 << 6), (len & 0xFF) as u8])?;
} else {
enc.write_all(&[BITLEN32 << 6])?;
enc.write_all(&htonl(len as u32))?;
}
Ok(())
}
pub fn encode_slice_u8<W: io::Write>(
data: &[u8],
enc: &mut W,
as_int: bool,
) -> Result<(), EncodeError> {
if as_int && data.len() <= 11 {
if let Some(()) = from_utf8(data)
.ok()
.and_then(|s| s.parse().ok())
.and_then(|i| encode_i64(i, enc).ok())
{
return Ok(());
}
}
// TODO: lzf compression
encode_len(data.len(), enc)?;
enc.write_all(data)?;
Ok(())
}
#[test]<|fim▁hole|> let mut v = vec![];
assert_eq!(encode_i64(1, &mut v).unwrap(), 2);
assert_eq!(v, vec![192, 1]);
}
#[test]
fn test_encode_i64_2bytes() {
let mut v = vec![];
assert_eq!(encode_i64(260, &mut v).unwrap(), 3);
assert_eq!(v, b"\xc1\x04\x01");
}
#[test]
fn test_encode_i64_4bytes() {
let mut v = vec![];
assert_eq!(encode_i64(70000, &mut v).unwrap(), 5);
assert_eq!(v, b"\xc2p\x11\x01\x00");
}
#[test]
fn test_encode_i64_overflow() {
let mut v = vec![];
match encode_usize(usize::MAX, &mut v).unwrap_err() {
EncodeError::OverflowError => (),
_ => panic!("Unexpected error"),
}
}
#[test]
fn test_encode_usize() {
let mut v = vec![];
assert_eq!(encode_usize(123, &mut v).unwrap(), 2);
assert_eq!(v, vec![192, 123]);
}
#[test]
fn test_encode_usize_overflow() {
let mut v = vec![];
match encode_usize(usize::MAX, &mut v).unwrap_err() {
EncodeError::OverflowError => (),
_ => panic!("Unexpected error"),
}
}
#[test]
fn test_encode_slice_u8_integer() {
let mut v = vec![];
assert_eq!(encode_slice_u8(b"1", &mut v, true).unwrap(), 2);
assert_eq!(v, vec![192, 1]);
}
#[test]
fn test_encode_slice_u8_data() {
let mut v = vec![];
assert_eq!(encode_slice_u8(b"hello world", &mut v, true).unwrap(), 12);
assert_eq!(v, b"\x0bhello world");
}<|fim▁end|> | fn test_encode_i64() { |
<|file_name|>issue-28927-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct Baz;<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>types.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from beets.plugins import BeetsPlugin
from beets.dbcore import types
from beets.util.confit import ConfigValueError
from beets import library
class TypesPlugin(BeetsPlugin):
@property
def item_types(self):
return self._types()
@property
def album_types(self):
return self._types()
def _types(self):
if not self.config.exists():
return {}
mytypes = {}
for key, value in self.config.items():
if value.get() == 'int':
mytypes[key] = types.INTEGER
elif value.get() == 'float':
mytypes[key] = types.FLOAT
elif value.get() == 'bool':
mytypes[key] = types.BOOLEAN
elif value.get() == 'date':<|fim▁hole|> else:
raise ConfigValueError(
u"unknown type '{0}' for the '{1}' field"
.format(value, key))
return mytypes<|fim▁end|> | mytypes[key] = library.DateType() |
<|file_name|>proc_modis_qc.py<|end_file_name|><|fim▁begin|>###############################################################################
# $Id$
#
# Project: Sub1 project of IRRI
# Purpose: Quality Assessment extraction from MODIS
# Author: Yann Chemin, <[email protected]>
#
###############################################################################
# Copyright (c) 2008, Yann Chemin <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
#!/usr/bin/python
import wx
import wx.lib.filebrowsebutton as filebrowse
import os
# For Image Processing
import numpy as N
from osgeo import gdalnumeric
from osgeo import gdal
from osgeo import gdal_array
from osgeo.gdalconst import *
# For icons, pngs, etc coming from images.py
from wx import ImageFromStream, BitmapFromImage, EmptyIcon
import cStringIO
import images
# Define satellite bands
# Based on Landsat channels
qc = ''
# Define output file name
output = ''
# Define list of MODIS types
NameMOD = ['250','500']
# Define list of QA types
NameQC = ['modland_qa_bits','cloud','data_quality','atcorr','adjcorr','diff_orbit_from_500m']
# Define band number
bandno = ['1','2','3','4','5','6','7']
# Define Info Message
overview = """MODIS Quality Assessment Extractor
Makes Human-readable images of Quality Assessment binary bits from MOD09 products.
500m does not have "cloud" and "diff_orbit_from_500m" options.
# MODLAND QA Bits 250m Unsigned Int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands maybe fill value (Note that a value of [11] overrides a value of [01])
# Cloud State 250m Unsigned Int bits[2-3]
#00 -> class 0: Clear -- No clouds
#01 -> class 1: Cloudy
#10 -> class 2: Mixed
#11 -> class 3: Not Set ; Assumed Clear
# Band-wise Data Quality 250m Unsigned Int bits[4-7][8-11]
# Band-wise Data Quality 500m long Int bits[2-5][6-9][10-13][14-17][18-21][22-25][26-29]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
# Atmospheric correction 250m Unsigned Int bit[12]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
# Adjacency correction 250m Unsigned Int bit[13]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
# Different orbit from 500m product, 250m Unsigned Int bit[14]
#0 -> class 0: same orbit as 500m
#1 -> class 1: different orbit from 500m
"""
class MyFrame(wx.Frame):
def __init__(self,parent, id=-1, title='MODIS Quality Bits Extractor',
pos=(0,0),
size=(400,650),
style=wx.DEFAULT_FRAME_STYLE):
wx.Frame.__init__(self, parent, id, title, pos, size, style)
ico = images.getPngGipeIcon()
self.SetIcon(ico)
self.lognull = wx.LogNull()
# Input Filenames
self.qc = qc
self.qc_type = 'modland_qa_bits'
self.pixelres = '250'
self.band_no = '1'
self.NameMOD = NameMOD
self.NameQC = NameQC
self.bandno = bandno
self.output = output
# Construct Interface
self.make_text()
self.make_buttons()
self.make_radiobuttons1()
self.make_radiobuttons2()
self.make_radiobuttons3()
self.make_fb()
self.mbox = wx.BoxSizer(wx.VERTICAL)
self.mbox.Add((10,10))
self.mbox.Add(self.text, 1, wx.EXPAND|wx.CENTER, 10)
self.mbox.Add(self.cc2, 1, wx.EXPAND, 0)
self.mbox.Add(self.cc6, 1, wx.EXPAND, 0)
self.mbox.Add(self.rbox1, 1, wx.CENTER, 0)
self.mbox.Add(self.rbox2, 1, wx.CENTER, 0)
self.mbox.Add(self.rbox3, 1, wx.CENTER, 0)
self.mbox.Add((10,10))
self.mbox.Add((50,10))
self.mbox.Add(self.bbox, 1, wx.CENTER, 10)
self.mbox.Add((10,10))
self.SetSizer(self.mbox)
self.bindEvents()
# Process Equations, Handling and saving of output
def OnOK(self,event):
#print "qc: ", self.qc
#print "out:", self.output
if(self.qc==''):
self.OnFileInError()
else:
self.qcF = gdal.Open(self.qc)
self.bqc = self.qcF.GetRasterBand(1)
self.test = gdal.Open(self.qc)
self.CrAr( self.qc, self.output, 'GTiff' )
self.result = gdal.Open(self.output, GA_Update)
for self.y in range(self.bqc.YSize - 1, -1, -1):
print self.y
self.scanline1=self.bqc.ReadAsArray(0, self.y, self.bqc.XSize, 1, self.bqc.XSize, 1)
for self.x in range(0, self.bqc.XSize - 1, 1):
self.pix1 = self.scanline1[0][self.x]
self.scanline1[0][self.x]=self.qcbits(self.pix1,self.qc_type,int(self.pixelres),int(self.band_no))
self.result.GetRasterBand(1).WriteArray(N.reshape(self.scanline1,(1,self.bqc.XSize)), 0, self.y)
self.Destroy()
#def bin(self,i):
#"""
#Convert Binary to Integer Bit Field
#Manish Jethani (manish.j at gmx.net)
#http://bytes.com/forum/thread20381.html
#"""
#b = ''
#while i > 0:
#j = i & 1
#b = str(j) + b
#i >>= 1
#return b
def qcbits(self,qcbit,qcflag,pixres,bandno):
outclas = 0
#calculate modland QA bits extraction
if (qcflag=="modland_qa_bits"):
if (pixres==500):
# 500m product
outclas = self.qc500a(qcbit)
else:
# 250m product
outclas = self.qc250a(qcbit)
#calculate cloud state
elif (qcflag=="cloud"):
if (pixres==500):
# 500m product
# Signal user that the flag name is badly written
# therefore not understood by the application
print "flag name unavailable for 500m, please restart"
self.OnQCInError()
else:
# ONLY 250m product!
outclas = self.qc250b(qcbit)
#calculate modland QA bits extraction
elif (qcflag=="data_quality"):
if (pixres==500):
# 500m product
outclas = self.qc500c(qcbit, bandno)
else:
# 250m product
outclas = self.qc250c(qcbit, bandno)
#calculate atmospheric correction flag
elif (qcflag=="atcorr"):
if (pixres==500):
# 500m product
outclas = self.qc500d(qcbit)
else:
# 250m product
outclas = self.qc250d(qcbit)
#calculate adjacency correction flag
elif (qcflag=="adjcorr"):
if (pixres==500):
# 500m product
outclas = self.qc500e(qcbit)
else:
# 250m product
outclas = self.qc250e(qcbit)
#calculate different orbit from 500m flag
elif (qcflag=="diff_orbit_from_500m"):
if (pixres==500):
# 500m product
# Signal user that the flag name is badly written
# therefore not understood by the application
print "flag name unavailable for 500m, please restart"
self.OnQCInError()
else:
# ONLY 250m product!
outclas = self.qc250f(qcbit)
else:
# Signal user that the flag name is badly written
# therefore not understood by the application
print "Unknown flag name, please check spelling"
self.OnQCInError()
return outclas
def qc250a(self, pixel):
"""
# MODLAND QA Bits 250m Unsigned Int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands maybe fill value (Note that a value of [11] overrides a value of [01])
"""
pixel = pixel & 3
return pixel
def qc250b(self, pixel):
"""
# Cloud State 250m Unsigned Int bits[2-3]
#00 -> class 0: Clear -- No clouds
#01 -> class 1: Cloudy
#10 -> class 2: Mixed
#11 -> class 3: Not Set ; Assumed Clear
"""
pixel >> 2
pixel = pixel & 3
return pixel
def qc250c(self,pixel,bandno):
"""
# Band-wise Data Quality 250m Unsigned Int bits[0-1]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
"""
pixel >> 4 + (4*(bandno-1))
pixel = pixel & 15
return pixel
def qc250d(self, pixel):
"""
# Atmospheric correction 250m Unsigned Int bit[12]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 12
pixel = pixel & 1
return pixel
def qc250e(self,pixel):
"""
# Adjacency correction 250m Unsigned Int bit[13]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 13
pixel = pixel & 1
return pixel
def qc250f(self,pixel):
"""
# Different orbit from 500m product, 250m Unsigned Int bit[14]
#0 -> class 0: same orbit as 500m
#1 -> class 1: different orbit from 500m
"""
pixel >> 14
pixel = pixel & 1
return pixel
def qc500a(self,pixel):
"""
# MODLAND QA Bits 500m long int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands mayb be fill value (Note that a value of [11] overrides a value of [01])
"""
pixel = pixel & 3
return pixel
def qc500c(self,pixel,bandno):
"""
# Band-wise Data Quality 500m long Int
#bits[2-5][6-9][10-13][14-17][18-21][22-25][26-29]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
"""
pixel >> 2 + (4*(bandno-1))
pixel = pixel & 15
return pixel
def qc500d(self,pixel):
"""
# Atmospheric correction 500m long Int bit[30]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 30
pixel = pixel & 1
return pixel
def qc500e(self,pixel):
"""
# Adjacency correction 500m long Int bit[31]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 31
pixel = pixel & 1
return pixel
def CrAr(self, src_flnm, dst_flnm, format ):
"""
CrAr(): Create Array with Georeferencing from another file (src_flnm), save it in file (dst_flnm) with format (format)
CrAr( self, src_flnm, dst_flnm, format )
"""
cr_opts=[]
# Read information from source file.
src_ds = gdal.Open(str(src_flnm))
gt = src_ds.GetGeoTransform()
pj = src_ds.GetProjection()
src_ds = None
# Standard checking on the GDAL driver
Driver = gdal.GetDriverByName( str(format) )
if Driver is None:
raise ValueError, "CrAr: No DriverFound "+format
DriverMTD = Driver.GetMetadata()
if not DriverMTD.has_key('DCAP_CREATE'):
print 'Format Driver %s does not support creation and piecewise writing.\nPlease select a format that does, such as GTiff or HFA (Erdas/Imagine).' % format
sys.exit( 1 )
# Set up the band number
nbands = 1
#print "nbands =", nbands
# Collect information on source files
flinfos = self.names_to_fileinfos( str(src_flnm) )
ulx = flinfos[0].ulx
uly = flinfos[0].uly
lrx = flinfos[0].lrx
lry = flinfos[0].lry
# get largest extends
for fi in flinfos:
ulx = min(ulx, fi.ulx)
uly = max(uly, fi.uly)
lrx = max(lrx, fi.lrx)
lry = min(lry, fi.lry)
# Set other info
psize_x = flinfos[0].geotransform[1]
psize_y = flinfos[0].geotransform[5]
band_type = flinfos[0].band_type
# Try opening as an existing file
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
out_fh = gdal.Open( str(dst_flnm), gdal.GA_Update )
gdal.PopErrorHandler()
# Otherwise create a new file
if out_fh is None:
geot = [ulx, psize_x, 0, uly, 0, psize_y]
print geot[0], geot[1], geot[2], geot[3], geot[4]
xsize = int((lrx-ulx)/geot[1]+0.5)
ysize = int((lry-uly)/geot[5]+0.5)
out_fh=Driver.Create(str(dst_flnm),xsize,ysize,nbands,band_type,cr_opts)
if out_fh is None:
raise ValueError, "CrAr: Failed to create new file "+dst_flnm
sys.exit( 1 )
out_fh.SetGeoTransform( gt )
out_fh.SetProjection( pj )
#out_fh.GetRasterBand(1).SetRasterColorTable(flinfos[0].ct)
nodata = None
iband = 1
for fi in flinfos:
fi.copy_into( out_fh, 1, iband, nodata )
iband=iband+1
iband = 0
def names_to_fileinfos( self, name ):
file_infos = []
fi = file_info()
if fi.init_from_name( name ) == 1:
file_infos.append( fi )
return file_infos
def OnFileInError(self):
dlg = wx.MessageDialog(self,
'Minimum files to add:\n\n Input files => NDVI and Modis Band7\n One Output file',
'Error',wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def OnQCInError(self):
dlg = wx.MessageDialog(self,
'QC type error\n\n Please check your input',
'Error',wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
# Path+filename seek and set
def make_fb(self):
# get current working directory
self.dirnm = os.getcwd()
self.cc2 = filebrowse.FileBrowseButton(
self, -1, size=(50, -1), labelText='QC File:',
startDirectory = self.dirnm,
fileMode=wx.OPEN,
changeCallback = self.fbbCallback2,
)
self.cc6 = filebrowse.FileBrowseButton(
self, -1, size=(50, -1), labelText='OUT File: ',
startDirectory = self.dirnm,
fileMask='*.tif',
fileMode=wx.SAVE,
changeCallback = self.fbbCallback6
)
# Collect path+filenames
def fbbCallback2(self, evt):
self.qc = str(evt.GetString())
def fbbCallback6(self, evt):
self.output = str(evt.GetString())
# Front text
def make_text(self):
self.text = wx.StaticText(self, -1, "This is processing MODIS Quality Assessment Bits through the use of gdal and numeric.")
# QC type radio buttons
def make_radiobuttons1(self):
self.rbox1 = wx.BoxSizer(wx.HORIZONTAL)
self.rb1 = wx.RadioBox(self, -1, "Select MODIS Type",
wx.DefaultPosition, wx.DefaultSize,
self.NameMOD, 2, wx.RA_SPECIFY_COLS)
self.rb1.SetToolTip(wx.ToolTip("Select MODIS type"))
self.rb1.SetLabel("MODIS Type")
self.rbox1.Add(self.rb1,1,wx.ALL,10)
def EvtRadioBox1(self, evt):
self.nb = evt.GetInt()
self.pixelres = NameMOD[self.nb]
#print self.pixelres
def make_radiobuttons2(self):
self.rbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.rb2 = wx.RadioBox(self, -1, "Select Band number (data quality only)",
wx.DefaultPosition, wx.DefaultSize,
self.bandno, 7, wx.RA_SPECIFY_COLS)
self.rb2.SetToolTip(wx.ToolTip("Select Band number (for data_quality)"))
self.rb2.SetLabel("Band Number (for \"data quality\" only)")
self.rbox2.Add(self.rb2,1,wx.ALL,10)
def EvtRadioBox2(self, evt):
self.nb = evt.GetInt()
self.band_no = self.bandno[self.nb]
#print self.band_no
def make_radiobuttons3(self):
self.rbox3 = wx.BoxSizer(wx.HORIZONTAL)
self.rb3 = wx.RadioBox(self, -1, "Select QC Type",
wx.DefaultPosition, wx.DefaultSize,
self.NameQC, 2, wx.RA_SPECIFY_COLS)
self.rb3.SetToolTip(wx.ToolTip("Select QC type"))
self.rb3.SetLabel("QC Type")
self.rbox3.Add(self.rb3,1,wx.ALL,10)
def EvtRadioBox3(self, evt):
self.nb = evt.GetInt()
self.qc_type = NameQC[self.nb]
#print self.qc_type
# Bottom buttons
def make_buttons(self):
self.bbox = wx.BoxSizer(wx.HORIZONTAL)
# OnOK
bmp0 = images.getPngDialogOKBitmap()
self.b0 = wx.BitmapButton(self, 20, bmp0, (20, 20),
(bmp0.GetWidth()+50, bmp0.GetHeight()+10), style=wx.NO_BORDER)
self.b0.SetToolTipString("Process")
self.bbox.Add(self.b0,1,wx.CENTER,10)
# OnCancel
bmp1 = images.getPngDialogCancelBitmap()
self.b1 = wx.BitmapButton(self, 30, bmp1, (20, 20),
(bmp1.GetWidth()+50, bmp1.GetHeight()+10), style=wx.NO_BORDER)
self.b1.SetToolTipString("Abort")
self.bbox.Add(self.b1,1,wx.CENTER,10)
# OnInfo
bmp2 = images.getPngHelpAboutBitmap()
self.b2 = wx.BitmapButton(self, 40, bmp2, (20, 20),
(bmp2.GetWidth()+50, bmp2.GetHeight()+10), style=wx.NO_BORDER)
self.b2.SetToolTipString("Help/Info.")
self.bbox.Add(self.b2,1,wx.CENTER,10)
def bindEvents(self):
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_BUTTON, self.OnOK, self.b0)
self.Bind(wx.EVT_BUTTON, self.OnCancel, self.b1)
self.Bind(wx.EVT_BUTTON, self.OnInfo, self.b2)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox1, self.rb1)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox2, self.rb2)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox3, self.rb3)
def OnCloseWindow(self, event):
self.Destroy()
def OnCancel(self, event):
self.Destroy()
def OnInfo(self,event):
dlg = wx.MessageDialog(self, overview,
'Help', wx.OK | wx.ICON_INFORMATION
)
dlg.ShowModal()
dlg.Destroy()
class file_info:
"""A class holding information about a GDAL file."""
def init_from_name(self, filename):
"""
Initialize file_info from filename
filename -- Name of file to read.
Returns 1 on success or 0 if the file can't be opened.
"""
fh = gdal.Open( str(filename) )
if fh is None:
return 0
self.filename = filename
self.bands = fh.RasterCount
self.xsize = fh.RasterXSize
self.ysize = fh.RasterYSize
self.band_type = fh.GetRasterBand(1).DataType
self.projection = fh.GetProjection()
self.geotransform = fh.GetGeoTransform()
self.ulx = self.geotransform[0]
self.uly = self.geotransform[3]
self.lrx = self.ulx + self.geotransform[1] * self.xsize
self.lry = self.uly + self.geotransform[5] * self.ysize
ct = fh.GetRasterBand(1).GetRasterColorTable()
if ct is not None:
self.ct = ct.Clone()
else:
self.ct = None
return 1
def copy_into( self, t_fh, s_band = 1, t_band = 1, nodata_arg=None ):
"""
Copy this files image into target file.
"""
t_geotransform = t_fh.GetGeoTransform()
t_ulx = t_geotransform[0]
t_uly = t_geotransform[3]
t_lrx = t_geotransform[0] + t_fh.RasterXSize * t_geotransform[1]
t_lry = t_geotransform[3] + t_fh.RasterYSize * t_geotransform[5]
<|fim▁hole|> tgw_lrx = min(t_lrx,self.lrx)
if t_geotransform[5] < 0:
tgw_uly = min(t_uly,self.uly)
tgw_lry = max(t_lry,self.lry)
else:
tgw_uly = max(t_uly,self.uly)
tgw_lry = min(t_lry,self.lry)
# do they even intersect?
if tgw_ulx >= tgw_lrx:
return 1
if t_geotransform[5] < 0 and tgw_uly <= tgw_lry:
return 1
if t_geotransform[5] > 0 and tgw_uly >= tgw_lry:
return 1
# compute target window in pixel coordinates.
tw_xoff = int((tgw_ulx - t_geotransform[0]) / t_geotransform[1] + 0.1)
tw_yoff = int((tgw_uly - t_geotransform[3]) / t_geotransform[5] + 0.1)
tw_xsize = int((tgw_lrx-t_geotransform[0])/t_geotransform[1] + 0.5) - tw_xoff
tw_ysize = int((tgw_lry-t_geotransform[3])/t_geotransform[5] + 0.5) - tw_yoff
if tw_xsize < 1 or tw_ysize < 1:
return 1
# Compute source window in pixel coordinates.
sw_xoff = int((tgw_ulx - self.geotransform[0]) / self.geotransform[1])
sw_yoff = int((tgw_uly - self.geotransform[3]) / self.geotransform[5])
sw_xsize = int((tgw_lrx - self.geotransform[0]) / self.geotransform[1] + 0.5) - sw_xoff
sw_ysize = int((tgw_lry - self.geotransform[3]) / self.geotransform[5] + 0.5) - sw_yoff
if sw_xsize < 1 or sw_ysize < 1:
return 1
# Open the source file, and copy the selected region.
s_fh = gdal.Open( str(self.filename) )
return self.raster_copy( s_fh, sw_xoff, sw_yoff, sw_xsize, sw_ysize, s_band, t_fh, tw_xoff, tw_yoff, tw_xsize, tw_ysize, t_band, nodata_arg )
def raster_copy( self, s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n, t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n, nodata=None ):
if nodata is not None:
return self.raster_copy_with_nodata(
s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
nodata )
s_band = s_fh.GetRasterBand( s_band_n )
t_band = t_fh.GetRasterBand( t_band_n )
data = s_band.ReadRaster( s_xoff, s_yoff, s_xsize, s_ysize, t_xsize, t_ysize, t_band.DataType )
t_band.WriteRaster( t_xoff, t_yoff, t_xsize, t_ysize, data, t_xsize, t_ysize, t_band.DataType )
return 0
def raster_copy_with_nodata( self, s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n, nodata ):
import Numeric as Num
s_band = s_fh.GetRasterBand( s_band_n )
t_band = t_fh.GetRasterBand( t_band_n )
data_src = s_band.ReadAsArray( s_xoff, s_yoff, s_xsize, s_ysize, t_xsize, t_ysize )
data_dst = t_band.ReadAsArray( t_xoff, t_yoff, t_xsize, t_ysize )
nodata_test = Num.equal(data_src,nodata)
to_write = Num.choose(nodata_test, (data_src, data_dst))
t_band.WriteArray( to_write, t_xoff, t_yoff )
return 0
class MainApp(wx.App):
def OnInit(self):
frame = MainFrame(None)
frame.Show(True)
self.SetTopWindow(frame)
return True
if __name__ == '__main__':
app = wx.App()
frame = MyFrame(None)
frame.Show()
app.MainLoop()<|fim▁end|> | # figure out intersection region
tgw_ulx = max(t_ulx,self.ulx) |
<|file_name|>server_usage.py<|end_file_name|><|fim▁begin|># Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.openstack.common import log as logging
<|fim▁hole|>authorize = extensions.soft_extension_authorizer('compute', 'server_usage')
class ServerUsageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (Server_usage.alias, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsageTemplate())
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ServerUsagesTemplate())
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class Server_usage(extensions.ExtensionDescriptor):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = "OS-SRV-USG"
namespace = ("http://docs.openstack.org/compute/ext/"
"server_usage/api/v1.1")
updated = "2013-04-29T00:00:00Z"
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def make_server(elem):
elem.set('{%s}launched_at' % Server_usage.namespace,
'%s:launched_at' % Server_usage.alias)
elem.set('{%s}terminated_at' % Server_usage.namespace,
'%s:terminated_at' % Server_usage.alias)
class ServerUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
make_server(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})
class ServerUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('servers')
elem = xmlutil.SubTemplateElement(root, 'server', selector='servers')
make_server(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Server_usage.alias: Server_usage.namespace})<|fim▁end|> | LOG = logging.getLogger(__name__) |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#coding=utf-8
import unittest
import HTMLTestRunner
import time
from config import globalparam
from public.common import sendmail
def run():
test_dir = './testcase'
suite = unittest.defaultTestLoader.discover(start_dir=test_dir,pattern='test*.py')
now = time.strftime('%Y-%m-%d_%H_%M_%S')
reportname = globalparam.report_path + '\\' + 'TestResult' + now + '.html'
with open(reportname,'wb') as f:<|fim▁hole|> description='Test the import testcase'
)
runner.run(suite)
time.sleep(3)
# 发送邮件
mail = sendmail.SendMail()
mail.send()
if __name__=='__main__':
run()<|fim▁end|> | runner = HTMLTestRunner.HTMLTestRunner(
stream=f,
title='测试报告', |
<|file_name|>const.py<|end_file_name|><|fim▁begin|>"""All constants related to the ZHA component."""
import enum
import logging
from typing import List
import bellows.zigbee.application
from zigpy.config import CONF_DEVICE_PATH # noqa: F401 # pylint: disable=unused-import
import zigpy_cc.zigbee.application
import zigpy_deconz.zigbee.application
import zigpy_xbee.zigbee.application
import zigpy_zigate.zigbee.application
import zigpy_znp.zigbee.application
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.number import DOMAIN as NUMBER
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from .typing import CALLABLE_T
ATTR_ARGS = "args"
ATTR_ATTRIBUTE = "attribute"
ATTR_ATTRIBUTE_ID = "attribute_id"
ATTR_ATTRIBUTE_NAME = "attribute_name"
ATTR_AVAILABLE = "available"
ATTR_CLUSTER_ID = "cluster_id"
ATTR_CLUSTER_TYPE = "cluster_type"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE_IEEE = "device_ieee"
ATTR_DEVICE_TYPE = "device_type"
ATTR_ENDPOINTS = "endpoints"
ATTR_ENDPOINT_NAMES = "endpoint_names"
ATTR_ENDPOINT_ID = "endpoint_id"
ATTR_IEEE = "ieee"
ATTR_IN_CLUSTERS = "in_clusters"
ATTR_LAST_SEEN = "last_seen"
ATTR_LEVEL = "level"
ATTR_LQI = "lqi"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURER_CODE = "manufacturer_code"
ATTR_MEMBERS = "members"
ATTR_MODEL = "model"
ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"
ATTR_QUIRK_CLASS = "quirk_class"
ATTR_RSSI = "rssi"
ATTR_SIGNATURE = "signature"
ATTR_TYPE = "type"
ATTR_UNIQUE_ID = "unique_id"
ATTR_VALUE = "value"
ATTR_WARNING_DEVICE_DURATION = "duration"
ATTR_WARNING_DEVICE_MODE = "mode"
ATTR_WARNING_DEVICE_STROBE = "strobe"
ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle"
ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
BINDINGS = "bindings"
CHANNEL_ACCELEROMETER = "accelerometer"
CHANNEL_ANALOG_INPUT = "analog_input"
CHANNEL_ANALOG_OUTPUT = "analog_output"
CHANNEL_ATTRIBUTE = "attribute"
CHANNEL_BASIC = "basic"
CHANNEL_COLOR = "light_color"
CHANNEL_COVER = "window_covering"
CHANNEL_DOORLOCK = "door_lock"
CHANNEL_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CHANNEL_EVENT_RELAY = "event_relay"
CHANNEL_FAN = "fan"
CHANNEL_HUMIDITY = "humidity"
CHANNEL_IAS_WD = "ias_wd"
CHANNEL_IDENTIFY = "identify"
CHANNEL_ILLUMINANCE = "illuminance"
CHANNEL_LEVEL = ATTR_LEVEL
CHANNEL_MULTISTATE_INPUT = "multistate_input"
CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat"
CHANNEL_ZDO = "zdo"
CHANNEL_ZONE = ZONE = "ias_zone"
CLUSTER_COMMAND_SERVER = "server"
CLUSTER_COMMANDS_CLIENT = "client_commands"
CLUSTER_COMMANDS_SERVER = "server_commands"
CLUSTER_TYPE_IN = "in"
CLUSTER_TYPE_OUT = "out"
PLATFORMS = (
BINARY_SENSOR,
CLIMATE,
COVER,
DEVICE_TRACKER,
FAN,
LIGHT,
LOCK,
NUMBER,
SENSOR,
SWITCH,
)
CONF_BAUDRATE = "baudrate"
CONF_DATABASE = "database_path"
CONF_DEVICE_CONFIG = "device_config"
CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_ZIGPY = "zigpy_config"
DATA_DEVICE_CONFIG = "zha_device_config"
DATA_ZHA = "zha"
DATA_ZHA_CONFIG = "config"
DATA_ZHA_BRIDGE_ID = "zha_bridge_id"
DATA_ZHA_CORE_EVENTS = "zha_core_events"
DATA_ZHA_DISPATCHERS = "zha_dispatchers"
DATA_ZHA_GATEWAY = "zha_gateway"
DATA_ZHA_PLATFORM_LOADED = "platform_loaded"
DEBUG_COMP_BELLOWS = "bellows"
DEBUG_COMP_ZHA = "homeassistant.components.zha"
DEBUG_COMP_ZIGPY = "zigpy"
DEBUG_COMP_ZIGPY_CC = "zigpy_cc"
DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz"
DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee"
DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate"
DEBUG_LEVEL_CURRENT = "current"
DEBUG_LEVEL_ORIGINAL = "original"
DEBUG_LEVELS = {
DEBUG_COMP_BELLOWS: logging.DEBUG,
DEBUG_COMP_ZHA: logging.DEBUG,
DEBUG_COMP_ZIGPY: logging.DEBUG,
DEBUG_COMP_ZIGPY_CC: logging.DEBUG,
DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG,
DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG,
DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG,
}
DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY]
DEFAULT_RADIO_TYPE = "ezsp"
DEFAULT_BAUDRATE = 57600
DEFAULT_DATABASE_NAME = "zigbee.db"
DEVICE_PAIRING_STATUS = "pairing_status"
DISCOVERY_KEY = "zha_discovery_info"
DOMAIN = "zha"
GROUP_ID = "group_id"
GROUP_IDS = "group_ids"
GROUP_NAME = "group_name"
MFG_CLUSTER_ID_START = 0xFC00
POWER_MAINS_POWERED = "Mains"
POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown"
class RadioType(enum.Enum):
# pylint: disable=invalid-name
"""Possible options for radio type."""
znp = (
"ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_znp.zigbee.application.ControllerApplication,
)
ezsp = (
"EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis",
bellows.zigbee.application.ControllerApplication,
)
deconz = (
"deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II",
zigpy_deconz.zigbee.application.ControllerApplication,
)
ti_cc = (
"Legacy TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_cc.zigbee.application.ControllerApplication,
)
zigate = (
"ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi",
zigpy_zigate.zigbee.application.ControllerApplication,
)
xbee = (
"XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3",
zigpy_xbee.zigbee.application.ControllerApplication,
)
@classmethod
def list(cls) -> List[str]:
"""Return a list of descriptions."""
return [e.description for e in RadioType]
@classmethod
def get_by_description(cls, description: str) -> str:
"""Get radio by description."""
for radio in cls:
if radio.description == description:
return radio.name
raise ValueError
def __init__(self, description: str, controller_cls: CALLABLE_T):
"""Init instance."""
self._desc = description
self._ctrl_cls = controller_cls
@property
def controller(self) -> CALLABLE_T:
"""Return controller class."""
return self._ctrl_cls
@property
def description(self) -> str:
"""Return radio type description."""<|fim▁hole|>
REPORT_CONFIG_MAX_INT = 900
REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800
REPORT_CONFIG_MIN_INT = 30
REPORT_CONFIG_MIN_INT_ASAP = 1
REPORT_CONFIG_MIN_INT_IMMEDIATE = 0
REPORT_CONFIG_MIN_INT_OP = 5
REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600
REPORT_CONFIG_RPT_CHANGE = 1
REPORT_CONFIG_DEFAULT = (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_ASAP = (
REPORT_CONFIG_MIN_INT_ASAP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_BATTERY_SAVE = (
REPORT_CONFIG_MIN_INT_BATTERY_SAVE,
REPORT_CONFIG_MAX_INT_BATTERY_SAVE,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_IMMEDIATE = (
REPORT_CONFIG_MIN_INT_IMMEDIATE,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_OP = (
REPORT_CONFIG_MIN_INT_OP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
SENSOR_ACCELERATION = "acceleration"
SENSOR_BATTERY = "battery"
SENSOR_ELECTRICAL_MEASUREMENT = CHANNEL_ELECTRICAL_MEASUREMENT
SENSOR_GENERIC = "generic"
SENSOR_HUMIDITY = CHANNEL_HUMIDITY
SENSOR_ILLUMINANCE = CHANNEL_ILLUMINANCE
SENSOR_METERING = "metering"
SENSOR_OCCUPANCY = CHANNEL_OCCUPANCY
SENSOR_OPENING = "opening"
SENSOR_PRESSURE = CHANNEL_PRESSURE
SENSOR_TEMPERATURE = CHANNEL_TEMPERATURE
SENSOR_TYPE = "sensor_type"
SIGNAL_ADD_ENTITIES = "zha_add_new_entities"
SIGNAL_ATTR_UPDATED = "attribute_updated"
SIGNAL_AVAILABLE = "available"
SIGNAL_MOVE_LEVEL = "move_level"
SIGNAL_REMOVE = "remove"
SIGNAL_SET_LEVEL = "set_level"
SIGNAL_STATE_ATTR = "update_state_attribute"
SIGNAL_UPDATE_DEVICE = "{}_zha_update_device"
SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed"
SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change"
UNKNOWN = "unknown"
UNKNOWN_MANUFACTURER = "unk_manufacturer"
UNKNOWN_MODEL = "unk_model"
WARNING_DEVICE_MODE_STOP = 0
WARNING_DEVICE_MODE_BURGLAR = 1
WARNING_DEVICE_MODE_FIRE = 2
WARNING_DEVICE_MODE_EMERGENCY = 3
WARNING_DEVICE_MODE_POLICE_PANIC = 4
WARNING_DEVICE_MODE_FIRE_PANIC = 5
WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6
WARNING_DEVICE_STROBE_NO = 0
WARNING_DEVICE_STROBE_YES = 1
WARNING_DEVICE_SOUND_LOW = 0
WARNING_DEVICE_SOUND_MEDIUM = 1
WARNING_DEVICE_SOUND_HIGH = 2
WARNING_DEVICE_SOUND_VERY_HIGH = 3
WARNING_DEVICE_STROBE_LOW = 0x00
WARNING_DEVICE_STROBE_MEDIUM = 0x01
WARNING_DEVICE_STROBE_HIGH = 0x02
WARNING_DEVICE_STROBE_VERY_HIGH = 0x03
WARNING_DEVICE_SQUAWK_MODE_ARMED = 0
WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1
ZHA_DISCOVERY_NEW = "zha_discovery_new_{}"
ZHA_GW_MSG = "zha_gateway_message"
ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized"
ZHA_GW_MSG_DEVICE_INFO = "device_info"
ZHA_GW_MSG_DEVICE_JOINED = "device_joined"
ZHA_GW_MSG_DEVICE_REMOVED = "device_removed"
ZHA_GW_MSG_GROUP_ADDED = "group_added"
ZHA_GW_MSG_GROUP_INFO = "group_info"
ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added"
ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed"
ZHA_GW_MSG_GROUP_REMOVED = "group_removed"
ZHA_GW_MSG_LOG_ENTRY = "log_entry"
ZHA_GW_MSG_LOG_OUTPUT = "log_output"
ZHA_GW_MSG_RAW_INIT = "raw_device_initialized"
EFFECT_BLINK = 0x00
EFFECT_BREATHE = 0x01
EFFECT_OKAY = 0x02
EFFECT_DEFAULT_VARIANT = 0x00<|fim▁end|> | return self._desc |
<|file_name|>test_missing_function_pycode.py<|end_file_name|><|fim▁begin|>"""
@brief test log(time=8s)
@author Xavier Dupre
"""
import sys
import os
import unittest
import shutil
from contextlib import redirect_stdout
from io import StringIO
from pyquickhelper.pycode import ExtTestCase
from pyquickhelper.pycode import process_standard_options_for_setup_help, get_temp_folder
from pyquickhelper.texthelper import compare_module_version
from pyquickhelper.texthelper.version_helper import numeric_module_version
from pyquickhelper.pycode.setup_helper import (
clean_notebooks_for_numbers, hash_list, process_argv_for_unittest,
process_standard_options_for_setup)
class TestMissingFunctionsPycode(ExtTestCase):
def test_process_standard_options_for_setup_help(self):
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help('--help-commands')
self.assertIn('Commands processed by pyquickhelper:', f.getvalue())
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help(['--help', 'unittests'])
self.assertIn('-f file', f.getvalue())
f = StringIO()
with redirect_stdout(f):
process_standard_options_for_setup_help(['--help', 'clean_space'])
self.assertIn('clean unnecessary spaces', f.getvalue())
@unittest.skipIf(sys.platform != 'win32', reason="not available")
def test_process_standard_options_for_setup(self):
temp = get_temp_folder(
__file__, "temp_process_standard_options_for_setup")<|fim▁hole|> process_standard_options_for_setup(
['build_script'], file_or_folder=temp, project_var_name="debug",
fLOG=print)
text = f.getvalue()
self.assertIn('[process_standard_options_for_setup]', text)
self.assertExists(os.path.join(temp, 'bin'))
def test_numeric_module_version(self):
self.assertEqual(numeric_module_version((4, 5)), (4, 5))
self.assertEqual(numeric_module_version("4.5.e"), (4, 5, 'e'))
self.assertEqual(compare_module_version(("4.5.e"), (4, 5, 'e')), 0)
self.assertEqual(compare_module_version(("4.5.e"), None), -1)
self.assertEqual(compare_module_version(None, ("4.5.e")), 1)
self.assertEqual(compare_module_version(None, None), 0)
self.assertEqual(compare_module_version(
("4.5.e"), (4, 5, 'e', 'b')), -1)
def test_clean_notebooks_for_numbers(self):
temp = get_temp_folder(__file__, "temp_clean_notebooks_for_numbers")
nb = os.path.join(temp, "..", "data", "notebook_with_svg.ipynb")
fold = os.path.join(temp, '_doc', 'notebooks')
self.assertNotExists(fold)
os.makedirs(fold)
shutil.copy(nb, fold)
res = clean_notebooks_for_numbers(temp)
self.assertEqual(len(res), 1)
with open(res[0], 'r') as f:
content = f.read()
self.assertIn('"execution_count": 1,', content)
def test_hash_list(self):
li = [4, '5']
res = hash_list(li)
self.assertEqual(res, "1402b9d4")
li = []
res = hash_list(li)
self.assertEqual(res, "d41d8cd9")
def test_process_argv_for_unittest(self):
li = ['unittests', '-d', '5']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests']
res = process_argv_for_unittest(li, None)
self.assertEmpty(res)
li = ['unittests', '-e', '.*']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests', '-g', '.*']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
li = ['unittests', '-f', 'test.py']
res = process_argv_for_unittest(li, None)
self.assertNotEmpty(res)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | os.mkdir(os.path.join(temp, '_unittests'))
f = StringIO()
with redirect_stdout(f): |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from cStringIO import StringIO
from struct import pack, unpack, error as StructError
from .log import log
from .structures import fields
class DBFile(object):
"""
Base class for WDB and DBC files
"""
@classmethod
def open(cls, file, build, structure, environment):
if isinstance(file, basestring):
file = open(file, "rb")
instance = cls(file, build, environment)
instance._readHeader()
instance.setStructure(structure)
instance._rowDynamicFields = 0 # Dynamic fields index, used when parsing a row
instance._readAddresses()
return instance
def __init__(self, file=None, build=None, environment=None):
self._addresses = {}
self._values = {}
self.file = file
self.build = build
self.environment = environment
def __repr__(self):
return "%s(file=%r, build=%r)" % (self.__class__.__name__, self.file, self.build)
def __contains__(self, id):
return id in self._addresses
def __getitem__(self, item):
if isinstance(item, slice):
keys = sorted(self._addresses.keys())[item]
return [self[k] for k in keys]
if item not in self._values:
self._parse_row(item)
return self._values[item]
def __setitem__(self, item, value):
if not isinstance(item, int):
raise TypeError("DBFile indices must be integers, not %s" % (type(item)))
if isinstance(value, DBRow):
self._values[item] = value
self._addresses[item] = -1
else:
# FIXME technically we should allow DBRow, but this is untested and will need resetting parent
raise TypeError("Unsupported type for DBFile.__setitem__: %s" % (type(value)))
def __delitem__(self, item):
if item in self._values:
del self._values[item]
del self._addresses[item]
def __iter__(self):
return self._addresses.__iter__()
def __len__(self):
return len(self._addresses)
def _add_row(self, id, address, reclen):
if id in self._addresses: # Something's wrong here
log.warning("Multiple instances of row %r found in %s" % (id, self.file.name))
self._addresses[id] = (address, reclen)
def _parse_field(self, data, field, row=None):
"""
Parse a single field in stream.
"""
if field.dyn > self._rowDynamicFields:
return None # The column doesn't exist in this row, we set it to None
ret = None
try:
if isinstance(field, fields.StringField):
ret = self._parse_string(data)
elif isinstance(field, fields.DataField): # wowcache.wdb
length = getattr(row, field.master)
ret = data.read(length)
elif isinstance(field, fields.DynamicMaster):
ret, = unpack("<I", data.read(4))
self._rowDynamicFields = ret
else:
ret, = unpack("<%s" % (field.char), data.read(field.size))
except StructError:
log.warning("Field %s could not be parsed properly" % (field))
ret = None
return ret
def supportsSeeking(self):
return hasattr(self.file, "seek")
def append(self, row):
"""
Append a row at the end of the file.
If the row does not have an id, one is automatically assigned.
"""
i = len(self) + 1 # FIXME this wont work properly in incomplete files
if "_id" not in row:
row["_id"] = i
self[i] = row
def clear(self):
"""<|fim▁hole|>
def keys(self):
return self._addresses.keys()
def items(self):
return [(k, self[k]) for k in self]
def parse_row(self, data, reclen=0):
"""
Assign data to a DBRow instance
"""
return DBRow(self, data=data, reclen=reclen)
def values(self):
"""
Return a list of the file's values
"""
return [self[id] for id in self]
def setRow(self, key, **values):
self.__setitem__(key, DBRow(self, columns=values))
def size(self):
if hasattr(self.file, "size"):
return self.file.size()
elif isinstance(self.file, file):
from os.path import getsize
return getsize(self.file.name)
raise NotImplementedError
def update(self, other):
"""
Update file from iterable other
"""
for k in other:
self[k] = other[k]
def write(self, filename=""):
"""
Write the file data on disk. If filename is not given, use currently opened file.
"""
_filename = filename or self.file.name
data = self.header.data() + self.data() + self.eof()
f = open(_filename, "wb") # Don't open before calling data() as uncached rows would be empty
f.write(data)
f.close()
log.info("Written %i bytes at %s" % (len(data), f.name))
if not filename: # Reopen self.file, we modified it
# XXX do we need to wipe self._values here?
self.file.close()
self.file = open(f.name, "rb")
class DBRow(list):
"""
A database row.
Names of the variables of that class should not be used in field names of structures
"""
initialized = False
def __init__(self, parent, data=None, columns=None, reclen=0):
self._parent = parent
self._values = {} # Columns values storage
self.structure = parent.structure
self.initialized = True # needed for __setattr__
if columns:
if type(columns) == list:
self.extend(columns)
elif type(columns) == dict:
self._default()
_cols = [k.name for k in self.structure]
for k in columns:
try:
self[_cols.index(k)] = columns[k]
except ValueError:
log.warning("Column %r not found" % (k))
elif data:
dynfields = 0
data = StringIO(data)
for field in self.structure:
_data = parent._parse_field(data, field, self)
self.append(_data)
if reclen:
real_reclen = reclen + self._parent.row_header_size
if data.tell() != real_reclen:
log.warning("Reclen not respected for row %r. Expected %i, read %i. (%+i)" % (self.id, real_reclen, data.tell(), real_reclen-data.tell()))
def __dir__(self):
result = self.__dict__.keys()
result.extend(self.structure.column_names)
return result
def __getattr__(self, attr):
if attr in self.structure:
return self._get_value(attr)
if attr in self.structure._abstractions: # Union abstractions etc
field, func = self.structure._abstractions[attr]
return func(field, self)
if "__" in attr:
return self._query(attr)
return super(DBRow, self).__getattribute__(attr)
def __int__(self):
return self.id
def __setattr__(self, attr, value):
# Do not preserve the value in DBRow! Use the save method to save.
if self.initialized and attr in self.structure:
self._set_value(attr, value)
return super(DBRow, self).__setattr__(attr, value)
def __setitem__(self, index, value):
if not isinstance(index, int):
raise TypeError("Expected int instance, got %s instead (%r)" % (type(index), index))
list.__setitem__(self, index, value)
col = self.structure[index]
self._values[col.name] = col.to_python(value, row=self)
def _get_reverse_relation(self, table, field):
"""
Return a list of rows matching the reverse relation
"""
if not hasattr(self._parent, "_reverse_relation_cache"):
self._parent._reverse_relation_cache = {}
cache = self._parent._reverse_relation_cache
tfield = table + "__" + field
if tfield not in cache:
cache[tfield] = {}
# First time lookup, let's build the cache
table = self._parent.environment.dbFile(table)
for row in table:
row = table[row]
id = row._raw(field)
if id not in cache[tfield]:
cache[tfield][id] = []
cache[tfield][id].append(row)
return cache[tfield].get(self.id, None)
def _matches(self, **kwargs):
for k, v in kwargs.items():
if not self._query(k, v):
return False
return True
def _query(self, rel, value=None):
"""
Parse a django-like multilevel relationship
"""
rels = rel.split("__")
if "" in rels: # empty string
raise ValueError("Invalid relation string")
first = rels[0]
if not hasattr(self, first):
if self._parent.environment.hasDbFile(first):
# Handle reverse relations, eg spell__item for item table
remainder = rel[len(first + "__"):]
return self._get_reverse_relation(first, remainder)
raise ValueError("Invalid relation string")
ret = self
rels = rels[::-1]
special = {
"contains": lambda x, y: x in y,
"exact": lambda x, y: x == y,
"icontains": lambda x, y: x.lower() in y.lower(),
"iexact": lambda x, y: x.lower() == y.lower(),
"gt": lambda x, y: x > y,
"gte": lambda x, y: x >= y,
"lt": lambda x, y: x < y,
"lte": lambda x, y: x <= y,
}
while rels:
if rels[-1] in special:
if len(rels) != 1:
# icontains always needs to be the last piece of the relation string
raise ValueError("Invalid relation string")
return special[rels[-1]](value, ret)
else:
ret = getattr(ret, rels.pop())
return ret
def _set_value(self, name, value):
index = self.structure.index(name)
col = self.structure[index]
self._values[name] = col.to_python(value, self)
self[index] = value
def _get_value(self, name):
if name not in self._values:
raw_value = self[self.structure.index(name)]
self._set_value(name, raw_value)
return self._values[name]
def _raw(self, name):
"""
Returns the raw value from field 'name'
"""
index = self.structure.index(name)
return self[index]
def _save(self):
for name in self._values:
index = self.structure.index(name)
col = self.structure[index]
self[index] = col.from_python(self._values[name])
def _field(self, name):
"""
Returns the field 'name'
"""
index = self.structure.index(name)
return self.structure[index]
def _default(self):
"""
Change all fields to their default values
"""
del self[:]
self._values = {}
for col in self.structure:
char = col.char
if col.dyn:
self.append(None)
elif char == "s":
self.append("")
elif char == "f":
self.append(0.0)
else:
self.append(0)
def dict(self):
"""
Return a dict of the row as colname: value
"""
return dict(zip(self.structure.column_names, self))
def update(self, other):
for k in other:
self[k] = other[k]
@property
def id(self):
"Temporary hack to transition between _id and id"
return self._id<|fim▁end|> | Delete every row in the file
"""
for k in self.keys(): # Use key, otherwise we get RuntimeError: dictionary changed size during iteration
del self[k] |
<|file_name|>DecimalRange.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.testdriver.rule;
import java.math.BigDecimal;
import java.text.MessageFormat;
/**
* Accepts iff actual decimal is in [ expected + lower-bound, expected + upper-bound ].
* @since 0.2.0<|fim▁hole|>
private final BigDecimal lowerBound;
private final BigDecimal upperBound;
/**
* Creates a new instance.
* @param lowerBound lower bound offset from expected value
* @param upperBound upper bound offset from expected value
*/
public DecimalRange(BigDecimal lowerBound, BigDecimal upperBound) {
this.lowerBound = lowerBound;
this.upperBound = upperBound;
}
@Override
public boolean accepts(BigDecimal expected, BigDecimal actual) {
if (expected == null || actual == null) {
throw new IllegalArgumentException();
}
return expected.add(lowerBound).compareTo(actual) <= 0
&& actual.compareTo(expected.add(upperBound)) <= 0;
}
@Override
public String describeExpected(BigDecimal expected, BigDecimal actual) {
if (expected == null) {
return "(error)"; //$NON-NLS-1$
}
return MessageFormat.format(
"{0} ~ {1}", //$NON-NLS-1$
Util.format(expected.add(lowerBound)),
Util.format(expected.add(upperBound)));
}
}<|fim▁end|> | */
public class DecimalRange implements ValuePredicate<BigDecimal> { |
<|file_name|>dig.rs<|end_file_name|><|fim▁begin|>extern crate argparse;
extern crate domain;
extern crate tokio_core;
use std::error;
use std::result;
use std::str::FromStr;
use domain::bits::message::{MessageBuf, RecordSection};
use domain::bits::name::{DNameBuf, DNameSlice};
use domain::iana::{Class, Rtype};
use domain::resolv::{ResolvConf, Resolver};
//------------ Options ------------------------------------------------------
struct Options {
// @server
// -b address
// -c class Class as string
// -f filename
// -k filename<|fim▁hole|> // -q name
// -t type
// -x addr
// -y [hmac:name:key]
// -4
// -6
name: String, // name
qtype: String, // type Type as string
qclass: String, // class
// queryopt...
conf: ResolvConf,
}
impl Options {
fn new() -> Options {
let mut conf = ResolvConf::new();
let _ = conf.parse_file("/etc/resolv.conf");
conf.finalize();
conf.options.use_vc = true;
Options {
name: String::new(),
qtype: String::new(), // default depends on name.
qclass: "IN".to_string(),
conf: conf,
}
}
fn from_args() -> Options {
let mut res = Options::new();
res.parse();
res
}
fn parse(&mut self) {
use argparse::{ArgumentParser, Store};
let mut parser = ArgumentParser::new();
parser.refer(&mut self.name)
.add_argument("name", Store, "name of the resource record");
parser.refer(&mut self.qtype)
.add_argument("type", Store, "query type");
parser.refer(&mut self.qclass)
.add_argument("class", Store, "query class");
parser.parse_args_or_exit();
}
}
impl Options {
fn name(&self) -> Result<DNameBuf> {
if self.name.is_empty() {
Ok(DNameSlice::root().to_owned())
}
else {
let mut res = try!(DNameBuf::from_str(&self.name));
res.append_root().unwrap();
Ok(res)
}
}
fn qtype(&self) -> Result<Rtype> {
if self.qtype.is_empty() {
Ok(if self.name.is_empty() { Rtype::Ns } else { Rtype::A })
}
else {
Ok(try!(Rtype::from_str(&self.qtype)))
}
}
fn qclass(&self) -> Result<Class> {
Ok(Class::In)
}
fn conf(&self) -> &ResolvConf { &self.conf }
}
//------------ Error and Result ---------------------------------------------
type Error = Box<error::Error>;
type Result<T> = result::Result<T, Error>;
//------------ Processing Steps ---------------------------------------------
fn query(options: Options) -> MessageBuf {
Resolver::run_with_conf(options.conf().clone(), |resolv| {
resolv.query((options.name().unwrap(), options.qtype().unwrap(),
options.qclass().unwrap()))
}).unwrap()
}
fn print_result(response: MessageBuf) {
println!(";; Got answer:");
println!(";; ->>HEADER<<- opcode: {}, status: {}, id: {}",
response.header().opcode(), response.header().rcode(),
response.header().id());
print!(";; flags:");
if response.header().qr() { print!(" qr"); }
if response.header().aa() { print!(" aa"); }
if response.header().tc() { print!(" tc"); }
if response.header().rd() { print!(" rd"); }
if response.header().ra() { print!(" ra"); }
if response.header().ad() { print!(" ad"); }
if response.header().cd() { print!(" cd"); }
println!("; QUERY: {}, ANSWER: {}, AUTHORITY: {}, ADDITIONAL: {}",
response.counts().qdcount(), response.counts().ancount(),
response.counts().nscount(), response.counts().arcount());
println!("");
let mut question = response.question();
if response.counts().qdcount() > 0 {
println!(";; QUESTION SECTION");
for item in &mut question {
let item = item.unwrap();
println!("; {}\t\t{}\t{}", item.qname(),
item.qclass(), item.qtype());
}
println!("");
}
let mut answer = question.answer().unwrap();
if response.counts().ancount() > 0 {
println!(";; ANSWER SECTION");
print_records(&mut answer);
println!("");
}
let mut authority = answer.next_section().unwrap().unwrap();
if response.counts().nscount() > 0 {
println!(";; AUTHORITY SECTION");
print_records(&mut authority);
println!("");
}
let mut additional = authority.next_section().unwrap().unwrap();
if response.counts().arcount() > 0 {
println!(";; ADDITIONAL SECTION");
print_records(&mut additional);
println!("");
}
}
fn print_records(section: &mut RecordSection) {
for record in section {
println!("{}", record.unwrap());
}
}
//------------ Main Function ------------------------------------------------
fn main() {
let options = Options::from_args();
let response = query(options);
let len = response.len();
print_result(response);
println!(";; Query time: not yet available.");
println!(";; SERVER: we don't currently know.");
println!(";; WHEN: not yet available.");
println!(";; MSG SIZE rcvd: {} bytes", len);
println!("");
}<|fim▁end|> | // -m
// -p port# |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Sharing.settings")
from django.core.management import execute_from_command_line<|fim▁hole|> execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>popup.js<|end_file_name|><|fim▁begin|>"use strict";
/**
* @var jaegerhut [Object] Badge icons, one for each policy
*/
const jaegerhut = {
0: {
name: "allowall",
colour: "#D84A4A",
text: chrome.i18n.getMessage("policyAllowAll")
},
1: {
name: "relaxed",
colour: "#559FE6",
text: chrome.i18n.getMessage("policyRelaxed")
},
2: {
name: "filtered",
colour: "#73AB55",
text: chrome.i18n.getMessage("policyFiltered")
},
3: {
name: "blockall",
colour: "#26272A",
text: chrome.i18n.getMessage("policyBlockAll")
},
undefined: {
name: "undefined",
colour: "#6F7072"
}
};
/**
* @var tabInfo [Object] Holds data about the tab
* obtained from the background process
*/
let tabInfo = {};
/**
* @var port [Object] A connection Port that allows message exchanging
*/
let port;
/*
* Basic nodes for building the interface<|fim▁hole|>const nodeCheckbox = document.createElement("input");
const nodeDetails = document.createElement("label");
const nodeWebsocket = document.createElement("div");
const nodeFrames = document.createElement("div");
const nodeSubdomain = document.createElement("span");
const nodeDomain = document.createElement("span");
const nodeNumber = document.createElement("label");
const nodeResource = document.createElement("a");
const nodeHostsList = document.createElement("div");
nodeHost.className = "host blocked";
nodeDetails.className = "details";
nodeWebsocket.className = "websocket";
nodeFrames.className = "frames";
nodeSubdomain.className = "subdomain";
nodeDomain.className = "domain";
nodeNumber.className = "number";
nodeResource.className = "resource";
nodeHostsList.className = "hosts";
nodeHostsList.id = "f0";
nodeCheckbox.type = "checkbox";
nodeNumber.title = chrome.i18n.getMessage("seeResources");
nodeResource.target = "_blank";
/**
* @brief Set and save an exception rule for that script
*
* Fired whenever the user changes the checkbox of a rule.
* This will set and save the rule according to what the
* user has chosen.
*
* @param e [Event] Event interface on the checkbox change event
*/
function setScriptRule(e) {
const input = e.target;
let info = tabInfo;
const frameID = parseInt(input.dataset.frameid, 10);
if (frameID > 0) {
info = tabInfo.frames[frameID];
}
const msg = {
type: 0,
private: tabInfo.private,
site: [],
rule: {}
};
switch (parseInt(document.getElementById("settings").dataset.scope, 10)) {
// page
case 3: msg.site[2] = info.page;
// site - fallthrough
case 2: msg.site[1] = info.subdomain;
// domain - fallthrough
case 1: msg.site[0] = info.domain;
// global - fallthrough
default:
}
const domain = input.dataset.domain;
const subdomain = input.dataset.subdomain;
msg.rule[domain] = {
rule: null,
urls: {}
};
msg.rule[domain].urls[subdomain] = {
// in the DOM true means checked which means allow
// in the settings true means block
rule: !input.checked,
urls: {}
};
// The background script deals with it because the popup process will die on close
port.postMessage(msg);
}
/**
* @brief Open dropdown to choose frame policy
*
* Opens an overlay div to choose a new policy for a frame.
* This is fired when clicking on a hat (Jaegerhut)
*
* @param e [Event] Event interface on the clicked Jaegerhut
*/
function openFramePolicy(e) {
const frameid = e.target.parentNode.dataset.frameid;
const policy = parseInt(e.target.dataset.policy, 10);
const dropdown = document.getElementById("frame-edit");
const pos = e.target.getBoundingClientRect().y - 30;
dropdown.dataset.frameid = frameid;
dropdown.dataset.hidden = false;
dropdown.style = `top:${pos}px`;
dropdown.dataset.scope = 1;
dropdown.dataset.policy = policy;
}
/**
* @brief Close frame policy dropdown
*
* Closes the overlay div where you choose a new policy for a frame.
*/
function closeFramePolicy() {
document.getElementById("frame-edit").dataset.hidden = true;
}
/**
* @brief Build resource list in the DOM
*
* Injects nodes to display the list of resources that the page
* contains. Also attaches events to the elements to allow
* manipulation of the settings.
*
* @param frameid [Number] id of the frame being built
*/
function buildList(frameID) {
const elemMainNode = document.getElementById(`f${frameID}`);
let frame = tabInfo;
if (frameID > 0) {
frame = tabInfo.frames[frameID];
}
Object.entries(frame.scripts).sort().forEach((domainData) => {
const domain = domainData[0];
Object.entries(domainData[1]).sort().forEach((subdomainData) => {
const subdomain = subdomainData[0];
const resources = subdomainData[1];
const elemHost = nodeHost.cloneNode(false);
const elemCheckbox = nodeCheckbox.cloneNode(false);
const elemDetails = nodeDetails.cloneNode(false);
const elemWebsocket = nodeWebsocket.cloneNode(false);
const elemFrames = nodeFrames.cloneNode(false);
const elemSubdomain = nodeSubdomain.cloneNode(false);
const elemDomain = nodeDomain.cloneNode(false);
const elemNumber = nodeNumber.cloneNode(false);
elemDetails.appendChild(elemWebsocket);
elemDetails.appendChild(elemFrames);
elemDetails.appendChild(elemSubdomain);
elemDetails.appendChild(elemDomain);
elemHost.appendChild(elemCheckbox);
elemHost.appendChild(elemDetails);
elemHost.appendChild(elemNumber);
elemMainNode.appendChild(elemHost);
const hostID = `${subdomain}${domain}${frameID}`;
elemCheckbox.id = hostID;
elemDetails.htmlFor = hostID;
elemSubdomain.innerHTML = `<span>${subdomain}${((subdomain.length > 0) ? "." : "")}</span>`;
elemDomain.innerHTML = `<span>${domain}</span>`;
elemNumber.innerText = resources.length;
// if the text is larger than the area, we display a tooltip
if (elemSubdomain.scrollWidth > elemSubdomain.clientWidth || elemDomain.scrollWidth > elemDomain.clientWidth) {
elemHost.title = `${elemSubdomain.textContent}${domain}`;
}
// save script exception
elemCheckbox.addEventListener("change", setScriptRule, false);
// add data to checkbox
elemCheckbox.dataset.frameid = frameID;
elemCheckbox.dataset.domain = domain;
elemCheckbox.dataset.subdomain = subdomain;
// input that controls the script list visibility
const openList = nodeCheckbox.cloneNode(false);
openList.id = `list_${hostID}`;
elemNumber.htmlFor = `list_${hostID}`;
elemMainNode.appendChild(openList);
// element that holds the list of elements from that host
const resourcesList = document.createElement("div");
resourcesList.className = "resources";
elemMainNode.appendChild(resourcesList);
let frames = 0;
let websockets = 0;
// populate scripts list
// script can be a websocket or frame
resources.forEach((script) => {
if (!script.blocked) {
elemCheckbox.checked = true;
// remove blocked class
elemHost.className = "host";
}
const url = `${script.protocol}${elemSubdomain.textContent}${domain}${script.name}${script.query}`;
const elemResource = nodeResource.cloneNode(false);
elemResource.innerText = script.name.match(/[^/]*.$/);
elemResource.title = url;
elemResource.href = url;
// websocket
if (script.protocol === "wss://" || script.protocol === "ws://") {
elemResource.className = "resource haswebsocket";
elemWebsocket.className = "websocket haswebsocket";
elemWebsocket.title = `\n${chrome.i18n.getMessage("tooltipWebsockets", (++websockets).toString())}`;
}
// if frameid exists it's a frame
// otherwise it's a normal script/websocket
if (script.frameid === undefined) {
resourcesList.appendChild(elemResource);
}
else {
const policy = jaegerhut[tabInfo.frames[script.frameid].policy];
const elemFrameDiv = document.createElement("div");
elemFrameDiv.className = "frame";
elemFrameDiv.dataset.frameid = script.frameid;
elemFrames.className = "frames hasframe";
elemFrames.title = chrome.i18n.getMessage("tooltipFrames", (++frames).toString());
const elemPolicy = document.createElement("img");
elemPolicy.src = `/images/${policy.name}38.png`;
elemPolicy.className = "frame-policy";
elemPolicy.title = policy.text;
elemPolicy.dataset.policy = tabInfo.frames[script.frameid].policy;
elemPolicy.addEventListener("click", openFramePolicy);
const elemNumberFrame = nodeNumber.cloneNode(false);
elemNumberFrame.htmlFor = `frame${script.frameid}`;
elemNumberFrame.innerText = Object.keys(tabInfo.frames[script.frameid].scripts).length;
elemFrameDiv.appendChild(elemPolicy);
elemFrameDiv.appendChild(elemResource);
elemFrameDiv.appendChild(elemNumberFrame);
resourcesList.appendChild(elemFrameDiv);
const elemCheckboxFrame = nodeCheckbox.cloneNode(false);
elemCheckboxFrame.id = `frame${script.frameid}`;
resourcesList.appendChild(elemCheckboxFrame);
const resourcesListFrame = document.createElement("div");
resourcesListFrame.className = `resources ${policy.name}`;
resourcesListFrame.id = `f${script.frameid}`;
resourcesList.appendChild(resourcesListFrame);
buildList(script.frameid);
}
elemFrames.title = `${elemFrames.title}${elemWebsocket.title}`;
});
});
});
}
/**
* @brief Sets and build the popup UI
*
* Define main classes and then call the script list builder.
*/
function startUI() {
const error = document.getElementById("error");
const settings = document.getElementById("settings");
settings.replaceChild(nodeHostsList.cloneNode(false), document.getElementById("f0"));
settings.removeAttribute("hidden");
error.hidden = true;
const blocked = tabInfo.policy ? tabInfo.allowonce ? "(T) " : `(${tabInfo.blocked}) ` : "";
document.title = `${blocked}ScriptJäger`;
document.getElementById("jaegerhut").href = `images/${jaegerhut[tabInfo.policy].name}38.png`;
document.getElementById("jaegerfarbe").content = jaegerhut[tabInfo.policy].colour;
let skip = false;
switch (tabInfo.protocol) {
case "https://":
case "http://":
break;
case "chrome://":
case "chrome-extension://":
skip = "errorInternal";
break;
case "file://":
if (!tabInfo.policy) {
skip = "errorFile";
}
break;
default:
skip = "errorInternal";
}
document.body.className = jaegerhut[tabInfo.policy].name;
if (skip !== false) {
error.innerText = chrome.i18n.getMessage(skip);
error.removeAttribute("hidden");
settings.hidden = true;
return;
}
// policy button reflects current policy
settings.dataset.policy = tabInfo.policy;
const allowonce = document.getElementById("allowonce");
// Allow once is turned on
if (tabInfo.allowonce === true) {
allowonce.title = chrome.i18n.getMessage("policyAllowOnceDisable");
allowonce.className = "allowonce";
}
// Allow once is turned off
else {
allowonce.title = chrome.i18n.getMessage("policyAllowOnce");
allowonce.className = "";
}
buildList(0);
}
/**
* @brief Get info about tab
*
* When opening the popup we request the info about the
* page scripts and create the DOM nodes with this info
*
* @param tabs [Array] Contains info about the current tab
*/
chrome.tabs.query({currentWindow: true, active: true}, (tabs) => {
port = chrome.runtime.connect({name: tabs[0].id.toString(10)});
/**
* @brief Perform actions acording to message
*
* The background script will send the info we need
*
* Child 'type' will contain the type of the request
*
* @param msg [Object] Contains type and data for the action
*
* @note Each request has different msg children/info
*
* 0 (Re)Build UI - Whenever the UI has to be completely updated
* - data [Object] Tab info, a children of background tabStorage
*
* 1 Update interface
*
* 2 Response of allowed/blocked list for relaxed/filtered
* - tabid [Number] id of the requested tab
* - scripts [Array] Contains the url and the rule
* - name [String] DOM ID of the script
* - blocked [Boolean] Whether that level will be blocked
*/
port.onMessage.addListener((msg) => {
console.log(msg);
if (msg.type === 0) {
// save tab info in variable
tabInfo = msg.data;
startUI();
return;
}
if (msg.type === 1) {
return;
}
// msg.type === 2
// check if the user has not changed tab
if (msg.tabid === tabInfo.tabid) {
msg.scripts.forEach((domain) => {
document.getElementById(domain.name).checked = !domain.blocked;
});
}
});
});
/**
* @brief Save new policy
*
* Send to background process to save new policy for the specific scope
*
* @param policy [Number] Policy to save
* @param scope [Number] Where to change rule, e.g. domain, global
* @param frameid [Number] Frame where the policy change is being done
*/
function changePolicy(policy, scope, frameid) {
const msg = {
type: 0,
private: tabInfo.private,
site: [],
rule: policy
};
let frame = tabInfo;
if (frameid > 0) {
frame = tabInfo.frames[frameid];
}
switch (scope) {
// page
case 3: msg.site[2] = frame.page;
// site - fallthrough
case 2: msg.site[1] = frame.subdomain;
// domain - fallthrough
case 1: msg.site[0] = frame.domain;
// global - fallthrough
default:
}
port.postMessage(msg);
}
/**
* @brief Enable listeners when the DOM has loaded
*
* When the DOM is loaded we can attach the events to manipulate
* the preferences.
*/
function enableListeners() {
document.getElementById("settings").addEventListener("click", closeFramePolicy, true);
document.getElementById("cancel").addEventListener("click", closeFramePolicy);
document.getElementById("preferences").addEventListener("click", (e) => {
e.stopPropagation();
chrome.runtime.openOptionsPage();
});
// allow once
document.getElementById("allowonce").addEventListener("click", (e) => {
e.stopPropagation();
port.postMessage({
type: 1,
tabId: tabInfo.tabid,
allow: !tabInfo.allowonce
});
});
document.querySelectorAll(".scopes").forEach((scopes) => {
scopes.addEventListener("click", (e) => {
e.target.parentNode.parentNode.dataset.scope = e.target.dataset.value;
});
});
document.querySelectorAll(".policies").forEach((policies) => {
policies.addEventListener("click", (e) => {
const target = (e.target.tagName === "IMG") ? e.target.parentNode : e.target;
const frame = target.parentNode.parentNode.dataset;
const policy = parseInt(target.dataset.value, 10);
const scope = parseInt(frame.scope, 10);
frame.policy = policy;
changePolicy(policy, scope, frame.frameid);
if (frame.frameid > 0) {
document.getElementById(`f${frame.frameid}`).className = `resources ${jaegerhut[frame.policy].name}`;
} else {
document.body.className = jaegerhut[frame.policy].name;
}
// change all inputs to checked (allowed) or unchecked (blocked)
if (policy === 0 || policy === 3) {
document.querySelectorAll(`#f${frame.frameid} > .script > input`).forEach((checkbox) => {
checkbox.checked = !policy;
});
return;
}
// request list of blocked and allowed scripts from background script
port.postMessage({
type: 2,
policy: policy,
tabid: tabInfo.tabid,
frameid: frame.frameid,
window: tabInfo.window,
});
});
});
}
/**
* @brief Translate and attach events
*
* This will translate the page and attach the events to the nodes.
*/
document.addEventListener("DOMContentLoaded", () => {
const template = document.body.innerHTML;
// translate the page
document.body.innerHTML = template.replace(/__MSG_(\w+)__/g, (a, b) => {
return chrome.i18n.getMessage(b);
});
// allow resizable width on webpanel
if (document.location.search === "?webpanel") {
document.body.style = "width: 100%";
}
enableListeners();
});<|fim▁end|> | */
const nodeHost = document.createElement("div"); |
<|file_name|>0005_auto__add_field_schema_immutable.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Schema.immutable'
db.add_column('tardis_portal_schema', 'immutable', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Schema.immutable'
db.delete_column('tardis_portal_schema', 'immutable')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tardis_portal.author_experiment': {
'Meta': {'ordering': "['order']", 'unique_together': "(('experiment', 'author'),)", 'object_name': 'Author_Experiment'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'tardis_portal.datafileparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatafileParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatafileParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datafileparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatafileParameterSet'},
'dataset_file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset_File']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.dataset': {
'Meta': {'object_name': 'Dataset'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),<|fim▁hole|> 'Meta': {'object_name': 'Dataset_File'},
'created_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5sum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'size': ('django.db.models.fields.CharField', [], {'max_length': '400', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'tardis_portal.datasetparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatasetParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatasetParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datasetparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatasetParameterSet'},
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.experiment': {
'Meta': {'object_name': 'Experiment'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'created_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'handle': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution_name': ('django.db.models.fields.CharField', [], {'default': "'Monash University'", 'max_length': '400'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'tardis_portal.experimentacl': {
'Meta': {'ordering': "['experiment__id']", 'object_name': 'ExperimentACL'},
'aclOwnershipType': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'canDelete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canRead': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canWrite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'effectiveDate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'entityId': ('django.db.models.fields.CharField', [], {'max_length': '320'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'expiryDate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isOwner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pluginId': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'tardis_portal.experimentparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'ExperimentParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ExperimentParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.experimentparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'ExperimentParameterSet'},
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.groupadmin': {
'Meta': {'object_name': 'GroupAdmin'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'tardis_portal.parametername': {
'Meta': {'ordering': "('order', 'name')", 'unique_together': "(('schema', 'name'),)", 'object_name': 'ParameterName'},
'choices': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'comparison_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'data_type': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_searchable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '9999', 'null': 'True', 'blank': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'})
},
'tardis_portal.schema': {
'Meta': {'object_name': 'Schema'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'namespace': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}),
'subtype': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'tardis_portal.token': {
'Meta': {'object_name': 'Token'},
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'expiry_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date(2011, 10, 19)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'tardis_portal.userauthentication': {
'Meta': {'object_name': 'UserAuthentication'},
'authenticationMethod': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'userProfile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.UserProfile']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'tardis_portal.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isDjangoAccount': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['tardis_portal']<|fim▁end|> | 'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'tardis_portal.dataset_file': { |
<|file_name|>package.rs<|end_file_name|><|fim▁begin|>use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
use semver::Version;
use core::{Dependency, Manifest, PackageId, SourceId, Target, TargetKind};
use core::{Summary, Metadata, SourceMap};
use ops;
use util::{CargoResult, Config, LazyCell, ChainError, internal, human, lev_distance};
use rustc_serialize::{Encoder,Encodable};
/// Information about a package that is available somewhere in the file system.
///
/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: PathBuf,
}
#[derive(RustcEncodable)]
struct SerializedPackage<'a> {
name: &'a str,
version: &'a str,
id: &'a PackageId,
source: &'a SourceId,
dependencies: &'a [Dependency],
targets: &'a [Target],
features: &'a HashMap<String, Vec<String>>,
manifest_path: &'a str,
}
<|fim▁hole|> let summary = self.manifest.summary();
let package_id = summary.package_id();
SerializedPackage {
name: &package_id.name(),
version: &package_id.version().to_string(),
id: package_id,
source: summary.source_id(),
dependencies: summary.dependencies(),
targets: &self.manifest.targets(),
features: summary.features(),
manifest_path: &self.manifest_path.display().to_string(),
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.to_path_buf(),
}
}
pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult<Package> {
let path = manifest_path.parent().unwrap();
let source_id = try!(SourceId::for_path(path));
let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id,
config));
Ok(pkg)
}
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn name(&self) -> &str { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn targets(&self) -> &[Target] { self.manifest().targets() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn authors(&self) -> &Vec<String> { &self.manifest.metadata().authors }
pub fn publish(&self) -> bool { self.manifest.publish() }
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn generate_metadata(&self) -> Metadata {
self.package_id().generate_metadata()
}
pub fn find_closest_target(&self, target: &str, kind: TargetKind) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| *t.kind() == kind)
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
}
impl fmt::Display for Package {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.summary().package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
self.package_id() == other.package_id()
}
}
impl Eq for Package {}
impl hash::Hash for Package {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.package_id().hash(into)
}
}
pub struct PackageSet<'cfg> {
packages: Vec<(PackageId, LazyCell<Package>)>,
sources: RefCell<SourceMap<'cfg>>,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new(None))
}).collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
Box::new(self.packages.iter().map(|&(ref p, _)| p))
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| {
internal(format!("couldn't find `{}` in package set", id))
}));
let slot = &slot.1;
if let Some(pkg) = slot.borrow() {
return Ok(pkg)
}
let mut sources = self.sources.borrow_mut();
let source = try!(sources.get_mut(id.source_id()).chain_error(|| {
internal(format!("couldn't find source for `{}`", id))
}));
let pkg = try!(source.download(id).chain_error(|| {
human("unable to get packages from source")
}));
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}<|fim▁end|> | impl Encodable for Package {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { |
<|file_name|>bmotion.integrated.js<|end_file_name|><|fim▁begin|>requirejs(['bmotion.config'], function() {<|fim▁hole|><|fim▁end|> | requirejs(['bms.integrated.root'], function() {});
}); |
<|file_name|>cli.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
//
// cli.js
//
// Copyright (c) 2016-2017 Junpei Kawamoto<|fim▁hole|>// http://opensource.org/licenses/mit-license.php
//
const {
start,
crawl
} = require("../lib/crawler");
const argv = require("yargs")
.option("lang", {
describe: "Language to be used to scrape trand pages. Not used in crawl command."
})
.default("lang", "EN")
.option("dir", {
describe: "Path to the directory to store database files"
})
.demandOption(["dir"])
.command("*", "Start crawling", () => {}, (argv) => {
start(argv.lang, argv.dir);
})
.command("crawl", "Crawl comments form a video", () => {}, (argv) => {
crawl(argv.dir).catch((err) => {
console.error(err);
});
})
.help("h")
.alias("h", "help")
.argv;<|fim▁end|> | //
// This software is released under the MIT License.
// |
<|file_name|>fuse.rs<|end_file_name|><|fim▁begin|>use core::pin::Pin;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
/// Stream for the [`fuse`](super::StreamExt::fuse) method.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Fuse<St> {
stream: St,
done: bool,
}
impl<St: Unpin> Unpin for Fuse<St> {}
impl<St> Fuse<St> {
unsafe_pinned!(stream: St);
unsafe_unpinned!(done: bool);
pub(super) fn new(stream: St) -> Fuse<St> {
Fuse { stream, done: false }<|fim▁hole|> ///
/// If this method returns `true`, then all future calls to poll are
/// guaranteed to return `None`. If this returns `false`, then the
/// underlying stream is still in use.
pub fn is_done(&self) -> bool {
self.done
}
/// Acquires a reference to the underlying stream that this combinator is
/// pulling from.
pub fn get_ref(&self) -> &St {
&self.stream
}
/// Acquires a mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_mut(&mut self) -> &mut St {
&mut self.stream
}
/// Acquires a pinned mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut St> {
self.stream()
}
/// Consumes this combinator, returning the underlying stream.
///
/// Note that this may discard intermediate state of this combinator, so
/// care should be taken to avoid losing resources when this is called.
pub fn into_inner(self) -> St {
self.stream
}
}
impl<S: Stream> FusedStream for Fuse<S> {
fn is_terminated(&self) -> bool {
self.done
}
}
impl<S: Stream> Stream for Fuse<S> {
type Item = S::Item;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<S::Item>> {
if self.done {
return Poll::Ready(None);
}
let item = ready!(self.as_mut().stream().poll_next(cx));
if item.is_none() {
*self.as_mut().done() = true;
}
Poll::Ready(item)
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.done {
(0, Some(0))
} else {
self.stream.size_hint()
}
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<S: Stream + Sink<Item>, Item> Sink<Item> for Fuse<S> {
type Error = S::Error;
delegate_sink!(stream, Item);
}<|fim▁end|> | }
/// Returns whether the underlying stream has finished or not. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017 https://github.com/ping
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
<|fim▁hole|><|fim▁end|> | __version__ = '0.3.9' |
<|file_name|>DestinationFilterTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.filter;
import junit.framework.TestCase;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
public class DestinationFilterTest extends TestCase {
public void testPrefixFilter() throws Exception {
DestinationFilter filter = DestinationFilter.parseFilter(new ActiveMQQueue(">"));
assertTrue("Filter not parsed well: " + filter.getClass(), filter instanceof PrefixDestinationFilter);
System.out.println(filter);
assertFalse("Filter matched wrong destination type", filter.matches(new ActiveMQTopic(">")));
}
public void testWildcardFilter() throws Exception {
DestinationFilter filter = DestinationFilter.parseFilter(new ActiveMQQueue("A.*"));
assertTrue("Filter not parsed well: " + filter.getClass(), filter instanceof WildcardDestinationFilter);
assertFalse("Filter matched wrong destination type", filter.matches(new ActiveMQTopic("A.B")));
}
public void testCompositeFilter() throws Exception {
DestinationFilter filter = DestinationFilter.parseFilter(new ActiveMQQueue("A.B,B.C"));
assertTrue("Filter not parsed well: " + filter.getClass(), filter instanceof CompositeDestinationFilter);
assertFalse("Filter matched wrong destination type", filter.matches(new ActiveMQTopic("A.B")));
}
public void testMatchesChild() throws Exception {
DestinationFilter filter = DestinationFilter.parseFilter(new ActiveMQQueue("A.*.C"));
assertFalse("Filter matched wrong destination type", filter.matches(new ActiveMQTopic("A.B")));
assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A.B.C")));
filter = DestinationFilter.parseFilter(new ActiveMQQueue("A.*"));<|fim▁hole|> assertFalse("Filter did match", filter.matches(new ActiveMQQueue("A")));
}
public void testMatchesAny() throws Exception {
DestinationFilter filter = DestinationFilter.parseFilter(new ActiveMQQueue("A.>.>"));
assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A.C")));
assertFalse("Filter did match", filter.matches(new ActiveMQQueue("B")));
assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A.B")));
assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A.B.C.D.E.F")));
assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A")));
}
}<|fim▁end|> | assertTrue("Filter did not match", filter.matches(new ActiveMQQueue("A.B"))); |
<|file_name|>queue_test.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2014 The Syncthing Authors.
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this file,
// You can obtain one at http://mozilla.org/MPL/2.0/.
package model
import (
"fmt"
"reflect"
"testing"
)
func TestJobQueue(t *testing.T) {
// Some random actions
q := newJobQueue()
q.Push("f1", 0, 0)
q.Push("f2", 0, 0)
q.Push("f3", 0, 0)
q.Push("f4", 0, 0)
progress, queued := q.Jobs()
if len(progress) != 0 || len(queued) != 4 {
t.Fatal("Wrong length")
}
for i := 1; i < 5; i++ {
n, ok := q.Pop()
if !ok || n != fmt.Sprintf("f%d", i) {
t.Fatal("Wrong element")
}
progress, queued = q.Jobs()
if len(progress) != 1 || len(queued) != 3 {
t.Log(progress)
t.Log(queued)
t.Fatal("Wrong length")
}
q.Done(n)
progress, queued = q.Jobs()
if len(progress) != 0 || len(queued) != 3 {
t.Fatal("Wrong length", len(progress), len(queued))
}
q.Push(n, 0, 0)
progress, queued = q.Jobs()
if len(progress) != 0 || len(queued) != 4 {
t.Fatal("Wrong length")
}
q.Done("f5") // Does not exist
progress, queued = q.Jobs()
if len(progress) != 0 || len(queued) != 4 {
t.Fatal("Wrong length")
}
}
if len(q.progress) > 0 || len(q.queued) != 4 {
t.Fatal("Wrong length")
}
for i := 4; i > 0; i-- {
progress, queued = q.Jobs()
if len(progress) != 4-i || len(queued) != i {
t.Fatal("Wrong length")
}
s := fmt.Sprintf("f%d", i)
q.BringToFront(s)
progress, queued = q.Jobs()
if len(progress) != 4-i || len(queued) != i {
t.Fatal("Wrong length")
}
n, ok := q.Pop()
if !ok || n != s {
t.Fatal("Wrong element")
}
progress, queued = q.Jobs()
if len(progress) != 5-i || len(queued) != i-1 {
t.Fatal("Wrong length")
}
q.Done("f5") // Does not exist
progress, queued = q.Jobs()
if len(progress) != 5-i || len(queued) != i-1 {
t.Fatal("Wrong length")
}
}
_, ok := q.Pop()
if len(q.progress) != 4 || ok {
t.Fatal("Wrong length")
}
q.Done("f1")
q.Done("f2")
q.Done("f3")
q.Done("f4")
q.Done("f5") // Does not exist
_, ok = q.Pop()
if len(q.progress) != 0 || ok {
t.Fatal("Wrong length")
}
progress, queued = q.Jobs()
if len(progress) != 0 || len(queued) != 0 {
t.Fatal("Wrong length")
}
q.BringToFront("")
q.Done("f5") // Does not exist
progress, queued = q.Jobs()
if len(progress) != 0 || len(queued) != 0 {
t.Fatal("Wrong length")
}
}
func TestBringToFront(t *testing.T) {
q := newJobQueue()
q.Push("f1", 0, 0)
q.Push("f2", 0, 0)
q.Push("f3", 0, 0)
q.Push("f4", 0, 0)
_, queued := q.Jobs()
if !reflect.DeepEqual(queued, []string{"f1", "f2", "f3", "f4"}) {
t.Errorf("Incorrect order %v at start", queued)
}
q.BringToFront("f1") // corner case: does nothing
_, queued = q.Jobs()
if !reflect.DeepEqual(queued, []string{"f1", "f2", "f3", "f4"}) {
t.Errorf("Incorrect order %v", queued)
}
q.BringToFront("f3")
_, queued = q.Jobs()
if !reflect.DeepEqual(queued, []string{"f3", "f1", "f2", "f4"}) {
t.Errorf("Incorrect order %v", queued)
}
q.BringToFront("f2")
_, queued = q.Jobs()
if !reflect.DeepEqual(queued, []string{"f2", "f3", "f1", "f4"}) {
t.Errorf("Incorrect order %v", queued)
}
q.BringToFront("f4") // corner case: last element
_, queued = q.Jobs()
if !reflect.DeepEqual(queued, []string{"f4", "f2", "f3", "f1"}) {
t.Errorf("Incorrect order %v", queued)
}
}
func TestShuffle(t *testing.T) {
q := newJobQueue()
q.Push("f1", 0, 0)
q.Push("f2", 0, 0)
q.Push("f3", 0, 0)
q.Push("f4", 0, 0)
// This test will fail once in eight million times (1 / (4!)^5) :)
for i := 0; i < 5; i++ {
q.Shuffle()
_, queued := q.Jobs()
if l := len(queued); l != 4 {
t.Fatalf("Weird length %d returned from Jobs()", l)
}
t.Logf("%v", queued)
if !reflect.DeepEqual(queued, []string{"f1", "f2", "f3", "f4"}) {
// The queue was shuffled
return
}
}
t.Error("Queue was not shuffled after five attempts.")
}
func TestSortBySize(t *testing.T) {
q := newJobQueue()
q.Push("f1", 20, 0)
q.Push("f2", 40, 0)
q.Push("f3", 30, 0)
q.Push("f4", 10, 0)
q.SortSmallestFirst()
_, actual := q.Jobs()
if l := len(actual); l != 4 {
t.Fatalf("Weird length %d returned from Jobs()", l)
}
expected := []string{"f4", "f1", "f3", "f2"}
if !reflect.DeepEqual(actual, expected) {
t.Errorf("SortSmallestFirst(): %#v != %#v", actual, expected)
}
q.SortLargestFirst()
_, actual = q.Jobs()<|fim▁hole|> }
expected = []string{"f2", "f3", "f1", "f4"}
if !reflect.DeepEqual(actual, expected) {
t.Errorf("SortLargestFirst(): %#v != %#v", actual, expected)
}
}
func TestSortByAge(t *testing.T) {
q := newJobQueue()
q.Push("f1", 0, 20)
q.Push("f2", 0, 40)
q.Push("f3", 0, 30)
q.Push("f4", 0, 10)
q.SortOldestFirst()
_, actual := q.Jobs()
if l := len(actual); l != 4 {
t.Fatalf("Weird length %d returned from Jobs()", l)
}
expected := []string{"f4", "f1", "f3", "f2"}
if !reflect.DeepEqual(actual, expected) {
t.Errorf("SortOldestFirst(): %#v != %#v", actual, expected)
}
q.SortNewestFirst()
_, actual = q.Jobs()
if l := len(actual); l != 4 {
t.Fatalf("Weird length %d returned from Jobs()", l)
}
expected = []string{"f2", "f3", "f1", "f4"}
if !reflect.DeepEqual(actual, expected) {
t.Errorf("SortNewestFirst(): %#v != %#v", actual, expected)
}
}
func BenchmarkJobQueueBump(b *testing.B) {
files := genFiles(b.N)
q := newJobQueue()
for _, f := range files {
q.Push(f.Name, 0, 0)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
q.BringToFront(files[i].Name)
}
}
func BenchmarkJobQueuePushPopDone10k(b *testing.B) {
files := genFiles(10000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
q := newJobQueue()
for _, f := range files {
q.Push(f.Name, 0, 0)
}
for _ = range files {
n, _ := q.Pop()
q.Done(n)
}
}
}<|fim▁end|> | if l := len(actual); l != 4 {
t.Fatalf("Weird length %d returned from Jobs()", l) |
<|file_name|>CCPlugin.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: UTF-8 -*-
from abc import ABCMeta, abstractmethod<|fim▁hole|> """@Interface"""
__metaclass__ = ABCMeta
@abstractmethod
def perform(self):
pass
@abstractmethod
def __init__(self):
self._path = None
self._request = None
"""@AttributeType cloudclient.CCResponse"""<|fim▁end|> | from cloudclient import CCResponse
class CCPlugin(object): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.