prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>round_trip.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 GFX Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use obj::{Obj, ObjData};
#[test]
fn round_trip_sponza_no_mtls() {
let sponza: Obj = Obj::load("test_assets/sponza.obj").unwrap();
let mut obj = Vec::new();
sponza.data.write_to_buf(&mut obj).unwrap();
let sponza_round_trip = ObjData::load_buf(obj.as_slice()).unwrap();
assert_eq!(sponza_round_trip, sponza.data);
}
#[test]
fn round_trip_sponza_with_mtl() {
let mut sponza: Obj = Obj::load("test_assets/sponza.obj").unwrap();
sponza.load_mtls().unwrap();
// Write obj to string, and then load it from that string to create a round trip Obj instance.
let mut obj = Vec::new();
sponza.data.write_to_buf(&mut obj).unwrap();
let mut sponza_round_trip: Obj = Obj {
data: ObjData::load_buf(obj.as_slice()).unwrap(),
path: sponza.path,<|fim▁hole|> for mtl in sponza.data.material_libs.iter() {
let mut out = Vec::new();
mtl.write_to_buf(&mut out).unwrap();
round_trip_mtl_libs.insert(mtl.filename.as_str(), out);
}
sponza_round_trip
.load_mtls_fn(|_, mtllib| Ok(round_trip_mtl_libs.get(mtllib).unwrap().as_slice()))
.unwrap();
assert_eq!(sponza_round_trip.data, sponza.data);
}<|fim▁end|> | };
// Write each mtl lib to a string and load it back using load_mtls_fn into sponza_round_trip.
let mut round_trip_mtl_libs = std::collections::HashMap::new(); |
<|file_name|>test_mkl.py<|end_file_name|><|fim▁begin|>import pytest
import numpy as np
import scipy.linalg
import scipy.sparse
import qutip
if qutip.settings.has_mkl:
from qutip._mkl.spsolve import mkl_splu, mkl_spsolve
pytestmark = [
pytest.mark.skipif(not qutip.settings.has_mkl,
reason='MKL extensions not found.'),
]
class Test_spsolve:
def test_single_rhs_vector_real(self):
Adense = np.array([[0, 1, 1],
[1, 0, 1],
[0, 0, 1]])
As = scipy.sparse.csr_matrix(Adense)
np.random.seed(1234)
x = np.random.randn(3)
b = As * x
x2 = mkl_spsolve(As, b, verbose=True)
np.testing.assert_allclose(x, x2)
def test_single_rhs_vector_complex(self):
A = qutip.rand_herm(10)
x = qutip.rand_ket(10).full()
b = A.full() @ x
y = mkl_spsolve(A.data, b, verbose=True)
np.testing.assert_allclose(x, y)
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_multi_rhs_vector(self, dtype):
M = np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=dtype)
sM = scipy.sparse.csr_matrix(M)
N = np.array([
[3, 0, 1],
[0, 2, 0],
[0, 0, 0],
], dtype=dtype)
sX = mkl_spsolve(sM, N, verbose=True)
X = scipy.linalg.solve(M, N)
np.testing.assert_allclose(X, sX)
def test_rhs_shape_is_maintained(self):
A = scipy.sparse.csr_matrix(np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=np.complex128))
b = np.array([0, 2, 0], dtype=np.complex128)
out = mkl_spsolve(A, b, verbose=True)
assert b.shape == out.shape
b = np.array([0, 2, 0], dtype=np.complex128).reshape((3, 1))
out = mkl_spsolve(A, b, verbose=True)
assert b.shape == out.shape
def test_sparse_rhs(self):
A = scipy.sparse.csr_matrix([
[1, 2, 0],
[0, 3, 0],
[0, 0, 5],
])
b = scipy.sparse.csr_matrix([
[0, 1],
[1, 0],
[0, 0],
])
x = mkl_spsolve(A, b, verbose=True)
ans = np.array([[-0.66666667, 1],<|fim▁hole|> [0.33333333, 0],
[0, 0]])
np.testing.assert_allclose(x.toarray(), ans)
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_symmetric_solver(self, dtype):
A = qutip.rand_herm(np.arange(1, 11)).data
if dtype == np.float64:
A = A.real
x = np.ones(10, dtype=dtype)
b = A.dot(x)
y = mkl_spsolve(A, b, hermitian=1, verbose=True)
np.testing.assert_allclose(x, y)
class Test_splu:
@pytest.mark.parametrize('dtype', [np.float64, np.complex128])
def test_repeated_rhs_solve(self, dtype):
M = np.array([
[1, 0, 2],
[0, 0, 3],
[-4, 5, 6],
], dtype=dtype)
sM = scipy.sparse.csr_matrix(M)
N = np.array([
[3, 0, 1],
[0, 2, 0],
[0, 0, 0],
], dtype=dtype)
test_X = np.zeros((3, 3), dtype=dtype)
lu = mkl_splu(sM, verbose=True)
for k in range(3):
test_X[:, k] = lu.solve(N[:, k])
lu.delete()
expected_X = scipy.linalg.solve(M, N)
np.testing.assert_allclose(test_X, expected_X)<|fim▁end|> | |
<|file_name|>httputil.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// Package httputil provides utility functions for bypassing
// some mundane tasks when working with net/http.
package httputil<|fim▁end|> | |
<|file_name|>filters.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
unicode_urlencode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError
from jinja2._compat import imap, string_types, text_type, iteritems
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def make_attrgetter(environment, attribute):
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
to access attributes of attributes. Integer parts in paths are
looked up as integers.
"""
if not isinstance(attribute, string_types) \
or ('.' not in attribute and not attribute.isdigit()):
return lambda x: environment.getitem(x, attribute)
attribute = attribute.split('.')
def attrgetter(item):
for part in attribute:
if part.isdigit():
part = int(part)
item = environment.getitem(item, part)
return item
return attrgetter
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(text_type(value))
def do_urlencode(value):
"""Escape strings for use in URLs (uses UTF-8 encoding). It accepts both
dictionaries and regular strings as well as pairwise iterables.
.. versionadded:: 2.7
"""
itemiter = None
if isinstance(value, dict):
itemiter = iteritems(value)
elif not isinstance(value, string_types):
try:
itemiter = iter(value)
except TypeError:
pass
if itemiter is None:
return unicode_urlencode(value)
return u'&'.join(unicode_urlencode(k) + '=' +
unicode_urlencode(v) for k, v in itemiter)
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return text_type(s).replace(text_type(old), text_type(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in iteritems(d)
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
rv = []
for item in re.compile(r'([-\s]+)(?u)').split(soft_unicode(s)):
if not item:
continue
rv.append(item[0].upper() + item[1:].lower())
return ''.join(rv)
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dictsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by value, case insensitive
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, string_types) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
@environmentfilter
def do_sort(environment, value, reverse=False, case_sensitive=False,
attribute=None):
"""Sort an iterable. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
If the iterable is made of strings the third parameter can be used to
control the case sensitiveness of the comparison which is disabled by
default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
It is also possible to sort by an attribute (for example to sort
by the date of an object) by specifying the `attribute` parameter:
.. sourcecode:: jinja
{% for item in iterable|sort(attribute='date') %}
...
{% endfor %}
.. versionchanged:: 2.6
The `attribute` parameter was added.
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, string_types):
item = item.lower()
return item
else:
sort_func = None
if attribute is not None:
getter = make_attrgetter(environment, attribute)
def sort_func(item, processor=sort_func or (lambda x: x)):
return processor(getter(item))
return sorted(value, key=sort_func, reverse=reverse)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja<|fim▁hole|> {{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if isinstance(value, Undefined) or (boolean and not value):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u'', attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
"""
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = text_type(item)
if do_escape:
d = escape(d)
else:
d = text_type(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return text_type(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return next(iter(seq))
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False,
target=None):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
If *target* is specified, the ``target`` attribute will be added to the
``<a>`` tag:
.. sourcecode:: jinja
{{ mytext|urlize(40, target='_blank') }}
.. versionchanged:: 2.8+
The *target* parameter was added.
"""
rv = urlize(value, trim_url_limit, nofollow, target)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will discard the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode:: jinja
{{ "foo bar baz"|truncate(9) }}
-> "foo ..."
{{ "foo bar baz"|truncate(9, True) }}
-> "foo ba..."
"""
if len(s) <= length:
return s
elif killwords:
return s[:length - len(end)] + end
result = s[:length - len(end)].rsplit(' ', 1)[0]
if len(result) < length:
result += ' '
return result + end
@environmentfilter
def do_wordwrap(environment, s, width=79, break_long_words=True,
wrapstring=None):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`. By default, the newlines
will be the default newlines for the environment, but this can be changed
using the wrapstring keyword argument.
.. versionadded:: 2.7
Added support for the `wrapstring` parameter.
"""
if not wrapstring:
wrapstring = environment.newline_sequence
import textwrap
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(text_type(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in range(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill up missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, ' ') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
return func(value * (10 ** precision)) / (10 ** precision)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
.. versionchanged:: 2.6
It's now possible to use dotted notation to group by the child
attribute of another attribute.
"""
expr = make_attrgetter(environment, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, xxx_todo_changeme):
(key, value) = xxx_todo_changeme
return tuple.__new__(cls, (key, list(value)))
@environmentfilter
def do_sum(environment, iterable, attribute=None, start=0):
"""Returns the sum of a sequence of numbers plus the value of parameter
'start' (which defaults to 0). When the sequence is empty it returns
start.
It is also possible to sum up only certain attributes:
.. sourcecode:: jinja
Total: {{ items|sum(attribute='price') }}
.. versionchanged:: 2.6
The `attribute` parameter was added to allow suming up over
attributes. Also the `start` parameter was moved on to the right.
"""
if attribute is not None:
iterable = imap(make_attrgetter(environment, attribute), iterable)
return sum(iterable, start)
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return text_type(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, string_types):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo.bar`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
@contextfilter
def do_map(*args, **kwargs):
"""Applies a filter on a sequence of objects or looks up an attribute.
This is useful when dealing with lists of objects but you are really
only interested in a certain value of it.
The basic usage is mapping on an attribute. Imagine you have a list
of users but you are only interested in a list of usernames:
.. sourcecode:: jinja
Users on this page: {{ users|map(attribute='username')|join(', ') }}
Alternatively you can let it invoke a filter by passing the name of the
filter and the arguments afterwards. A good example would be applying a
text conversion filter on a sequence:
.. sourcecode:: jinja
Users on this page: {{ titles|map('lower')|join(', ') }}
.. versionadded:: 2.7
"""
context = args[0]
seq = args[1]
if len(args) == 2 and 'attribute' in kwargs:
attribute = kwargs.pop('attribute')
if kwargs:
raise FilterArgumentError('Unexpected keyword argument %r' %
next(iter(kwargs)))
func = make_attrgetter(context.environment, attribute)
else:
try:
name = args[2]
args = args[3:]
except LookupError:
raise FilterArgumentError('map requires a filter argument')
func = lambda item: context.environment.call_filter(
name, item, args, kwargs, context=context)
if seq:
for item in seq:
yield func(item)
@contextfilter
def do_select(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and only
selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|select("odd") }}
{{ numbers|select("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, False)
@contextfilter
def do_reject(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and
rejecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, False)
@contextfilter
def do_selectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object and only selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ users|selectattr("is_active") }}
{{ users|selectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, True)
@contextfilter
def do_rejectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object or the attribute and rejecting the ones with the test succeeding.
.. sourcecode:: jinja
{{ users|rejectattr("is_active") }}
{{ users|rejectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, True)
def _select_or_reject(args, kwargs, modfunc, lookup_attr):
context = args[0]
seq = args[1]
if lookup_attr:
try:
attr = args[2]
except LookupError:
raise FilterArgumentError('Missing parameter for attribute name')
transfunc = make_attrgetter(context.environment, attr)
off = 1
else:
off = 0
transfunc = lambda x: x
try:
name = args[2 + off]
args = args[3 + off:]
func = lambda item: context.environment.call_test(
name, item, args, kwargs)
except LookupError:
func = bool
if seq:
for item in seq:
if modfunc(func(transfunc(item))):
yield item
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'map': do_map,
'random': do_random,
'reject': do_reject,
'rejectattr': do_rejectattr,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'select': do_select,
'selectattr': do_selectattr,
'slice': do_slice,
'batch': do_batch,
'sum': do_sum,
'abs': abs,
'round': do_round,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr,
'urlencode': do_urlencode
}<|fim▁end|> | |
<|file_name|>HTMLEditorTransform.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import org.simpleframework.xml.transform.Transform;
public class HTMLEditorTransform implements Transform<HTMLEditor>
{
public HTMLEditor read(String value) throws Exception
{
HTMLEditor editor = new HTMLEditor();
editor.setHtmlText(value);
return editor;
}
@Override
public String write(HTMLEditor value) throws Exception
{
return value.getHtmlText();
}
}<|fim▁end|> | package com.dexvis.simple.transform;
import javafx.scene.web.HTMLEditor;
|
<|file_name|>revision.py<|end_file_name|><|fim▁begin|># This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for manipulating revision chains in the database."""
import d1_common.types.exceptions
import d1_gmn.app
import d1_gmn.app.did
import d1_gmn.app.model_util
import d1_gmn.app.models
def create_or_update_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
chain_model = _get_chain_by_pid(pid)
if chain_model:
_set_chain_sid(chain_model, sid)
else:
_add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid)
_update_sid_to_last_existing_pid_map(pid)
def delete_chain(pid):
pid_to_chain_model = d1_gmn.app.models.ChainMember.objects.get(pid__did=pid)
chain_model = pid_to_chain_model.chain
pid_to_chain_model.delete()
if not d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model).exists():
if chain_model.sid:
# Cascades back to chain_model.
d1_gmn.app.models.IdNamespace.objects.filter(
did=chain_model.sid.did
).delete()
else:
chain_model.delete()
def cut_from_chain(sciobj_model):
"""Remove an object from a revision chain.
The object can be at any location in the chain, including the head or tail.
Preconditions:
- The object with the pid is verified to exist and to be a member of an
revision chain. E.g., with:
d1_gmn.app.views.asserts.is_existing_object(pid)
d1_gmn.app.views.asserts.is_in_revision_chain(pid)
Postconditions:
- The given object is a standalone object with empty obsoletes, obsoletedBy and
seriesId fields.
- The previously adjacent objects in the chain are adjusted to close any gap that
was created or remove dangling reference at the head or tail.
- If the object was the last object in the chain and the chain has a SID, the SID
reference is shifted over to the new last object in the chain.
"""
if _is_head(sciobj_model):
old_pid = sciobj_model.obsoletes.did
_cut_head_from_chain(sciobj_model)
elif _is_tail(sciobj_model):
old_pid = sciobj_model.obsoleted_by.did
_cut_tail_from_chain(sciobj_model)
else:
old_pid = sciobj_model.obsoleted_by.did
_cut_embedded_from_chain(sciobj_model)
_update_sid_to_last_existing_pid_map(old_pid)
def get_all_pid_by_sid(sid):
return [c.pid.did for c in _get_all_chain_member_queryset_by_sid(sid)]
# def set_revision(pid, obsoletes_pid=None, obsoleted_by_pid=None):
# sciobj_model = d1_gmn.app.util.get_sci_model(pid)
# set_revision_links(sciobj_model, obsoletes_pid, obsoleted_by_pid)
# sciobj_model.save()
def resolve_sid(sid):
"""Get the PID to which the ``sid`` currently maps.
Preconditions:
- ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
"""
return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
def get_sid_by_pid(pid):
"""Given the ``pid`` of the object in a chain, return the SID for the chain.
Return None if there is no SID for the chain. This operation is also valid
for standalone objects which may or may not have a SID.
This is the reverse of resolve.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
"""
return d1_gmn.app.did.get_did_by_foreign_key(_get_chain_by_pid(pid).sid)
def set_revision_links(sciobj_model, obsoletes_pid=None, obsoleted_by_pid=None):
if obsoletes_pid:
sciobj_model.obsoletes = d1_gmn.app.did.get_or_create_did(obsoletes_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoletes_pid, is_obsoletes=False)
if obsoleted_by_pid:
sciobj_model.obsoleted_by = d1_gmn.app.did.get_or_create_did(obsoleted_by_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoleted_by_pid, is_obsoletes=True)
sciobj_model.save()
def is_obsoletes_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletes field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists()
def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(
obsoleted_by__did=pid
).exists()
def is_revision(pid):
"""Return True if ``pid`` is referenced in the obsoletes or obsoletedBy field of any
object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return is_obsoletes_pid(pid) or is_obsoleted_by_pid(pid)
def _add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid):
is_added = _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid)
if not is_added:
# if not obsoletes_pid and not obsoleted_by_pid:
_add_standalone(pid, sid)
# else:
def _add_standalone(pid, sid):
# assert_sid_unused(sid)
_create_chain(pid, sid)
def _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
_assert_sid_is_in_chain(sid, obsoletes_pid)
_assert_sid_is_in_chain(sid, obsoleted_by_pid)
obsoletes_chain_model = _get_chain_by_pid(obsoletes_pid)
obsoleted_by_chain_model = _get_chain_by_pid(obsoleted_by_pid)
sid_chain_model = _get_chain_by_sid(sid) if sid else None
chain_model = obsoletes_chain_model or obsoleted_by_chain_model or sid_chain_model
if not chain_model:
return False
if obsoletes_chain_model and obsoletes_chain_model != chain_model:
_merge_chains(chain_model, obsoletes_chain_model)
if obsoleted_by_chain_model and obsoleted_by_chain_model != chain_model:
_merge_chains(chain_model, obsoleted_by_chain_model)
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return True
def _merge_chains(chain_model_a, chain_model_b):
"""Merge two chains.
For use when it becomes known that two chains that were created separately
actually are separate sections of the same chain
E.g.:
- A obsoleted by X is created. A has no SID. X does not exist yet. A chain is
created for A.
- B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created
for B.
- C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the
same chain, which means that A and B are in the same chain. At this point, the
two chains need to be merged. Merging the chains causes A to take on the SID of
B.
"""
_set_chain_sid(
chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid)
)
for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b):
member_model.chain = chain_model_a
member_model.save()
chain_model_b.delete()
def _add_pid_to_chain(chain_model, pid):
chain_member_model = d1_gmn.app.models.ChainMember(
chain=chain_model, pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_member_model.save()
def _set_chain_sid(chain_model, sid):
"""Set or update SID for chain.
If the chain already has a SID, ``sid`` must either be None or match the existing
SID.
"""
if not sid:
return
if chain_model.sid and chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to modify existing SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.save()
def _assert_sid_is_in_chain(sid, pid):
if not sid or not pid:
return
chain_model = _get_chain_by_pid(pid)
if not chain_model or not chain_model.sid:
return
if chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to create object in chain with non-matching SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
def _find_head_or_latest_connected(pid, last_pid=None):
"""Find latest existing sciobj that can be reached by walking towards the head from
``pid``
If ``pid`` does not exist, return None. If chain is connected all the way to head
and head exists, return the head. If chain ends in a dangling obsoletedBy, return
the last existing object.
"""
try:
sci_model = d1_gmn.app.model_util.get_sci_model(pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return last_pid
if sci_model.obsoleted_by is None:
return pid
return _find_head_or_latest_connected(sci_model.obsoleted_by.did, pid)
def _get_chain_by_pid(pid):
"""Find chain by pid.
Return None if not found.
"""
try:
return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
except d1_gmn.app.models.ChainMember.DoesNotExist:
pass
def _get_chain_by_sid(sid):
"""Return None if not found."""
try:
return d1_gmn.app.models.Chain.objects.get(sid__did=sid)
except d1_gmn.app.models.Chain.DoesNotExist:
pass
def _update_sid_to_last_existing_pid_map(pid):
"""Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object()
"""
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save()
def _create_chain(pid, sid):
"""Create the initial chain structure for a new standalone object. Intended to be
called in MNStorage.create().
Preconditions:
- ``sid`` must be verified to be available to be assigned to a new standalone
object. E.g., with is_valid_sid_for_new_standalone().
"""
chain_model = d1_gmn.app.models.Chain(
# sid=d1_gmn.app.models.did(sid) if sid else None,
head_pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_model.save()
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return chain_model
# def _get_or_create_chain_for_pid(pid):
# try:
# return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
# except d1_gmn.app.models.ChainMember.DoesNotExist:
# return _create_chain(pid, None)
def _map_sid_to_pid(chain_model, sid, pid):
if sid is not None:
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(pid)
chain_model.save()
def _get_all_chain_member_queryset_by_sid(sid):
return d1_gmn.app.models.ChainMember.objects.filter(
chain=d1_gmn.app.models.Chain.objects.get(sid__did=sid)
)
def _get_all_chain_member_queryset_by_chain(chain_model):
return d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model)
def _cut_head_from_chain(sciobj_model):
new_head_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
new_head_model.obsoleted_by = None
sciobj_model.obsoletes = None
sciobj_model.save()
new_head_model.save()
def _cut_tail_from_chain(sciobj_model):
new_tail_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
new_tail_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
new_tail_model.save()
def _cut_embedded_from_chain(sciobj_model):
prev_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
next_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
prev_model.obsoleted_by = next_model.pid
next_model.obsoletes = prev_model.pid
sciobj_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
prev_model.save()
next_model.save()
def _is_head(sciobj_model):
return sciobj_model.obsoletes and not sciobj_model.obsoleted_by
def _is_tail(sciobj_model):
return sciobj_model.obsoleted_by and not sciobj_model.obsoletes
def _set_revision_reverse(to_pid, from_pid, is_obsoletes):
try:
sciobj_model = d1_gmn.app.model_util.get_sci_model(from_pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return
if not d1_gmn.app.did.is_existing_object(to_pid):
return
did_model = d1_gmn.app.did.get_or_create_did(to_pid)
if is_obsoletes:
sciobj_model.obsoletes = did_model
else:
sciobj_model.obsoleted_by = did_model
sciobj_model.save()
# def assert_sid_unused(sid):
# if not sid:
# return
# if find_chain_by_sid(sid):
# raise d1_common.types.exceptions.ServiceFailure(
# 0, u'Attempted to create standalone object with SID already in use. '
# 'sid="{}"'.format(sid)
# )
# def upd_sid_resolve(pid, sid=None, obsoletes_pid=None, obsoleted_by_pid=None):
# """Set SID to resolve to the newest object that exists locally for a chain"""
#
# last_pid = find_head_or_latest_connected(pid)
# def has_chain(pid):
# return d1_gmn.app.models.ChainMember.objects.filter(pid__did=pid).exists()
# def create_chain(sid, pid):
# """Create the initial chain structure for a new standalone object. Intended to
# be called in MNStorage.create().
#
# Preconditions:
# - ``sid`` must either be None or be previously unused.
# d1_gmn.app.views.asserts.is_unused()
# - ``pid`` must exist and be verified to be a PID.
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(pid)
# _map_sid_to_pid(chain_model, sid, pid)
# def add_pid_to_chain(sid, old_pid, new_pid):
# """Add a new revision ``new_pid`` to the chain that ``old_pid`` belongs to and
# update any SID to resolve to the new PID. Intended to be called in
# MNStorage.update().
#
# Preconditions:
# - ``sid`` must either be None or match the SID already assigned to the chain.
# - Both ``old_pid`` and ``new_pid`` must exist and be verified to be PIDs
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(old_pid)
# _add_pid_to_chain(chain_model, new_pid)
# _map_sid_to_pid(chain_model, sid, new_pid)
# def is_sid_in_revision_chain(sid, pid):
# """Determine if ``sid`` resolves to an object in the revision chain to which
# ``pid`` belongs.
#
# Preconditions:
# - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
# """
# chain_pid_list = get_pids_in_revision_chain(pid)
# resolved_pid = resolve_sid(sid)
# return resolved_pid in chain_pid_list
# def update_or_create_sid_to_pid_map(sid, pid):
# """Update existing or create a new ``sid`` to ``pid`` association. Then create
# or update the ``sid`` to resolve to the ``pid``.
#
# Preconditions:
# - ``sid`` is verified to be unused if creating a standalone object (that may later become
# the first object in a chain).
# - ``sid`` is verified to belong to the given chain updating.
# - ``pid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
# """
# d1_gmn.app.models.sid_to_pid(sid, pid)
# d1_gmn.app.models.sid_to_head_pid(sid, pid)
# def get_sid_by_pid(pid):
# """Get the SID to which the ``pid`` maps.
# Return None if there is no SID maps to ``pid``.
# """
# try:
# return d1_gmn.app.models.SeriesIdToPersistentId.objects.get(
# pid__did=pid
# ).sid.did
# except d1_gmn.app.models.SeriesIdToPersistentId.DoesNotExist:
# return None
# def move_sid_to_last_object_in_chain(pid):
# """Move SID to the last object in a chain to which ``pid`` belongs.
#
# - If the chain does not have a SID, do nothing.
# - If the SID already maps to the last object in the chain, do nothing.
#
# A SID always resolves to the last object in its chain. So System Metadata XML
# docs are used for introducing SIDs and setting initial mappings, but the
# database maintains the current mapping going forward.
#
# Preconditions:
# - PID is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
#
# Postconditions:
# - The SID maps to the last object in the chain.
# """
# sid = sysmeta_db.get_sid_by_pid(pid)
# if sid:
# chain_pid_list = sysmeta_db.get_pids_in_revision_chain(pid)
# update_sid(sid, chain_pid_list[-1])
# def update_revision_chain(pid, obsoletes_pid, obsoleted_by_pid, sid):
# with sysmeta_file.SysMetaFile(pid) as sysmeta_pyxb:
# sysmeta_file.update_revision_chain(
# sysmeta_pyxb, obsoletes_pid, obsoleted_by_pid, sid
# )
# sysmeta_db.update_revision_chain(sysmeta_pyxb)
# if sysmeta.obsoletes is not None:
# chain_pid_list = [pid]
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoletes:
# obsoletes_pid = sysmeta_pyxb.obsoletes.value()<|fim▁hole|># sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoleted_by:
# obsoleted_by_pid = sysmeta_pyxb.obsoleted_by.value()
# chain_pid_list.append(obsoleted_by_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoleted_by_pid)
# return chain_pid_list<|fim▁end|> | # chain_pid_list.append(obsoletes_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoletes_pid) |
<|file_name|>bitcoin_eo.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="eo" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About ncicoin</source>
<translation>Pri ncicoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>ncicoin</b> version</source>
<translation><b>ncicoin</b>-a versio</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The ncicoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Adresaro</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Duoble-klaku por redakti adreson aŭ etikedon</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Kreu novan adreson</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiu elektitan adreson al la tondejo</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Nova Adreso</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your ncicoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Kopiu Adreson</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a ncicoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified ncicoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Forviŝu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your ncicoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Kopiu &Etikedon</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Redaktu</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Eksportu Adresarajn Datumojn</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Diskoma dosiero (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Eraro dum eksportado</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ne eblis skribi al dosiero %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(ne etikedo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Enigu pasfrazon</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova pasfrazo</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ripetu novan pasfrazon</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Enigu novan pasfrazon por la monujo.<br/>Bonvolu, uzu pasfrazon kun <b>10 aŭ pli hazardaj signoj</b>, aŭ <b>ok aŭ pli vortoj</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Ĉifru monujon</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ĉi tiu operacio devas vian monujan pasfrazon, por malŝlosi la monujon.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Malŝlosu monujon</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ĉi tiu operacio devas vian monujan pasfrazon, por malĉifri la monujon.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Malĉifru monujon</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Anstataŭigu pasfrazon</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Enigu la malnovan kaj novan monujan pasfrazon.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Konfirmu ĉifrado de monujo</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR ZETACOINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Monujo ĉifrita</translation>
</message>
<message>
<location line="-56"/>
<source>ncicoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your ncicoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Monujo ĉifrado fiaskis</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Ĉifrado de monujo fiaskis, kaŭze de interna eraro. Via monujo ne ĉifritas.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>La pasfrazoj enigitaj ne samas.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Monujo malŝlosado fiaskis</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La pasfrazo enigita por ĉifrado de monujo ne konformas.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Monujo malĉifrado fiaskis</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Subskribu &mesaĝon...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sinkronigante kun reto...</translation>
</message>
<message>
<location line="-349"/><|fim▁hole|> <source>&Overview</source>
<translation>&Superrigardo</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transakcioj</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Esploru historion de transakcioj</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>&Eliru</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Eliru de aplikaĵo</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about ncicoin</source>
<translation>Vidigu informaĵon pri Bitmono</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Pri &QT</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Vidigu informaĵon pri Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opcioj...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Ĉifru Monujon...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Anstataŭigu pasfrazon...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a ncicoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for ncicoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Kontrolu mesaĝon...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>ncicoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Monujo</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About ncicoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your ncicoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified ncicoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Dosiero</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Agordoj</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Helpo</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>ncicoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to ncicoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n horo</numerusform><numerusform>%n horoj</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n tago</numerusform><numerusform>%n tagoj</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n semajno</numerusform><numerusform>%n semajnoj</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Ĝisdata</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Ĝisdatigante...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Sendita transakcio</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Envenanta transakcio</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid ncicoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj nun <b>malŝlosita</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Monujo estas <b>ĉifrita</b> kaj nun <b>ŝlosita</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. ncicoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Reta Averto</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Redaktu Adreson</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etikedo</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>La etikedo interrilatita kun ĉi tiun adreso</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adreso</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>La adreso enigita "%1" jam ekzistas en la adresaro.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid ncicoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Ne eblis malŝlosi monujon</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>ncicoin-Qt</source>
<translation>ncicoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versio</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opcioj</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start ncicoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start ncicoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the ncicoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the ncicoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting ncicoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show ncicoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Nuligu</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Apliku</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting ncicoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the ncicoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Nekonfirmita:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Monujo</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Lastaj transakcioj</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start ncicoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etikedo:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Reto</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Malfermu</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the ncicoin-Qt help message to get a list with possible ncicoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>ncicoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>ncicoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the ncicoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the ncicoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Sendu Monojn</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Sendu samtempe al multaj ricevantoj</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123,456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> al %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Ĉu vi vere volas sendi %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>kaj</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etikedo:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Elektu adreson el adresaro</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Algluu adreson de tondejo</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Forigu ĉi tiun ricevanton</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a ncicoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Algluu adreson de tondejo</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this ncicoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified ncicoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a ncicoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter ncicoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+25"/>
<source>The ncicoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/nekonfirmita</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmoj</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ankoraŭ ne elsendita sukcese</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>nekonata</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transakciaj detaloj</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Ricevita de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pago al vi mem</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Transakcia tipo.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Ĉiuj</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hodiaŭ</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Ricevita kun</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Sendita al</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Al vi mem</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minita</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopiu adreson</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Redaktu etikedon</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Diskoma dosiero (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Konfirmita</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etikedo</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adreso</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Sumo</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Eraro dum eksportado</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ne eblis skribi al dosiero %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>ncicoin version</source>
<translation>ncicoin-a versio</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or ncicoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Listigu instrukciojn</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opcioj:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: ncicoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: ncicoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332 or testnet: 18332)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=ncicoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "ncicoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. ncicoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong ncicoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the ncicoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Ŝarĝante adresojn...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of ncicoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart ncicoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Ŝarĝante blok-indekson...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. ncicoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Ŝarĝante monujon...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Ŝarĝado finitas</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Por uzi la opcion %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Eraro</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>MediaUploader.java<|end_file_name|><|fim▁begin|>package com.lesgrosspoof.bemydiary.network;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Intent;
import com.lesgrosspoof.bemydiary.AbstractActivity;
import com.lesgrosspoof.bemydiary.R;
import com.lesgrosspoof.bemydiary.entities.ItemSelection;
import com.lesgrosspoof.bemydiary.entities.Media;
import com.lesgrosspoof.bemydiary.entities.MediaToUpload;
import com.lesgrosspoof.bemydiary.models.Boards;
import com.lesgrosspoof.bemydiary.models.Medias;
import com.lesgrosspoof.bemydiary.models.Selections;
public class MediaUploader implements AsyncResultListener
{
private static MediaUploader _instance;
private ArrayList<MediaToUpload> queue;
private boolean isUploading;
private AbstractActivity activity;
public static MediaUploader getInstance() {
if (_instance == null)
_instance = new MediaUploader();
return _instance;
}
public void setCallbackActivity(AbstractActivity activity)
{
this.activity = activity;
}
private MediaUploader()
{
queue = new ArrayList<MediaToUpload>();
}
public void addToQueue(MediaToUpload m)
{
queue.add(m);
if(!isUploading)
{
System.out.println("connection is not busy, let's upload !");
notifyLoading();
uploadNext();
}
else
{
System.out.println("oops, must wait until previous upload finishes...");
}
System.out.println("queue : "+queue.toString());
}
private void uploadNext()
{
if(queue.size() > 0)
{
System.out.println("beginning upload...");
isUploading = true;
MediaToUpload media = queue.get(0);
AsyncRequest request = new AsyncRequest(this, AsyncRequest.UPLOAD_MEDIA, "http://dev.bemydiary.fr/media.json", "POST", AuthManager.getInstance().getCookie());
HashMap<String, String> params = new HashMap<String, String>();
String id_selection_site = null;
List<ItemSelection> selections = Selections.getInstance().get(Boards.getInstance().getCurrentBoardId());
for(ItemSelection item : selections)
{
if(item.getId() == Integer.parseInt(media.getId_lieu()))
{<|fim▁hole|>
params.put("medium[selection_id]", id_selection_site);
params.put("authenticity_token", AuthManager.getInstance().getCsrf_token());
System.out.println("csrf : "+AuthManager.getInstance().getCsrf_token());
params.put("medium[upload]", media.getMedia().getContent());
request.execute(params);
}
else
{
System.out.println("Queue is empty, my job here is done !");
this.deleteNotification();
}
}
private void uploadFinished()
{
System.out.println("upload finished.");
isUploading = false;
queue.remove(0);
uploadNext();
}
public void callback(String result, int type)
{
JSONObject json = null;
try
{
json = new JSONObject(result);
}
catch (JSONException e)
{
e.printStackTrace();
}
if(type == AsyncRequest.UPLOAD_MEDIA)
{
if(json != null)
{
System.out.println("Response : "+json.toString());
Media lastMedia = queue.get(0).getMedia();
try
{
lastMedia.setId_site(json.getString("_id"));
}
catch (JSONException e)
{
e.printStackTrace();
}
Medias.getInstance().update(lastMedia);
}
uploadFinished();
}
}
private final void notifyLoading()
{
Notification notification = new Notification(R.drawable.wheelanim, null, System.currentTimeMillis());
PendingIntent pendingIntent = PendingIntent.getActivity(activity, 0,
new Intent(), 0);
notification.flags |= Notification.FLAG_NO_CLEAR;
notification.setLatestEventInfo(activity, "Publication du carnet",
"Mise à jour des médias...", pendingIntent);
((NotificationManager) activity.getSystemService(activity.NOTIFICATION_SERVICE)).notify(
1338, notification);
}
private final void deleteNotification()
{
((NotificationManager) activity.getSystemService(activity.NOTIFICATION_SERVICE)).cancel(1338);
}
public boolean isUploading() {
return isUploading;
}
}<|fim▁end|> | id_selection_site = item.getId_site();
break;
}
} |
<|file_name|>base.rs<|end_file_name|><|fim▁begin|>use std::io::Write;
<|fim▁hole|> White,
Grey,
}
// Shamelessly stolen from termios
// which doesn't compile on Win32
// which is why I'm doing all this nonsense in the first place
pub enum Event {
Key(Key),
Mouse(MouseEvent),
Unsupported(Vec<u32>),
}
// Derived from termios, with modifications
// Precedence **must** be Ctrl(Alt(Shift())) in that order
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
pub enum Key {
Backspace,
Left,
Right,
Up,
Down,
Home,
End,
PageUp,
PageDown,
Delete,
Insert,
F(u8),
Char(char),
Shift(Box<Key>),
Alt(Box<Key>),
Ctrl(Box<Key>),
Null,
Esc,
}
impl Key {
pub fn is_char(&self) -> bool {
match self {
&Key::Char(_) => true,
_ => false
}
}
pub fn is_navigation(&self) -> bool {
match self {
&Key::Left | &Key::Right | &Key::Up | &Key::Down |
&Key::Home | &Key::End | &Key::PageUp | &Key::PageDown => true,
_ => false
}
}
}
// Also termios
pub enum MouseEvent {
Press(MouseButton, i32, i32),
Release(i32, i32),
Hold(i32, i32),
}
// Still termios
pub enum MouseButton {
Left,
Right,
Middle,
WheelUp,
WheelDown,
}
// Me again
pub trait TermImpl: Write + Default {
fn get_size(&self) -> (i32, i32);
fn goto(&mut self, (i32, i32));
fn set_color_fg(&mut self, Color);
fn set_color_bg(&mut self, Color);
fn clear(&mut self);
// fn keys(&mut self) -> Iterator<Item = Event>;
}<|fim▁end|> | pub enum Color {
Reset,
Black, |
<|file_name|>audio_buffer.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
namespace webrtc {
namespace {
enum {
kSamplesPer8kHzChannel = 80,
kSamplesPer16kHzChannel = 160,
kSamplesPer32kHzChannel = 320
};
bool HasKeyboardChannel(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
case AudioProcessing::kStereo:
return false;
case AudioProcessing::kMonoAndKeyboard:
case AudioProcessing::kStereoAndKeyboard:
return true;
}
assert(false);
return false;
}
int KeyboardChannelIndex(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
case AudioProcessing::kStereo:
assert(false);
return -1;
case AudioProcessing::kMonoAndKeyboard:
return 1;
case AudioProcessing::kStereoAndKeyboard:
return 2;
}
assert(false);
return -1;
}
void StereoToMono(const float* left, const float* right, float* out,
int samples_per_channel) {
for (int i = 0; i < samples_per_channel; ++i) {
out[i] = (left[i] + right[i]) / 2;
}
}
void StereoToMono(const int16_t* left, const int16_t* right, int16_t* out,
int samples_per_channel) {
for (int i = 0; i < samples_per_channel; ++i) {
out[i] = (left[i] + right[i]) >> 1;
}
}
} // namespace
// One int16_t and one float ChannelBuffer that are kept in sync. The sync is
// broken when someone requests write access to either ChannelBuffer, and
// reestablished when someone requests the outdated ChannelBuffer. It is
// therefore safe to use the return value of ibuf_const() and fbuf_const()
// until the next call to ibuf() or fbuf(), and the return value of ibuf() and
// fbuf() until the next call to any of the other functions.
class IFChannelBuffer {
public:
IFChannelBuffer(int samples_per_channel, int num_channels)
: ivalid_(true),
ibuf_(samples_per_channel, num_channels),
fvalid_(true),
fbuf_(samples_per_channel, num_channels) {}
ChannelBuffer<int16_t>* ibuf() { return ibuf(false); }
ChannelBuffer<float>* fbuf() { return fbuf(false); }
const ChannelBuffer<int16_t>* ibuf_const() { return ibuf(true); }
const ChannelBuffer<float>* fbuf_const() { return fbuf(true); }
private:
ChannelBuffer<int16_t>* ibuf(bool readonly) {
RefreshI();
fvalid_ = readonly;
return &ibuf_;
}
ChannelBuffer<float>* fbuf(bool readonly) {
RefreshF();
ivalid_ = readonly;
return &fbuf_;
}
void RefreshF() {
if (!fvalid_) {
assert(ivalid_);
const int16_t* const int_data = ibuf_.data();
float* const float_data = fbuf_.data();
const int length = fbuf_.length();
for (int i = 0; i < length; ++i)
float_data[i] = int_data[i];
fvalid_ = true;
}
}
void RefreshI() {
if (!ivalid_) {
assert(fvalid_);
const float* const float_data = fbuf_.data();
int16_t* const int_data = ibuf_.data();
const int length = ibuf_.length();
for (int i = 0; i < length; ++i)
int_data[i] = WEBRTC_SPL_SAT(std::numeric_limits<int16_t>::max(),
float_data[i],
std::numeric_limits<int16_t>::min());
ivalid_ = true;
}
}
bool ivalid_;
ChannelBuffer<int16_t> ibuf_;
bool fvalid_;
ChannelBuffer<float> fbuf_;
};
AudioBuffer::AudioBuffer(int input_samples_per_channel,
int num_input_channels,
int process_samples_per_channel,
int num_process_channels,
int output_samples_per_channel)
: input_samples_per_channel_(input_samples_per_channel),
num_input_channels_(num_input_channels),
proc_samples_per_channel_(process_samples_per_channel),
num_proc_channels_(num_process_channels),
output_samples_per_channel_(output_samples_per_channel),
samples_per_split_channel_(proc_samples_per_channel_),
mixed_low_pass_valid_(false),
reference_copied_(false),
activity_(AudioFrame::kVadUnknown),
keyboard_data_(NULL),
channels_(new IFChannelBuffer(proc_samples_per_channel_,
num_proc_channels_)) {
assert(input_samples_per_channel_ > 0);
assert(proc_samples_per_channel_ > 0);
assert(output_samples_per_channel_ > 0);
assert(num_input_channels_ > 0 && num_input_channels_ <= 2);
assert(num_proc_channels_ <= num_input_channels);
if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
input_buffer_.reset(new ChannelBuffer<float>(input_samples_per_channel_,
num_proc_channels_));
}
if (input_samples_per_channel_ != proc_samples_per_channel_ ||
output_samples_per_channel_ != proc_samples_per_channel_) {
// Create an intermediate buffer for resampling.
process_buffer_.reset(new ChannelBuffer<float>(proc_samples_per_channel_,
num_proc_channels_));
}
if (input_samples_per_channel_ != proc_samples_per_channel_) {
input_resamplers_.reserve(num_proc_channels_);
for (int i = 0; i < num_proc_channels_; ++i) {
input_resamplers_.push_back(
new PushSincResampler(input_samples_per_channel_,
proc_samples_per_channel_));
}
}
if (output_samples_per_channel_ != proc_samples_per_channel_) {
output_resamplers_.reserve(num_proc_channels_);
for (int i = 0; i < num_proc_channels_; ++i) {
output_resamplers_.push_back(
new PushSincResampler(proc_samples_per_channel_,
output_samples_per_channel_));
}
}
if (proc_samples_per_channel_ == kSamplesPer32kHzChannel) {
samples_per_split_channel_ = kSamplesPer16kHzChannel;
split_channels_low_.reset(new IFChannelBuffer(samples_per_split_channel_,
num_proc_channels_));
split_channels_high_.reset(new IFChannelBuffer(samples_per_split_channel_,
num_proc_channels_));
filter_states_.reset(new SplitFilterStates[num_proc_channels_]);
}
}
AudioBuffer::~AudioBuffer() {}
void AudioBuffer::CopyFrom(const float* const* data,
int samples_per_channel,
AudioProcessing::ChannelLayout layout) {
assert(samples_per_channel == input_samples_per_channel_);
assert(ChannelsFromLayout(layout) == num_input_channels_);
InitForNewData();
if (HasKeyboardChannel(layout)) {
keyboard_data_ = data[KeyboardChannelIndex(layout)];
}
// Downmix.
const float* const* data_ptr = data;
if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
StereoToMono(data[0],
data[1],
input_buffer_->channel(0),
input_samples_per_channel_);
data_ptr = input_buffer_->channels();
}
// Resample.
if (input_samples_per_channel_ != proc_samples_per_channel_) {
for (int i = 0; i < num_proc_channels_; ++i) {
input_resamplers_[i]->Resample(data_ptr[i],
input_samples_per_channel_,
process_buffer_->channel(i),
proc_samples_per_channel_);
}
data_ptr = process_buffer_->channels();
}
// Convert to int16.
for (int i = 0; i < num_proc_channels_; ++i) {
ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
channels_->ibuf()->channel(i));
}
}
void AudioBuffer::CopyTo(int samples_per_channel,
AudioProcessing::ChannelLayout layout,
float* const* data) {
assert(samples_per_channel == output_samples_per_channel_);
assert(ChannelsFromLayout(layout) == num_proc_channels_);
// Convert to float.
float* const* data_ptr = data;
if (output_samples_per_channel_ != proc_samples_per_channel_) {
// Convert to an intermediate buffer for subsequent resampling.
data_ptr = process_buffer_->channels();
}
for (int i = 0; i < num_proc_channels_; ++i) {
ScaleToFloat(channels_->ibuf()->channel(i),
proc_samples_per_channel_,
data_ptr[i]);
}
// Resample.
if (output_samples_per_channel_ != proc_samples_per_channel_) {
for (int i = 0; i < num_proc_channels_; ++i) {
output_resamplers_[i]->Resample(data_ptr[i],
proc_samples_per_channel_,
data[i],
output_samples_per_channel_);
}
}
}
void AudioBuffer::InitForNewData() {
keyboard_data_ = NULL;
mixed_low_pass_valid_ = false;
reference_copied_ = false;
activity_ = AudioFrame::kVadUnknown;
}
const int16_t* AudioBuffer::data(int channel) const {
return channels_->ibuf_const()->channel(channel);
}
int16_t* AudioBuffer::data(int channel) {
mixed_low_pass_valid_ = false;
return channels_->ibuf()->channel(channel);
}
const float* AudioBuffer::data_f(int channel) const {
return channels_->fbuf_const()->channel(channel);
}
float* AudioBuffer::data_f(int channel) {
mixed_low_pass_valid_ = false;
return channels_->fbuf()->channel(channel);
}
const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
return split_channels_low_.get()
? split_channels_low_->ibuf_const()->channel(channel)
: data(channel);
}
int16_t* AudioBuffer::low_pass_split_data(int channel) {
mixed_low_pass_valid_ = false;
return split_channels_low_.get()
? split_channels_low_->ibuf()->channel(channel)
: data(channel);
}
const float* AudioBuffer::low_pass_split_data_f(int channel) const {
return split_channels_low_.get()
? split_channels_low_->fbuf_const()->channel(channel)
: data_f(channel);
}
float* AudioBuffer::low_pass_split_data_f(int channel) {
mixed_low_pass_valid_ = false;
return split_channels_low_.get()
? split_channels_low_->fbuf()->channel(channel)
: data_f(channel);
}
const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
return split_channels_high_.get()<|fim▁hole|>int16_t* AudioBuffer::high_pass_split_data(int channel) {
return split_channels_high_.get()
? split_channels_high_->ibuf()->channel(channel)
: NULL;
}
const float* AudioBuffer::high_pass_split_data_f(int channel) const {
return split_channels_high_.get()
? split_channels_high_->fbuf_const()->channel(channel)
: NULL;
}
float* AudioBuffer::high_pass_split_data_f(int channel) {
return split_channels_high_.get()
? split_channels_high_->fbuf()->channel(channel)
: NULL;
}
const int16_t* AudioBuffer::mixed_low_pass_data() {
// Currently only mixing stereo to mono is supported.
assert(num_proc_channels_ == 1 || num_proc_channels_ == 2);
if (num_proc_channels_ == 1) {
return low_pass_split_data(0);
}
if (!mixed_low_pass_valid_) {
if (!mixed_low_pass_channels_.get()) {
mixed_low_pass_channels_.reset(
new ChannelBuffer<int16_t>(samples_per_split_channel_, 1));
}
StereoToMono(low_pass_split_data(0),
low_pass_split_data(1),
mixed_low_pass_channels_->data(),
samples_per_split_channel_);
mixed_low_pass_valid_ = true;
}
return mixed_low_pass_channels_->data();
}
const int16_t* AudioBuffer::low_pass_reference(int channel) const {
if (!reference_copied_) {
return NULL;
}
return low_pass_reference_channels_->channel(channel);
}
const float* AudioBuffer::keyboard_data() const {
return keyboard_data_;
}
SplitFilterStates* AudioBuffer::filter_states(int channel) {
assert(channel >= 0 && channel < num_proc_channels_);
return &filter_states_[channel];
}
void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
activity_ = activity;
}
AudioFrame::VADActivity AudioBuffer::activity() const {
return activity_;
}
int AudioBuffer::num_channels() const {
return num_proc_channels_;
}
int AudioBuffer::samples_per_channel() const {
return proc_samples_per_channel_;
}
int AudioBuffer::samples_per_split_channel() const {
return samples_per_split_channel_;
}
int AudioBuffer::samples_per_keyboard_channel() const {
// We don't resample the keyboard channel.
return input_samples_per_channel_;
}
// TODO(andrew): Do deinterleaving and mixing in one step?
void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
assert(proc_samples_per_channel_ == input_samples_per_channel_);
assert(num_proc_channels_ == num_input_channels_);
assert(frame->num_channels_ == num_proc_channels_);
assert(frame->samples_per_channel_ == proc_samples_per_channel_);
InitForNewData();
activity_ = frame->vad_activity_;
int16_t* interleaved = frame->data_;
for (int i = 0; i < num_proc_channels_; i++) {
int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
for (int j = 0; j < proc_samples_per_channel_; j++) {
deinterleaved[j] = interleaved[interleaved_idx];
interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
assert(proc_samples_per_channel_ == output_samples_per_channel_);
assert(num_proc_channels_ == num_input_channels_);
assert(frame->num_channels_ == num_proc_channels_);
assert(frame->samples_per_channel_ == proc_samples_per_channel_);
frame->vad_activity_ = activity_;
if (!data_changed) {
return;
}
int16_t* interleaved = frame->data_;
for (int i = 0; i < num_proc_channels_; i++) {
int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
for (int j = 0; j < proc_samples_per_channel_; j++) {
interleaved[interleaved_idx] = deinterleaved[j];
interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::CopyLowPassToReference() {
reference_copied_ = true;
if (!low_pass_reference_channels_.get()) {
low_pass_reference_channels_.reset(
new ChannelBuffer<int16_t>(samples_per_split_channel_,
num_proc_channels_));
}
for (int i = 0; i < num_proc_channels_; i++) {
low_pass_reference_channels_->CopyFrom(low_pass_split_data(i), i);
}
}
} // namespace webrtc<|fim▁end|> | ? split_channels_high_->ibuf_const()->channel(channel)
: NULL;
}
|
<|file_name|>test_storage.py<|end_file_name|><|fim▁begin|>"""
Test storage
"""
from django.test import TestCase
class StorageTestCase(TestCase):
def test_import(self):
from launchlab_django_utils.storage import StaticRootS3Boto3Storage<|fim▁hole|><|fim▁end|> | from launchlab_django_utils.storage import MediaRootS3Boto3Storage |
<|file_name|>amqp_clock.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
AMQP Clock
Fires off simple messages at one-minute intervals to a topic
exchange named 'clock', with the topic of the message being
the local time as 'year.month.date.dow.hour.minute',
for example: '2007.11.26.1.12.33', where the dow (day of week)
is 0 for Sunday, 1 for Monday, and so on (similar to Unix crontab).
A consumer could then bind a queue to the routing key '#.0'
for example to get a message at the beginning of each hour.
2007-11-26 Barry Pederson <[email protected]>
"""
from datetime import datetime
from optparse import OptionParser
from time import sleep
import amqplib.client_0_8 as amqp
Message = amqp.Message
EXCHANGE_NAME = 'clock'
TOPIC_PATTERN = '%Y.%m.%d.%w.%H.%M' # Python datetime.strftime() pattern
def main():
parser = OptionParser()
parser.add_option('--host', dest='host',
help='AMQP server to connect to (default: %default)',
default='localhost')
parser.add_option('-u', '--userid', dest='userid',
help='AMQP userid to authenticate as (default: %default)',
default='guest')
parser.add_option('-p', '--password', dest='password',
help='AMQP password to authenticate with (default: %default)',
default='guest')
parser.add_option('--ssl', dest='ssl', action='store_true',
help='Enable SSL with AMQP server (default: not enabled)',
default=False)
options, args = parser.parse_args()
conn = amqp.Connection(options.host, options.userid, options.password)
ch = conn.channel()
ch.access_request('/data', write=True, active=True)
ch.exchange_declare(EXCHANGE_NAME, type='topic')
# Make sure our first message is close to the beginning
# of a minute
now = datetime.now()
if now.second > 0:
sleep(60 - now.second)
while True:
now = datetime.now()
msg = Message(timestamp=now)
msg_topic = now.strftime(TOPIC_PATTERN)
ch.basic_publish(msg, EXCHANGE_NAME, routing_key=msg_topic)
# Don't know how long the basic_publish took, so
# grab the time again.<|fim▁hole|> now = datetime.now()
sleep(60 - now.second)
ch.close()
conn.close()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>pipelines.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|># Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class WikispiderPipeline(object):
def process_item(self, item, spider):
return item<|fim▁end|> | |
<|file_name|>test_quota_sets.py<|end_file_name|><|fim▁begin|># Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class QuotaSetsSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-quota-sets"
def _get_flags(self):
f = super(QuotaSetsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.server_group_quotas.'
'Server_group_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.quotas.Quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.extended_quotas.Extended_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.user_quotas.User_quotas')
return f
def test_show_quotas(self):
# Get api sample to show quotas.
response = self._do_get('os-quota-sets/fake_tenant')
self._verify_response('quotas-show-get-resp', {}, response, 200)
def test_show_quotas_defaults(self):
# Get api sample to show quotas defaults.
response = self._do_get('os-quota-sets/fake_tenant/defaults')
self._verify_response('quotas-show-defaults-get-resp',
{}, response, 200)
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
self._verify_response('quotas-update-post-resp', {}, response, 200)
def test_delete_quotas(self):
# Get api sample to delete quota.
response = self._do_delete('os-quota-sets/fake_tenant')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_force(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',<|fim▁hole|> {})
return self._verify_response('quotas-update-force-post-resp', {},
response, 200)
def test_show_quotas_for_user(self):
# Get api sample to show quotas for user.
response = self._do_get('os-quota-sets/fake_tenant?user_id=1')
self._verify_response('user-quotas-show-get-resp', {}, response, 200)
def test_delete_quotas_for_user(self):
response = self._do_delete('os-quota-sets/fake_tenant?user_id=1')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_for_user(self):
# Get api sample to update quotas for user.
response = self._do_put('os-quota-sets/fake_tenant?user_id=1',
'user-quotas-update-post-req',
{})
return self._verify_response('user-quotas-update-post-resp', {},
response, 200)<|fim▁end|> | 'quotas-update-force-post-req', |
<|file_name|>authz_policy.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Edgewall Software
# Copyright (C) 2007 Alec Thomas <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Alec Thomas <[email protected]>
from fnmatch import fnmatch
from itertools import groupby
import os
from trac.core import *
from trac.config import Option
from trac.perm import PermissionSystem, IPermissionPolicy
ConfigObj = None
try:
from configobj import ConfigObj
except ImportError:
pass
class AuthzPolicy(Component):
"""Permission policy using an authz-like configuration file.
Refer to SVN documentation for syntax of the authz file. Groups are
supported.
As the fine-grained permissions brought by this permission policy are
often used in complement of the other pemission policies (like the
`DefaultPermissionPolicy`), there's no need to redefine all the
permissions here. Only additional rights or restrictions should be added.
=== Installation ===
Note that this plugin requires the `configobj` package:
http://www.voidspace.org.uk/python/configobj.html
You should be able to install it by doing a simple `easy_install configobj`
Enabling this policy requires listing it in `trac.ini:
{{{
[trac]
permission_policies = AuthzPolicy, DefaultPermissionPolicy
[authz_policy]
authz_file = conf/authzpolicy.conf
}}}
This means that the `AuthzPolicy` permissions will be checked first, and
only if no rule is found will the `DefaultPermissionPolicy` be used.
=== Configuration ===
The `authzpolicy.conf` file is a `.ini` style configuration file.
- Each section of the config is a glob pattern used to match against a
Trac resource descriptor. These descriptors are in the form:
{{{
<realm>:<id>@<version>[/<realm>:<id>@<version> ...]
}}}
Resources are ordered left to right, from parent to child. If any
component is inapplicable, `*` is substituted. If the version pattern is
not specified explicitely, all versions (`@*`) is added implicitly
Example: Match the WikiStart page
{{{
[wiki:*]
[wiki:WikiStart*]
[wiki:WikiStart@*]
[wiki:WikiStart]
}}}
Example: Match the attachment `wiki:WikiStart@117/attachment/FOO.JPG@*`
on WikiStart
{{{
[wiki:*]
[wiki:WikiStart*]
[wiki:WikiStart@*]
[wiki:WikiStart@*/attachment/*]
[wiki:WikiStart@117/attachment/FOO.JPG]
}}}
- Sections are checked against the current Trac resource '''IN ORDER''' of
appearance in the configuration file. '''ORDER IS CRITICAL'''.
- Once a section matches, the current username is matched, '''IN ORDER''',
against the keys of the section. If a key is prefixed with a `@`, it is
treated as a group. If a key is prefixed with a `!`, the permission is
denied rather than granted. The username will match any of 'anonymous',
'authenticated', <username> or '*', using normal Trac permission rules.
Example configuration:
{{{
[groups]
administrators = athomas
[*/attachment:*]
* = WIKI_VIEW, TICKET_VIEW
[wiki:WikiStart@*]
@administrators = WIKI_ADMIN
anonymous = WIKI_VIEW
* = WIKI_VIEW
# Deny access to page templates<|fim▁hole|> [wiki:PageTemplates/*]
* =
# Match everything else
[*]
@administrators = TRAC_ADMIN
anonymous = BROWSER_VIEW, CHANGESET_VIEW, FILE_VIEW, LOG_VIEW,
MILESTONE_VIEW, POLL_VIEW, REPORT_SQL_VIEW, REPORT_VIEW, ROADMAP_VIEW,
SEARCH_VIEW, TICKET_CREATE, TICKET_MODIFY, TICKET_VIEW, TIMELINE_VIEW,
WIKI_CREATE, WIKI_MODIFY, WIKI_VIEW
# Give authenticated users some extra permissions
authenticated = REPO_SEARCH, XML_RPC
}}}
"""
implements(IPermissionPolicy)
authz_file = Option('authz_policy', 'authz_file', None,
'Location of authz policy configuration file.')
authz = None
authz_mtime = None
# IPermissionPolicy methods
def check_permission(self, action, username, resource, perm):
if ConfigObj is None:
self.log.error('configobj package not found')
return None
if self.authz_file and not self.authz_mtime or \
os.path.getmtime(self.get_authz_file()) > self.authz_mtime:
self.parse_authz()
resource_key = self.normalise_resource(resource)
self.log.debug('Checking %s on %s', action, resource_key)
permissions = self.authz_permissions(resource_key, username)
if permissions is None:
return None # no match, can't decide
elif permissions == ['']:
return False # all actions are denied
# FIXME: expand all permissions once for all
ps = PermissionSystem(self.env)
for deny, perms in groupby(permissions,
key=lambda p: p.startswith('!')):
if deny and action in ps.expand_actions([p[1:] for p in perms]):
return False # action is explicitly denied
elif action in ps.expand_actions(perms):
return True # action is explicitly granted
return None # no match for action, can't decide
# Internal methods
def get_authz_file(self):
f = self.authz_file
return os.path.isabs(f) and f or os.path.join(self.env.path, f)
def parse_authz(self):
self.env.log.debug('Parsing authz security policy %s' %
self.get_authz_file())
self.authz = ConfigObj(self.get_authz_file())
self.groups_by_user = {}
for group, users in self.authz.get('groups', {}).iteritems():
if isinstance(users, basestring):
users = [users]
for user in users:
self.groups_by_user.setdefault(user, set()).add('@' + group)
self.authz_mtime = os.path.getmtime(self.get_authz_file())
def normalise_resource(self, resource):
def flatten(resource):
if not resource or not (resource.realm or resource.id):
return []
# XXX Due to the mixed functionality in resource we can end up with
# ticket, ticket:1, ticket:1@10. This code naively collapses all
# subsets of the parent resource into one. eg. ticket:1@10
parent = resource.parent
while parent and (resource.realm == parent.realm or \
(resource.realm == parent.realm and resource.id == parent.id)):
parent = parent.parent
if parent:
parent = flatten(parent)
else:
parent = []
return parent + ['%s:%s@%s' % (resource.realm or '*',
resource.id or '*',
resource.version or '*')]
return '/'.join(flatten(resource))
def authz_permissions(self, resource_key, username):
# TODO: Handle permission negation in sections. eg. "if in this
# ticket, remove TICKET_MODIFY"
valid_users = ['*', 'anonymous']
if username and username != 'anonymous':
valid_users = ['*', 'authenticated', username]
for resource_section in [a for a in self.authz.sections
if a != 'groups']:
resource_glob = resource_section
if '@' not in resource_glob:
resource_glob += '@*'
if fnmatch(resource_key, resource_glob):
section = self.authz[resource_section]
for who, permissions in section.iteritems():
if who in valid_users or \
who in self.groups_by_user.get(username, []):
self.env.log.debug('%s matched section %s for user %s'
% (resource_key, resource_glob, username))
if isinstance(permissions, basestring):
return [permissions]
else:
return permissions
return None<|fim▁end|> | |
<|file_name|>DTIPAddress.java<|end_file_name|><|fim▁begin|>// ----------------------------------------------------------------------------
// Copyright 2007-2011, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2006/08/21 Martin D. Flynn
// Initial release
// ----------------------------------------------------------------------------
package org.opengts.dbtypes;
import java.lang.*;
import java.util.*;
import java.math.*;
import java.io.*;
import java.sql.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
public class DTIPAddress
extends DBFieldType
{
// ------------------------------------------------------------------------
private IPTools.IPAddress ipAddr = null;
public DTIPAddress(IPTools.IPAddress ipAddr)
{
this.ipAddr = ipAddr;
}
public DTIPAddress(String ipAddr)
{
super(ipAddr);
this.ipAddr = new IPTools.IPAddress(ipAddr);
}
public DTIPAddress(ResultSet rs, String fldName)
throws SQLException<|fim▁hole|> // set to default value if 'rs' is null
this.ipAddr = (rs != null)? new IPTools.IPAddress(rs.getString(fldName)) : null;
}
// ------------------------------------------------------------------------
public boolean isMatch(String ipAddr)
{
if (this.ipAddr != null) {
return this.ipAddr.isMatch(ipAddr);
} else {
return true;
}
}
// ------------------------------------------------------------------------
public Object getObject()
{
return this.toString();
}
public String toString()
{
return (this.ipAddr != null)? this.ipAddr.toString() : "";
}
// ------------------------------------------------------------------------
public boolean equals(Object other)
{
if (this == other) {
// same object
return true;
} else
if (other instanceof DTIPAddress) {
DTIPAddress otherList = (DTIPAddress)other;
if (otherList.ipAddr == this.ipAddr) {
// will also match if both are null
return true;
} else
if ((this.ipAddr == null) || (otherList.ipAddr == null)) {
// one is null, the other isn't
return false;
} else {
// IPAddressList match
return this.ipAddr.equals(otherList.ipAddr);
}
} else {
return false;
}
}
// ------------------------------------------------------------------------
}<|fim▁end|> | {
super(rs, fldName); |
<|file_name|>gd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2016, Jianfeng Chen <[email protected]>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#<|fim▁hole|># copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
def dist(a, b):
return sum((i-j)**2 for i, j in zip(a, b))
def GD(PF0, PFc):
up = 0
for i in PFc:
up += min([dist(i, j) for j in PF0])
return up**0.5 / (len(PFc))<|fim▁end|> | # Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
<|file_name|>extract_strings_qt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2012-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import operator
import os<|fim▁hole|>OUT_CPP = "qt/bitcoinstrings.cpp"
EMPTY = ['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT = os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print(
'Cannot extract strings: xgettext utility is not installed or not configured.',
file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',
file=sys.stderr)
sys.exit(1)
child = Popen([XGETTEXT, '--output=-', '-n',
'--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w', encoding="utf8")
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "{}"),\n'.format(
os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", {}),\n'.format('\n'.join(msgid)))
f.write('};\n')
f.close()<|fim▁end|> | import sys
|
<|file_name|>simpleeval_multi.py<|end_file_name|><|fim▁begin|>from net.grinder.script.Grinder import grinder
from net.grinder.script import Test
from net.grinder.plugin.http import HTTPRequest
from HTTPClient import NVPair
from java.util import Random
newCellTest = Test(1, "Make a new Cell")
evaluationTest = Test(2, "Evaluate")
updateTest = Test(3, "Poll until evaluated")
deleteCellTest = Test(4, "Delete Cell")
class TestRunner:
def __call__(self):
random = Random()
worksheet = random.nextInt(10)
base_url = 'http://localhost:8080/home/admin/%s' % worksheet
request = newCellTest.wrap(HTTPRequest(url=base_url + "/new_cell_after"))
result = request.POST((NVPair("id","0"),))
new_cell = result.text.split()[0].rstrip('___S_A_G_E___')
request = evaluationTest.wrap(HTTPRequest(url=base_url + "/eval"))
a, b = random.nextInt(10**1), random.nextInt(10**1)
evalData = ( NVPair("id", new_cell),
NVPair("input", "%s * %s"% (a,b)),
NVPair("newcell", "0"),)
result = request.POST(evalData)
count = 0 <|fim▁hole|> while (True):
request = updateTest.wrap(HTTPRequest(url=base_url + "/cell_update"))
getData = ( NVPair("id", new_cell),)
result = request.POST(getData)
count += 1
if result.text.find('pre') != -1:
print 'wait',count,'test',a,'*',b,'=', strip_answer(result.text)
break
request = deleteCellTest.wrap(HTTPRequest(url=base_url + "/delete_cell"))
getData = ( NVPair("id", new_cell),)
result = request.POST(getData)
def strip_answer(text):
#<pre class="shrunk">532962756677</pre>
st = text.find('<pre')
end = text.find('</pre>')
return text[st + 20 : end]<|fim▁end|> | |
<|file_name|>enum-nullable-const-null-with-fields.rs<|end_file_name|><|fim▁begin|>use std::result::Result;<|fim▁hole|>static C: Result<(), Box<isize>> = Ok(());
// This is because of yet another bad assertion (ICE) about the null side of a nullable enum.
// So we won't actually compile if the bug is present, but we check the value in main anyway.
pub fn main() {
assert!(C.is_ok());
}<|fim▁end|> | use std::result::Result::Ok;
|
<|file_name|>lc826-most-profit-assigning-work.py<|end_file_name|><|fim▁begin|># coding=utf-8
import unittest
"""826. Most Profit Assigning Work
https://leetcode.com/problems/most-profit-assigning-work/description/
We have jobs: `difficulty[i]` is the difficulty of the `i`th job, and
`profit[i]` is the profit of the `i`th job.
Now we have some workers. `worker[i]` is the ability of the `i`th worker,
which means that this worker can only complete a job with difficulty at most
`worker[i]`.
Every worker can be assigned at most one job, but one job can be completed
multiple times.
For example, if 3 people attempt the same job that pays $1, then the total
profit will be $3. If a worker cannot complete any job, his profit is $0.
What is the most profit we can make?
**Example 1:**
**Input:** difficulty = [2,4,6,8,10], profit = [10,20,30,40,50], worker = [4,5,6,7]
**Output:** 100
**Explanation: W** orkers are assigned jobs of difficulty [4,4,6,6] and they get profit of [20,20,30,30] seperately.
**Notes:**
* `1 <= difficulty.length = profit.length <= 10000`
* `1 <= worker.length <= 10000`
* `difficulty[i], profit[i], worker[i]` are in range `[1, 10^5]`
Similar Questions:
"""
class Solution(object):
def maxProfitAssignment(self, difficulty, profit, worker):
"""
:type difficulty: List[int]
:type profit: List[int]
:type worker: List[int]
:rtype: int
"""
def test(self):
pass
<|fim▁hole|>
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>util.js<|end_file_name|><|fim▁begin|>module.export = {<|fim▁hole|><|fim▁end|> |
}; |
<|file_name|>GridRenderSystem-vtest.js<|end_file_name|><|fim▁begin|>goo.V.attachToGlobal();
V.describe('GridRenderSystem Test');
var gooRunner = V.initGoo();
var world = gooRunner.world;
var gridRenderSystem = new GridRenderSystem();
gooRunner.renderSystems.push(gridRenderSystem);
world.setSystem(gridRenderSystem);
V.addLights();
document.body.addEventListener('keypress', function (e) {
switch (e.keyCode) {
case 49:
break;
case 50:
break;
case 51:
break;
}
});
// camera 1 - spinning
// var cameraEntity = V.addOrbitCamera(new Vector3(25, 0, 0));
// cameraEntity.cameraComponent.camera.setFrustumPerspective(null, null, 1, 10000);
// add camera
var camera = new Camera(undefined, undefined, 1, 10000);
var cameraEntity = gooRunner.world.createEntity(camera, 'CameraEntity', [0, 10, 20]).lookAt([0, 0, 0]).addToWorld();
// camera control set up
var scripts = new ScriptComponent();
var wasdScript = Scripts.create('WASD', {
domElement: gooRunner.renderer.domElement,
walkSpeed: 1000,
crawlSpeed: 20
});
// WASD control script to move around
scripts.scripts.push(wasdScript);
// the FPCam script itself that locks the pointer and moves the camera
var fpScript = Scripts.create('MouseLookScript', {
domElement: gooRunner.renderer.domElement
});
scripts.scripts.push(fpScript);<|fim▁hole|>
world.createEntity('Box', new Box(20, 0.1, 20), new Material(ShaderLib.simpleLit)).addToWorld();
world.createEntity('Sphere', new Sphere(8, 8, 1), new Material(ShaderLib.simpleLit)).addToWorld();
V.process();<|fim▁end|> |
cameraEntity.setComponent(scripts); |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![doc(
html_logo_url = "https://openstratos.org/wp-content/uploads/2017/05/OpenStratos-768x226.png",
html_favicon_url = "https://openstratos.org/wp-content/uploads/2015/10/OpenStratos-mark.png",
html_root_url = "https://openstratos.github.io/server-rs/"
)]
//! OpenStratos balloon software.
//!
//! This crate provides the functionality required to control a stratospheric balloon. It provides
//! several modules that can be enabled using cargo features, and it can be extended by adding more
//! modules.
//!
//! ## Example:
//!
//! If you for example want to use the GPS and the GSM, but no real-time telemetry or Raspberry Pi
//! camera, it's as simple as compiling the crate as follows:
//!
//! ```text
//! cargo build --no-default-features --features="gps fona"
//! ```
//!
//! Here, the `--no-default-features` is required since by default, GPS, GSM (Adafruit FONA),
//! Raspberry Pi camera and real-time transparent serial telemetry will be activated.
//!
//! ## Configuration
//!
//! OpenStratos is highly configurable. Please refer to the [`config`](config/index.html) module for
//! further information.
//!
//! ## Launcher
//!
//! The project has a launcher in `src/main.rs` and can be launched by running `cargo run`. More
//! information can be found in the [`launcher`](../launcher/index.html) crate.
//!
//! ## Simulation mode
//!
//! *In development…*
#![deny(clippy::all)]
#![forbid(anonymous_parameters)]
#![warn(clippy::pedantic)]
#![deny(
variant_size_differences,
unused_results,
unused_qualifications,
unused_import_braces,
unsafe_code,
trivial_numeric_casts,
trivial_casts,
missing_docs,
missing_debug_implementations,
missing_copy_implementations,
box_pointers,
unused_extern_crates
)]
// Removing some warnings
#![allow(unsafe_code, box_pointers, clippy::use_self)]
/// Configuration file.
pub const CONFIG_FILE: &str = "config.toml";
/// Last state file, in the `data` directory.
pub const STATE_FILE: &str = "last_state";
pub mod config;
pub mod error;
#[cfg(feature = "fona")]
pub mod fona;
#[cfg(feature = "gps")]
pub mod gps;
pub mod logic;
#[cfg(feature = "raspicam")]
pub mod raspicam;
#[cfg(feature = "telemetry")]
pub mod telemetry;
use std::fs;
use failure::{Error, ResultExt};
pub use crate::config::CONFIG;
use crate::logic::{MainLogic, State};
/// The main logic of the program.
pub fn run() -> Result<(), Error> {
initialize_data_filesystem().context(error::Fs::DataInit)?;
if let Some(_state) = State::get_last().context(error::LastState::Read)? {
// TODO recover from last state and continue
unimplemented!()
} else {
logic::init().context(error::Logic::Init)?.main_logic()
}
}
/// Initializes the data file system for videos and images.
pub fn initialize_data_filesystem() -> Result<(), Error> {
let video_path = CONFIG.data_dir().join("video");
fs::create_dir_all(&video_path).context(error::Fs::DirectoryCreation { path: video_path })?;
let img_path = CONFIG.data_dir().join("img");
fs::create_dir_all(&img_path).context(error::Fs::DirectoryCreation { path: img_path })?;
Ok(())
}
/// Generates a stack trace string of an error.
#[allow(clippy::use_debug)]
pub fn generate_error_string<S>(error: &Error, main_error: S) -> String
where
S: AsRef<str>,
{
let mut result = format!("{}:\n{}\n", main_error.as_ref(), error);
for e in error.iter_causes() {<|fim▁hole|>
// TODO: print only on debug mode
result.push_str(&format!("\tbacktrace: {:?}\n", error.backtrace()));
result
}
/// Initializes all loggers.
pub fn init_loggers() -> Result<log4rs::Handle, Error> {
use chrono::Utc;
use log::LevelFilter;
use log4rs::{
append::{console::ConsoleAppender, file::FileAppender},
config::{Appender, Config, Logger, Root},
encode::pattern::PatternEncoder,
};
// Only required for GPS, FONA or telemetry
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
use log::Record;
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
use log4rs::filter::{threshold::ThresholdFilter, Filter, Response};
/// Filter that filters all but debug records.
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
#[derive(Debug, Clone, Copy)]
struct DebugFilter;
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
impl Filter for DebugFilter {
fn filter(&self, record: &Record) -> Response {
if record.level() == LevelFilter::Debug {
Response::Neutral
} else {
Response::Reject
}
}
}
/// Filter that filters all but trace records.
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
#[derive(Debug, Clone, Copy)]
struct TraceFilter;
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
impl Filter for TraceFilter {
fn filter(&self, record: &Record) -> Response {
if record.level() == LevelFilter::Trace {
Response::Neutral
} else {
Response::Reject
}
}
}
let now = Utc::now().format("%Y-%m-%d-%H-%M-%S");
let pattern_naive = "[{d(%Y-%m-%d %H:%M:%S %Z)(utc)}][{l}] - {m}{n}";
// Only required for GPS, FONA or telemetry
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
let pattern_exact = "[{d(%Y-%m-%d %H:%M:%S%.3f %Z)(utc)}][{l}] - {m}{n}";
let stdout = ConsoleAppender::builder().build();
let main = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_naive)))
.build(format!("data/logs/main-{}.log", now))
.context(error::Log::Appender { name: "main" })?;
let system = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_naive)))
.build(format!("data/logs/system-{}.log", now))
.context(error::Log::Appender { name: "system" })?;
// Only required for GPS, FONA or telemetry
#[cfg(any(feature = "gps", feature = "fona", feature = "telemetry"))]
let log_level = if CONFIG.debug() {
LevelFilter::Trace
} else {
LevelFilter::Debug
};
let config = Config::builder()
// Appenders
.appender(Appender::builder().build("stdout", Box::new(stdout)))
.appender(Appender::builder().build("main", Box::new(main)))
.appender(Appender::builder().build("system", Box::new(system)))
// Loggers
.logger(
Logger::builder()
.appender("system")
.additive(false)
.build("system", LevelFilter::Info),
);
#[cfg(feature = "raspicam")]
let config = {
let camera = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_naive)))
.build(format!("data/logs/camera-{}.log", now))
.context(error::Log::Appender { name: "camera" })?;
config
.appender(Appender::builder().build("camera", Box::new(camera)))
.logger(
Logger::builder()
.appender("camera")
.additive(false)
.build("os_balloon::camera", LevelFilter::Info),
)
};
#[cfg(feature = "gps")]
let config = {
let gps = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_naive)))
.build(format!("data/logs/gps-{}.log", now))
.context(error::Log::Appender {
name: "os_balloon::gps",
})?;
let gps_frames = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/gps_frames-{}.log", now))
.context(error::Log::Appender {
name: "os_balloon::gps_frames",
})?;
let gps_logger = {
let mut builder = Logger::builder()
.appender("gps")
.appender("gps_frames")
.additive(false);
if CONFIG.debug() {
builder = builder.appender("gps_serial");
}
builder.build("os_balloon::gps", log_level)
};
let config = config
.appender(
Appender::builder()
.filter(Box::new(ThresholdFilter::new(LevelFilter::Info)))
.build("gps", Box::new(gps)),
)
.appender(
Appender::builder()
.filter(Box::new(DebugFilter))
.build("gps_frames", Box::new(gps_frames)),
)
.logger(gps_logger);
if CONFIG.debug() {
let gps_serial = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/gps_serial-{}.log", now))
.context(error::Log::Appender { name: "gps_serial" })?;
config.appender(
Appender::builder()
.filter(Box::new(TraceFilter))
.build("gps_serial", Box::new(gps_serial)),
)
} else {
config
}
};
#[cfg(feature = "fona")]
let config = {
let fona = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_naive)))
.build(format!("data/logs/fona-{}.log", now))
.context(error::Log::Appender {
name: "os_balloon::fona",
})?;
let fona_frames = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/fona_frames-{}.log", now))
.context(error::Log::Appender {
name: "os_balloon::fona_frames",
})?;
let fona_logger = {
let mut builder = Logger::builder()
.appender("fona")
.appender("fona_frames")
.additive(false);
if CONFIG.debug() {
builder = builder.appender("fona_serial");
}
builder.build("os_balloon::fona", log_level)
};
let config = config
.appender(
Appender::builder()
.filter(Box::new(ThresholdFilter::new(LevelFilter::Info)))
.build("fona", Box::new(fona)),
)
.appender(
Appender::builder()
.filter(Box::new(DebugFilter))
.build("fona_frames", Box::new(fona_frames)),
)
.logger(fona_logger);
if CONFIG.debug() {
let gsm_serial = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/fona_serial-{}.log", now))
.context(error::Log::Appender {
name: "fona_serial",
})?;
config.appender(
Appender::builder()
.filter(Box::new(TraceFilter))
.build("fona_serial", Box::new(gsm_serial)),
)
} else {
config
}
};
#[cfg(feature = "telemetry")]
let config = {
let telemetry = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/telemetry-{}.log", now))
.context(error::Log::Appender { name: "telemetry" })?;
let telemetry_frames = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/telemetry_frames-{}.log", now))
.context(error::Log::Appender {
name: "telemetry_frames",
})?;
let telemetry_logger = {
let mut builder = Logger::builder()
.appender("telemetry")
.appender("telemetry_frames")
.additive(false);
if CONFIG.debug() {
builder = builder.appender("telemetry_serial");
}
builder.build("os_balloon::telemetry", log_level)
};
let config = config
.appender(
Appender::builder()
.filter(Box::new(ThresholdFilter::new(LevelFilter::Info)))
.build("telemetry", Box::new(telemetry)),
)
.appender(
Appender::builder()
.filter(Box::new(DebugFilter))
.build("telemetry_frames", Box::new(telemetry_frames)),
)
.logger(telemetry_logger);
if CONFIG.debug() {
let telemetry_serial = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(pattern_exact)))
.build(format!("data/logs/telemetry_serial-{}.log", now))
.context(error::Log::Appender {
name: "telemetry_serial",
})?;
config.appender(
Appender::builder()
.filter(Box::new(TraceFilter))
.build("telemetry_serial", Box::new(telemetry_serial)),
)
} else {
config
}
};
let config = config
.build(
Root::builder()
.appender("stdout")
.appender("main")
.build(LevelFilter::Info),
)
.context(error::Log::Build)?;
Ok(log4rs::init_config(config)?)
}<|fim▁end|> | result.push_str(&format!("\tcaused by: {}\n", e));
} |
<|file_name|>player.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from random import randint
FIGURES = ['камень', 'бумага', 'ножницы']
FIG_LEN = len(FIGURES)
class Player:
"""
Player class is needed to store tactics and to generate figures by this tactic
-- Doctests --
>>> player = Player()
>>> player.figure in FIGURES
True
"""
def __init__(self, number: int):
self.name = 'игрок{}'.format(number)
tactic = randint(0, FIG_LEN-1)
self.main_figure = FIGURES[tactic]
self.__figures = [FIGURES[(tactic+i) % FIG_LEN] for i in range(FIG_LEN)]
def __str__(self):
return '{}: {}'.format(self.name, self.main_figure)
@property
def figure(self):
rand = randint(0, FIG_LEN)
return self.__figures[rand % FIG_LEN]<|fim▁end|> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'litleleprikon' |
<|file_name|>reducer.ts<|end_file_name|><|fim▁begin|>import { documents } from './shape'
import { chats } from 'store/events'
const { getDialogs, loadSlice } = chats
<|fim▁hole|>import { Slice } from 'helpers/reselector.h'
const updater = updateStoreMap<Slice, 'documents'>('documents')
documents
.on(loadSlice.done, updater)
.on(getDialogs.done, updater)<|fim▁end|> | import { updateStoreMap } from 'helpers/reselector' |
<|file_name|>generate_arm_template_request.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class GenerateArmTemplateRequest(Model):
"""Parameters for generating an ARM template for deploying artifacts.
:param virtual_machine_name: The resource name of the virtual machine.
:type virtual_machine_name: str
:param parameters: The parameters of the ARM template.
:type parameters: list[~azure.mgmt.devtestlabs.models.ParameterInfo]
:param location: The location of the virtual machine.
:type location: str
:param file_upload_options: Options for uploading the files for the
artifact. UploadFilesAndGenerateSasTokens is the default value. Possible
values include: 'UploadFilesAndGenerateSasTokens', 'None'
:type file_upload_options: str or
~azure.mgmt.devtestlabs.models.FileUploadOptions
"""
_attribute_map = {
'virtual_machine_name': {'key': 'virtualMachineName', 'type': 'str'},<|fim▁hole|>
def __init__(self, virtual_machine_name=None, parameters=None, location=None, file_upload_options=None):
self.virtual_machine_name = virtual_machine_name
self.parameters = parameters
self.location = location
self.file_upload_options = file_upload_options<|fim▁end|> | 'parameters': {'key': 'parameters', 'type': '[ParameterInfo]'},
'location': {'key': 'location', 'type': 'str'},
'file_upload_options': {'key': 'fileUploadOptions', 'type': 'str'},
} |
<|file_name|>DataTest.java<|end_file_name|><|fim▁begin|>package me.august.lumen.data;
import me.august.lumen.compile.resolve.data.ClassData;
import me.august.lumen.compile.resolve.lookup.DependencyManager;
import org.junit.Assert;
import org.junit.Test;
public class DataTest {
@Test
public void testClassData() {
ClassData data = ClassData.fromClass(String.class);
Assert.assertEquals(
String.class.getName(),
data.getName()
);
String[] expected = new String[]{
"java.io.Serializable", "java.lang.Comparable",
"java.lang.CharSequence"
};
Assert.assertArrayEquals(
expected,
data.getInterfaces()
);
}
@Test
public void testAssignableTo() {
DependencyManager deps = new DependencyManager();
ClassData data;
data = ClassData.fromClass(String.class);
Assert.assertTrue(
"Expected String to be assignable to String",
data.isAssignableTo("java.lang.String", deps)
);
Assert.assertTrue(
"Expected String to be assignable to Object",
data.isAssignableTo("java.lang.Object", deps)
);
Assert.assertTrue(
"Expected String to be assignable to CharSequence",
data.isAssignableTo("java.lang.CharSequence", deps)<|fim▁hole|> );
data = ClassData.fromClass(Object.class);
Assert.assertFalse(
"Expected Object to not be assignable to String",
data.isAssignableTo("java.lang.String", deps)
);
data = ClassData.fromClass(CharSequence.class);
Assert.assertTrue(
"Expected CharSequence to be assignable to Object",
data.isAssignableTo("java.lang.Object", deps)
);
}
}<|fim▁end|> | |
<|file_name|>trackevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::audiotrack::AudioTrack;
use crate::dom::bindings::codegen::Bindings::EventBinding::EventBinding::EventMethods;
use crate::dom::bindings::codegen::Bindings::TrackEventBinding;
use crate::dom::bindings::codegen::Bindings::TrackEventBinding::TrackEventMethods;
use crate::dom::bindings::codegen::UnionTypes::VideoTrackOrAudioTrackOrTextTrack;
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::event::Event;
use crate::dom::globalscope::GlobalScope;
use crate::dom::texttrack::TextTrack;
use crate::dom::videotrack::VideoTrack;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
#[must_root]
#[derive(JSTraceable, MallocSizeOf)]
enum MediaTrack {
Video(Dom<VideoTrack>),
Audio(Dom<AudioTrack>),
Text(Dom<TextTrack>),
}
#[dom_struct]
pub struct TrackEvent {
event: Event,
track: Option<MediaTrack>,
}
impl TrackEvent {
#[allow(unrooted_must_root)]
fn new_inherited(track: &Option<VideoTrackOrAudioTrackOrTextTrack>) -> TrackEvent {
let media_track = match track {
Some(VideoTrackOrAudioTrackOrTextTrack::VideoTrack(VideoTrack)) => {
Some(MediaTrack::Video(Dom::from_ref(VideoTrack)))
},
Some(VideoTrackOrAudioTrackOrTextTrack::AudioTrack(AudioTrack)) => {
Some(MediaTrack::Audio(Dom::from_ref(AudioTrack)))
},
Some(VideoTrackOrAudioTrackOrTextTrack::TextTrack(TextTrack)) => {
Some(MediaTrack::Text(Dom::from_ref(TextTrack)))
},
None => None,
};
TrackEvent {
event: Event::new_inherited(),<|fim▁hole|> }
}
pub fn new(
global: &GlobalScope,
type_: Atom,
bubbles: bool,
cancelable: bool,
track: &Option<VideoTrackOrAudioTrackOrTextTrack>,
) -> DomRoot<TrackEvent> {
let te = reflect_dom_object(
Box::new(TrackEvent::new_inherited(&track)),
global,
TrackEventBinding::Wrap,
);
{
let event = te.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
te
}
pub fn Constructor(
window: &Window,
type_: DOMString,
init: &TrackEventBinding::TrackEventInit,
) -> Fallible<DomRoot<TrackEvent>> {
Ok(TrackEvent::new(
&window.global(),
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,
&init.track,
))
}
}
impl TrackEventMethods for TrackEvent {
// https://html.spec.whatwg.org/multipage/#dom-trackevent-track
fn GetTrack(&self) -> Option<VideoTrackOrAudioTrackOrTextTrack> {
match &self.track {
Some(MediaTrack::Video(VideoTrack)) => Some(
VideoTrackOrAudioTrackOrTextTrack::VideoTrack(DomRoot::from_ref(VideoTrack)),
),
Some(MediaTrack::Audio(AudioTrack)) => Some(
VideoTrackOrAudioTrackOrTextTrack::AudioTrack(DomRoot::from_ref(AudioTrack)),
),
Some(MediaTrack::Text(TextTrack)) => Some(
VideoTrackOrAudioTrackOrTextTrack::TextTrack(DomRoot::from_ref(TextTrack)),
),
None => None,
}
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|> | track: media_track, |
<|file_name|>fake.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at<|fim▁hole|>Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package unversioned
import (
"net/http"
"net/url"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/testapi"
"k8s.io/kubernetes/pkg/runtime"
)
type HTTPClientFunc func(*http.Request) (*http.Response, error)
func (f HTTPClientFunc) Do(req *http.Request) (*http.Response, error) {
return f(req)
}
// FakeRESTClient provides a fake RESTClient interface.
type FakeRESTClient struct {
Client HTTPClient
Codec runtime.Codec
Req *http.Request
Resp *http.Response
Err error
}
func (c *FakeRESTClient) Get() *Request {
return NewRequest(c, "GET", &url.URL{Host: "localhost"}, testapi.Default.Version(), c.Codec)
}
func (c *FakeRESTClient) Put() *Request {
return NewRequest(c, "PUT", &url.URL{Host: "localhost"}, testapi.Default.Version(), c.Codec)
}
func (c *FakeRESTClient) Patch(_ api.PatchType) *Request {
return NewRequest(c, "PATCH", &url.URL{Host: "localhost"}, testapi.Default.Version(), c.Codec)
}
func (c *FakeRESTClient) Post() *Request {
return NewRequest(c, "POST", &url.URL{Host: "localhost"}, testapi.Default.Version(), c.Codec)
}
func (c *FakeRESTClient) Delete() *Request {
return NewRequest(c, "DELETE", &url.URL{Host: "localhost"}, testapi.Default.Version(), c.Codec)
}
func (c *FakeRESTClient) Do(req *http.Request) (*http.Response, error) {
c.Req = req
if c.Client != HTTPClient(nil) {
return c.Client.Do(req)
}
return c.Resp, c.Err
}<|fim▁end|> |
http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>storageevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::{StorageEventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: MutNullableHeap<JS<Storage>>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
oldValue: oldValue,
newValue: newValue,
url: url,
storageArea: MutNullableHeap::new(storageArea)
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> Root<StorageEvent> {
let ev = reflect_dom_object(box StorageEvent::new_inherited(key, oldValue, newValue,
url, storageArea),
global,
StorageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<Root<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();<|fim▁hole|> let storageArea = init.storageArea.r();
let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = StorageEvent::new(global, Atom::from(&*type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.oldValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.newValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<Root<Storage>> {
self.storageArea.get()
}
}<|fim▁end|> | let newValue = init.newValue.clone();
let url = init.url.clone(); |
<|file_name|>cpu.rs<|end_file_name|><|fim▁begin|>use super::register::Registers;
use super::keypad::Keypad;
use super::memory::Memory;
use std::fmt;
use std::fs::File;
use sdl2;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::render::Renderer;
use sdl2::EventPump;
use rand;
use time::PreciseTime;
const FRAMES_PER_SECOND: i64 = 4000;
const SKIP_TICKS: i64 = 1000 / FRAMES_PER_SECOND;
pub struct Chip8<'a> {
reg: Registers,
mem: Memory,
keys: Keypad,
sdl_event_pump: EventPump,
<|fim▁hole|> window: Renderer<'a>,
display: [[bool; 32]; 64],
display_updated: bool,
_next_step: bool,
}
impl<'a> fmt::Debug for Chip8<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:#?}{:#?}{:#?}", self.reg, self.mem, self.keys)
}
}
impl<'a> Chip8<'a> {
pub fn new() -> Chip8<'a> {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let new_window = video_subsystem.window("Rust8", 640, 320)
.position_centered()
.opengl()
.build()
.unwrap();
let renderer = new_window.renderer().build().unwrap();
Chip8 {
reg: Registers::new(),
mem: Memory::default(),
keys: Keypad::default(),
sdl_event_pump: sdl_context.event_pump().unwrap(),
window: renderer,
display: [[false; 32]; 64],
display_updated: false,
_next_step: false,
}
}
pub fn init_display(&mut self) {
self.mem.load_fonts();
self.window.set_draw_color(Color::RGB(0, 0, 0));
self.window.clear();
self.window.present();
self.window.set_draw_color(Color::RGB(255, 255, 255));
}
pub fn run(&mut self) {
let mut quit = false;
let mut start_time = PreciseTime::now();
let mut diff;
'running: loop {
let end_time = PreciseTime::now();
diff = start_time.to(end_time).num_milliseconds();
self.cpu_cycle();
quit = self.handle_input();
if quit == true {
break 'running;
}
if diff >= SKIP_TICKS {
start_time = end_time;
let delay_timer_value = self.reg.read_delay_timer();
if delay_timer_value > 0 {
self.reg.write_delay_timer(delay_timer_value - 1);
}
let sound_timer_value = self.reg.read_sound_timer();
if sound_timer_value > 0 {
// TODO: actually output a beep or something
println!("BEEP!");
self.reg.write_sound_timer(sound_timer_value - 1);
}
}
self.render();
}
}
pub fn _run_debug(&mut self) {
let mut quit = false;
let mut start_time = PreciseTime::now();
let mut diff;
'running: loop {
let end_time = PreciseTime::now();
diff = start_time.to(end_time).num_milliseconds();
while !self._next_step {
quit = self.handle_input();
if quit == true {
break 'running;
}
}
self._next_step = false;
self.cpu_cycle();
if self.display_updated {
self.render();
}
println!("{:?}", self);
quit = self.handle_input();
if quit == true {
break 'running;
}
if diff >= SKIP_TICKS {
start_time = end_time;
let delay_timer_value = self.reg.read_delay_timer();
if delay_timer_value > 0 {
self.reg.write_delay_timer(delay_timer_value - 1);
}
let sound_timer_value = self.reg.read_sound_timer();
if sound_timer_value > 0 {
// TODO: actually output a beep or something
println!("BEEP!");
self.reg.write_sound_timer(sound_timer_value - 1);
}
}
}
}
pub fn store_program_data(&mut self, rom: File) {
self.mem.store_program_data(rom);
}
pub fn _debug_pong_rom(&self) {
self.mem._display_pong_rom();
}
pub fn _debug_font_data(&self) {
self.mem._display_font_data();
}
fn cpu_cycle(&mut self) {
let instruction = self.read_word();
self.process_instruction(instruction);
}
fn render(&mut self) {
let mut fg_rect_vec: Vec<Rect> = Vec::new();
let mut bg_rect_vec: Vec<Rect> = Vec::new();
for x in 0..64 {
for y in 0..32 {
// println!("Loading display byte at {},{}", x, y);
let nibble = self.display[x][y];
if nibble {
fg_rect_vec.push(Rect::new_unwrap((x * 10) as i32, (y * 10) as i32, 10, 10));
} else {
bg_rect_vec.push(Rect::new_unwrap((x * 10) as i32, (y * 10) as i32, 10, 10));
}
}
}
self.window.set_draw_color(Color::RGB(0, 0, 0));
for r in bg_rect_vec {
self.window.fill_rect(r);
}
self.window.set_draw_color(Color::RGB(255, 255, 255));
for r in fg_rect_vec {
self.window.fill_rect(r);
}
self.window.present();
self.display_updated = false;
}
fn handle_input(&mut self) -> bool {
for event in self.sdl_event_pump.poll_iter() {
match event {
Event::Quit {..} | Event::KeyDown {keycode: Some(Keycode::Escape), .. } => {
return true
}
Event::KeyDown {keycode: Some(Keycode::Num1), ..} => {
self.keys.keys[1] = true;
}
Event::KeyDown {keycode: Some(Keycode::Num2), ..} => {
self.keys.keys[2] = true;
}
Event::KeyDown {keycode: Some(Keycode::Num3), ..} => {
self.keys.keys[3] = true;
}
Event::KeyDown {keycode: Some(Keycode::Num4), ..} => {
self.keys.keys[12] = true;
}
Event::KeyDown {keycode: Some(Keycode::Q), ..} => {
self.keys.keys[4] = true;
}
Event::KeyDown {keycode: Some(Keycode::W), ..} => {
self.keys.keys[5] = true;
}
Event::KeyDown {keycode: Some(Keycode::E), ..} => {
self.keys.keys[6] = true;
}
Event::KeyDown {keycode: Some(Keycode::R), ..} => {
self.keys.keys[13] = true;
}
Event::KeyDown {keycode: Some(Keycode::A), ..} => {
self.keys.keys[7] = true;
}
Event::KeyDown {keycode: Some(Keycode::S), ..} => {
self.keys.keys[8] = true;
}
Event::KeyDown {keycode: Some(Keycode::D), ..} => {
self.keys.keys[9] = true;
}
Event::KeyDown {keycode: Some(Keycode::F), ..} => {
self.keys.keys[14] = true;
}
Event::KeyDown {keycode: Some(Keycode::Z), ..} => {
self.keys.keys[10] = true;
}
Event::KeyDown {keycode: Some(Keycode::X), ..} => {
self.keys.keys[0] = true;
}
Event::KeyDown {keycode: Some(Keycode::C), ..} => {
self.keys.keys[11] = true;
}
Event::KeyDown {keycode: Some(Keycode::V), ..} => {
self.keys.keys[15] = true;
}
Event::KeyUp {keycode: Some(Keycode::Num1), ..} => {
self.keys.keys[1] = false;
}
Event::KeyUp {keycode: Some(Keycode::Num2), ..} => {
self.keys.keys[2] = false;
}
Event::KeyUp {keycode: Some(Keycode::Num3), ..} => {
self.keys.keys[3] = false;
}
Event::KeyUp {keycode: Some(Keycode::Num4), ..} => {
self.keys.keys[12] = false;
}
Event::KeyUp {keycode: Some(Keycode::Q), ..} => {
self.keys.keys[4] = false;
}
Event::KeyUp {keycode: Some(Keycode::W), ..} => {
self.keys.keys[5] = false;
}
Event::KeyUp {keycode: Some(Keycode::E), ..} => {
self.keys.keys[6] = false;
}
Event::KeyUp {keycode: Some(Keycode::R), ..} => {
self.keys.keys[13] = false;
}
Event::KeyUp {keycode: Some(Keycode::A), ..} => {
self.keys.keys[7] = false;
}
Event::KeyUp {keycode: Some(Keycode::S), ..} => {
self.keys.keys[8] = false;
}
Event::KeyUp {keycode: Some(Keycode::D), ..} => {
self.keys.keys[9] = false;
}
Event::KeyUp {keycode: Some(Keycode::F), ..} => {
self.keys.keys[14] = false;
}
Event::KeyUp {keycode: Some(Keycode::Z), ..} => {
self.keys.keys[10] = false;
}
Event::KeyUp {keycode: Some(Keycode::X), ..} => {
self.keys.keys[0] = false;
}
Event::KeyUp {keycode: Some(Keycode::C), ..} => {
self.keys.keys[11] = false;
}
Event::KeyUp {keycode: Some(Keycode::V), ..} => {
self.keys.keys[15] = false;
}
Event::KeyDown {keycode: Some(Keycode::K), ..} => {
self._next_step = true;
}
Event::KeyDown {keycode: Some(Keycode::M), ..} => {
self.mem._dump_mem_to_disk();
}
_ => {}
}
}
false
}
fn read_word(&mut self) -> u16 {
let instruction_high_order = (self.mem.read_byte(self.reg.read_pc()) as u16) << 8;
let instruction_low_order = self.mem.read_byte(self.reg.read_pc() + 1) as u16;
let instruction = instruction_high_order | instruction_low_order;
self.reg.increment_pc();
instruction
}
fn process_instruction(&mut self, instruction: u16) {
let op_type: u8 = ((instruction >> 12) & 0xff) as u8;
match op_type {
0x0 => {
// we will ignore the 0nnn opcode used for jumping to machine code routines
let operation = instruction & 0x00ff;
if operation == 0xe0 {
println!("PC: {:#x} | Opcode: {:#x} | cls",
self.reg.read_pc() - 2,
instruction);
for x in 0..64 {
for y in 0..32 {
self.display[x][y] = false;
}
}
self.display_updated = true;
} else if operation == 0xee {
println!("PC: {:#x} | Opcode: {:#x} | ret",
self.reg.read_pc() - 2,
instruction);
self.reg.return_from_subroutine();
}
}
0x1 => {
let jump_addr = instruction & 0x0fff;
println!("PC: {:#x} | Opcode: {:#x} | jmp {:#x}",
self.reg.read_pc() - 2,
instruction,
jump_addr);
self.reg.jump_to_address(jump_addr, JumpType::NORMAL);
}
0x2 => {
let subroutine_addr = instruction & 0x0fff;
println!("PC: {:#x} | Opcode: {:#x} | call {:#x}",
self.reg.read_pc() - 2,
instruction,
subroutine_addr);
self.reg.jump_to_address(subroutine_addr, JumpType::SUBROUTINE);
}
0x3 => {
let target_reg = ((instruction & 0x0f00) >> 8) as u8;
let comparison_byte = (instruction & 0x00ff) as u8;
println!("PC: {:#x} | Opcode: {:#x} | se V{} {:#x}",
self.reg.read_pc() - 2,
instruction,
target_reg,
comparison_byte);
if self.reg.read_register(target_reg) == comparison_byte {
self.reg.increment_pc();
}
}
0x4 => {
let target_reg = ((instruction & 0x0f00) >> 8) as u8;
let comparison_byte = (instruction & 0x00ff) as u8;
println!("PC: {:#x} | Opcode: {:#x} | sne V{} {:#x}",
self.reg.read_pc() - 2,
instruction,
target_reg,
comparison_byte);
if self.reg.read_register(target_reg) != comparison_byte {
self.reg.increment_pc();
}
}
0x5 => {
let reg_one = ((instruction & 0x0f00) >> 8) as u8;
let reg_two = ((instruction & 0x00f0) >> 4) as u8;
println!("PC: {:#x} | Opcode: {:#x} | se V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
if self.reg.read_register(reg_one) == self.reg.read_register(reg_two) {
self.reg.increment_pc();
}
}
0x6 => {
let target_reg = ((instruction >> 8) & 0x0f) as u8;
let data_value = (instruction & 0x00ff) as u8;
println!("PC: {:#x} | Opcode: {:#x} | ld V{} {:#x}",
self.reg.read_pc() - 2,
instruction,
target_reg,
data_value);
self.reg.write_register(target_reg, data_value);
}
0x7 => {
let target_reg = ((instruction >> 8) & 0x0f) as u8;
let immediate_value = (instruction & 0x00ff) as u8;
let reg_value = self.reg.read_register(target_reg);
let data_value = immediate_value.wrapping_add(reg_value);
println!("PC: {:#x} | Opcode: {:#x} | add V{} {:#x}",
self.reg.read_pc() - 2,
instruction,
target_reg,
immediate_value);
self.reg.write_register(target_reg, data_value);
}
0x8 => {
let reg_one = ((instruction >> 8) & 0x0f) as u8;
let reg_two = ((instruction >> 4) & 0x0f) as u8;
let operation = (instruction & 0x000f) as u8;
match operation {
0 => {
let data_value = self.reg.read_register(reg_two);
println!("PC: {:#x} | Opcode: {:#x} | ld V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, data_value);
}
1 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
let data_value = reg_one_value | reg_two_value;
println!("PC: {:#x} | Opcode: {:#x} | or V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, data_value);
}
2 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
let data_value = reg_one_value & reg_two_value;
println!("PC: {:#x} | Opcode: {:#x} | and V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, data_value);
}
3 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
if reg_two_value > reg_one_value {
self.reg.write_register(0x0f, 0x01);
}
let data_value = reg_two_value - reg_one_value;
println!("PC: {:#x} | Opcode: {:#x} | xor V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, data_value);
}
4 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
let mut result: u32 = (reg_one_value as u32) + (reg_two_value as u32);
if result > 255 {
self.reg.set_vf();
} else {
self.reg.clear_vf();
}
println!("PC: {:#x} | Opcode: {:#x} | add V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, result as u8);
}
5 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
if reg_one_value > reg_two_value {
self.reg.set_vf();
} else {
self.reg.clear_vf();
}
println!("PC: {:#x} | Opcode: {:#x} | sub V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, reg_one_value.wrapping_sub(reg_two_value));
}
6 => {
let reg_one_value = self.reg.read_register(reg_one);
println!("PC: {:#x} | Opcode: {:#x} | shr V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
if (reg_one_value & 1) == 1 {
self.reg.set_vf();
} else {
self.reg.clear_vf();
}
self.reg.write_register(reg_one, reg_one_value >> 1);
}
7 => {
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
if reg_two_value > reg_one_value {
self.reg.set_vf();
} else {
self.reg.clear_vf();
}
println!("PC: {:#x} | Opcode: {:#x} | subn V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
self.reg.write_register(reg_one, reg_two_value.wrapping_sub(reg_one_value));
}
0xe => {
let reg_one_value = self.reg.read_register(reg_one);
println!("PC: {:#x} | Opcode: {:#x} | shl V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
if ((reg_one_value >> 7) & 1) == 1 {
self.reg.set_vf();
} else {
self.reg.clear_vf();
}
self.reg.write_register(reg_one, reg_one_value << 1);
}
_ => panic!("Unrecognized opcode: {:#x}", instruction),
}
}
0x9 => {
let reg_one = ((instruction & 0x0f00) >> 8) as u8;
let reg_two = ((instruction & 0x00f0) >> 4) as u8;
let reg_one_value = self.reg.read_register(reg_one);
let reg_two_value = self.reg.read_register(reg_two);
println!("PC: {:#x} | Opcode: {:#x} | sne V{} V{}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two);
if reg_one_value != reg_two_value {
self.reg.increment_pc();
}
}
0xa => {
let data_value = instruction & 0x0fff;
println!("PC: {:#x} | Opcode: {:#x} | ld i {:#x}",
self.reg.read_pc() - 2,
instruction,
data_value);
self.reg.write_register_i(data_value);
}
0xb => {
let initial_addr = instruction & 0x0fff;
let offset = self.reg.read_register(0) as u16;
println!("PC: {:#x} | Opcode: {:#x} | jp V0 {:#x}",
self.reg.read_pc() - 2,
instruction,
initial_addr + offset);
self.reg.jump_to_address(initial_addr + offset, JumpType::NORMAL);
}
0xc => {
let target_reg = ((instruction & 0x0f00) >> 8) as u8;
let combination_byte = (instruction & 0x00ff) as u8;
let rand_num: u8 = rand::random();
self.reg.write_register(target_reg, (combination_byte & rand_num));
println!("PC: {:#x} | Opcode: {:#x} | rnd V{} {:#x}",
self.reg.read_pc() - 2,
instruction,
target_reg,
combination_byte);
println!(" | rand_num: {:#x} | final byte: {:#x}",
rand_num,
combination_byte & rand_num);
}
0xd => {
let reg_one = ((instruction & 0x0F00) >> 8) as u8;
let reg_two = ((instruction & 0x00F0) >> 4) as u8;
let num_bytes = (instruction & 0x000F) as u8;
println!("PC: {:#x} | Opcode: {:#x} | drw V{} V{} {}",
self.reg.read_pc() - 2,
instruction,
reg_one,
reg_two,
num_bytes);
let sprite_x = self.reg.read_register(reg_one);
let sprite_y = self.reg.read_register(reg_two);
println!("Sprite X: {} | Sprite Y: {}", sprite_x, sprite_y);
let mut bit_vec: Vec<u8> = Vec::new();
for i in 0..num_bytes {
bit_vec.push(self.mem.read_byte(self.reg.read_register_i() + (i as u16)));
}
println!("Glyph:");
for byte in bit_vec.clone() {
println!("{:#8b}", byte);
}
println!("");
self.reg.clear_vf();
let mut y_index = sprite_y as usize;
let mut x_value = sprite_x as usize;
for byte in bit_vec.clone() {
for i in 0..8 {
let mut x_index = x_value + (7 - i);
if x_index > 63 {
x_index = 69 - x_value;
}
if y_index > 31 {
y_index = y_index - 32;
}
let mut bit_state: bool = false;
if (byte >> i) & 1 == 1 {
bit_state = true;
}
if bit_state != self.display[x_index][y_index] {
self.display[x_index][y_index] = true;
} else {
if self.display[x_index][y_index] == true {
self.reg.set_vf();
}
self.display[x_index][y_index] = false;
}
}
y_index += 1;
}
self.display_updated = true;
}
0xe => {
let optype = (instruction & 0x00ff) as u8;
let target_reg = ((instruction & 0x0f00) >> 8) as u8;
match optype {
0x9e => {
let key = self.reg.read_register(target_reg);
if self.keys.keys[key as usize] == true {
self.reg.increment_pc();
}
println!("PC: {:#x} | Opcode: {:#x} | skp V{}",
self.reg.read_pc() - 2,
instruction,
target_reg);
}
0xa1 => {
let key = self.reg.read_register(target_reg);
if self.keys.keys[key as usize] == false {
self.reg.increment_pc();
}
println!("PC: {:#x} | Opcode: {:#x} | sknp V{}",
self.reg.read_pc() - 2,
instruction,
target_reg);
}
_ => panic!("Invalid instruction: {:#4x}", instruction),
}
}
0xf => {
let operation = (instruction & 0x00FF) as u8;
let register_index = ((instruction & 0x0F00) >> 8) as u8;
match operation {
0x07 => {
println!("PC: {:#x} | Opcode: {:#x} | ld V{} DT",
self.reg.read_pc() - 2,
instruction,
register_index);
let reg_value = self.reg.read_delay_timer();
self.reg.write_register(register_index, reg_value);
}
0x15 => {
println!("PC: {:#x} | Opcode: {:#x} | ld DT V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
let reg_value = self.reg.read_register(register_index);
self.reg.write_delay_timer(reg_value);
}
0x18 => {
println!("PC: {:#x} | Opcode: {:#x} | ld ST V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
let reg_value = self.reg.read_register(register_index);
self.reg.write_sound_timer(reg_value);
}
0x1e => {
let reg_value = self.reg.read_register(register_index);
let i_value = self.reg.read_register_i();
println!("PC: {:#x} | Opcode: {:#x} | add I V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
self.reg.write_register_i((reg_value as u16) + i_value);
}
0x29 => {
println!("PC: {:#x} | Opcode: {:#x} | ld F V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
let reg_value = self.reg.read_register(register_index);
match reg_value {
0 => {
self.reg.write_register_i(0x0);
}
1 => {
self.reg.write_register_i(0x5);
}
2 => {
self.reg.write_register_i(0xa);
}
3 => {
self.reg.write_register_i(0xf);
}
4 => {
self.reg.write_register_i(0x14);
}
5 => {
self.reg.write_register_i(0x19);
}
6 => {
self.reg.write_register_i(0x1e);
}
7 => {
self.reg.write_register_i(0x23);
}
8 => {
self.reg.write_register_i(0x28);
}
9 => {
self.reg.write_register_i(0x2d);
}
0xa => {
self.reg.write_register_i(0x32);
}
0xb => {
self.reg.write_register_i(0x37);
}
0xc => {
self.reg.write_register_i(0x3c);
}
0xd => {
self.reg.write_register_i(0x41);
}
0xe => {
self.reg.write_register_i(0x46);
}
0xf => {
self.reg.write_register_i(0x4b);
}
_ => {
panic!("Should never hit this statement, all cases covered.");
}
}
}
0x33 => {
let mut reg_value = self.reg.read_register(register_index);
let ones_digit: u8 = reg_value % 10;
reg_value = reg_value / 10;
let tens_digit: u8 = reg_value % 10;
reg_value = reg_value / 10;
let hundreds_digit: u8 = reg_value % 10;
println!("PC: {:#x} | Opcode: {:#x} | ld B V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
self.mem.write_byte(self.reg.read_register_i(), hundreds_digit);
self.mem.write_byte(self.reg.read_register_i() + 1, tens_digit);
self.mem.write_byte(self.reg.read_register_i() + 2, ones_digit);
}
0x55 => {
let num_reg = register_index as usize;
let mut mem_addr = self.reg.read_register_i();
for n in 0..num_reg {
self.mem
.write_byte(mem_addr + (n as u16), self.reg.read_register(n as u8));
}
println!("PC: {:#x} | Opcode: {:#x} | ld [I] V{}",
self.reg.read_pc() - 2,
instruction,
register_index);
}
0x65 => {
println!("PC: {:#x} | Opcode: {:#x} | ld V{} [I]",
self.reg.read_pc() - 2,
instruction,
register_index);
let mem_addr = self.reg.read_register_i();
for n in 0..(register_index + 1) {
let byte = self.mem.read_byte(mem_addr + (n as u16));
self.reg.write_register(n as u8, byte);
}
}
_ => {
println!("Chip8 status at end time: {:#?}", self);
println!("*************Unrecognized opcode!*************");
panic!("PC: {:#x} | Opcode: {:#x} | various",
self.reg.read_pc() - 2,
instruction);
}
}
}
_ => {
println!("Chip8 status at end time: {:#?}", self);
panic!("Unsupported op type: {:#2x}", op_type);
}
}
}
}
pub enum JumpType {
NORMAL,
SUBROUTINE,
}<|fim▁end|> | |
<|file_name|>git_pillar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Use a git repository as a Pillar source
---------------------------------------
.. note::
This external pillar has been rewritten for the :doc:`2015.8.0
</topics/releases/2015.8.0>` release. The old method of configuring this
external pillar will be maintained for a couple releases, allowing time for
configurations to be updated to reflect the new usage.
This external pillar allows for a Pillar top file and Pillar SLS files to be
sourced from a git repository.
However, since git_pillar does not have an equivalent to the
:conf_master:`pillar_roots` parameter, configuration is slightly different. The
Pillar top file must still contain the relevant environment, like so:
.. code-block:: yaml
base:
'*':
- foo
The branch/tag which maps to that environment must then be specified along with
the repo's URL. Configuration details can be found below.
.. _git-pillar-pre-2015-8-0:
Configuring git_pillar for Salt releases before 2015.8.0
========================================================
For Salt releases earlier than :doc:`2015.8.0 </topics/releases/2015.8.0>`,
GitPython is the only supported provider for git_pillar. Individual
repositories can be configured under the :conf_master:`ext_pillar`
configuration parameter like so:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git root=subdirectory
The repository is specified in the format ``<branch> <repo_url>``, with an
optional ``root`` parameter (added in the :doc:`2014.7.0
</topics/releases/2014.7.0>` release) which allows the pillar SLS files to be
served up from a subdirectory (similar to :conf_master:`gitfs_root` in gitfs).
To use more than one branch from the same repo, multiple lines must be
specified under :conf_master:`ext_pillar`:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git
- git: dev https://gitserver/git-pillar.git
To remap a specific branch to a specific Pillar environment, use the format
``<branch>:<env>``:
.. code-block:: yaml
ext_pillar:
- git: develop:dev https://gitserver/git-pillar.git
- git: master:prod https://gitserver/git-pillar.git
In this case, the ``develop`` branch would need its own ``top.sls`` with a
``dev`` section in it, like this:
.. code-block:: yaml
dev:
'*':
- bar
The ``master`` branch would need its own ``top.sls`` with a ``prod`` section in
it:
.. code-block:: yaml
prod:
'*':
- bar
If ``__env__`` is specified as the branch name, then git_pillar will use the
branch specified by :conf_master:`gitfs_base`:
.. code-block:: yaml
ext_pillar:
- git: __env__ https://gitserver/git-pillar.git root=pillar
The corresponding Pillar top file would look like this:
.. code-block:: yaml
{{env}}:
'*':
- bar
.. _git-pillar-2015-8-0-and-later:
Configuring git_pillar for Salt releases 2015.8.0 and later
===========================================================
.. note::
In version 2015.8.0, the method of configuring git external pillars has
changed, and now more closely resembles that of the :ref:`Git Fileserver
Backend <tutorial-gitfs>`. If Salt detects the old configuration schema, it
will use the pre-2015.8.0 code to compile the external pillar. A warning
will also be logged.
Beginning with Salt version 2015.8.0, pygit2_ is now supported in addition to
GitPython_ (Dulwich_ will not be supported for the forseeable future). The
requirements for GitPython_ and pygit2_ are the same as for gitfs, as described
:ref:`here <gitfs-dependencies>`.
.. important::
git_pillar has its own set of global configuration parameters. While it may
seem intuitive to use the global gitfs configuration parameters
(:conf_master:`gitfs_base`, etc.) to manage git_pillar, this will not work.
The main difference for this is the fact that the different components
which use Salt's git backend code do not all function identically. For
instance, in git_pillar it is necessary to specify which branch/tag to be
used for git_pillar remotes. This is the reverse behavior from gitfs, where
branches/tags make up your environments.
See :ref:`here <git_pillar-config-opts>` for documentation on the
git_pillar configuration options and their usage.
Here is an example git_pillar configuration:
.. code-block:: yaml
ext_pillar:
- git:
# Use 'prod' instead of the branch name 'production' as the environment
- production https://gitserver/git-pillar.git:
- env: prod
# Use 'dev' instead of the branch name 'develop' as the environment
- develop https://gitserver/git-pillar.git:
- env: dev
# No per-remote config parameters (and no trailing colon), 'qa' will
# be used as the environment
- qa https://gitserver/git-pillar.git
# SSH key authentication
- master git@other-git-server:pillardata-ssh.git:
# Pillar SLS files will be read from the 'pillar' subdirectory in
# this repository
- root: pillar
- privkey: /path/to/key
- pubkey: /path/to/key.pub
- passphrase: CorrectHorseBatteryStaple
# HTTPS authentication
- master https://other-git-server/pillardata-https.git:
- user: git
- password: CorrectHorseBatteryStaple
The main difference between this and the old way of configuring git_pillar is
that multiple remotes can be configured under one ``git`` section under
:conf_master:`ext_pillar`. More than one ``git`` section can be used, but it is
not necessary. Remotes will be evaluated sequentially.
Per-remote configuration parameters are supported (similar to :ref:`gitfs
<gitfs-per-remote-config>`), and global versions of the git_pillar
configuration parameters can also be set.
With the addition of pygit2_ support, git_pillar can now interact with
authenticated remotes. Authentication works just like in gitfs (as outlined in
the :ref:`Git Fileserver Backend Walkthrough <gitfs-authentication>`), only
with the global authenication parameter names prefixed with ``git_pillar``
instead of ``gitfs`` (e.g. :conf_master:`git_pillar_pubkey`,
:conf_master:`git_pillar_privkey`, :conf_master:`git_pillar_passphrase`, etc.).
.. _GitPython: https://github.com/gitpython-developers/GitPython
.. _pygit2: https://github.com/libgit2/pygit2
.. _Dulwich: https://www.samba.org/~jelmer/dulwich/
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import hashlib
import os
# Import salt libs
import salt.utils.gitfs
import salt.utils.dictupdate
from salt.exceptions import FileserverConfigError
from salt.pillar import Pillar
# Import third party libs
import salt.ext.six as six
# pylint: disable=import-error
try:
import git
HAS_GITPYTHON = True
except ImportError:
HAS_GITPYTHON = False
# pylint: enable=import-error
PER_REMOTE_OVERRIDES = ('env', 'root', 'ssl_verify')
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'git'
def __virtual__():
'''
Only load if gitpython is available
'''
git_ext_pillars = [x for x in __opts__['ext_pillar'] if 'git' in x]
if not git_ext_pillars:
# No git external pillars were configured
return False
for ext_pillar in git_ext_pillars:
if isinstance(ext_pillar['git'], six.string_types):
# Verification of legacy git pillar configuration
if not HAS_GITPYTHON:
log.error(
'Git-based ext_pillar is enabled in configuration but '
'could not be loaded, is GitPython installed?'
)
return False
if not git.__version__ > '0.3.0':
return False
return __virtualname__
else:
# Verification of new git pillar configuration
try:
salt.utils.gitfs.GitPillar(__opts__)
# Initialization of the GitPillar object did not fail, so we
# know we have valid configuration syntax and that a valid
# provider was detected.
return __virtualname__
except FileserverConfigError:
pass
return False
def ext_pillar(minion_id, repo, pillar_dirs):
'''
Checkout the ext_pillar sources and compile the resulting pillar SLS
'''
if isinstance(repo, six.string_types):
return _legacy_git_pillar(minion_id, repo, pillar_dirs)
else:
opts = copy.deepcopy(__opts__)
opts['pillar_roots'] = {}
pillar = salt.utils.gitfs.GitPillar(opts)
pillar.init_remotes(repo, PER_REMOTE_OVERRIDES)
pillar.checkout()
ret = {}
merge_strategy = __opts__.get(
'pillar_source_merging_strategy',
'smart'
)
merge_lists = __opts__.get(
'pillar_merge_lists',
False
)
for pillar_dir, env in six.iteritems(pillar.pillar_dirs):
log.debug(
'git_pillar is processing pillar SLS from {0} for pillar '
'env \'{1}\''.format(pillar_dir, env)
)
all_dirs = [d for (d, e) in six.iteritems(pillar.pillar_dirs)
if env == e]
# Ensure that the current pillar_dir is first in the list, so that
# the pillar top.sls is sourced from the correct location.
pillar_roots = [pillar_dir]
pillar_roots.extend([x for x in all_dirs if x != pillar_dir])
opts['pillar_roots'] = {env: pillar_roots}
local_pillar = Pillar(opts, __grains__, minion_id, env)
ret = salt.utils.dictupdate.merge(
ret,
local_pillar.compile_pillar(ext=False),
strategy=merge_strategy,
merge_lists=merge_lists
)
return ret
# Legacy git_pillar code
class _LegacyGitPillar(object):
'''
Deal with the remote git repository for Pillar
'''
def __init__(self, branch, repo_location, opts):
'''
Try to initialize the Git repo object
'''
self.branch = self.map_branch(branch, opts)
self.rp_location = repo_location
self.opts = opts
self._envs = set()
self.working_dir = ''
self.repo = None
hash_type = getattr(hashlib, opts.get('hash_type', 'md5'))
hash_str = '{0} {1}'.format(self.branch, self.rp_location)
repo_hash = hash_type(hash_str).hexdigest()
rp_ = os.path.join(self.opts['cachedir'], 'pillar_gitfs', repo_hash)
if not os.path.isdir(rp_):
os.makedirs(rp_)
try:
self.repo = git.Repo.init(rp_)
except (git.exc.NoSuchPathError,
git.exc.InvalidGitRepositoryError) as exc:
log.error('GitPython exception caught while '
'initializing the repo: {0}. Maybe '
'git is not available.'.format(exc))
# Git directory we are working on
# Should be the same as self.repo.working_dir
self.working_dir = rp_
if isinstance(self.repo, git.Repo):
if not self.repo.remotes:
try:
self.repo.create_remote('origin', self.rp_location)
# ignore git ssl verification if requested
if self.opts.get('pillar_gitfs_ssl_verify', True):
self.repo.git.config('http.sslVerify', 'true')
else:
self.repo.git.config('http.sslVerify', 'false')
except os.error:
# This exception occurs when two processes are
# trying to write to the git config at once, go
# ahead and pass over it since this is the only
# write.
# This should place a lock down.
pass
else:
if self.repo.remotes.origin.url != self.rp_location:
self.repo.remotes.origin.config_writer.set(
'url', self.rp_location)
def map_branch(self, branch, opts=None):
opts = __opts__ if opts is None else opts
if branch == '__env__':
branch = opts.get('environment') or 'base'
if branch == 'base':
branch = opts.get('gitfs_base') or 'master'
elif ':' in branch:
branch = branch.split(':', 1)[0]
return branch
def update(self):
'''
Ensure you are following the latest changes on the remote
Return boolean whether it worked
'''
try:
log.debug('Updating fileserver for git_pillar module')
self.repo.git.fetch()
except git.exc.GitCommandError as exc:
log.error('Unable to fetch the latest changes from remote '
'{0}: {1}'.format(self.rp_location, exc))
return False
try:
self.repo.git.checkout('origin/{0}'.format(self.branch))
except git.exc.GitCommandError as exc:
log.error('Unable to checkout branch '
'{0}: {1}'.format(self.branch, exc))
return False
return True
def envs(self):<|fim▁hole|> '''
Return a list of refs that can be used as environments
'''
if isinstance(self.repo, git.Repo):
remote = self.repo.remote()
for ref in self.repo.refs:
parted = ref.name.partition('/')
short = parted[2] if parted[2] else parted[0]
if isinstance(ref, git.Head):
if short == 'master':
short = 'base'
if ref not in remote.stale_refs:
self._envs.add(short)
elif isinstance(ref, git.Tag):
self._envs.add(short)
return list(self._envs)
def _legacy_git_pillar(minion_id, repo_string, pillar_dirs):
'''
Support pre-Beryllium config schema
'''
if pillar_dirs is None:
return
# split the branch, repo name and optional extra (key=val) parameters.
options = repo_string.strip().split()
branch_env = options[0]
repo_location = options[1]
root = ''
for extraopt in options[2:]:
# Support multiple key=val attributes as custom parameters.
DELIM = '='
if DELIM not in extraopt:
log.error('Incorrectly formatted extra parameter. '
'Missing \'{0}\': {1}'.format(DELIM, extraopt))
key, val = _extract_key_val(extraopt, DELIM)
if key == 'root':
root = val
else:
log.warning('Unrecognized extra parameter: {0}'.format(key))
# environment is "different" from the branch
cfg_branch, _, environment = branch_env.partition(':')
gitpil = _LegacyGitPillar(cfg_branch, repo_location, __opts__)
branch = gitpil.branch
if environment == '':
if branch == 'master':
environment = 'base'
else:
environment = branch
# normpath is needed to remove appended '/' if root is empty string.
pillar_dir = os.path.normpath(os.path.join(gitpil.working_dir, root))
pillar_dirs.setdefault(pillar_dir, {})
if cfg_branch == '__env__' and branch not in ['master', 'base']:
gitpil.update()
elif pillar_dirs[pillar_dir].get(branch, False):
return {} # we've already seen this combo
pillar_dirs[pillar_dir].setdefault(branch, True)
# Don't recurse forever-- the Pillar object will re-call the ext_pillar
# function
if __opts__['pillar_roots'].get(branch, []) == [pillar_dir]:
return {}
opts = copy.deepcopy(__opts__)
opts['pillar_roots'][environment] = [pillar_dir]
pil = Pillar(opts, __grains__, minion_id, branch)
return pil.compile_pillar(ext=False)
def _update(branch, repo_location):
'''
Ensure you are following the latest changes on the remote
return boolean whether it worked
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.update()
def _envs(branch, repo_location):
'''
Return a list of refs that can be used as environments
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.envs()
def _extract_key_val(kv, delimiter='='):
'''Extract key and value from key=val string.
Example:
>>> _extract_key_val('foo=bar')
('foo', 'bar')
'''
pieces = kv.split(delimiter)
key = pieces[0]
val = delimiter.join(pieces[1:])
return key, val<|fim▁end|> | |
<|file_name|>myDevice.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
import datetime
import socket
import time
import sys
import os.path
lib_path = os.path.abspath('../utils')
sys.path.append(lib_path)
from myParser import *
from myCrypto import *
#from myDriver import *
#from myCamDriver import *
import re
import hashlib
#from PIL import Image
#host='connect.mysensors.info'
host='localhost'
port=9090
state="INITIAL"
device=""
server="mysensors"
class mySensorDatagramProtocol(DatagramProtocol):
def __init__(self, host,port,reactor):
self.ip= socket.gethostbyname(host)
self.port = port
#self._reactor=reactor
#self.ip=reactor.resolve(host)
def startProtocol(self):
self.transport.connect(self.ip,self.port)
if state=='INITIAL':
#If system is at the initial state, it will send the device creation Senze
self.register()
else:
response=raw_input("Enter your Senze:")
self.sendDatagram(response)
def stopProtocol(self):<|fim▁hole|> #self._reactor.listenUDP(0, self)
print "STOP **************"
def register(self):
global server
cry=myCrypto(name=device)
senze ='SHARE #pubkey %s @%s' %(pubkey,server)
senze=cry.signSENZE(senze)
self.transport.write(senze)
def sendDatagram(self,senze):
global server
cry=myCrypto(name=device)
senze=cry.signSENZE(senze)
print senze
self.transport.write(senze)
def datagramReceived(self, datagram, host):
print 'Datagram received: ', repr(datagram)
parser=myParser(datagram)
recipients=parser.getUsers()
sender=parser.getSender()
signature=parser.getSignature()
data=parser.getData()
sensors=parser.getSensors()
cmd=parser.getCmd()
if cmd=="DATA":
if 'UserCreated' in data['msg']:
#Creating the .devicename file and store the device name and PIN
f=open(".devicename",'w')
f.write(device+'\n')
f.close()
print device+ " was created at the server."
print "You should execute the program again."
print "The system halted!"
reactor.stop()
elif 'UserCreationFailed' in data['msg']:
print "This user name may be already taken"
print "You can try it again with different username"
print "The system halted!"
reactor.stop()
#self.sendDatagram()
def init():
#cam=myCamDriver()
global device
global pubkey
global state
#If .device name is not there, we will read the device name from keyboard
#else we will get it from .devicename file
try:
if not os.path.isfile(".devicename"):
device=raw_input("Enter the device name: ")
# Account need to be created at the server
state='INITIAL'
else:
#The device name will be read form the .devicename file
f=open(".devicename","r")
device = f.readline().rstrip("\n")
state='READY'
except:
print "ERRER: Cannot access the device name file."
raise SystemExit
#Here we will generate public and private keys for the device
#These keys will be used to perform authentication and key exchange
try:
cry=myCrypto(name=device)
#If keys are not available yet
if not os.path.isfile(cry.pubKeyLoc):
# Generate or loads an RSA keypair with an exponent of 65537 in PEM format
# Private key and public key was saved in the .devicenamePriveKey and .devicenamePubKey files
cry.generateRSA(bits=1024)
pubkey=cry.loadRSAPubKey()
except:
print "ERRER: Cannot genereate private/public keys for the device."
raise SystemExit
print pubkey
#Check the network connectivity.
#check_connectivity(ServerName)
def main():
global host
global port
protocol = mySensorDatagramProtocol(host,port,reactor)
reactor.listenUDP(0, protocol)
reactor.run()
if __name__ == '__main__':
init()
main()<|fim▁end|> | #on disconnect |
<|file_name|>Todo.py<|end_file_name|><|fim▁begin|># Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module provides the Todo class.
"""
from datetime import date
from topydo.lib.Config import config
from topydo.lib.TodoBase import TodoBase
from topydo.lib.Utils import date_string_to_date
class Todo(TodoBase):
"""
This class adds common functionality with respect to dates to the Todo
base class, mainly by interpreting the start and due dates of task.
"""
def __init__(self, p_str):
TodoBase.__init__(self, p_str)
self.attributes = {}
def get_date(self, p_tag):
""" Given a date tag, return a date object. """
string = self.tag_value(p_tag)
result = None
try:
result = date_string_to_date(string) if string else None
except ValueError:
pass
return result
def start_date(self):
""" Returns a date object of the todo's start date. """
return self.get_date(config().tag_start())
def due_date(self):
""" Returns a date object of the todo's due date. """
return self.get_date(config().tag_due())
def is_active(self):
"""
Returns True when the start date is today or in the past and the
task has not yet been completed.
"""
start = self.start_date()
return not self.is_completed() and (not start or start <= date.today())
def is_overdue(self):
"""
Returns True when the due date is in the past and the task has not
yet been completed.
"""
return not self.is_completed() and self.days_till_due() < 0
def days_till_due(self):
"""
Returns the number of days till the due date. Returns a negative number
of days when the due date is in the past.
Returns 0 when the task has no due date.
"""<|fim▁hole|> if due:
diff = due - date.today()
return diff.days
return 0
def length(self):
"""
Returns the length (in days) of the task, by considering the start date
and the due date. When there is no start date, its creation date is
used. Returns 0 when one of these dates is missing.
"""
start = self.start_date() or self.creation_date()
due = self.due_date()
if start and due and start < due:
diff = due - start
return diff.days
else:
return 0<|fim▁end|> | due = self.due_date() |
<|file_name|>palindrama.js<|end_file_name|><|fim▁begin|>var flag = "did_you_use_python's_[::-1]_notation?";
exports.get_data = function(req, callback) {
var result = [];
var s = [
"[Go, droop aloof] sides reversed, is [fool a poor dog]. I did roar again, Niagara! ... or did I?",
"Help Max, Enid -- in example, H. See, slave, I demonstrate yet arts no medieval sees.",
"Egad, a base tone denotes a bad age. So may Obadiah, even in Nineveh, aid a boy, Amos. Naomi, did I moan?",
"Sir, I soon saw Bob was no Osiris. Poor Dan is in a droop.",
"Straw? No, too stupid a fad. I put soot on warts.",
"Live on, Time; emit no evil.",
"No, it is opposition.",
"Peel's lager on red rum did murder no regal sleep.",
"Too far away, no mere clay or royal ceremony, a war afoot."
];
var rand = s[(Math.random()*s.length)|0];
var a = s[(Math.random()*s.length)|0];
while (a == rand){
a = s[(Math.random()*s.length)|0];
}
a += " " + rand;
//console.log(s,a);
result.push(a);
req.session.data = result;
callback(result);
};
exports.check_data = function(req, callback) {
var answer = req.param("answer");<|fim▁hole|> var longest = "";
var longestOrig = "";
for(var i = s.length - 1; i >= 0; i--) {
for (var j = i + 1; j < s.length; j++) {
var q = s.substring(i, j);
var t = q;
if (q.substring(0,1).match(/[0-9a-zA-Z]+$/)){
t = q.replace(/[^A-Za-z]/gi,'').toLowerCase();
}
if (t == t.split("").reverse().join("") && t.length > longest.length) {
longest = t;
longestOrig = q.trim();
}
}
}
var correct = longestOrig;
/*console.log("!"+longestOrig+"!");
console.log("!"+output+"!");
console.log(longestOrig == output);*/
// return longestOrig === answer;
if (answer) {
answer = answer.replace(/^\s+|\s+$/g,'');
correct = correct.replace(/^\s+|\s+$/g,'');
if (answer.toLowerCase() === correct.toLowerCase()) {
callback({
status: 1,
message: "Great job! Your flag is <code>" + flag + "</code>",
});
return;
} else {
callback({
status: 0,
message: "Nope, try again..."
});
return;
}
} else {
callback({
status: 0,
message: "Answer cannot be empty!"
});
return;
}
};<|fim▁end|> | var data = req.session.data;
var s = data[0];
|
<|file_name|>MTTTCore.py<|end_file_name|><|fim▁begin|>"""@brief MTTT's core commands, stems from the original version created using Gtk https://github.com/roxana-lafuente/MTTT"""
# !/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# Machine Translation Training Tool
# Copyright (C) 2016 Roxana Lafuente <[email protected]>
# Miguel Lemos <[email protected]>
# Paula Estrella <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|>#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
def install_and_import(package):
import importlib
try:
importlib.import_module(package)
except ImportError:
try:
import pip
except ImportError:
print "no pip"
os.system('python get_pip.py')
finally:
import pip
pip.main(['install', package])
finally:
globals()[package] = importlib.import_module(package)
#os is one of the modules that I know comes with 2.7, no questions asked.
import os
#these other ones I a am not so sure of. Thus the install function.
install_and_import("requests")
install_and_import("subprocess")
install_and_import("json")
install_and_import("sys")
install_and_import("time")
install_and_import("shutil")
install_and_import("urlparse")
install_and_import("itertools")
from commands import *
from files_processing import *
from constants import moses_dir_fn
from Ui_mosesDialog import MosesDialog
UI_INFO = """
<ui>
<menubar name='MenuBar'>
<menu action='VisualsMenu'>
<menu action='Visuals'>
<menuitem action='metro'/>
<menuitem action='paper'/>
<separator />
<menuitem action='lights_on_option'/>
</menu>
</menu>
</menubar>
</ui>
"""
class MTTTCore():
def __init__(self):
# Recognize OS
if os.name == 'posix': # Linux
self.is_linux, self.is_windows = True, False
elif os.name == 'nt': # Windows
self.is_linux, self.is_windows = False, True
else:
print "Unknown OS"
exit(1)
# Check Moses Config file.
self.moses_dir = ""
try:
f = open(moses_dir_fn, 'r')
self.moses_dir = f.read()
f.close()
except IOError, OSError:
# File does not exist.
self.moses_dir = self.get_moses_dir()
f = open(moses_dir_fn, 'w')
f.write(self.moses_dir)
f.close()
finally:
# File content is wrong
if not self.is_moses_dir_valid(self.moses_dir):
moses_dir = self.get_moses_dir()
f = open(moses_dir_fn, 'w')
f.write(self.moses_dir)
f.close()
self.saved_absolute_path = os.path.abspath("saved")
self.saved_relative_filepath = "./saved"
if not os.path.exists(self.saved_absolute_path):
os.makedirs(self.saved_absolute_path)
# Init
self.source_lang = None
self.target_lang = None
self.output_text= None
self.cwd = os.getcwd()
def is_moses_dir_valid(self, directory):
is_valid = True
if directory == "":
is_valid = False # Empty string
elif not os.path.exists(directory):
is_valid = False # Directory does not exist
else:
# Check if dir exists but does not contain moses installation
is_valid = self._check_moses_installation(directory)
return is_valid
def _check_moses_installation(self, directory):
# TODO: TRY catch OSError when permission denied!!
file_content = [f for f in os.listdir(directory)]
moses_files = ["/scripts/tokenizer/tokenizer.perl",
"/scripts/recaser/truecase.perl",
"/scripts/training/clean-corpus-n.perl",
"/bin/lmplz",
"/bin/build_binary",
"/scripts/training/train-model.perl",
"/bin/moses"
]
if self.is_windows:
moses_files = [f.replace("/", "\\")
for f in moses_files]
moses_files = [f + ".exe"
for f in moses_files
if "/bin" in f]
is_valid = True
for mfile in moses_files:
is_valid = is_valid and os.path.isfile(directory + mfile)
return is_valid
def get_moses_dir(self):
"""
Gets Moses directory.
"""
moses = MosesDialog()
self.moses_dir = moses.detect()
return self.moses_dir
def _prepare_corpus(self, output_text, source_lang, target_lang, st_train, tt_train, lm_text):
self.output_text = str(output_text)
self.source_lang = str(source_lang)
self.target_lang = str(target_lang)
self.lm_text = str(lm_text)
self.tt_train = str(tt_train)
self.st_train = str(st_train)
output_directory = adapt_path_for_cygwin(self.is_windows, self.output_text)
return_text = ""
if output_directory is not None:
# Change directory to the output_directory.
try:
os.chdir(self.output_text)
except:
# Output directory does not exist.
os.mkdir(self.output_text)
os.chdir(self.output_text)
cmds = []
# 1) Tokenization
# a) Target text
target_tok = generate_input_tok_fn(self.target_lang,
output_directory)
cmds.append(get_tokenize_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
self.target_lang,
adapt_path_for_cygwin(self.is_windows,self.tt_train),
target_tok))
# b) Source text
source_tok = generate_input_tok_fn(self.source_lang,
output_directory)
cmds.append(get_tokenize_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
self.source_lang,
adapt_path_for_cygwin(self.is_windows,self.st_train),
source_tok))
# c) Language model
lm_tok = generate_lm_tok_fn(output_directory)
cmds.append(get_tokenize_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
self.source_lang,
adapt_path_for_cygwin(self.is_windows,self.lm_text),
lm_tok))
# 2) Truecaser training
# a) Target text
cmds.append(get_truecaser_train_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
target_tok))
# b) Source text
cmds.append(get_truecaser_train_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
source_tok))
# c) Language model
cmds.append(get_truecaser_train_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
lm_tok))
# 3) Truecaser
input_true = output_directory + "/input.true"
# a) Target text
target_true = generate_input_true_fn(self.target_lang,
output_directory)
cmds.append(get_truecaser_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
target_tok,
target_true))
# b) Source text
source_true = generate_input_true_fn(self.source_lang,
output_directory)
cmds.append(get_truecaser_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
source_tok,
source_true))
# c) Language model
self.lm_true = lm_true = generate_lm_true_fn(output_directory)
cmds.append(get_truecaser_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
target_tok, lm_true))
# 4) Cleaner
# a) Target text
self.input_clean = input_clean = generate_input_clean_fn(output_directory)
self.source_clean = source_clean = input_true + "." + self.source_lang
self.target_clean = target_clean = input_true + "." + self.target_lang
cmds.append(get_cleaner_command(adapt_path_for_cygwin(self.is_windows, self.moses_dir),
self.source_lang,
self.target_lang,
input_true,
input_clean))
# Start threads
all_ok = True
for cmd in cmds:
#print cmd
return_text += cmd + "\n"
# all_ok = all_ok and (os.system(cmd) == 0)
proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True)
all_ok = all_ok and (proc.wait() == 0)
# print "returncode:", proc.returncode, "\n\n\n"
out, err = proc.communicate()
if all_ok:
self.is_corpus_preparation_ready = True
else:
print "TODO: Pop up error message!!"
return return_text
def _train(self):
# print "==============================>", self.is_corpus_preparation_ready
if self.output_text is not None:
#print self.output_text
output_directory = adapt_path_for_cygwin(self.is_windows, self.output_text)
else:
return "ERR"
return_text = ""
if output_directory is not None and self.is_corpus_preparation_ready:
cmds = []
output = "Log:\n\n"
# Train the language model.
self.lm_arpa = generate_lm_fn(output_directory)
#print "out:", self.lm_arpa, "\n"
cmds.append(get_lmtrain_command(self.moses_dir,
self.target_lang,
self.lm_true,
self.lm_arpa))
# Binarize arpa
self.blm = generate_blm_fn(output_directory)
#print "binarized out:", self.blm, "\n"
cmds.append(get_blmtrain_command(self.moses_dir,
self.target_lang,
self.lm_arpa,
self.blm))
# Train the translation model.
out_file = generate_tm_fn(output_directory)
cmds.append(get_tmtrain_command(self.moses_dir,
self.source_lang,
self.target_lang,
self.blm,
self.input_clean,
output_directory))
# TODO!
# Binarize phase-table.gz
# Binarize reordering-table.wbe-msd-bidirectional-fe.gz
# Change PhraseDictionaryMemory to PhraseDictionaryCompact
# Set the path of the PhraseDictionary feature to point to $HOME/working/binarised-model/phrase-table.minphr
# Set the path of the LexicalReordering feature to point to $HOME/working/binarised-model/reordering-table
for cmd in cmds:
# use Popen for non-blocking
#print cmd
output += cmd
return_text += cmd + "\n"
proc = subprocess.Popen([cmd],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
proc.wait()
(out, err) = proc.communicate()
if out != "":
output += out
elif err != "":
output += err
# Adding output from training.out
training = adapt_path_for_cygwin(self.is_windows, self.output_text) + "/training.out"
try:
with open(training, "r") as f:
output += "\n" + f.read()
except IOError:
output += "Error. Unsuccessful when attempting to create moses.ini"
# Set output to the output label.
else:
output = "ERROR: Please go to the first tab and complete the process."
return output
return return_text
def _machine_translation(self, mt_in, chooseModel):
mt_in = str(mt_in)
base=os.path.basename(mt_in)
#mt_out = os.path.dirname(mt_in) + os.path.splitext(base)[0] + "_translated" + os.path.splitext(base)[1]
mt_out = mt_in + ".translated"
in_file = adapt_path_for_cygwin(self.is_windows, mt_in)
out_file = adapt_path_for_cygwin(self.is_windows,mt_out)
#print "OUTDIR:::"+adapt_path_for_cygwin(self.is_windows, self.output_text) + "/train/model/moses.ini"
if chooseModel:
output_text = chooseModel
else:
output_text = adapt_path_for_cygwin(self.is_windows, self.output_text)
output = "Running decoder, please wait\n\n............\n\n"
# Run the decoder.
cmd = get_test_command(self.moses_dir,
adapt_path_for_cygwin(self.is_windows, output_text) + "/train/model/moses.ini",
in_file,
out_file) #---> explota si se elije choose model
# use Popen for non-blocking
#print "CMD MT:::::::"+cmd
proc = subprocess.Popen([cmd],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
(out, err) = proc.communicate()
f = open(out_file, 'r')
mt_result = f.read()
if mt_result == "":
if out != "":
output += out
elif err != "":
output += err
else:
output += "Best translation: " + mt_result
f.close()
return output<|fim▁end|> | # GNU General Public License for more details. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// pub mod run_length;<|fim▁end|> | pub mod merge;
pub mod coalesce; |
<|file_name|>myModule.js<|end_file_name|><|fim▁begin|>"use strict";
var myTable = require("../data/myTables.json");
// Dictionaries to modify/extend the original dictionaries
var devanagari_dict_mod = {};
var telugu_dict_mod = {};
var kannada_dict_mod = {};
var gujarati_dict_mod = {};
var tamil_dict_mod = {};
var bengali_dict_mod = {};
var gurmukhi_dict_mod = {};
var malayalam_dict_mod = {};
var oriya_dict_mod = {};
var english_dict_mod = {};
var devanagari_dict_rev = {};
var telugu_dict_rev = {};
var kannada_dict_rev = {};
var gujarati_dict_rev = {};
var tamil_dict_rev = {};
var bengali_dict_rev = {};
var gurmukhi_dict_rev = {};
var malayalam_dict_rev = {};
var oriya_dict_rev = {};
var english_dict_rev = {};
var katapayadi_dict = myTable.katapayadi_dict;
function init()
{
// new modified/extended dictionaries
devanagari_dict_mod = extend(myTable.devanagari_dict);
telugu_dict_mod = extend(myTable.telugu_dict);
kannada_dict_mod = extend(myTable.kannada_dict);
gujarati_dict_mod = extend(myTable.gujarati_dict);
tamil_dict_mod = extend(myTable.tamil_dict);
bengali_dict_mod = extend(myTable.bengali_dict);
gurmukhi_dict_mod = extend(myTable.gurmukhi_dict);
malayalam_dict_mod = extend(myTable.malayalam_dict);
oriya_dict_mod = extend(myTable.oriya_dict);
english_dict_mod = extend(myTable.english_dict);
// reverse dictionaries
devanagari_dict_rev = reverse(myTable.devanagari_dict);
telugu_dict_rev = reverse(myTable.telugu_dict);<|fim▁hole|> gurmukhi_dict_rev = reverse(myTable.gurmukhi_dict);
malayalam_dict_rev = reverse(myTable.malayalam_dict);
oriya_dict_rev = reverse(myTable.oriya_dict);
english_dict_rev = reverse(myTable.english_dict);
}
function detectLanguage(inp_txt){
var indx;
var chr;
for (indx = 0; indx < inp_txt.length; indx++){
chr = inp_txt.charAt(indx);
if(chr == "\u0950") { // skip Devanagari 'AUM', since it is used across all Indian languages
continue;
}
else if((array_key_exists(chr, devanagari_dict_rev["Independent_vowels"])) || (array_key_exists(chr, devanagari_dict_rev["Consonants"]))) {
return "Devanagari";
}
else if((array_key_exists(chr, telugu_dict_rev["Independent_vowels"])) || (array_key_exists(chr, telugu_dict_rev["Consonants"]))) {
return "Telugu";
}
else if((array_key_exists(chr, kannada_dict_rev["Independent_vowels"])) || (array_key_exists(chr, kannada_dict_rev["Consonants"]))) {
return "Kannada";
}
else if((array_key_exists(chr, gujarati_dict_rev["Independent_vowels"])) || (array_key_exists(chr, gujarati_dict_rev["Consonants"]))) {
return "Gujarati";
}
else if((array_key_exists(chr, tamil_dict_rev["Independent_vowels"])) || (array_key_exists(chr, tamil_dict_rev["Consonants"]))) {
return "Tamil";
}
else if((array_key_exists(chr, bengali_dict_rev["Independent_vowels"])) || (array_key_exists(chr, bengali_dict_rev["Consonants"]))) {
return "Bengali";
}
else if((array_key_exists(chr, gurmukhi_dict_rev["Independent_vowels"])) || (array_key_exists(chr, gurmukhi_dict_rev["Consonants"]))) {
return "Gurmukhi";
}
else if((array_key_exists(chr, malayalam_dict_rev["Independent_vowels"])) || (array_key_exists(chr, malayalam_dict_rev["Consonants"]))) {
return "Malayalam";
}
else if((array_key_exists(chr, oriya_dict_rev["Independent_vowels"])) || (array_key_exists(chr, oriya_dict_rev["Consonants"]))) {
return "Oriya";
}
}
return "English"; // default
}
function convert2IndicScript(inp_txt, encoding_type, indicScript, modeStrict, reverse, preferASCIIDigits)
{
var indx=0;
var out = "";
var vovel_needed_p = false;
var word_start_p = true;
var prev_Type = "NoMatch";
var insideTag_p = false;
var blk, blkLen, Type;
// Assigning the dictionary
var lang_dict;
if(reverse){
if(indicScript == "Devanagari"){lang_dict = devanagari_dict_rev;}
else if(indicScript == "Telugu"){lang_dict = telugu_dict_rev;}
else if(indicScript == "Kannada"){lang_dict = kannada_dict_rev;}
else if(indicScript == "Gujarati"){lang_dict = gujarati_dict_rev;}
else if(indicScript == "Tamil"){lang_dict = tamil_dict_rev;}
else if(indicScript == "Bengali"){lang_dict = bengali_dict_rev;}
else if(indicScript == "Gurmukhi"){lang_dict = gurmukhi_dict_rev;}
else if(indicScript == "Malayalam"){lang_dict = malayalam_dict_rev;}
else if(indicScript == "Oriya"){lang_dict = oriya_dict_rev;}
else {lang_dict = english_dict_rev;}
}
else if(modeStrict){ // orignal dictionaries if modeStrict
if(indicScript == "Devanagari"){lang_dict = myTable.devanagari_dict;}
else if(indicScript == "Telugu"){lang_dict = myTable.telugu_dict;}
else if(indicScript == "Kannada"){lang_dict = myTable.kannada_dict;}
else if(indicScript == "Gujarati"){lang_dict = myTable.gujarati_dict;}
else if(indicScript == "Tamil"){lang_dict = myTable.tamil_dict;}
else if(indicScript == "Bengali"){lang_dict = myTable.bengali_dict;}
else if(indicScript == "Gurmukhi"){lang_dict = myTable.gurmukhi_dict;}
else if(indicScript == "Malayalam"){lang_dict = myTable.malayalam_dict;}
else if(indicScript == "Oriya"){lang_dict = myTable.oriya_dict;}
else {lang_dict = myTable.english_dict;}
}
else { // modified/extended dictionaries if not modeStrict
if(indicScript == "Devanagari"){lang_dict = devanagari_dict_mod;}
else if(indicScript == "Telugu"){lang_dict = telugu_dict_mod;}
else if(indicScript == "Kannada"){lang_dict = kannada_dict_mod;}
else if(indicScript == "Gujarati"){lang_dict = gujarati_dict_mod;}
else if(indicScript == "Tamil"){lang_dict = tamil_dict_mod;}
else if(indicScript == "Bengali"){lang_dict = bengali_dict_mod;}
else if(indicScript == "Gurmukhi"){lang_dict = gurmukhi_dict_mod;}
else if(indicScript == "Malayalam"){lang_dict = malayalam_dict_mod;}
else if(indicScript == "Oriya"){lang_dict = oriya_dict_mod;}
else {lang_dict = english_dict_mod;}
}
// convert to ITRANS
if((!reverse) && ((encoding_type == "ISO") || (encoding_type == "IAST"))) {
inp_txt = convert2ITRANS(inp_txt, encoding_type);
}
while (indx < inp_txt.length){
// skip space charecter " "
if(inp_txt.substring(indx,indx+6) == " "){
if(vovel_needed_p){
out += lang_dict["VIRAMA"];
}
out += " ";
indx += 6;
word_start_p = true;
vovel_needed_p=0;
continue;
}
[blk, blkLen, Type, vovel_needed_p, insideTag_p] = getNxtIndicChr(lang_dict, inp_txt.substring(indx), modeStrict, word_start_p, vovel_needed_p, insideTag_p, reverse, preferASCIIDigits);
out += blk;
if(Type == "NoMatch"){
//document.write( inp_txt.substring(indx, indx+blkLen)+": ***** NoMatch (blkLen)<br>");
indx += 1;
word_start_p = true;
}
else{
//document.write( inp_txt.substring(indx, indx+blkLen)+": "+blk+" Match (blkLen)<br>");
indx += blkLen;
word_start_p = false;
}
}
if(vovel_needed_p){
out += lang_dict["VIRAMA"];
}
vovel_needed_p=0;
//document.getElementById("out_txt").value=out;
return out;
}
function convert2ITRANS(inp_txt, encoding_type)
{
var insideTag_p = false;
var indx=0;
var out = "";
var blk, blkLen, Type, insideTag_p;
var decoding_dict;
// selecting appropriate dict to convert to ITRANS
if(encoding_type == "ISO") {
decoding_dict = myTable.iso2itrans_dict;
}
else if(encoding_type == "IAST") {
decoding_dict = myTable.iast2itrans_dict;
}
else {
return inp_txt;
}
while (indx < inp_txt.length){
[blk, blkLen, Type, insideTag_p] = convertNextBlk2ITRANS(decoding_dict, inp_txt.substring(indx), insideTag_p);
out += blk;
if(Type == "NoMatch"){
indx += 1;
}
else{
indx += blkLen;
}
}
return out;
}
function convertNextBlk2ITRANS(trans_dict, inp_txt, insideTag_p){
var MAX=2; // *** set this
var debug=0;
var insideTag = insideTag_p;
var Type = "NoMatch"; //default
var out = "";
var blk = "";
var blkLen=MAX;
while(blkLen > 0){
//if(debug){document.write( inp_txt.substring(0, blkLen)+" <br>");}
// selecting block, skip it its a TAG i.e., inside < >
if( (!insideTag) && (inp_txt.charAt(0) == "<") ){
insideTag = true;
break;
}
else if( (insideTag) && (inp_txt.charAt(0) == ">") ){
insideTag = false;
break;
}
else if(insideTag){
break;
}
blk= inp_txt.substring(0, blkLen);
//if(debug){document.write( "<br>blk...:"+blk+" "+word_start+" "+vovel_needed+"<br>");}
if( array_key_exists(blk, trans_dict) ){
Type = "Match";
out += trans_dict[blk];
//if(debug){document.write( "5: "+"-"+blk+" "+trans_dict[blk]);}
break;
}
// No match for the taken block
else{
blkLen -= 1;
}
}
if(Type == "NoMatch"){// no match found
out += inp_txt[0];
}
else{
//if(debug){document.write( "Match "+vovel_needed+"<br>");}
}
//if(debug){document.write( "<br>returning "+out+" "+blkLen+"<br>");}
return [out, blkLen, Type, insideTag];
};
function getNxtIndicChr(lang_dict, inp_txt, modeStrict, word_start_p, vovel_needed_p, insideTag_p, reverse, preferASCIIDigits){
var MAX=4; // *** set this
var debug=0;
var out = "";
var Type = "NoMatch"; //default
var vovel_needed = vovel_needed_p;
var word_start = word_start_p;
var insideTag = insideTag_p;
var blk = "";
var blkLen=MAX;
var iteration = 1; // first time
// decoding charecter-by-charecter in reverse convertion
if(reverse){
blkLen=1;
}
while(blkLen > 0){
//if(debug){document.write( inp_txt.substring(0, blkLen)+" <br>");}
// selecting block, skip it its a TAG i.e., inside < >
if( (!insideTag) && (inp_txt.charAt(0) == "<") ){
insideTag = true;
break;
}
else if( (insideTag) && (inp_txt.charAt(0) == ">") ){
insideTag = false;
break;
}
else if(insideTag){
break;
}
else if(inp_txt.length >= blkLen){ // string is longer than or equal to blkLen
blk= inp_txt.substring(0, blkLen);
}
else if(inp_txt.length > 0){ // string is shorter than blkLen
blk = inp_txt.substring(0);
}
else{ // string is of zero length
break;
}
//if(debug){document.write( "<br>blk...:"+blk+" "+word_start+" "+vovel_needed+"<br>");}
// if not modeStrict, convert the 1st letter of every word to lower-case
if((!modeStrict) && (word_start == true)){
blk = blk.substring(0,1).toLowerCase() + blk.substring(1);
}
// 2nd iteration ==> working case-insensitive
if((!modeStrict) && (iteration == 2)){
blk = blk.toLowerCase();
}
// Accent marks : Do not change any flags for this case, except "Type"
if(array_key_exists(blk, lang_dict["Accent_marks"])){
Type = "Accent";
out += lang_dict["Accent_marks"][blk];
//if(debug){document.write( "0: "+blk+" "+lang_dict["Accent_marks"][blk]+"<br>");}
break;
}
// Independent vowels
/*else if( (reverse || !vovel_needed) // for reverse convertion, vovel_needed condition is not required
// *** This will be lossy translation ***
// e.g., रई -> rii -> री */
else if( (vovel_needed == false)
&& (array_key_exists(blk, lang_dict["Independent_vowels"])) ){
Type = "Independent";
vovel_needed=0;
out += lang_dict["Independent_vowels"][blk];
//if(debug){document.write( "5: "+"-"+blk+" "+lang_dict["Independent_vowels"][blk]);}
break;
}
// Dependent vowels
else if((vovel_needed)
&& (array_key_exists(blk, lang_dict["Dependent_vowel"])) ){
Type = "Vowel";
vovel_needed=0;
out += lang_dict["Dependent_vowel"][blk];
//if(debug){document.write( "7: "+blk+" "+lang_dict["Dependent_vowel"][blk]);}
break;
}
// Consonants
else if(array_key_exists((blk), lang_dict["Consonants"])){
if(vovel_needed){
out += lang_dict["VIRAMA"];
}
Type = "Consonants";
vovel_needed=1;
out += lang_dict["Consonants"][blk];
//if(debug){document.write( "8: "+blk+" "+lang_dict["Consonants"][blk]);}
break;
}
// Others [Do not convert ASCII Digits if option is selected]
else if( !((isASCIIDigit(blk) == true) && (preferASCIIDigits == true))
&& array_key_exists(blk, lang_dict["Others"])){
if(vovel_needed){
out += lang_dict["VIRAMA"];
}
Type = "Other";
vovel_needed = 0;
// nullify "a"+".h" in reverse conversion
if(lang_dict["Others"][blk] == ".h"){
out = out.substring(0, out.length-1);
}
else {
out += lang_dict["Others"][blk];
}
//if(debug){document.write( "9: "+blk+" "+lang_dict["Others"][blk]+"<br>");}
break;
}
// No match for the taken block
else{
// 2nd iteration ==> repeat as case-insensitive
if((!modeStrict) && (iteration == 1)){
iteration += 1;
continue;
}
blkLen -= 1;
}
}
if(Type == "NoMatch"){ // no match found
if(vovel_needed){
out += lang_dict["VIRAMA"];
}
//if(debug){document.write( "No match "+vovel_needed+"<br>");}
out += inp_txt[0];
word_start = true;
vovel_needed=0;
}
else{
//if(debug){document.write( "Match "+vovel_needed+"<br>");}
word_start = false;
}
//if(debug){document.write( "<br>returning "+out+" "+blkLen+" "+Type+" "+vovel_needed+"<br>");}
return [out, blkLen, Type, vovel_needed, insideTag];
};
function array_key_exists(key, dict)
{
if (key in dict) return true;
else return false;
}
// to extend dictionaries
function extend(org_dict) {
var ext_dict = {
"Independent_vowels" : {
"ee" : org_dict['Independent_vowels']["E"]
},
"Dependent_vowel" : {
"ee" : org_dict['Dependent_vowel']["E"]
},
"Consonants" : {
"c" : org_dict['Consonants']["k"],
"f" : org_dict['Consonants']["ph"],
"z" : org_dict['Consonants']["j"],
// Modifications eto IAST/ITRANS
"t" : org_dict['Consonants']["T"],
"tt" : org_dict['Consonants']["Th"],
"th" : org_dict['Consonants']["t"],
"tth" : org_dict['Consonants']["th"],
"d" : org_dict['Consonants']["D"],
"dd" : org_dict['Consonants']["Dh"],
"dh" : org_dict['Consonants']["d"],
"ddh" : org_dict['Consonants']["dh"]
}
};
var new_dict = cloneDict(org_dict);
for (var property in ext_dict) {
for(var key in ext_dict[property]){
if (ext_dict[property][key]) {
new_dict[property][key] = ext_dict[property][key];
}
}
}
return new_dict;
};
// clone dictionaries
function cloneDict(dict) {
if(typeof(dict) != 'object') return dict;
if(dict == null) return dict;
var new_dict = new Object();
for(var property in dict){
new_dict[property] = cloneDict(dict[property]);
}
return new_dict;
}
// to extend dictionaries
function reverse(org_dict) {
var new_dict = new Object();
for (var property in org_dict) {
new_dict[property] = cloneRevDict(org_dict[property]);
}
// nullify the adding of "VIRAMA"
new_dict["VIRAMA"] = "a";
return new_dict;
};
// clone dictionaries
function cloneRevDict(dict) {
if(typeof(dict) != 'object') return dict;
if(dict == null) return dict;
var new_dict = new Object();
for(var property in dict){
new_dict[dict[property]] = property;
}
return new_dict;
}
function isASCIIDigit(n) {
return ((n>=0) && (n<=9));
}
function convert2Katapayadi(inp_txt) {
var indx=0, dict_indx;
var out = "";
var insideTag = false;
while (indx < inp_txt.length){
// skip space charecter " "
if(inp_txt.substring(indx,indx+6) == " "){
out += " ";
indx += 6;
continue;
}
else if( (!insideTag) && (inp_txt.charAt(indx) == "<") ){
insideTag = true;
out += inp_txt.charAt(indx);
++indx;
}
else if( (insideTag) && (inp_txt.charAt(indx) == ">") ){
insideTag = false;
out += inp_txt.charAt(indx);
++indx;
}
else if(insideTag){
out += inp_txt.charAt(indx);
++indx;
}
else{
for(dict_indx=0; dict_indx<=9; ++dict_indx) {
// if next charecter is VIRAMA, this char should be neglected
if((katapayadi_dict[dict_indx].indexOf(inp_txt.charAt(indx)) >= 0) &&
(katapayadi_dict[10].indexOf(inp_txt.charAt(indx+1)) == -1)){
out += dict_indx.toString();
break;
}
}
++indx;
}
}
return out;
}
exports.init = init;
exports.detectLanguage = detectLanguage;
exports.convert2IndicScript = convert2IndicScript;
exports.convert2Katapayadi = convert2Katapayadi;<|fim▁end|> | kannada_dict_rev = reverse(myTable.kannada_dict);
gujarati_dict_rev = reverse(myTable.gujarati_dict);
tamil_dict_rev = reverse(myTable.tamil_dict);
bengali_dict_rev = reverse(myTable.bengali_dict); |
<|file_name|>test_codegen.py<|end_file_name|><|fim▁begin|>from StringIO import StringIO
from sympy.core import symbols, Eq, pi, Catalan, Lambda, Dummy
from sympy.utilities.codegen import CCodeGen, Routine, InputArgument, Result, \
CodeGenError, FCodeGen, codegen, CodeGenArgumentListError, OutputArgument, \
InOutArgument
from sympy.utilities.pytest import XFAIL, raises
from sympy.utilities.lambdify import implemented_function
# import test:
#FIXME: Fails due to circular import in with core
# from sympy import codegen
#FIXME-py3k: Many AssertionErrors here, perhaps related to unicode;
#FIXME-py3k: some are just due to an extra space at the end of the string
def get_string(dump_fn, routines, prefix="file", header=False, empty=False):
"""Wrapper for dump_fn. dump_fn writes its results to a stream object and
this wrapper returns the contents of that stream as a string. This
auxiliary function is used by many tests below.
The header and the empty lines are not generator to facilitate the
testing of the output.
"""
output = StringIO()
dump_fn(routines, output, prefix, header, empty)
source = output.getvalue()
output.close()
return source
def test_Routine_argument_order():
a, x, y, z = symbols('a x y z')
expr = (x+y)*z
raises(CodeGenArgumentListError, 'Routine("test", expr, argument_sequence=[z, x])')
raises(CodeGenArgumentListError, 'Routine("test", Eq(a, expr), argument_sequence=[z, x, y])')
r = Routine('test', Eq(a, expr), argument_sequence=[z, x, a, y])
assert [ arg.name for arg in r.arguments ] == [z, x, a, y]
assert [ type(arg) for arg in r.arguments ] == [
InputArgument, InputArgument, OutputArgument, InputArgument ]
r = Routine('test', Eq(z, expr), argument_sequence=[z, x, y])
assert [ type(arg) for arg in r.arguments ] == [
InOutArgument, InputArgument, InputArgument ]
from sympy.tensor import IndexedBase, Idx
A, B = map(IndexedBase, ['A', 'B'])
m = symbols('m', integer=True)
i = Idx('i', m)
r = Routine('test', Eq(A[i], B[i]), argument_sequence=[B, A, m])
assert [ arg.name for arg in r.arguments ] == [B.label, A.label, m]
def test_empty_c_code():
code_gen = CCodeGen()
source = get_string(code_gen.dump_c, [])
assert source == "#include \"file.h\"\n#include <math.h>\n"
def test_empty_c_code_with_comment():
code_gen = CCodeGen()
source = get_string(code_gen.dump_c, [], header=True)
assert source[:82] == (
"/******************************************************************************\n *"
)
# " Code generated with sympy 0.7.1 "
assert source[158:] == ( "*\n"
" * *\n"
" * See http://www.sympy.org/ for more information. *\n"
" * *\n"
" * This file is part of 'project' *\n"
" ******************************************************************************/\n"
"#include \"file.h\"\n"
"#include <math.h>\n"
)
def test_empty_c_header():
code_gen = CCodeGen()
source = get_string(code_gen.dump_h, [])
assert source == "#ifndef PROJECT__FILE__H\n#define PROJECT__FILE__H\n#endif\n"
def test_simple_c_code():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
routine = Routine("test", expr)
code_gen = CCodeGen()
source = get_string(code_gen.dump_c, [routine])
expected = (
"#include \"file.h\"\n"
"#include <math.h>\n"
"double test(double x, double y, double z) {\n"
" return z*(x + y);\n"
"}\n"
)
assert source == expected
def test_numbersymbol_c_code():
routine = Routine("test", pi**Catalan)
code_gen = CCodeGen()
source = get_string(code_gen.dump_c, [routine])
expected = (
"#include \"file.h\"\n"
"#include <math.h>\n"
"double test() {\n"
" double const Catalan = 0.915965594177219;\n"
" return pow(M_PI, Catalan);\n"
"}\n"
)
assert source == expected
def test_c_code_argument_order():
x,y,z = symbols('x,y,z')
expr = x + y
routine = Routine("test", expr, argument_sequence=[z, x, y])
code_gen = CCodeGen()
source = get_string(code_gen.dump_c, [routine])
expected = (
"#include \"file.h\"\n"
"#include <math.h>\n"
"double test(double z, double x, double y) {\n"
" return x + y;\n"
"}\n"
)
assert source == expected
def test_simple_c_header():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
routine = Routine("test", expr)
code_gen = CCodeGen()
source = get_string(code_gen.dump_h, [routine])
expected = (
"#ifndef PROJECT__FILE__H\n"
"#define PROJECT__FILE__H\n"
"double test(double x, double y, double z);\n"
"#endif\n"
)
assert source == expected
def test_simple_c_codegen():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
result = codegen(("test", (x+y)*z), "C", "file", header=False, empty=False)
expected = [
("file.c",
"#include \"file.h\"\n"
"#include <math.h>\n"
"double test(double x, double y, double z) {\n"
" return z*(x + y);\n"
"}\n"),
("file.h",
"#ifndef PROJECT__FILE__H\n"
"#define PROJECT__FILE__H\n"
"double test(double x, double y, double z);\n"
"#endif\n")
]
assert result == expected
def test_multiple_results_c():
x,y,z = symbols('x,y,z')
expr1 = (x+y)*z
expr2 = (x-y)*z
routine = Routine(
"test",
[expr1,expr2]
)
code_gen = CCodeGen()
raises(CodeGenError, 'get_string(code_gen.dump_h, [routine])')
def test_no_results_c():
raises(ValueError, 'Routine("test", [])')
def test_ansi_math1_codegen():
# not included: log10
from sympy import (acos, asin, atan, ceiling, cos, cosh, floor, log, ln,
sin, sinh, sqrt, tan, tanh, N, Abs)
x = symbols('x')
name_expr = [
("test_fabs", Abs(x)),
("test_acos", acos(x)),
("test_asin", asin(x)),
("test_atan", atan(x)),
("test_ceil", ceiling(x)),
("test_cos", cos(x)),
("test_cosh", cosh(x)),
("test_floor", floor(x)),
("test_log", log(x)),
("test_ln", ln(x)),
("test_sin", sin(x)),
("test_sinh", sinh(x)),
("test_sqrt", sqrt(x)),
("test_tan", tan(x)),
("test_tanh", tanh(x)),
]
result = codegen(name_expr, "C", "file", header=False, empty=False)
assert result[0][0] == "file.c"
assert result[0][1] == (
'#include "file.h"\n#include <math.h>\n'
'double test_fabs(double x) {\n return fabs(x);\n}\n'
'double test_acos(double x) {\n return acos(x);\n}\n'
'double test_asin(double x) {\n return asin(x);\n}\n'
'double test_atan(double x) {\n return atan(x);\n}\n'
'double test_ceil(double x) {\n return ceil(x);\n}\n'
'double test_cos(double x) {\n return cos(x);\n}\n'
'double test_cosh(double x) {\n return cosh(x);\n}\n'
'double test_floor(double x) {\n return floor(x);\n}\n'
'double test_log(double x) {\n return log(x);\n}\n'
'double test_ln(double x) {\n return log(x);\n}\n'
'double test_sin(double x) {\n return sin(x);\n}\n'
'double test_sinh(double x) {\n return sinh(x);\n}\n'
'double test_sqrt(double x) {\n return sqrt(x);\n}\n'
'double test_tan(double x) {\n return tan(x);\n}\n'
'double test_tanh(double x) {\n return tanh(x);\n}\n'
)
assert result[1][0] == "file.h"
assert result[1][1] == (
'#ifndef PROJECT__FILE__H\n#define PROJECT__FILE__H\n'
'double test_fabs(double x);\ndouble test_acos(double x);\n'
'double test_asin(double x);\ndouble test_atan(double x);\n'
'double test_ceil(double x);\ndouble test_cos(double x);\n'
'double test_cosh(double x);\ndouble test_floor(double x);\n'
'double test_log(double x);\ndouble test_ln(double x);\n'
'double test_sin(double x);\ndouble test_sinh(double x);\n'
'double test_sqrt(double x);\ndouble test_tan(double x);\n'
'double test_tanh(double x);\n#endif\n'
)
def test_ansi_math2_codegen():
# not included: frexp, ldexp, modf, fmod
from sympy import atan2, N
x, y = symbols('x,y')
name_expr = [
("test_atan2", atan2(x,y)),
("test_pow", x**y),
]
result = codegen(name_expr, "C", "file", header=False, empty=False)
assert result[0][0] == "file.c"
assert result[0][1] == (
'#include "file.h"\n#include <math.h>\n'
'double test_atan2(double x, double y) {\n return atan2(x, y);\n}\n'
'double test_pow(double x, double y) {\n return pow(x, y);\n}\n'
)
assert result[1][0] == "file.h"
assert result[1][1] == (
'#ifndef PROJECT__FILE__H\n#define PROJECT__FILE__H\n'
'double test_atan2(double x, double y);\n'
'double test_pow(double x, double y);\n'
'#endif\n'
)
def test_complicated_codegen():
from sympy import sin, cos, tan, N
x,y,z = symbols('x,y,z')
name_expr = [
("test1", ((sin(x)+cos(y)+tan(z))**7).expand()),
("test2", cos(cos(cos(cos(cos(cos(cos(cos(x+y+z))))))))),
]
result = codegen(name_expr, "C", "file", header=False, empty=False)
assert result[0][0] == "file.c"
assert result[0][1] == (
'#include "file.h"\n#include <math.h>\n'
'double test1(double x, double y, double z) {\n'
' return '
'pow(sin(x), 7) + '
'7*pow(sin(x), 6)*cos(y) + '
'7*pow(sin(x), 6)*tan(z) + '
'21*pow(sin(x), 5)*pow(cos(y), 2) + '
'42*pow(sin(x), 5)*cos(y)*tan(z) + '
'21*pow(sin(x), 5)*pow(tan(z), 2) + '
'35*pow(sin(x), 4)*pow(cos(y), 3) + '
'105*pow(sin(x), 4)*pow(cos(y), 2)*tan(z) + '
'105*pow(sin(x), 4)*cos(y)*pow(tan(z), 2) + '
'35*pow(sin(x), 4)*pow(tan(z), 3) + '
'35*pow(sin(x), 3)*pow(cos(y), 4) + '
'140*pow(sin(x), 3)*pow(cos(y), 3)*tan(z) + '
'210*pow(sin(x), 3)*pow(cos(y), 2)*pow(tan(z), 2) + '
'140*pow(sin(x), 3)*cos(y)*pow(tan(z), 3) + '
'35*pow(sin(x), 3)*pow(tan(z), 4) + '
'21*pow(sin(x), 2)*pow(cos(y), 5) + '
'105*pow(sin(x), 2)*pow(cos(y), 4)*tan(z) + '
'210*pow(sin(x), 2)*pow(cos(y), 3)*pow(tan(z), 2) + '
'210*pow(sin(x), 2)*pow(cos(y), 2)*pow(tan(z), 3) + '
'105*pow(sin(x), 2)*cos(y)*pow(tan(z), 4) + '
'21*pow(sin(x), 2)*pow(tan(z), 5) + '
'7*sin(x)*pow(cos(y), 6) + '
'42*sin(x)*pow(cos(y), 5)*tan(z) + '
'105*sin(x)*pow(cos(y), 4)*pow(tan(z), 2) + '
'140*sin(x)*pow(cos(y), 3)*pow(tan(z), 3) + '
'105*sin(x)*pow(cos(y), 2)*pow(tan(z), 4) + '
'42*sin(x)*cos(y)*pow(tan(z), 5) + '
'7*sin(x)*pow(tan(z), 6) + '
'pow(cos(y), 7) + '
'7*pow(cos(y), 6)*tan(z) + '
'21*pow(cos(y), 5)*pow(tan(z), 2) + '
'35*pow(cos(y), 4)*pow(tan(z), 3) + '
'35*pow(cos(y), 3)*pow(tan(z), 4) + '
'21*pow(cos(y), 2)*pow(tan(z), 5) + '
'7*cos(y)*pow(tan(z), 6) + '
'pow(tan(z), 7);\n'
'}\n'
'double test2(double x, double y, double z) {\n'
' return cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))));\n'
'}\n'
)
assert result[1][0] == "file.h"
assert result[1][1] == (
'#ifndef PROJECT__FILE__H\n'
'#define PROJECT__FILE__H\n'
'double test1(double x, double y, double z);\n'
'double test2(double x, double y, double z);\n'
'#endif\n'
)
def test_loops_c():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n,m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
(f1, code), (f2, interface) = codegen(
('matrix_vector', Eq(y[i], A[i, j]*x[j])), "C", "file", header=False, empty=False)
assert f1 == 'file.c'
expected = (
'#include "file.h"\n'
'#include <math.h>\n'
'void matrix_vector(double *A, int m, int n, double *x, double *y) {\n'
' for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
' }\n'
' for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = y[i] + %(rhs)s;\n'
' }\n'
' }\n'
'}\n'
)
assert (code == expected %{'rhs': 'A[i*n + j]*x[j]'} or
code == expected %{'rhs': 'A[j + i*n]*x[j]'} or<|fim▁hole|> assert interface == (
'#ifndef PROJECT__FILE__H\n'
'#define PROJECT__FILE__H\n'
'void matrix_vector(double *A, int m, int n, double *x, double *y);\n'
'#endif\n'
)
def test_dummy_loops_c():
from sympy.tensor import IndexedBase, Idx
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'#include "file.h"\n'
'#include <math.h>\n'
'void test_dummies(int m_%(mno)i, double *x, double *y) {\n'
' for (int i_%(ino)i=0; i_%(ino)i<m_%(mno)i; i_%(ino)i++){\n'
' y[i_%(ino)i] = x[i_%(ino)i];\n'
' }\n'
'}\n'
) % {'ino': i.label.dummy_index, 'mno': m.dummy_index}
r = Routine('test_dummies', Eq(y[i], x[i]))
c = CCodeGen()
code = get_string(c.dump_c, [r])
assert code == expected
def test_partial_loops_c():
# check that loop boundaries are determined by Idx, and array strides
# determined by shape of IndexedBase object.
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n,m,o,p = symbols('n m o p', integer=True)
A = IndexedBase('A', shape=(m, p))
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', (o, m - 5)) # Note: bounds are inclusive
j = Idx('j', n) # dimension n corresponds to bounds (0, n - 1)
(f1, code), (f2, interface) = codegen(
('matrix_vector', Eq(y[i], A[i, j]*x[j])), "C", "file", header=False, empty=False)
assert f1 == 'file.c'
expected = (
'#include "file.h"\n'
'#include <math.h>\n'
'void matrix_vector(double *A, int m, int n, int o, int p, double *x, double *y) {\n'
' for (int i=o; i<%(upperi)s; i++){\n'
' y[i] = 0;\n'
' }\n'
' for (int i=o; i<%(upperi)s; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = y[i] + %(rhs)s;\n'
' }\n'
' }\n'
'}\n'
) % {'upperi': m - 4, 'rhs': '%(rhs)s'}
assert (code == expected %{'rhs': 'A[i*p + j]*x[j]'} or
code == expected %{'rhs': 'A[j + i*p]*x[j]'} or
code == expected %{'rhs': 'x[j]*A[i*p + j]'} or
code == expected %{'rhs': 'x[j]*A[j + i*p]'})
assert f2 == 'file.h'
assert interface == (
'#ifndef PROJECT__FILE__H\n'
'#define PROJECT__FILE__H\n'
'void matrix_vector(double *A, int m, int n, int o, int p, double *x, double *y);\n'
'#endif\n'
)
def test_output_arg_c():
from sympy import sin, cos, Equality
x, y, z = symbols("x,y,z")
r = Routine("foo", [Equality(y, sin(x)), cos(x)])
c = CCodeGen()
result = c.write([r], "test", header=False, empty=False)
assert result[0][0] == "test.c"
expected = (
'#include "test.h"\n'
'#include <math.h>\n'
'double foo(double x, double &y) {\n'
' y = sin(x);\n'
' return cos(x);\n'
'}\n'
)
assert result[0][1] == expected
def test_empty_f_code():
code_gen = FCodeGen()
source = get_string(code_gen.dump_f95, [])
assert source == ""
def test_empty_f_code_with_header():
code_gen = FCodeGen()
source = get_string(code_gen.dump_f95, [], header=True)
assert source[:82] == (
"!******************************************************************************\n!*"
)
# " Code generated with sympy 0.7.1 "
assert source[158:] == ( "*\n"
"!* *\n"
"!* See http://www.sympy.org/ for more information. *\n"
"!* *\n"
"!* This file is part of 'project' *\n"
"!******************************************************************************\n"
)
def test_empty_f_header():
code_gen = FCodeGen()
source = get_string(code_gen.dump_h, [])
assert source == ""
def test_simple_f_code():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
routine = Routine("test", expr)
code_gen = FCodeGen()
source = get_string(code_gen.dump_f95, [routine])
expected = (
"REAL*8 function test(x, y, z)\n"
"implicit none\n"
"REAL*8, intent(in) :: x\n"
"REAL*8, intent(in) :: y\n"
"REAL*8, intent(in) :: z\n"
"test = z*(x + y)\n"
"end function\n"
)
assert source == expected
def test_numbersymbol_f_code():
routine = Routine("test", pi**Catalan)
code_gen = FCodeGen()
source = get_string(code_gen.dump_f95, [routine])
expected = (
"REAL*8 function test()\n"
"implicit none\n"
"REAL*8, parameter :: Catalan = 0.915965594177219d0\n"
"REAL*8, parameter :: pi = 3.14159265358979d0\n"
"test = pi**Catalan\n"
"end function\n"
)
assert source == expected
def test_f_code_argument_order():
x,y,z = symbols('x,y,z')
expr = x + y
routine = Routine("test", expr, argument_sequence=[z, x, y])
code_gen = FCodeGen()
source = get_string(code_gen.dump_f95, [routine])
expected = (
"REAL*8 function test(z, x, y)\n"
"implicit none\n"
"REAL*8, intent(in) :: z\n"
"REAL*8, intent(in) :: x\n"
"REAL*8, intent(in) :: y\n"
"test = x + y\n"
"end function\n"
)
assert source == expected
def test_simple_f_header():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
routine = Routine("test", expr)
code_gen = FCodeGen()
source = get_string(code_gen.dump_h, [routine])
expected = (
"interface\n"
"REAL*8 function test(x, y, z)\n"
"implicit none\n"
"REAL*8, intent(in) :: x\n"
"REAL*8, intent(in) :: y\n"
"REAL*8, intent(in) :: z\n"
"end function\n"
"end interface\n"
)
assert source == expected
def test_simple_f_codegen():
x,y,z = symbols('x,y,z')
expr = (x+y)*z
result = codegen(("test", (x+y)*z), "F95", "file", header=False, empty=False)
expected = [
("file.f90",
"REAL*8 function test(x, y, z)\n"
"implicit none\n"
"REAL*8, intent(in) :: x\n"
"REAL*8, intent(in) :: y\n"
"REAL*8, intent(in) :: z\n"
"test = z*(x + y)\n"
"end function\n"),
("file.h",
"interface\n"
"REAL*8 function test(x, y, z)\n"
"implicit none\n"
"REAL*8, intent(in) :: x\n"
"REAL*8, intent(in) :: y\n"
"REAL*8, intent(in) :: z\n"
"end function\n"
"end interface\n")
]
assert result == expected
def test_multiple_results_f():
x,y,z = symbols('x,y,z')
expr1 = (x+y)*z
expr2 = (x-y)*z
routine = Routine(
"test",
[expr1,expr2]
)
code_gen = FCodeGen()
raises(CodeGenError, 'get_string(code_gen.dump_h, [routine])')
def test_no_results_f():
raises(ValueError, 'Routine("test", [])')
def test_intrinsic_math_codegen():
# not included: log10
from sympy import (acos, asin, atan, ceiling, cos, cosh, floor, log, ln,
sin, sinh, sqrt, tan, tanh, N, Abs)
x = symbols('x')
name_expr = [
("test_abs", Abs(x)),
("test_acos", acos(x)),
("test_asin", asin(x)),
("test_atan", atan(x)),
# ("test_ceil", ceiling(x)),
("test_cos", cos(x)),
("test_cosh", cosh(x)),
# ("test_floor", floor(x)),
("test_log", log(x)),
("test_ln", ln(x)),
("test_sin", sin(x)),
("test_sinh", sinh(x)),
("test_sqrt", sqrt(x)),
("test_tan", tan(x)),
("test_tanh", tanh(x)),
]
result = codegen(name_expr, "F95", "file", header=False, empty=False)
assert result[0][0] == "file.f90"
expected = (
'REAL*8 function test_abs(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_abs = Abs(x)\n'
'end function\n'
'REAL*8 function test_acos(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_acos = acos(x)\n'
'end function\n'
'REAL*8 function test_asin(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_asin = asin(x)\n'
'end function\n'
'REAL*8 function test_atan(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_atan = atan(x)\n'
'end function\n'
'REAL*8 function test_cos(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_cos = cos(x)\n'
'end function\n'
'REAL*8 function test_cosh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_cosh = cosh(x)\n'
'end function\n'
'REAL*8 function test_log(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_log = log(x)\n'
'end function\n'
'REAL*8 function test_ln(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_ln = log(x)\n'
'end function\n'
'REAL*8 function test_sin(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_sin = sin(x)\n'
'end function\n'
'REAL*8 function test_sinh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_sinh = sinh(x)\n'
'end function\n'
'REAL*8 function test_sqrt(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_sqrt = sqrt(x)\n'
'end function\n'
'REAL*8 function test_tan(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_tan = tan(x)\n'
'end function\n'
'REAL*8 function test_tanh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'test_tanh = tanh(x)\n'
'end function\n'
)
assert result[0][1] == expected
assert result[1][0] == "file.h"
expected = (
'interface\n'
'REAL*8 function test_abs(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_acos(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_asin(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_atan(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_cos(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_cosh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_log(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_ln(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_sin(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_sinh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_sqrt(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_tan(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_tanh(x)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'end function\n'
'end interface\n'
)
assert result[1][1] == expected
def test_intrinsic_math2_codegen():
# not included: frexp, ldexp, modf, fmod
from sympy import atan2, N
x, y = symbols('x,y')
name_expr = [
("test_atan2", atan2(x,y)),
("test_pow", x**y),
]
result = codegen(name_expr, "F95", "file", header=False, empty=False)
assert result[0][0] == "file.f90"
expected = (
'REAL*8 function test_atan2(x, y)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'test_atan2 = atan2(x, y)\n'
'end function\n'
'REAL*8 function test_pow(x, y)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'test_pow = x**y\n'
'end function\n'
)
assert result[0][1] == expected
assert result[1][0] == "file.h"
expected = (
'interface\n'
'REAL*8 function test_atan2(x, y)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test_pow(x, y)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'end function\n'
'end interface\n'
)
assert result[1][1] == expected
def test_complicated_codegen_f95():
from sympy import sin, cos, tan, N
x,y,z = symbols('x,y,z')
name_expr = [
("test1", ((sin(x)+cos(y)+tan(z))**7).expand()),
("test2", cos(cos(cos(cos(cos(cos(cos(cos(x+y+z))))))))),
]
result = codegen(name_expr, "F95", "file", header=False, empty=False)
assert result[0][0] == "file.f90"
expected = (
'REAL*8 function test1(x, y, z)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'REAL*8, intent(in) :: z\n'
'test1 = sin(x)**7 + 7*sin(x)**6*cos(y) + 7*sin(x)**6*tan(z) + 21*sin(x) &\n'
' **5*cos(y)**2 + 42*sin(x)**5*cos(y)*tan(z) + 21*sin(x)**5*tan(z) &\n'
' **2 + 35*sin(x)**4*cos(y)**3 + 105*sin(x)**4*cos(y)**2*tan(z) + &\n'
' 105*sin(x)**4*cos(y)*tan(z)**2 + 35*sin(x)**4*tan(z)**3 + 35*sin( &\n'
' x)**3*cos(y)**4 + 140*sin(x)**3*cos(y)**3*tan(z) + 210*sin(x)**3* &\n'
' cos(y)**2*tan(z)**2 + 140*sin(x)**3*cos(y)*tan(z)**3 + 35*sin(x) &\n'
' **3*tan(z)**4 + 21*sin(x)**2*cos(y)**5 + 105*sin(x)**2*cos(y)**4* &\n'
' tan(z) + 210*sin(x)**2*cos(y)**3*tan(z)**2 + 210*sin(x)**2*cos(y) &\n'
' **2*tan(z)**3 + 105*sin(x)**2*cos(y)*tan(z)**4 + 21*sin(x)**2*tan &\n'
' (z)**5 + 7*sin(x)*cos(y)**6 + 42*sin(x)*cos(y)**5*tan(z) + 105* &\n'
' sin(x)*cos(y)**4*tan(z)**2 + 140*sin(x)*cos(y)**3*tan(z)**3 + 105 &\n'
' *sin(x)*cos(y)**2*tan(z)**4 + 42*sin(x)*cos(y)*tan(z)**5 + 7*sin( &\n'
' x)*tan(z)**6 + cos(y)**7 + 7*cos(y)**6*tan(z) + 21*cos(y)**5*tan( &\n'
' z)**2 + 35*cos(y)**4*tan(z)**3 + 35*cos(y)**3*tan(z)**4 + 21*cos( &\n'
' y)**2*tan(z)**5 + 7*cos(y)*tan(z)**6 + tan(z)**7\n'
'end function\n'
'REAL*8 function test2(x, y, z)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'REAL*8, intent(in) :: z\n'
'test2 = cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))\n'
'end function\n'
)
assert result[0][1] == expected
assert result[1][0] == "file.h"
expected = (
'interface\n'
'REAL*8 function test1(x, y, z)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'REAL*8, intent(in) :: z\n'
'end function\n'
'end interface\n'
'interface\n'
'REAL*8 function test2(x, y, z)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(in) :: y\n'
'REAL*8, intent(in) :: z\n'
'end function\n'
'end interface\n'
)
assert result[1][1] == expected
def test_loops():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m = symbols('n,m', integer=True)
A, x, y = map(IndexedBase, 'Axy')
i = Idx('i', m)
j = Idx('j', n)
(f1, code), (f2, interface) = codegen(
('matrix_vector', Eq(y[i], A[i, j]*x[j])), "F95", "file", header=False, empty=False)
assert f1 == 'file.f90'
expected = (
'subroutine matrix_vector(A, m, n, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'INTEGER*4, intent(in) :: n\n'
'REAL*8, intent(in), dimension(1:m, 1:n) :: A\n'
'REAL*8, intent(in), dimension(1:n) :: x\n'
'REAL*8, intent(out), dimension(1:m) :: y\n'
'INTEGER*4 :: i\n'
'INTEGER*4 :: j\n'
'do i = 1, m\n'
' y(i) = 0\n'
'end do\n'
'do i = 1, m\n'
' do j = 1, n\n'
' y(i) = y(i) + %(rhs)s\n'
' end do\n'
'end do\n'
'end subroutine\n'
) % {'rhs': 'A(i, j)*x(j)'}
assert expected == code
assert f2 == 'file.h'
assert interface == (
'interface\n'
'subroutine matrix_vector(A, m, n, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'INTEGER*4, intent(in) :: n\n'
'REAL*8, intent(in), dimension(1:m, 1:n) :: A\n'
'REAL*8, intent(in), dimension(1:n) :: x\n'
'REAL*8, intent(out), dimension(1:m) :: y\n'
'end subroutine\n'
'end interface\n'
)
def test_dummy_loops_f95():
from sympy.tensor import IndexedBase, Idx
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'subroutine test_dummies(m_%(mcount)i, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m_%(mcount)i\n'
'REAL*8, intent(in), dimension(1:m_%(mcount)i) :: x\n'
'REAL*8, intent(out), dimension(1:m_%(mcount)i) :: y\n'
'INTEGER*4 :: i_%(icount)i\n'
'do i_%(icount)i = 1, m_%(mcount)i\n'
' y(i_%(icount)i) = x(i_%(icount)i)\n'
'end do\n'
'end subroutine\n'
) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index}
r = Routine('test_dummies', Eq(y[i], x[i]))
c = FCodeGen()
code = get_string(c.dump_f95, [r])
assert code == expected
def test_loops_InOut():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
i,j,n,m = symbols('i,j,n,m', integer=True)
A,x,y = symbols('A,x,y')
A = IndexedBase(A)[Idx(i, m), Idx(j, n)]
x = IndexedBase(x)[Idx(j, n)]
y = IndexedBase(y)[Idx(i, m)]
(f1, code), (f2, interface) = codegen(
('matrix_vector', Eq(y, y + A*x)), "F95", "file", header=False, empty=False)
assert f1 == 'file.f90'
expected = (
'subroutine matrix_vector(A, m, n, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'INTEGER*4, intent(in) :: n\n'
'REAL*8, intent(in), dimension(1:m, 1:n) :: A\n'
'REAL*8, intent(in), dimension(1:n) :: x\n'
'REAL*8, intent(inout), dimension(1:m) :: y\n'
'INTEGER*4 :: i\n'
'INTEGER*4 :: j\n'
'do i = 1, m\n'
' do j = 1, n\n'
' y(i) = y(i) + %(rhs)s\n'
' end do\n'
'end do\n'
'end subroutine\n'
)
assert (code == expected % {'rhs': 'A(i, j)*x(j)'} or
code == expected % {'rhs': 'x(j)*A(i, j)'})
assert f2 == 'file.h'
assert interface == (
'interface\n'
'subroutine matrix_vector(A, m, n, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'INTEGER*4, intent(in) :: n\n'
'REAL*8, intent(in), dimension(1:m, 1:n) :: A\n'
'REAL*8, intent(in), dimension(1:n) :: x\n'
'REAL*8, intent(inout), dimension(1:m) :: y\n'
'end subroutine\n'
'end interface\n'
)
def test_partial_loops_f():
# check that loop boundaries are determined by Idx, and array strides
# determined by shape of IndexedBase object.
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n,m,o,p = symbols('n m o p', integer=True)
A = IndexedBase('A', shape=(m, p))
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', (o, m - 5)) # Note: bounds are inclusive
j = Idx('j', n) # dimension n corresponds to bounds (0, n - 1)
(f1, code), (f2, interface) = codegen(
('matrix_vector', Eq(y[i], A[i, j]*x[j])), "F95", "file", header=False, empty=False)
expected = (
'subroutine matrix_vector(A, m, n, o, p, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'INTEGER*4, intent(in) :: n\n'
'INTEGER*4, intent(in) :: o\n'
'INTEGER*4, intent(in) :: p\n'
'REAL*8, intent(in), dimension(1:m, 1:p) :: A\n'
'REAL*8, intent(in), dimension(1:n) :: x\n'
'REAL*8, intent(out), dimension(1:%(iup-ilow)s) :: y\n'
'INTEGER*4 :: i\n'
'INTEGER*4 :: j\n'
'do i = %(ilow)s, %(iup)s\n'
' y(i) = 0\n'
'end do\n'
'do i = %(ilow)s, %(iup)s\n'
' do j = 1, n\n'
' y(i) = y(i) + %(rhs)s\n'
' end do\n'
'end do\n'
'end subroutine\n'
) % {
'rhs': 'A(i, j)*x(j)',
'iup': str(m - 4),
'ilow': str(1+o),
'iup-ilow': str(m - 4 -o)
}
assert expected == code
def test_output_arg_f():
from sympy import sin, cos, Equality
x, y, z = symbols("x,y,z")
r = Routine("foo", [Equality(y, sin(x)), cos(x)])
c = FCodeGen()
result = c.write([r], "test", header=False, empty=False)
assert result[0][0] == "test.f90"
assert result[0][1] == (
'REAL*8 function foo(x, y)\n'
'implicit none\n'
'REAL*8, intent(in) :: x\n'
'REAL*8, intent(out) :: y\n'
'y = sin(x)\n'
'foo = cos(x)\n'
'end function\n'
)
def test_inline_function():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n,m = symbols('n m', integer=True)
A, x, y = map(IndexedBase, 'Axy')
i = Idx('i', m)
j = Idx('j', n)
p = FCodeGen()
func = implemented_function('func', Lambda(n, n*(n+1)))
routine = Routine('test_inline', Eq(y[i], func(x[i])))
code = get_string(p.dump_f95, [routine])
expected = (
'subroutine test_inline(m, x, y)\n'
'implicit none\n'
'INTEGER*4, intent(in) :: m\n'
'REAL*8, intent(in), dimension(1:m) :: x\n'
'REAL*8, intent(out), dimension(1:m) :: y\n'
'INTEGER*4 :: i\n'
'do i = 1, m\n'
' y(i) = (1 + x(i))*x(i)\n'
'end do\n'
'end subroutine\n'
)
assert code == expected
def test_check_case():
x, X = symbols('x,X')
raises(CodeGenError, "codegen(('test', x*X), 'f95', 'prefix')")
def test_check_case_false_positive():
# The upper case/lower case exception should not be triggered by Sympy
# objects that differ only because of assumptions. (It may be useful to
# have a check for that as well, but here we only want to test against
# false positives with respect to case checking.)
x1 = symbols('x')
x2 = symbols('x', my_assumption=True)
try:
codegen(('test', x1*x2), 'f95', 'prefix')
except CodeGenError, e:
if e.args[0][0:21] == "Fortran ignores case.":
raise AssertionError("This exception should not be raised!")<|fim▁end|> | code == expected %{'rhs': 'x[j]*A[i*n + j]'} or
code == expected %{'rhs': 'x[j]*A[j + i*n]'})
assert f2 == 'file.h' |
<|file_name|>EZBXML.java<|end_file_name|><|fim▁begin|>// Copyright (C) 2012 Markus Fischer
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; version 2 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
//
// Contact: [email protected]
package ch.dbs.actions.bestellung;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang.StringEscapeUtils;
import org.jsoup.Jsoup;
import org.jsoup.safety.Whitelist;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import util.CodeUrl;
import util.Http;
import ch.dbs.form.JournalDetails;
import enums.Connect;
/**
* This class reads answers from the normal EZB UI searched with the parameter
* xmloutput=1 to get XML.
*/
public class EZBXML {
private static final Logger LOG = LoggerFactory.getLogger(EZBXML.class);
public List<JournalDetails> searchByTitle(final String jtitle, final String bibid) {
final Http http = new Http();
final CodeUrl coder = new CodeUrl();
final StringBuffer link = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=7&lang=de&jq_type1=KT&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&offset=-1&hits_per_page=30&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link.append(bibid);
link.append("&jq_term1=");
link.append(coder.encode(jtitle, "ISO-8859-1"));
String content = http.getContent(link.toString(), Connect.TIMEOUT_2.getValue(), Connect.TRIES_2.getValue(),
null);
// if we have > 30 hits, try a more concise search using: &jq_type1=KS (title starts with) instead of &jq_type1=KT (words in title)
if (content != null && content.contains("<search_count>")) {
final int x = Integer.parseInt(content.substring(content.indexOf("<search_count>") + 14,
content.indexOf("</search_count>")));
if (x > 30) {
final StringBuffer link2 = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=7&lang=de&jq_type1=KS&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&offset=-1&hits_per_page=30&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link2.append(bibid);
link2.append("&jq_term1=");
link2.append(coder.encode(jtitle, "ISO-8859-1"));
content = http.getContent(link2.toString(), Connect.TIMEOUT_2.getValue(), Connect.TRIES_2.getValue(),
null);
}
}
final List<String> jourids = getJourids(content);
return searchByJourids(jourids, bibid);
}
public List<JournalDetails> searchByIssn(final String issn, final String bibid) {
final Http http = new Http();
final StringBuffer link = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=5&lang=de&jq_type1=KT&jq_term1=&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&jq_bool4=AND&jq_not4=+&jq_type4=IS&offset=-1&hits_per_page=50&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link.append(bibid);
link.append("&jq_term4=");
link.append(issn);
final String content = http.getContent(link.toString(), Connect.TIMEOUT_2.getValue(),
Connect.TRIES_2.getValue(), null);
final List<String> jourids = getJourids(content);
return searchByJourids(jourids, bibid);
}
public List<JournalDetails> searchByJourids(final List<String> jourids, final String bibid) {
final List<JournalDetails> list = new ArrayList<JournalDetails>();
final Http http = new Http();
final StringBuffer link = new StringBuffer(
"http://rzblx1.uni-regensburg.de/ezeit/detail.phtml?xmloutput=1&colors=7&lang=de&bibid=");
link.append(bibid);
link.append("&jour_id=");
final StringBuffer infoLink = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/detail.phtml?colors=7&lang=de&bibid=");
infoLink.append(bibid);
infoLink.append("&jour_id=");
try {
for (final String jourid : jourids) {
final JournalDetails jd = new JournalDetails();
final String content = http.getContent(link.toString() + jourid, Connect.TIMEOUT_1.getValue(),
Connect.TRIES_1.getValue(), null);
if (content != null) {
final DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
domFactory.setNamespaceAware(true);
final DocumentBuilder builder = domFactory.newDocumentBuilder();
final Document doc = builder.parse(new InputSource(new StringReader(content)));
final XPathFactory factory = XPathFactory.newInstance();
final XPath xpath = factory.newXPath();
final XPathExpression exprJournal = xpath.compile("//journal");
final XPathExpression exprPissns = xpath.compile("//journal/detail/P_ISSNs");
final XPathExpression exprEissns = xpath.compile("//journal/detail/E_ISSNs");
final NodeList resultJournal = (NodeList) exprJournal.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < resultJournal.getLength(); i++) {
final Node firstResultNode = resultJournal.item(i);
final Element journal = (Element) firstResultNode;
// Title
String title = getValue(journal.getElementsByTagName("title"));
if (title != null) {
title = Jsoup.clean(title, Whitelist.none());
title = Jsoup.parse(title).text();
}
jd.setZeitschriftentitel(title);
// P-ISSNs
final NodeList resultPissns = (NodeList) exprPissns.evaluate(doc, XPathConstants.NODESET);
// get first pissn
for (int z = 0; z < resultPissns.getLength(); z++) {
final Node firstPissnsNode = resultPissns.item(i);
final Element pissnElement = (Element) firstPissnsNode;
final String pissn = getValue(pissnElement.getElementsByTagName("P_ISSN"));
jd.setIssn(pissn);
}
// try to get Eissn if we have no Pissn
if (jd.getIssn() == null) {
// E-ISSNs
final NodeList resultEissns = (NodeList) exprEissns.evaluate(doc, XPathConstants.NODESET);
// get first eissn
for (int z = 0; z < resultEissns.getLength(); z++) {
final Node firstEissnsNode = resultEissns.item(i);
final Element eissnElement = (Element) firstEissnsNode;
final String eissn = getValue(eissnElement.getElementsByTagName("E_ISSN"));
jd.setIssn(eissn);
}
}
// add info link
jd.setLink(infoLink.toString() + jourid);
list.add(jd);
}
}
}
} catch (final XPathExpressionException e) {
LOG.error(e.toString());
} catch (final SAXParseException e) {
LOG.error(e.toString());
} catch (final SAXException e) {
LOG.error(e.toString());
} catch (final IOException e) {
LOG.error(e.toString());
} catch (final ParserConfigurationException e) {
LOG.error(e.toString());
} catch (final Exception e) {
LOG.error(e.toString());
}
return list;
}
private List<String> getJourids(final String content) {
final List<String> result = new ArrayList<String>();
try {
if (content != null) {
final DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
domFactory.setNamespaceAware(true);
final DocumentBuilder builder = domFactory.newDocumentBuilder();
final Document doc = builder.parse(new InputSource(new StringReader(content)));
final XPathFactory factory = XPathFactory.newInstance();
final XPath xpath = factory.newXPath();
final XPathExpression exprJournals = xpath.compile("//journals/journal");
final NodeList journals = (NodeList) exprJournals.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < journals.getLength(); i++) {
final Node firstResultNode = journals.item(i);
final Element journal = (Element) firstResultNode;
final String id = journal.getAttribute("jourid");
if (id != null) {
result.add(id);
}
}
}
} catch (final XPathExpressionException e) {
LOG.error(e.toString());
} catch (final SAXParseException e) {
LOG.error(e.toString());
} catch (final SAXException e) {
LOG.error(e.toString());
} catch (final IOException e) {
LOG.error(e.toString());
} catch (final ParserConfigurationException e) {
LOG.error(e.toString());
} catch (final Exception e) {
LOG.error(e.toString());
}
return result;
}
private String getValue(final NodeList list) {
String result = null;
final Element listElement = (Element) list.item(0);
if (listElement != null) {
final NodeList textList = listElement.getChildNodes();
if (textList.getLength() > 0) {
result = StringEscapeUtils.unescapeXml(textList.item(0).getNodeValue());
}
}<|fim▁hole|>
}<|fim▁end|> |
return result;
} |
<|file_name|>core-traits-impls-length-33.rs<|end_file_name|><|fim▁begin|>// check-pass
pub fn yes_as_ref() -> impl AsRef<[u8]> {
[0; 33]
}
pub fn yes_as_mut() -> impl AsMut<[u8]> {
[0; 33]
}
pub fn yes_borrow() -> impl std::borrow::Borrow<[u8]> {
[0; 33]
}
pub fn yes_borrow_mut() -> impl std::borrow::BorrowMut<[u8]> {
[0; 33]
}
pub fn yes_try_from_slice() -> impl std::convert::TryFrom<&'static [u8]> {
[0; 33]
}
pub fn yes_ref_try_from_slice() -> impl std::convert::TryFrom<&'static [u8]> {
let a: &'static _ = &[0; 33];
a
}
pub fn yes_hash() -> impl std::hash::Hash {
[0; 33]
}
pub fn yes_debug() -> impl std::fmt::Debug {
[0; 33]
}
pub fn yes_ref_into_iterator() -> impl IntoIterator<Item=&'static u8> {
let a: &'static _ = &[0; 33];
a
}
pub fn yes_partial_eq() -> impl PartialEq<[u8; 33]> {
[0; 33]
}
pub fn yes_partial_eq_slice() -> impl PartialEq<[u8]> {
[0; 33]
}<|fim▁hole|>pub fn yes_slice_partial_eq() -> impl PartialEq<[u8; 33]> {
let a: &'static _ = &[0; 33];
&a[..]
}
pub fn yes_eq() -> impl Eq {
[0; 33]
}
pub fn yes_partial_ord() -> impl PartialOrd<[u8; 33]> {
[0; 33]
}
pub fn yes_ord() -> impl Ord {
[0; 33]
}
fn main() {}<|fim▁end|> | |
<|file_name|>tips.js<|end_file_name|><|fim▁begin|>angular.module('tips.tips').controller('TipsController', ['$scope', '$routeParams', '$location', 'Global', 'Tips', function ($scope, $routeParams, $location, Global, Tips) {
$scope.global = Global;
$scope.createTip = function () {
var tips = new Tips({
text: this.text,
likes: this.likes,
category: this.category
});<|fim▁hole|> this.title = "";
};
$scope.showTip = function () {
Tips.query(function (tips) {
$scope.tips = tips;
tips.linkEdit = 'tips/edit/';
// show tips size
function Settings (minLikes, maxLikes) {
var that = this;
that.size = {
min: 26,
max: 300
};
that.maxLikes = maxLikes;
that.minLikes = tips[0].likes;
that.valueOfdivision = (function(){
return (that.size.max - that.size.min)/that.maxLikes
})()
}
function startIsotope(){
var el = $('#isotope-container');
el.isotope({
itemSelector: '.isotope-element',
layoutMode: 'fitRows',
sortBy: 'number',
sortAscending: true,
});
return el;
}
var maxLikes = 0;
var minLikes = 0;
for (var i = 0; i < tips.length; i++) {
if(maxLikes <= tips[i].likes)maxLikes = tips[i].likes;
if(minLikes >= tips[i].likes)minLikes = tips[i].likes;
};
tips.settingsView = new Settings(minLikes, maxLikes);
$scope.$watch('tips', function () {
$scope.$evalAsync(function () {
var isotope = startIsotope();
});
})
});
};
$scope.updateTip = function (tip) {
var tip = new Tips(tip);
tip.$update(tip, function(){
console.log("update updateTip: ", tip._id);
}, function(){
console.warn("error updateTip:", tip._id);
});
};
$scope.getTip = function () {
Tips.query(function (tip) {
$scope.tip = tip;
console.log(tip);
});
};
$scope.editTip = function(tip){
console.log("edit tip");
};
}])<|fim▁end|> | tips.$save(function (response) {
$location.path("/");
});
|
<|file_name|>test_rhcloud_insights.py<|end_file_name|><|fim▁begin|>"""CLI tests for Insights part of RH Cloud - Inventory plugin.
:Requirement: RH Cloud - Inventory
:CaseAutomation: Automated
:CaseLevel: System
:CaseComponent: RHCloud-Inventory
:Assignee: jpathan
:TestType: Functional
:CaseImportance: High<|fim▁hole|>from broker.broker import VMBroker
from robottelo.constants import DISTRO_RHEL7
from robottelo.constants import DISTRO_RHEL8
from robottelo.hosts import ContentHost
@pytest.mark.tier4
@pytest.mark.parametrize('distro', [DISTRO_RHEL8, DISTRO_RHEL7])
def test_positive_connection_option(organization_ak_setup, default_sat, distro):
"""Verify that 'insights-client --test-connection' successfully tests the proxy connection via
the Satellite.
:id: 61a4a39e-b484-49f4-a6fd-46ffc7736e50
:Steps:
1. Create RHEL7 and RHEL8 VM and register to insights within org having manifest.
2. Run 'insights-client --test-connection'.
:expectedresults: 'insights-client --test-connection' should return 0.
:CaseImportance: Critical
"""
org, activation_key = organization_ak_setup
with VMBroker(nick=distro, host_classes={'host': ContentHost}) as vm:
vm.configure_rhai_client(default_sat, activation_key.name, org.label, distro)
result = vm.run('insights-client --test-connection')
assert result.status == 0, (
'insights-client --test-connection failed.\n'
f'status: {result.status}\n'
f'stdout: {result.stdout}\n'
f'stderr: {result.stderr}'
)<|fim▁end|> |
:Upstream: No
"""
import pytest |
<|file_name|>file.go<|end_file_name|><|fim▁begin|>/*
* Trap
* An anti-pryer server for better privacy
*
* This file is a part of Trap project
*
* Copyright 2016 Rain Lee <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package logger
import (
"github.com/raincious/trap/trap/core/types"
"bufio"
"fmt"
"time"
)
type FilePrinter struct {
writer *bufio.Writer
writeCounts uint16
}
func NewFilePrinter(w *bufio.Writer) (*FilePrinter, *types.Throw) {
_, writeErr := w.Write([]byte(""))
if writeErr != nil {
return nil, types.ConvertError(writeErr)
}
return &FilePrinter{
writer: w,
}, nil
}
func (l *FilePrinter) save(w types.String, c types.String,
t time.Time, m types.String) {
_, err := l.writer.WriteString(fmt.Sprintf("<%s> %s [%s]: %s\r\n",
w, c, t.Format(time.StampMilli), m))
if err != nil {
panic(fmt.Errorf("Can't write log file due to error: %s", err))
}<|fim▁hole|>
if l.writeCounts > 10 {
l.writer.Flush()
l.writeCounts = 0
}
}
func (l *FilePrinter) Info(c types.String, t time.Time, m types.String) {
l.save("INF", c, t, m)
}
func (l *FilePrinter) Debug(c types.String, t time.Time, m types.String) {
l.save("DBG", c, t, m)
}
func (l *FilePrinter) Warning(c types.String, t time.Time, m types.String) {
l.save("WRN", c, t, m)
}
func (l *FilePrinter) Error(c types.String, t time.Time, m types.String) {
l.save("ERR", c, t, m)
}
func (l *FilePrinter) Print(c types.String, t time.Time, m types.String) {
l.save("DEF", c, t, m)
}<|fim▁end|> |
l.writeCounts += 1 |
<|file_name|>sharded_clusters.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# coding=utf-8
# Copyright 2012-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<|fim▁hole|>
from mongo_orchestration.common import (
BaseModel, DEFAULT_SUBJECT, DEFAULT_CLIENT_CERT)
from mongo_orchestration.container import Container
from mongo_orchestration.errors import ShardedClusterError
from mongo_orchestration.servers import Servers
from mongo_orchestration.replica_sets import ReplicaSets
from mongo_orchestration.singleton import Singleton
from pymongo import MongoClient
logger = logging.getLogger(__name__)
class ShardedCluster(BaseModel):
"""class represents Sharding configuration"""
def __init__(self, params):
"""init configuration acording params"""
self.id = params.get('id', None) or str(uuid4())
self.admin_added = False
self.login = params.get('login', '')
self.password = params.get('password', '')
self.auth_key = params.get('auth_key', None)
self.auth_source = params.get('authSource', 'admin')
self._version = params.get('version')
self._configsvrs = []
self._routers = []
self._shards = {}
self.tags = {}
self.sslParams = params.get('sslParams', {})
self.kwargs = {}
self.restart_required = self.login or self.auth_key
self.x509_extra_user = False
if self.sslParams:
self.kwargs['ssl'] = True
self.kwargs['ssl_certfile'] = DEFAULT_CLIENT_CERT
configsvr_configs = params.get('configsvrs', [{}])
self.__init_configsvr(configsvr_configs)
for r in params.get('routers', [{}]):
self.router_add(r)
for cfg in params.get('shards', []):
shard_params = cfg.get('shardParams', {})
shard_tags = shard_params.pop('tags', None)
info = self.member_add(cfg.get('id', None), shard_params)
if shard_tags:
self.tags[info['id']] = shard_tags
if self.tags:
for sh_id in self.tags:
logger.debug('Add tags %r to %s' % (self.tags[sh_id], sh_id))
self.connection().config.shards.update(
{'_id': sh_id},
{'$addToSet': {'$each': self.tags[sh_id]}})
if self.login:
# Do we need to add an extra x509 user?
def only_x509(config):
set_params = config.get('setParameter', {})
auth_mechs = set_params.get('authenticationMechanisms', '')
auth_mechs = auth_mechs.split(',')
if len(auth_mechs) == 1 and auth_mechs[0] == 'MONGODB-X509':
return True
return False
any_only_x509 = lambda l: any(map(only_x509, l))
shard_configs = [s.get('shardParams', {}).get('procParams', {})
for s in params.get('shards', [])]
rs_shard_configs = [
m.get('procParams', {})
for s in params.get('shards', [])
for m in s.get('shardParams', {}).get('members', [])
]
router_configs = params.get('routers', [])
self.x509_extra_user = (any_only_x509(configsvr_configs) or
any_only_x509(shard_configs) or
any_only_x509(rs_shard_configs) or
any_only_x509(router_configs))
self._add_users(self.connection()[self.auth_source])
# Secondary user given from request.
secondary_login = {
'name': self.login,
'roles': self._user_roles
}
if self.password:
secondary_login['password'] = self.password
# Do the same for the shards.
for shard_id, config in zip(self._shards, shard_configs):
shard = self._shards[shard_id]
instance_id = shard['_id']
if shard.get('isServer'):
client = Servers()._storage[instance_id].connection
elif shard.get('isReplicaSet'):
client = ReplicaSets()._storage[instance_id].connection()
db = client[self.auth_source]
if self.x509_extra_user:
db.add_user(DEFAULT_SUBJECT, roles=self._user_roles)
if self.login:
db.add_user(**secondary_login)
if self.restart_required:
# Do we need to add clusterAuthMode back?
cluster_auth_mode = None
for cfg in shard_configs:
cam = cfg.get('clusterAuthMode')
if cam:
cluster_auth_mode = cam
break
def restart_with_auth(server_or_rs):
server_or_rs.x509_extra_user = self.x509_extra_user
server_or_rs.auth_source = self.auth_source
server_or_rs.ssl_params = self.sslParams
server_or_rs.login = self.login
server_or_rs.password = self.password
server_or_rs.auth_key = self.auth_key
def add_auth(cfg):
if self.auth_key:
cfg['keyFile'] = self.key_file
# Add clusterAuthMode back in.
if cluster_auth_mode:
cfg['clusterAuthMode'] = cam
return cfg
server_or_rs.restart(config_callback=add_auth)
for server_id in self._configsvrs:
server = Servers()._storage[server_id]
restart_with_auth(server)
for server_id in self._routers:
server = Servers()._storage[server_id]
restart_with_auth(server)
for shard_id in self._shards:
shard = self._shards[shard_id]
instance_id = shard['_id']
klass = ReplicaSets if shard.get('isReplicaSet') else Servers
instance = klass()._storage[instance_id]
restart_with_auth(instance)
self.restart_required = False
def __init_configsvr(self, params):
"""create and start config servers"""
self._configsvrs = []
for cfg in params:
# Remove flags that turn on auth.
cfg = self._strip_auth(cfg)
server_id = cfg.pop('server_id', None)
cfg.update({'configsvr': True})
self._configsvrs.append(Servers().create(
'mongod', cfg, sslParams=self.sslParams, autostart=True,
version=self._version, server_id=server_id))
def __len__(self):
return len(self._shards)
@property
def configsvrs(self):
"""return list of config servers"""
return [{'id': h_id, 'hostname': Servers().hostname(h_id)} for h_id in self._configsvrs]
@property
def routers(self):
"""return list of routers"""
return [{'id': h_id, 'hostname': Servers().hostname(h_id)} for h_id in self._routers]
@property
def members(self):
"""return list of members"""
# return [{'id': shard, 'hostname': Servers().hostname(info['_id'])} for shard, info in self._shards.items()]
return [self.member_info(item) for item in self._shards]
@property
def router(self):
"""return first available router"""
for server in self._routers:
info = Servers().info(server)
if info['procInfo'].get('alive', False):
return {'id': server, 'hostname': Servers().hostname(server)}
def router_add(self, params):
"""add new router (mongos) into existing configuration"""
cfgs = ','.join([Servers().hostname(item) for item in self._configsvrs])
server_id = params.pop('server_id', None)
params.update({'configdb': cfgs})
# Remove flags that turn auth on.
params = self._strip_auth(params)
self._routers.append(Servers().create(
'mongos', params, sslParams=self.sslParams, autostart=True,
version=self._version, server_id=server_id))
return {'id': self._routers[-1], 'hostname': Servers().hostname(self._routers[-1])}
def connection(self):
c = MongoClient(self.router['hostname'],
w='majority', fsync=True, **self.kwargs)
if self.login and not self.restart_required:
try:
c.admin.authenticate(self.login, self.password)
except:
logger.exception(
"Could not authenticate to %s as %s/%s"
% (self.router['hostname'], self.login, self.password))
raise
return c
def router_command(self, command, arg=None, is_eval=False):
"""run command on the router server
Args:
command - command string
arg - command argument
is_eval - if True execute command as eval
return command's result
"""
mode = is_eval and 'eval' or 'command'
if isinstance(arg, tuple):
name, d = arg
else:
name, d = arg, {}
result = getattr(self.connection().admin, mode)(command, name, **d)
return result
def router_remove(self, router_id):
"""remove """
result = Servers().remove(router_id)
del self._routers[ self._routers.index(router_id) ]
return { "ok": 1, "routers": self._routers }
def _add(self, shard_uri, name):
"""execute addShard command"""
return self.router_command("addShard", (shard_uri, {"name": name}), is_eval=False)
def member_add(self, member_id=None, params=None):
"""add new member into existing configuration"""
member_id = member_id or str(uuid4())
if 'members' in params:
# is replica set
rs_params = params.copy()
# Turn 'rs_id' -> 'id', to be consistent with 'server_id' below.
rs_params['id'] = rs_params.pop('rs_id', None)
rs_params.update({'sslParams': self.sslParams})
if self._version:
rs_params['version'] = self._version
rs_params['members'] = map(self._strip_auth, rs_params['members'])
rs_id = ReplicaSets().create(rs_params)
members = ReplicaSets().members(rs_id)
cfgs = rs_id + r"/" + ','.join([item['host'] for item in members])
result = self._add(cfgs, member_id)
if result.get('ok', 0) == 1:
self._shards[result['shardAdded']] = {'isReplicaSet': True, '_id': rs_id}
# return self._shards[result['shardAdded']].copy()
return self.member_info(member_id)
else:
# is single server
params.update({'autostart': True, 'sslParams': self.sslParams})
params = params.copy()
params['procParams'] = self._strip_auth(
params.get('procParams', {}))
if self._version:
params['version'] = self._version
logger.debug("servers create params: {params}".format(**locals()))
server_id = Servers().create('mongod', **params)
result = self._add(Servers().hostname(server_id), member_id)
if result.get('ok', 0) == 1:
self._shards[result['shardAdded']] = {'isServer': True, '_id': server_id}
return self.member_info(member_id)
def member_info(self, member_id):
"""return info about member"""
info = self._shards[member_id].copy()
info['id'] = member_id
info['tags'] = self.tags.get(member_id, list())
return info
def _remove(self, shard_name):
"""remove member from configuration"""
result = self.router_command("removeShard", shard_name, is_eval=False)
if result['ok'] == 1 and result['state'] == 'completed':
shard = self._shards.pop(shard_name)
if shard.get('isServer', False):
Servers().remove(shard['_id'])
if shard.get('isReplicaSet', False):
ReplicaSets().remove(shard['_id'])
return result
def member_remove(self, member_id):
"""remove member from configuration"""
return self._remove(member_id)
def reset(self):
"""Ensure all shards, configs, and routers are running and available."""
# Ensure all shards by calling "reset" on each.
for shard_id in self._shards:
if self._shards[shard_id].get('isReplicaSet'):
singleton = ReplicaSets()
elif self._shards[shard_id].get('isServer'):
singleton = Servers()
singleton.command(self._shards[shard_id]['_id'], 'reset')
# Ensure all config servers by calling "reset" on each.
for config_id in self._configsvrs:
Servers().command(config_id, 'reset')
# Ensure all routers by calling "reset" on each.
for router_id in self._routers:
Servers().command(router_id, 'reset')
return self.info()
def info(self):
"""return info about configuration"""
uri = ','.join(x['hostname'] for x in self.routers)
mongodb_uri = 'mongodb://' + uri
return {'id': self.id,
'shards': self.members,
'configsvrs': self.configsvrs,
'routers': self.routers,
'mongodb_uri': mongodb_uri,
'orchestration': 'sharded_clusters'}
def cleanup(self):
"""cleanup configuration: stop and remove all servers"""
for _id, shard in self._shards.items():
if shard.get('isServer', False):
Servers().remove(shard['_id'])
if shard.get('isReplicaSet', False):
ReplicaSets().remove(shard['_id'])
for mongos in self._routers:
Servers().remove(mongos)
for configsvr in self._configsvrs:
Servers().remove(configsvr)
self._configsvrs = []
self._routers = []
self._shards = {}
class ShardedClusters(Singleton, Container):
""" ShardedClusters is a dict-like collection for ShardedCluster objects"""
_name = 'shards'
_obj_type = ShardedCluster
releases = {}
pids_file = tempfile.mktemp(prefix="mongo-")
def set_settings(self, releases=None, default_release=None):
"""set path to storage"""
super(ShardedClusters, self).set_settings(releases, default_release)
ReplicaSets().set_settings(releases, default_release)
def __getitem__(self, key):
return self.info(key)
def cleanup(self):
"""remove all servers with their data"""
for server in self:
self.remove(server)
def create(self, params):
"""create new ShardedCluster
Args:
params - dictionary with specific params for instance
Return cluster_id
where cluster_id - id which can use to take the cluster from servers collection
"""
sh_id = params.get('id', str(uuid4()))
if sh_id in self:
raise ShardedClusterError(
"Sharded cluster with id %s already exists." % sh_id)
params['id'] = sh_id
cluster = ShardedCluster(params)
self[cluster.id] = cluster
return cluster.id
def remove(self, cluster_id):
"""remove cluster and data stuff
Args:
cluster_id - cluster identity
"""
cluster = self._storage.pop(cluster_id)
cluster.cleanup()
def info(self, cluster_id):
"""return dictionary object with info about cluster
Args:
cluster_id - cluster identity
"""
return self._storage[cluster_id].info()
def configsvrs(self, cluster_id):
"""return list of config servers"""
return self._storage[cluster_id].configsvrs
def routers(self, cluster_id):
"""return list of routers"""
return self._storage[cluster_id].routers
def router_add(self, cluster_id, params):
"""add new router"""
cluster = self._storage[cluster_id]
result = cluster.router_add(params)
self._storage[cluster_id] = cluster
return result
def router_del(self, cluster_id, router_id):
"""remove router from the ShardedCluster"""
cluster = self._storage[cluster_id]
result = cluster.router_remove(router_id)
self._storage[cluster_id] = cluster
return result
def members(self, cluster_id):
"""return list of members"""
return self._storage[cluster_id].members
def member_info(self, cluster_id, member_id):
"""return info about member"""
cluster = self._storage[cluster_id]
return cluster.member_info(member_id)
def command(self, cluster_id, command, *args):
"""Call a ShardedCluster method."""
cluster = self._storage[cluster_id]
try:
return getattr(cluster, command)(*args)
except AttributeError:
raise ValueError("Cannot issue the command %r to ShardedCluster %s"
% (command, cluster_id))
def member_del(self, cluster_id, member_id):
"""remove member from cluster cluster"""
cluster = self._storage[cluster_id]
result = cluster.member_remove(member_id)
self._storage[cluster_id] = cluster
return result
def member_add(self, cluster_id, params):
"""add new member into configuration"""
cluster = self._storage[cluster_id]
result = cluster.member_add(params.get('id', None), params.get('shardParams', {}))
self._storage[cluster_id] = cluster
return result<|fim▁end|> | import logging
import tempfile
from uuid import uuid4 |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>package consistentHash
/*
Package consistentHash provides a consistent hashing implementation using murmur3 with a 64bit ring space.
Virtual nodes are used to provide a good distribution.
This package has an almost identical API to StatHat's consistent package at https://github.com/stathat/consistent,
although that packages uses a crc32 hash and a smaller number of vnodes by default.
See:
http://en.wikipedia.org/wiki/Consistent_hashing
http://en.wikipedia.org/wiki/MurmurHash
The only time an error will be returned from a Get(), Get2(), or GetN() call is if there are not enough members added
Basic Example:
ch := consistentHash.New()
ch.Add("server1")
ch.Add("server2")
ch.Add("server3")
for _,key := range []string{"A","B","C","D","E","F","G"} {
server,err := ch.Get([]byte(key)
if err != nil {
panic(err)
}
fmt.Println("key=%s server=%s\n",key,server)<|fim▁hole|> key=A server=server3
key=B server=server3
key=C server=server1
key=D server=server3
key=E server=server2
key=F server=server2
key=G server=server1
Example with 3 servers and then removing a member:
ch := consistentHash.New()
ch.Add("server1")
ch.Add("server2")
ch.Add("server3")
keys := []string{"A", "B", "C", "D", "E", "F", "G"}
fmt.Println("3 servers")
for _, key := range keys {
server, _ := ch.Get([]byte(key))
fmt.Printf("key=%s server=%s\n", key, server)
}
fmt.Println("Removing server3")
ch.Remove("server3")
for _, key := range keys {
server, _ := ch.Get([]byte(key))
fmt.Printf("key=%s server=%s\n", key, server)
}
Output:
Output:
3 servers
key=A server=server3
key=B server=server3
key=C server=server1
key=D server=server3
key=E server=server2
key=F server=server2
key=G server=server1
Removing server3
key=A server=server1 // remapped from 3->1
key=B server=server2 // remapped from 3->2
key=C server=server1 // stayed in same location
key=D server=server1 // remapped from 3->1
key=E server=server2 // stayed in same location
key=F server=server2 // stayed in same location
key=G server=server1 // stayed in same location
*/<|fim▁end|> | }
Outputs: |
<|file_name|>issi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*- coding: utf-8 -*-
import define
import tw8836
import spi
def quad_check():
status = spi.status1_read()
if (status & 0x40):
print 'SPI flash is already in QUAD mode'
return define.TRUE
else:
print 'SPI flash is not in QUAD mode yet'
return define.FALSE
def quad_enable():
status = spi.status1_read()
spi.write_enable()
spi.status1_write(status | 0x40)
spi.write_disable()
def quad_disable():
status = spi.status1_read()
spi.write_enable()
spi.status1_write(status & ~0x40)
spi.write_disable()
def four_byte_check():
status = spi.status2_read()
if (status & 0x20):
if define.DEBUG == define.ON:
print 'SPI flash is in 4 Byte mode'
spi.bank_address_register_write(0x80)<|fim▁hole|> print 'SPI flash is not in 4 Byte mode'
spi.bank_address_register_write(0x0)
return define.FALSE
def four_byte_enter():
tw8836.write_page(0x04)
tw8836.write(0xF3, (spi.DMA_DEST_CHIPREG << 6) + spi.DMA_CMD_COUNT_1)
tw8836.write(0xF5, 0) #length high
tw8836.write(0xF8, 0) #length middle
tw8836.write(0xF9, 0) #length low
tw8836.write(0xFA, spi.SPICMD_EN4B)
tw8836.write(0xF4, spi.SPI_CMD_OPT_NONE | spi.DMA_START)
def four_byte_exit():
tw8836.write_page(0x04)
tw8836.write(0xF3, (spi.DMA_DEST_CHIPREG << 6) + spi.DMA_CMD_COUNT_1)
tw8836.write(0xF5, 0) #length high
tw8836.write(0xF8, 0) #length middle
tw8836.write(0xF9, 0) #length low
tw8836.write(0xFA, spi.SPICMD_EX4B)
tw8836.write(0xF4, spi.SPI_CMD_OPT_NONE | spi.DMA_START)
def erase_fail_check():
status = spi.security_register_read()
if (status & 0x40):
print 'erase failed'
spi.sr_clear()
return define.TRUE
else:
print 'erase succeed'
return define.FALSE
def dummy_cycles_config(mode, cycles):
print 'dummy_cycles_config in issi.py'
status2_register = spi.status2_read()
print hex(status2_register)<|fim▁end|> |
return define.TRUE
else:
if define.DEBUG == define.ON: |
<|file_name|>forge.rs<|end_file_name|><|fim▁begin|>use atom::header::AtomType;
use atom::types::{AtomSequence, UnknownAtomSequence};
use urid::{URID, URIDCache, URIDCacheMapping};
use std::slice;
use units::Unit;
use std::marker::PhantomData;
use atom::header::Atom;
use atom::into::ToAtom;
use std::mem;
pub trait Forger<'c, C: 'c> : Sized{
fn cache(&self) -> &'c C;
unsafe fn write_raw_padded(&mut self, data: *const u8, size: usize) -> *mut u8;
#[inline]
fn get_urid<T: AtomType>(&self) -> URID where C: URIDCacheMapping<T> {<|fim▁hole|> }
#[inline]
fn write_atom<T: AtomType, A: ToAtom<AtomType=T>>(&mut self, atom: A) where C: URIDCacheMapping<T> {
let atom = &atom.to_atom();
let dst_atom = unsafe { write_atom_inner(self, atom) };
dst_atom.update_type_id(self.cache());
}
}
impl<'c, C: URIDCache + 'c> Forger<'c, C> for Forge<'c, C> {
#[inline]
fn cache(&self) -> &'c C {
self.cache
}
unsafe fn write_raw_padded(&mut self, data: *const u8, size: usize) -> *mut u8 {
let end = self.position + size;
let dst_slice = &mut self.buffer[self.position..end];
let atom_data = slice::from_raw_parts(data, size);
dst_slice.copy_from_slice(atom_data);
self.position += ::lv2_sys::lv2_atom_pad_size(size as u32) as usize;
dst_slice.as_mut_ptr()
}
}
// TODO: add error handling
pub struct Forge<'c, C: URIDCache + 'c> {
buffer: &'c mut [u8],
cache: &'c C,
position: usize
}
#[inline]
unsafe fn write_atom_inner<'c, 'x, A: AtomType, F: Forger<'c, C>, C: URIDCache + 'c>(forge: &mut F, atom: &A) -> &'x mut A {
let size = atom.get_total_size() as usize;
let data = atom as *const A as *const u8;
&mut *(forge.write_raw_padded(data, size) as *mut A)
}
impl<'c, C: URIDCache> Forge<'c, C> {
#[inline]
pub fn new(buffer: &'c mut [u8], cache: &'c C) -> Forge<'c, C> {
Forge {
buffer, cache, position: 0
}
}
#[inline]
pub fn begin_sequence<'a, U: Unit + 'c>(&'a mut self) -> ForgeSequence<'a, 'c, C, U, Self> where C: URIDCacheMapping<U> + URIDCacheMapping<UnknownAtomSequence> {
ForgeSequence::new(self)
}
}
pub struct ForgeSequence<'a, 'c, C: 'c, U: Unit, F: Forger<'c, C> + 'a> {
parent: &'a mut F,
atom: &'a mut Atom,
_unit_type: PhantomData<U>,
_cache_type: PhantomData<&'c C>
}
impl<'a, 'c, C: URIDCache + 'c, U: Unit + 'c, F: Forger<'c, C> + 'a> ForgeSequence<'a, 'c, C, U, F> {
#[inline]
fn new<'b>(parent: &'b mut F) -> ForgeSequence<'b, 'c, C, U, F> where C: URIDCacheMapping<UnknownAtomSequence> + URIDCacheMapping<U>, U: 'b {
let seq_header = AtomSequence::<U>::new_header(parent.cache());
let atom = unsafe { write_atom_inner(parent, &seq_header) }.get_header_mut();
ForgeSequence {
parent,
atom,
_unit_type: PhantomData,
_cache_type: PhantomData
}
}
#[inline]
pub fn write_event<T: AtomType, A: ToAtom<AtomType=T>>(&mut self, time: &U, atom: A) where C: URIDCacheMapping<T> {
unsafe {
self.write_raw_padded(time as *const U as *const u8, mem::size_of::<U>());
}
self.write_atom(atom);
}
#[inline]
pub fn begin_sequence<'b, U2: Unit + 'c>(&'b mut self, time: &U) -> ForgeSequence<'b, 'c, C, U2, Self>
where C: URIDCacheMapping<U2> + URIDCacheMapping<UnknownAtomSequence> + 'b
{
unsafe {
self.write_raw_padded(time as *const U as *const u8, mem::size_of::<U>());
}
ForgeSequence::new(self)
}
}
impl<'a, 'c, C: URIDCache + 'c, U: Unit, F: Forger<'c, C> + 'a> Forger<'c, C> for ForgeSequence<'a, 'c, C, U, F> {
#[inline]
fn cache(&self) -> &'c C {
self.parent.cache()
}
unsafe fn write_raw_padded(&mut self, data: *const u8, size: usize) -> *mut u8 {
self.atom.add_size(::lv2_sys::lv2_atom_pad_size(size as u32));
self.parent.write_raw_padded(data, size)
}
}<|fim▁end|> | URIDCacheMapping::get_urid(self.cache()).urid() |
<|file_name|>notification.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { ToastsManager } from 'ng2-toastr/ng2-toastr';
@Injectable()
export class NotificationService {
constructor(private toastr: ToastsManager) { }
public info(message: string, title?: string) {
this.toastr.info(message, title);
}
public error(message: string, title?: string) {
this.toastr.error(message, title);
}
public success(message: string, title?: string) {
this.toastr.success(message, title);<|fim▁hole|> this.toastr.warning(message, title);
}
}<|fim▁end|> | }
public warn(message: string, title?: string) { |
<|file_name|>zmq_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the ZMQ API."""
import configparser
import os
import struct
from test_framework.test_framework import StatusquoTestFramework, SkipTest
from test_framework.util import (assert_equal,
bytes_to_hex_str,
)
class ZMQTest (StatusquoTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def setup_nodes(self):
# Try to import python3-zmq. Skip this test if the import fails.
try:
import zmq
except ImportError:
raise SkipTest("python3-zmq module not available.")
# Check that statusquo has been built with ZMQ enabled
config = configparser.ConfigParser()
if not self.options.configfile:
self.options.configfile = os.path.dirname(__file__) + "/../config.ini"
config.read_file(open(self.options.configfile))
if not config["components"].getboolean("ENABLE_ZMQ"):
raise SkipTest("statusquod has not been built with zmq enabled.")
self.zmqContext = zmq.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.set(zmq.RCVTIMEO, 60000)
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
ip_address = "tcp://127.0.0.1:26121"
self.zmqSubSocket.connect(ip_address)
extra_args = [['-zmqpubhashtx=%s' % ip_address, '-zmqpubhashblock=%s' % ip_address], []]
self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
def run_test(self):
try:
self._zmq_test()
finally:
# Destroy the zmq context
self.log.debug("Destroying zmq context")
self.zmqContext.destroy(linger=None)
def _zmq_test(self):
genhashes = self.nodes[0].generate(1)
self.sync_all()
self.log.info("Wait for tx")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
assert_equal(topic, b"hashtx")
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) # must be sequence 0 on hashtx
self.log.info("Wait for block")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) # must be sequence 0 on hashblock
blkhash = bytes_to_hex_str(body)
assert_equal(genhashes[0], blkhash) # blockhash from generate must be equal to the hash received over zmq
self.log.info("Generate 10 blocks (and 10 coinbase txes)")
n = 10
genhashes = self.nodes[1].generate(n)
self.sync_all()
zmqHashes = []
blockcount = 0
for x in range(n * 2):
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
if topic == b"hashblock":
zmqHashes.append(bytes_to_hex_str(body))
msgSequence = struct.unpack('<I', msg[-1])[-1]<|fim▁hole|> blockcount += 1
for x in range(n):
assert_equal(genhashes[x], zmqHashes[x]) # blockhash from generate must be equal to the hash received over zmq
self.log.info("Wait for tx from second node")
# test tx from a second node
hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
# now we should receive a zmq msg because the tx was broadcast
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
assert_equal(topic, b"hashtx")
hashZMQ = bytes_to_hex_str(body)
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount + 1)
assert_equal(hashRPC, hashZMQ) # txid from sendtoaddress must be equal to the hash received over zmq
if __name__ == '__main__':
ZMQTest().main()<|fim▁end|> | assert_equal(msgSequence, blockcount + 1) |
<|file_name|>screenInfo.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/james4k/terminal"
)
// ScreenInfo type
type ScreenInfo struct {
Width int
Height int
Chars []rune
Fcolors []terminal.Color
Bcolors []terminal.Color
CursorX int
CursorY int
CursorVisible bool
top int
bottom int
left int
right int
}
// NewScreenInfo returns ScreenInfo instance
func NewScreenInfo() *ScreenInfo {
return &ScreenInfo{
Width: -1,
Height: -1,
Chars: []rune{},
Fcolors: []terminal.Color{},
Bcolors: []terminal.Color{},
CursorX: -1,
CursorY: -1,
CursorVisible: false,
top: -1,
bottom: -1,
left: -1,
right: -1,
}
}
func (s *ScreenInfo) save(width int, height int, state *terminal.State) {
if s.Width != width || s.Height != height {
s.Width = width
s.Height = height
s.Chars = make([]rune, width*height)
s.Fcolors = make([]terminal.Color, width*height)
s.Bcolors = make([]terminal.Color, width*height)
}
for row := 0; row < s.Height; row++ {
for col := 0; col < s.Width; col++ {
ch, fg, bg := state.Cell(col, row)
s.Chars[row*s.Width+col] = ch
s.Fcolors[row*s.Width+col] = fg
s.Bcolors[row*s.Width+col] = bg
}<|fim▁hole|>
func (s *ScreenInfo) updateRedrawRange(x int, y int) {
if y < s.top {
s.top = y
}
if s.bottom < y+1 {
s.bottom = y + 1
}
if x < s.left {
s.left = x
}
if s.right < x+1 {
s.right = x + 1
}
}
// GetRedrawRange returns redraw range.
func (s *ScreenInfo) GetRedrawRange(width int, height int, state *terminal.State) (left int, top int, right int, bottom int) {
defer s.save(width, height, state)
if s.Width != width || s.Height != height {
return 0, 0, width, height
}
s.top = height
s.bottom = 0
s.left = width
s.right = 0
for row := 0; row < height; row++ {
for col := 0; col < width; col++ {
ch, fg, bg := state.Cell(col, row)
ch0 := s.Chars[row*width+col]
fg0 := s.Fcolors[row*width+col]
bg0 := s.Bcolors[row*width+col]
if ch != ch0 || fg != fg0 || bg != bg0 {
s.updateRedrawRange(col, row)
}
}
}
cursorVisible := state.CursorVisible()
cursorX, cursorY := state.Cursor()
if s.CursorVisible && !cursorVisible {
s.updateRedrawRange(s.CursorX, s.CursorY)
}
if !s.CursorVisible && cursorVisible {
s.updateRedrawRange(cursorX, cursorY)
}
if s.CursorVisible && cursorVisible && (s.CursorX != cursorX || s.CursorY != cursorY) {
s.updateRedrawRange(s.CursorX, s.CursorY)
s.updateRedrawRange(cursorX, cursorY)
}
return s.left, s.top, s.right, s.bottom
}<|fim▁end|> | }
s.CursorX, s.CursorY = state.Cursor()
s.CursorVisible = state.CursorVisible()
} |
<|file_name|>terrain.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
__author__ = 'jfernandez'
from lettuce import world, before, after
from commons.terrain_steps import setup_feature, setup_scenario, setup_outline, tear_down
from commons.provisioning_steps import ProvisioningSteps
from commons.rest_utils import RestUtils
from commons.configuration import CONFIG_VM_HOSTNAME
from commons.fabric_utils import execute_chef_client, execute_puppet_agent, remove_chef_client_cert_file, \
remove_puppet_agent_cert_file, execute_chef_client_stop, execute_puppet_agent_stop, \
remove_all_generated_test_files, remove_puppet_agent_catalog
provisioning_steps = ProvisioningSteps()
rest_utils = RestUtils()
@before.each_feature
def before_each_feature(feature):
"""
Hook: Will be executed before each feature. Configures global vars and gets token from keystone.
Launch agents (puppet and chef) in the target VM
"""
setup_feature(feature)
@before.each_scenario
def before_each_scenario(scenario):
"""
Hook: Will be executed before each Scenario.
Setup Scenario: initialize World vars and launch agents (puppet and chef) in the target VM
"""
setup_scenario(scenario)
execute_chef_client()
execute_puppet_agent()
@before.outline
def before_outline(param1, param2, param3, param4):
""" Hook: Will be executed before each Scenario Outline. Same behaviour as 'before_each_scenario'"""
setup_outline(param1, param2, param3, param4)
remove_all_generated_test_files()<|fim▁hole|>
@after.each_scenario
def after_each_scenario(scenario):
"""
Hook: Will be executed after all each scenario
Removes Feature data and cleans the system. Kills all agents running in the VM.
"""
execute_chef_client_stop()
execute_puppet_agent_stop()
remove_chef_client_cert_file()
remove_puppet_agent_cert_file()
remove_all_generated_test_files()
remove_puppet_agent_catalog()
rest_utils.delete_node(world.headers, world.tenant_id, CONFIG_VM_HOSTNAME)
@after.all
def after_all(scenario):
"""
Hook: Will be executed after all each scenario
Removes Feature data and cleans the system. Kills all agents running in the VM.
"""
after_each_scenario(scenario)
tear_down(scenario)<|fim▁end|> | remove_puppet_agent_catalog()
|
<|file_name|>nl.js<|end_file_name|><|fim▁begin|>CKEDITOR.plugins.setLang('oembed', 'nl', {
title : "Integratie van media-inhoud (foto's, video, content)",
button : "Media-inhoud van externe websites",
pasteUrl : "Geef een URL van een pagina in dat ondersteund wordt (Bijv.: YouTube, Flickr, Qik, Vimeo, Hulu, Viddler, MyOpera, etc.) ...",
invalidUrl : "Please provide an valid URL!",
noEmbedCode : "No embed code found, or site is not supported!",
url : "URL:",
width: "Breedte:",
height: "Hoogte:",
widthTitle: "Width for the embeded Content",
heightTitle: "Height for the embeded Content",
maxWidth: "Maximale breedte:",
maxHeight: "Maximale hoogte:",
maxWidthTitle: "Maximum Width for the embeded Content",
maxHeightTitle: "Maximum Height for the embeded Content",
resizeType: "Resize Type (Only Video's):",
noresize: "No Resize (use default)",
responsive: "Responsive Resize",
custom: "Specific Resize",
autoClose: "Automatically Close Dialog after Code is Embeded",
noVimeo: "The owner of this video has set domain restrictions and you will not be able to embed it on your website.",
Error: "Media Content could not been retrieved, please try a different URL."
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>person_test.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from unittest import TestCase
from application.backend.models.person import gender, create_person_from_dict
from application.backend.tests.test_helpers import create_person
class PersonTest(TestCase):
def test_should_jsonify_itself(self):
date_of_birth = datetime.now()
person = create_person(
surname='Nekesa', first_name='Patricia',
date_of_birth=date_of_birth, gender=gender.FEMALE
)
expected_json = {
u"surname": u"Nekesa",
u"first_name": u"Patricia",
u"date_of_birth": unicode(date_of_birth),
u"gender": unicode(gender.FEMALE),
u"id": unicode(None)
}
person_json = person.jsonify()
self.assertEqual(person_json, expected_json)
def test_should_construct_self_from_dict(self):<|fim▁hole|> surname=u'Nekesa', first_name=u'Patricia',
date_of_birth=u'2012-02-02', gender=gender.FEMALE
)
person = create_person_from_dict(dictionary)
self.assertEqual(person, expected_person)
def test_should_equate_two_people_if_ids_are_the_same(self):
person_1 = create_person()
person_2 = create_person()
self.assertEqual(person_1, person_2)<|fim▁end|> | dictionary = {"surname": "Nekesa", "first_name": "Patricia", "date_of_birth": "2012-02-02", "gender": "female"}
expected_person = create_person( |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod models;
mod schema;
use diesel::prelude::*;
use diesel::r2d2::ConnectionManager;
use std::env;
#[derive(Clone)]
pub struct DbContext {
pool: r2d2::Pool<ConnectionManager<PgConnection>>,
}
<|fim▁hole|> }
pub fn new(connection_string: &str) -> Self {
let pg_connection_manager = ConnectionManager::<PgConnection>::new(connection_string);
let pool = r2d2::Pool::builder()
.build(pg_connection_manager)
.expect("Failed to create pool.");
DbContext { pool }
}
}<|fim▁end|> | impl DbContext {
pub fn new_from_env() -> Self {
let connection_string = env::var("DATABASE_URL").expect("DATABASE_URL environment variable was not set");
DbContext::new(&connection_string) |
<|file_name|>insertion.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include <cstddef>
#include <ostream>
#include <utility>
#include <vector>
#include <insertionfinder/algorithm.hpp>
namespace InsertionFinder {class Solution;};
std::ostream& operator<<(std::ostream&, const InsertionFinder::Solution&);
namespace InsertionFinder {
struct Insertion {
Algorithm skeleton;
std::size_t insert_place;
const Algorithm* insertion;
explicit Insertion(
const Algorithm& skeleton,
std::size_t insert_place = 0,
const Algorithm* insertion = nullptr
): skeleton(skeleton), insert_place(insert_place), insertion(insertion) {}
explicit Insertion(
Algorithm&& skeleton,
std::size_t insert_place = 0,
const Algorithm* insertion = nullptr
): skeleton(std::move(skeleton)), insert_place(insert_place), insertion(insertion) {}
void print(std::ostream& out, std::size_t index) const;
};
struct Solution;
struct MergedInsertion {
struct SubInsertion {
const Algorithm* insertion;
std::size_t order;
};
Algorithm skeleton;
Algorithm final_solution;
std::vector<std::pair<std::size_t, std::vector<std::size_t>>> insert_places;
std::vector<Algorithm> insertions;
std::vector<std::pair<std::size_t, std::vector<SubInsertion>>> get_insertions() const;
void print(std::ostream& out, std::size_t initial_order, const Solution& solution) const;
};
struct Solution {
Algorithm final_solution;
std::vector<Insertion> insertions;<|fim▁hole|> std::size_t cancellation = 0;
Solution(const Algorithm& final_solution): final_solution(final_solution) {}
Solution(const Algorithm& final_solution, const std::vector<Insertion>& insertions):
final_solution(final_solution), insertions(insertions) {}
Solution(const Algorithm& final_solution, std::vector<Insertion>&& insertions):
final_solution(final_solution), insertions(std::move(insertions)) {}
std::vector<MergedInsertion> merge_insertions(const Algorithm& skeleton) const;
void print(std::ostream& out, const std::vector<MergedInsertion>& merged_insertions) const;
};
};<|fim▁end|> | |
<|file_name|>home-service.service.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import { TestBed, async, inject } from '@angular/core/testing';
import { HomeServiceService } from './home-service.service';
describe('Service: HomeService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [HomeServiceService]
});
});
it('should get created', inject([HomeServiceService], (service: HomeServiceService) => {<|fim▁hole|> expect(service).toBeTruthy();
}));
});<|fim▁end|> | |
<|file_name|>db.rs<|end_file_name|><|fim▁begin|>/* Copyright 2013 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use bson::encode::*;
use bson::formattable::*;
use util::*;
use tools::md5::*;
use client::Client;
use coll::Collection;
static L_END: bool = true;
pub struct DB {
name : ~str,
priv client : @Client,
}
/**
* Having created a `Client` and connected as desired
* to a server or cluster, users may interact with
* databases by creating `DB` handles to those databases.
*/
impl DB {
/**
* Creates a new Mongo DB with given name and associated Client.
*
* # Arguments
* * `name` - name of DB
* * `client` - Client with which this DB is associated
*
* # Returns
* DB (handle to database)
*/
pub fn new(name : ~str, client : @Client) -> DB {
DB {
name : name,
client : client
}
}
// COLLECTION INTERACTION
/**
* Gets names of all collections in this `DB`, returning error
* if any fail. Names do not include `DB` name, i.e. are not
* full namespaces.
*
* # Returns
* vector of collection names on success, `MongoErr` on failure
*
* # Failure Types
* * error querying `system.indexes` collection
* * response from server not in expected form (must contain
* vector of `BsonDocument`s each containing "name" fields of
* `UString`s)
*/
pub fn get_collection_names(&self) -> Result<~[~str], MongoErr> {
let mut names : ~[~str] = ~[];
// query on namespace collection
let coll = Collection::new(self.name.clone(), SYSTEM_NAMESPACE.to_owned(), self.client);
let mut cur = match coll.find(None, None, None) {
Ok(cursor) => cursor,
Err(e) => return Err(e),
};
// pull out all the names, returning error if any fail
for cur.advance |doc| {
match doc.find(~"name") {
Some(val) => {
match val {
&UString(ref s) => {
// ignore special collections (with "$")
if !s.contains_char('$') {
names.push(s.slice_from(self.name.len()+1).to_owned());
}
},
_ => return Err(MongoErr::new(
~"db::get_collection_names",
fmt!("db %s", self.name),
~"got non-string collection name")),
}
},
None => return Err(MongoErr::new(
~"db::get_collection_names",
fmt!("db %s", self.name),
~"got no name for collection")),
}
}
Ok(names)
}
/**
* Gets `Collection`s in this `DB`, returning error if any fail.
*
* # Returns
* vector of `Collection`s on success, `MongoErr` on failure
*
* # Failure Types
* * errors propagated from `get_collection_names`
*/
pub fn get_collections(&self) -> Result<~[Collection], MongoErr> {
let names = match self.get_collection_names() {
Ok(n) => n,
Err(e) => return Err(e),
};
let mut coll : ~[Collection] = ~[];
for names.iter().advance |&n| {
coll.push(Collection::new(self.name.clone(), n, self.client));
}
Ok(coll)
}
/**
* Creates collection with given options.
*
* # Arguments
* * `coll` - name of collection to create
* * `flag_array` - collection creation flags
* * `option_array` - collection creation options
*
* # Returns
* handle to collection on success, `MongoErr` on failure
*/
pub fn create_collection( &self,
coll : ~str,
flag_array : Option<~[COLLECTION_FLAG]>,
option_array : Option<~[COLLECTION_OPTION]>)
-> Result<Collection, MongoErr> {
let flags = process_flags!(flag_array);
let cmd = fmt!( "{ \"create\":\"%s\", %s }",
coll,
self.process_create_ops(flags, option_array));
let old_pref = self.client.set_read_pref(PRIMARY_ONLY);
let result = match self.run_command(SpecNotation(cmd)) {
Ok(_) => Ok(Collection::new(self.name.clone(), coll, self.client)),
Err(e) => Err(e),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}<|fim▁hole|> priv fn process_create_ops(&self, flags : i32, options : Option<~[COLLECTION_OPTION]>)
-> ~str {
let mut opts_str = ~"";
opts_str.push_str(fmt!( "\"autoIndexId\":%? ",
(flags & AUTOINDEX_ID as i32) != 0i32));
match options {
None => (),
Some(opt_arr) => {
for opt_arr.iter().advance |&opt| {
opts_str.push_str(match opt {
CAPPED(sz) => fmt!(", \"capped\":true, \"size\":%?", sz),
SIZE(sz) => fmt!(", \"size\":%?", sz),
MAX_DOCS(k) => fmt!(", \"max\":%?", k),
})
}
}
}
opts_str
}
/**
* Gets handle to collection with given name, from this `DB`.
*
* # Arguments
* * `coll` - name of `Collection` to get
*
* # Returns
* handle to collection
*/
pub fn get_collection(&self, coll : ~str) -> Collection {
Collection::new(self.name.clone(), coll, self.client)
}
/**
* Drops given collection from database associated with this `DB`.
*
* # Arguments
* * `coll` - name of collection to drop
*
* # Returns
* () on success, `MongoErr` on failure
*/
pub fn drop_collection(&self, coll : &str) -> Result<(), MongoErr> {
let old_pref = self.client.set_read_pref(PRIMARY_ONLY);
let result = match self.run_command(SpecNotation(fmt!("{ \"drop\":\"%s\" }", coll))) {
Ok(_) => Ok(()),
Err(e) => Err(e),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
// TODO make take options? (not strictly necessary but may be good?)
// TODO allow other query options, e.g. SLAVE_OK, with helper function
/**
* Runs given command (taken as `BsonDocument` or `~str`).
*
* # Arguments
* * `cmd` - command to run, taken as `SpecObj(BsonDocument)` or
* `SpecNotation(~str)`
*
* # Returns
* `~BsonDocument` response from server on success that must be parsed
* appropriately by caller, `MongoErr` on failure
*/
pub fn run_command(&self, cmd : QuerySpec) -> Result<~BsonDocument, MongoErr> {
let coll = Collection::new(self.name.clone(), SYSTEM_COMMAND.to_owned(), self.client);
//let ret_msg = match coll.find_one(Some(cmd), None, None, None) {
let ret_msg = match coll.find_one(Some(cmd.clone()), None, Some(~[NO_CUR_TIMEOUT])) {
Ok(msg) => msg,
Err(e) => return Err(MongoErr::new(
~"db::run_command",
fmt!("error getting return value from run_command %s", cmd.to_str()),
fmt!("-->\n%s", e.to_str()))),
};
// check if run_command succeeded
let ok = match ret_msg.find(~"ok") {
Some(x) => match *x {
Double(v) => v,
_ => return Err(MongoErr::new(
~"db::run_command",
fmt!("error in returned value from run_command %s", cmd.to_str()),
fmt!("\"ok\" field contains %?", *x))),
},
None => return Err(MongoErr::new(
~"db::run_command",
fmt!("error in returned value from run_command %s", cmd.to_str()),
~"no \"ok\" field in return message!")),
};
match ok {
0f64 => (),
_ => return Ok(ret_msg)
}
// otherwise, extract error message
let errmsg = match ret_msg.find(~"errmsg") {
Some(x) => match *x {
UString(ref s) => s.to_owned(),
_ => return Err(MongoErr::new(
~"db::run_command",
fmt!("error in returned value from run_command %s", cmd.to_str()),
fmt!("\"errmsg\" field contains %?", *x))),
},
None => return Err(MongoErr::new(
~"db::run_command",
fmt!("error in returned value from run_comand %s", cmd.to_str()),
~"run_command failed without msg!")),
};
Err(MongoErr::new(
~"db::run_command",
fmt!("run_command %s failed", cmd.to_str()),
errmsg))
}
/**
* Parses write concern into bytes and sends to server.
*
* # Arguments
* * `wc` - write concern, i.e. getLastError specifications
*
* # Returns
* `Option<~BsonDocument>` with full response on success (or None
* if write concern was 0), `MongoErr` on failure
*
* # Failure Types
* * invalid write concern specification (should never happen)
* * network
* * getLastError error, e.g. duplicate ```_id```s
*/
pub fn get_last_error(&self, wc : Option<~[WRITE_CONCERN]>)
-> Result<Option<~BsonDocument>, MongoErr>{
// set default write concern (to 1) if not specified
let concern = match wc {
None => ~[W_N(1), FSYNC(false)],
Some(w) => w,
};
let mut concern_doc = BsonDocument::new();
concern_doc.put(~"getLastError", Bool(true));
// parse write concern, early exiting if set to <= 0
for concern.iter().advance |&opt| {
match opt {
JOURNAL(j) => concern_doc.put(~"j", Bool(j)),
W_N(w) => {
if w <= 0 { return Ok(None); }
else { concern_doc.put(~"w", Int32(w as i32)) }
}
W_STR(w) => concern_doc.put(~"w", UString(w)),
W_TAGSET(ts) => concern_doc.union(ts.to_bson_t()),
WTIMEOUT(t) => concern_doc.put(~"wtimeout", Int32(t as i32)),
FSYNC(s) => concern_doc.put(~"fsync", Bool(s)),
}
}
// run_command and get entire doc
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None));
let err_doc_tmp = match self.run_command(SpecObj(concern_doc)) {
Ok(doc) => doc,
Err(e) => return Err(MongoErr::new(
~"db::get_last_error",
~"run_command error",
fmt!("-->\n%s", e.to_str()))),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
// error field name possibitilies
let err_field = ~[ err_doc_tmp.find(~"err"),
err_doc_tmp.find(~"$err")];
// search for error field
let mut err_found = false;
let mut err_doc = Int32(1); // [invalid err_doc]
for err_field.iter().advance |&err_result| {
match err_result {
None => (),
Some(doc) => {
err_found = true;
err_doc = doc.clone();
}
}
};
if !err_found {
return Err(MongoErr::new(
~"db::get_last_error",
~"getLastError unexpected format",
~"no $err field in reply"));
}
// unwrap error message
match err_doc {
Null => Ok(Some(err_doc_tmp.clone())),
UString(s) => Err(MongoErr::new(
~"db::get_last_error",
~"getLastError error",
s)),
_ => Err(MongoErr::new(
~"db::get_last_error",
~"getLastError unexpected format",
~"unknown last error in reply")),
}
}
///Enable sharding on this database.
pub fn enable_sharding(&self) -> Result<(), MongoErr> {
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None)); // XXX check
let result = match self.run_command(SpecNotation(fmt!("{ \"enableSharding\": %s }", self.name))) {
Ok(doc) => match *doc.find(~"ok").unwrap() {
Double(1f64) => Ok(()),
Int32(1i32) => Ok(()),
Int64(1i64) => Ok(()),
_ => Err(MongoErr::new(
~"db::logout",
~"error while logging out",
~"the server returned ok: 0")),
},
Err(e) => Err(e),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
///Add a new database user with the given username and password.
///If the system.users collection becomes unavailable, this will fail.
pub fn add_user(&self, username: ~str, password: ~str, roles: ~[~str]) -> Result<(), MongoErr>{
let coll = self.get_collection(SYSTEM_USERS.to_owned());
let mut user = match coll.find_one(Some(SpecNotation(fmt!("{ \"user\": \"%s\" }", username))), None, None)
{
Ok(u) => u,
Err(_) => {
let mut doc = BsonDocument::new();
doc.put(~"user", UString(username.clone()));
~doc
}
};
user.put(~"pwd", UString(md5(fmt!("%s:mongo:%s", username, password))));
user.put(~"roles", roles.to_bson_t());
coll.save(user, None)
}
///Become authenticated as the given username with the given password.
pub fn authenticate(&self, username: ~str, password: ~str) -> Result<(), MongoErr> {
let nonce = match self.run_command(SpecNotation(~"{ \"getnonce\": 1 }")) {
Ok(doc) => match *doc.find(~"nonce").unwrap() { //this unwrap should always succeed
UString(ref s) => s.to_owned(),
_ => return Err(MongoErr::new(
~"db::authenticate",
~"error while getting nonce",
fmt!("an invalid nonce (%?) was returned by the server", *doc.find(~"nonce").unwrap())))
},
Err(e) => return Err(e)
};
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None));
let result = match self.run_command(SpecNotation(fmt!(" {
\"authenticate\": 1,
\"user\": \"%s\",
\"nonce\": \"%s\",
\"key\": \"%s\" } ",
username,
nonce,
md5(fmt!("%s%s%s", nonce, username, md5(fmt!("%s:mongo:%s",username, password))))))) {
Ok(_) => Ok(()),
Err(e) => Err(e)
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
///Log out of the current user.
///Closing a connection will also log out.
pub fn logout(&self) -> Result<(), MongoErr> {
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None));
let result = match self.run_command(SpecNotation(~"{ \"logout\": 1 }")) {
Ok(doc) => match *doc.find(~"ok").unwrap() {
Double(1f64) => Ok(()),
Int32(1i32) => Ok(()),
Int64(1i64) => Ok(()),
_ => Err(MongoErr::new(
~"db::logout",
~"error while logging out",
~"the server returned ok: 0")),
},
Err(e) => Err(e),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
///Get the profiling level of the database.
// XXX return type; potential for change
pub fn get_profiling_level(&self) -> Result<(int, Option<int>), MongoErr> {
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None));
let result = match self.run_command(SpecNotation(~"{ 'profile':-1 }")) {
Ok(d) => {
let mut err = None;
let mut level = None;
let mut thresh = None;
match d.find(~"was") {
Some(&Double(f)) => level = Some(f as int),
_ => err = Some(MongoErr::new(
~"db::get_profiling_level",
~"could not get profiling level",
~"an invalid profiling level was returned"))
}
match d.find(~"slowms") {
None => (),
Some(&Double(ms)) => thresh = Some(ms as int),
_ => err = Some(MongoErr::new(
~"db::get_profiling_level",
~"could not get profiling threshold",
~"an invalid profiling threshold was returned"))
};
if err.is_none() { Ok((level.unwrap(), thresh)) }
else { Err(err.unwrap()) }
}
Err(e) => Err(e),
};
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
///Set the profiling level of the database.
// XXX argument types; potential for change
pub fn set_profiling_level(&self, level: int)
-> Result<~BsonDocument, MongoErr> {
let old_pref = self.client.set_read_pref(PRIMARY_PREF(None));
let result = self.run_command(SpecNotation(fmt!("{ \"profile\": %d }", level)));
match old_pref {
Ok(p) => { self.client.set_read_pref(p); }
Err(_) => (),
}
result
}
}<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import time
import unittest
from django.core.cache import cache
from django.utils.cache import patch_vary_headers
from django.http import HttpResponse
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Cache(unittest.TestCase):
def test_simple(self):
# simple set/get
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_add(self):
# test add (only add if key isn't already in cache)
cache.add("addkey1", "value")
cache.add("addkey1", "newvalue")
self.assertEqual(cache.get("addkey1"), "value")
def test_non_existent(self):
# get with non-existent keys
self.assertEqual(cache.get("does_not_exist"), None)
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# get_many
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
def test_delete(self):
# delete
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertEqual(cache.get("key1"), "spam")
cache.delete("key1")
self.assertEqual(cache.get("key1"), None)
self.assertEqual(cache.get("key2"), "eggs")
def test_has_key(self):
# has_key
cache.set("hello1", "goodbye1")
self.assertEqual(cache.has_key("hello1"), True)
self.assertEqual(cache.has_key("goodbye1"), False)
def test_in(self):
cache.set("hello2", "goodbye2")
self.assertEqual("hello2" in cache, True)
self.assertEqual("goodbye2" in cache, False)
def test_data_types(self):
stuff = {
'string' : 'this is a string',
'int' : 42,
'list' : [1, 2, 3, 4],
'tuple' : (1, 2, 3, 4),
'dict' : {'A': 1, 'B' : 2},
'function' : f,
'class' : C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_expiration(self):
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertEqual(cache.get("expire1"), None)
cache.add("expire2", "newvalue")
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertEqual(cache.has_key("expire3"), False)
def test_unicode(self):
stuff = {
u'ascii': u'ascii_value',
u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
u'ascii': {u'x' : 1 }
}
for (key, value) in stuff.items():
cache.set(key, value)
self.assertEqual(cache.get(key), value)
import os
import md5
import shutil
import tempfile
from django.core.cache.backends.filebased import CacheClass as FileCache
class FileBasedCacheTests(unittest.TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
self.dirname = tempfile.mktemp()
os.mkdir(self.dirname)
self.cache = FileCache(self.dirname, {})
def tearDown(self):
shutil.rmtree(self.dirname)
def test_hashing(self):
"""Test that keys are hashed into subdirectories correctly"""
self.cache.set("foo", "bar")
keyhash = md5.new("foo").hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assert_(os.path.exists(keypath))
def test_subdirectory_removal(self):
"""<|fim▁hole|> keyhash = md5.new("foo").hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assert_(os.path.exists(keypath))
self.cache.delete("foo")
self.assert_(not os.path.exists(keypath))
self.assert_(not os.path.exists(os.path.dirname(keypath)))
self.assert_(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
class CacheUtils(unittest.TestCase):
"""TestCase for django.utils.cache functions."""
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | Make sure that the created subdirectories are correctly removed when empty.
"""
self.cache.set("foo", "bar") |
<|file_name|>NumPyUtil.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
from typing import Union, List, cast
import numpy
from copy import deepcopy
def immutableNDArray(nda: Union[List, numpy.ndarray]) -> numpy.ndarray:
"""Creates an immutable copy of the given narray
If the array is already immutable then it just returns it.
:param nda: :type{numpy.ndarray} the array to copy. May be a list
:return: :type{numpy.ndarray} an immutable narray
"""<|fim▁hole|>
if type(nda) is list:
data = numpy.array(nda, numpy.float32)
data.flags.writeable = False
else:
data = cast(numpy.ndarray, nda)
if not data.flags.writeable:
return data
copy = deepcopy(data)
copy.flags.writeable = False
return copy<|fim▁end|> |
if nda is None:
return None |
<|file_name|>test_corddisease.py<|end_file_name|><|fim▁begin|>import unittest
from biothings_explorer.registry import Registry
from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher
from .utils import get_apis
reg = Registry()
class TestSingleHopQuery(unittest.TestCase):
def test_disease2protein(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Protein",
input_cls="Disease",
input_id="DOID",
pred="related_to",
output_id="PR",
values="DOID:12143",
)
seqd.query()
self.assertTrue("PR:000007572" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["PR:000007572"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2genomicentity(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="GenomicEntity",
input_cls="Disease",
pred="related_to",
input_id="DOID",
output_id="SO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("SO:0000999" in seqd.G)
self.assertTrue("SO:0001853" in seqd.G)
def test_disease2chemicalsubstance(self):
"""Test gene-genomic entity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="ChemicalSubstance",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="CHEBI",
)
seqd.query()
self.assertTrue("CHEBI:65349" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["CHEBI:65349"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2gene(self):
"""Test gene-gene"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Gene", input_cls="Disease", input_id="DOID", values="DOID:12143"
)
seqd.query()
self.assertTrue("DHDDS" in seqd.G)
self.assertTrue("RPL3" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DHDDS"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2anatomy(self):
"""Test gene-anatomy"""
seqd = SingleEdgeQueryDispatcher(
output_cls="AnatomicalEntity",
input_cls="Disease",
input_id="DOID",
output_id="UBERON",
values="DOID:12143",
)
seqd.query()
self.assertTrue("UBERON:0007023" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["UBERON:0007023"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2ma(self):
"""Test gene-molecular_activity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="MolecularActivity",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("GO:0004935" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0004935"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2bp(self):
"""Test gene-biological_process"""
seqd = SingleEdgeQueryDispatcher(
output_cls="BiologicalProcess",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="GO",
)
seqd.query()
self.assertTrue("GO:0007605" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0007605"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cc(self):
"""Test gene-cellular_component"""
seqd = SingleEdgeQueryDispatcher(
output_cls="CellularComponent",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:0001816",
)
seqd.query()
self.assertTrue("GO:0030017" in seqd.G)
edges = seqd.G["DOID:DOID:0001816"]["GO:0030017"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cell(self):
"""Test gene-cell"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Cell",
input_cls="Disease",
input_id="DOID",
output_id="CL",
values="DOID:12143",
)
seqd.query()
self.assertTrue("CL:0000731" in seqd.G)
def test_disease2disease(self):
"""Test gene-disease"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Disease",
input_cls="Disease",
input_id="DOID",
output_id="DOID",
values="DOID:12143",
)
seqd.query()
self.assertTrue("DOID:225" in seqd.G)<|fim▁hole|> self.assertTrue("CORD Disease API" in get_apis(edges))<|fim▁end|> | edges = seqd.G["DOID:DOID:12143"]["DOID:225"] |
<|file_name|>neutron.py<|end_file_name|><|fim▁begin|># Various utilies for dealing with Neutron and the renaming from Quantum.
from subprocess import check_output
from charmhelpers.core.hookenv import (
config,
log,
ERROR,
)
from charmhelpers.contrib.openstack.utils import os_release
def headers_package():
"""Ensures correct linux-headers for running kernel are installed,
for building DKMS package"""
kver = check_output(['uname', '-r']).strip()
return 'linux-headers-%s' % kver
QUANTUM_CONF_DIR = '/etc/quantum'
def kernel_version():
""" Retrieve the current major kernel version as a tuple e.g. (3, 13) """
kver = check_output(['uname', '-r']).strip()
kver = kver.split('.')
return (int(kver[0]), int(kver[1]))
def determine_dkms_package():
""" Determine which DKMS package should be used based on kernel version """
# NOTE: 3.13 kernels have support for GRE and VXLAN native
if kernel_version() >= (3, 13):
return []
else:
return ['openvswitch-datapath-dkms']
# legacy
def quantum_plugins():
from charmhelpers.contrib.openstack import context
return {
'ovs': {
'config': '/etc/quantum/plugins/openvswitch/'
'ovs_quantum_plugin.ini',
'driver': 'quantum.plugins.openvswitch.ovs_quantum_plugin.'
'OVSQuantumPluginV2',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=QUANTUM_CONF_DIR)],
'services': ['quantum-plugin-openvswitch-agent'],
'packages': [[headers_package()] + determine_dkms_package(),
['quantum-plugin-openvswitch-agent']],
'server_packages': ['quantum-server',
'quantum-plugin-openvswitch'],
'server_services': ['quantum-server']
},
'nvp': {
'config': '/etc/quantum/plugins/nicira/nvp.ini',
'driver': 'quantum.plugins.nicira.nicira_nvp_plugin.'
'QuantumPlugin.NvpPluginV2',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=QUANTUM_CONF_DIR)],
'services': [],
'packages': [],
'server_packages': ['quantum-server',
'quantum-plugin-nicira'],
'server_services': ['quantum-server']
}
}
NEUTRON_CONF_DIR = '/etc/neutron'
def neutron_plugins():
from charmhelpers.contrib.openstack import context
release = os_release('nova-common')
plugins = {
'ovs': {
'config': '/etc/neutron/plugins/openvswitch/'
'ovs_neutron_plugin.ini',
'driver': 'neutron.plugins.openvswitch.ovs_neutron_plugin.'
'OVSNeutronPluginV2',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=NEUTRON_CONF_DIR)],
'services': ['neutron-plugin-openvswitch-agent'],
'packages': [[headers_package()] + determine_dkms_package(),
['neutron-plugin-openvswitch-agent']],
'server_packages': ['neutron-server',
'neutron-plugin-openvswitch'],
'server_services': ['neutron-server']
},
'nvp': {
'config': '/etc/neutron/plugins/nicira/nvp.ini',
'driver': 'neutron.plugins.nicira.nicira_nvp_plugin.'
'NeutronPlugin.NvpPluginV2',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=NEUTRON_CONF_DIR)],
'services': [],
'packages': [],
'server_packages': ['neutron-server',
'neutron-plugin-nicira'],
'server_services': ['neutron-server']
},
'nsx': {
'config': '/etc/neutron/plugins/vmware/nsx.ini',
'driver': 'vmware',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=NEUTRON_CONF_DIR)],
'services': [],
'packages': [],
'server_packages': ['neutron-server',
'neutron-plugin-vmware'],
'server_services': ['neutron-server']
},
'n1kv': {
'config': '/etc/neutron/plugins/cisco/cisco_plugins.ini',
'driver': 'neutron.plugins.cisco.network_plugin.PluginV2',
'contexts': [
context.SharedDBContext(user=config('neutron-database-user'),
database=config('neutron-database'),
relation_prefix='neutron',
ssl_dir=NEUTRON_CONF_DIR)],
'services': [],
'packages': [['neutron-plugin-cisco']],
'server_packages': ['neutron-server',
'neutron-plugin-cisco'],
'server_services': ['neutron-server']
}
}
if release >= 'icehouse':
# NOTE: patch in ml2 plugin for icehouse onwards
plugins['ovs']['config'] = '/etc/neutron/plugins/ml2/ml2_conf.ini'
plugins['ovs']['driver'] = 'neutron.plugins.ml2.plugin.Ml2Plugin'
plugins['ovs']['server_packages'] = ['neutron-server',
'neutron-plugin-ml2']
# NOTE: patch in vmware renames nvp->nsx for icehouse onwards
plugins['nvp'] = plugins['nsx']
return plugins
def neutron_plugin_attribute(plugin, attr, net_manager=None):
manager = net_manager or network_manager()
if manager == 'quantum':
plugins = quantum_plugins()
elif manager == 'neutron':
plugins = neutron_plugins()
else:
log('Error: Network manager does not support plugins.')
raise Exception
try:
_plugin = plugins[plugin]
except KeyError:
log('Unrecognised plugin for %s: %s' % (manager, plugin), level=ERROR)
raise Exception
try:
return _plugin[attr]
except KeyError:
return None
def network_manager():
'''
Deals with the renaming of Quantum to Neutron in H and any situations
that require compatability (eg, deploying H with network-manager=quantum,
upgrading from G).
'''
release = os_release('nova-common')
manager = config('network-manager').lower()
if manager not in ['quantum', 'neutron']:
return manager
if release in ['essex']:
# E does not support neutron
log('Neutron networking not supported in Essex.', level=ERROR)
raise Exception
elif release in ['folsom', 'grizzly']:
# neutron is named quantum in F and G
return 'quantum'
else:
# ensure accurate naming for all releases post-H<|fim▁hole|><|fim▁end|> | return 'neutron' |
<|file_name|>borrow.rs<|end_file_name|><|fim▁begin|>pub trait Reborrow {
type Target;
fn reborrow(&self) -> &Self::Target;
}
pub trait ReborrowMut: Reborrow {
fn reborrow_mut(&mut self) -> &mut Self::Target;
}
pub trait ReborrowInto<'a>: Reborrow {
fn reborrow_into(self) -> &'a Self::Target;
}
impl<'a, T> Reborrow for &'a T {
type Target = T;<|fim▁hole|> fn reborrow(&self) -> &Self::Target {
*self
}
}
impl<'a, T> Reborrow for &'a mut T {
type Target = T;
fn reborrow(&self) -> &Self::Target {
&**self
}
}
impl<'a, T> ReborrowMut for &'a mut T {
fn reborrow_mut(&mut self) -> &mut Self::Target {
*self
}
}
impl<'a, T> ReborrowInto<'a> for &'a T {
fn reborrow_into(self) -> &'a Self::Target {
self
}
}
impl<'a, T> ReborrowInto<'a> for &'a mut T {
fn reborrow_into(self) -> &'a Self::Target {
&*self
}
}<|fim▁end|> | |
<|file_name|>imp.py<|end_file_name|><|fim▁begin|># vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
from powerline.lint.selfcheck import havemarks
class WithPath(object):
def __init__(self, import_paths):
self.import_paths = import_paths
def __enter__(self):
self.oldpath = sys.path
sys.path = self.import_paths + sys.path
def __exit__(self, *args):
sys.path = self.oldpath
def import_function(function_type, name, data, context, echoerr, module):
havemarks(name, module)<|fim▁hole|> try:
func = getattr(__import__(str(module), fromlist=[str(name)]), str(name))
except ImportError:
echoerr(context='Error while checking segments (key {key})'.format(key=context.key),
context_mark=name.mark,
problem='failed to import module {0}'.format(module),
problem_mark=module.mark)
return None
except AttributeError:
echoerr(context='Error while loading {0} function (key {key})'.format(function_type, key=context.key),
problem='failed to load function {0} from module {1}'.format(name, module),
problem_mark=name.mark)
return None
if not callable(func):
echoerr(context='Error while checking segments (key {key})'.format(key=context.key),
context_mark=name.mark,
problem='imported “function” {0} from module {1} is not callable'.format(name, module),
problem_mark=module.mark)
return None
return func
def import_segment(*args, **kwargs):
return import_function('segment', *args, **kwargs)<|fim▁end|> |
with WithPath(data['import_paths']): |
<|file_name|>try-now.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { BroadcastService } from '../shared/services/broadcast.service';
import { BroadcastEvent } from '../shared/models/broadcast-event';
import { TryFunctionsService } from '../shared/services/try-functions.service';
import { TranslateService } from '@ngx-translate/core';
import { PortalResources } from '../shared/models/portal-resources';
import { GlobalStateService } from '../shared/services/global-state.service';
import { AiService } from '../shared/services/ai.service';
@Component({
selector: 'try-now',
templateUrl: './try-now.component.html',
styleUrls: ['./try-now.component.scss']
})
export class TryNowComponent implements OnInit {
private endTime: Date;
public freeTrialUri: string;
public timerText: string;
public discoverMoreUri: string;
constructor(private _functionsService: TryFunctionsService,
private _broadcastService: BroadcastService,
private _globalStateService: GlobalStateService,
private _translateService: TranslateService,
private _aiService: AiService) {
// TODO: [fashaikh] Add cookie referer details like in try
this.freeTrialUri = `${window.location.protocol}//azure.microsoft.com/${window.navigator.language}/free`;
this.discoverMoreUri = `${window.location.protocol}//azure.microsoft.com/${window.navigator.language}/services/functions/`;
const callBack = () => {
window.setTimeout(() => {
let mm;
const now = new Date();
const msLeft = this.endTime.getTime() - now.getTime();
if (this.endTime >= now) {
// http://stackoverflow.com/questions/1787939/check-time-difference-in-javascript
mm = Math.floor(msLeft / 1000 / 60);
if (mm < 1) {
this.timerText = (this._translateService.instant(PortalResources.tryNow_lessThanOneMinute));
} else {
this.timerText = this.pad(mm, 2) + ' ' + this._translateService.instant(PortalResources.tryNow_minutes);
}
window.setTimeout(callBack, 1000);
} else {
this.timerText = this._translateService.instant(PortalResources.tryNow_trialExpired);
this._globalStateService.TrialExpired = true;
this._broadcastService.broadcast(BroadcastEvent.TrialExpired);
}
});
};
this._functionsService.getTrialResource()
.subscribe((resource) => {
this.endTime = new Date();
this.endTime.setSeconds(this.endTime.getSeconds() + resource.timeLeft);
callBack();
});
}
<|fim▁hole|> pad(n, width) {
const z = '0';
n = n + '';
return n.length >= width ? n : new Array(width - n.length + 1).join(z) + n;
}
trackLinkClick(buttonName: string) {
if (buttonName) {
try {
this._aiService.trackLinkClick(buttonName, this._globalStateService.TrialExpired.toString());
} catch (error) {
this._aiService.trackException(error, 'trackLinkClick');
}
}
}
}<|fim▁end|> | ngOnInit() { }
// http://stackoverflow.com/questions/10073699/pad-a-number-with-leading-zeros-in-javascript |
<|file_name|>Equipment.js<|end_file_name|><|fim▁begin|>Equipment=null;
EquipmentPanel = Popup.extend({
getIdentifier:function(){
return "Equipment";
},
getLayoutObject:function(){
var equipment_panel={};
equipment_panel["head"] = {
texture:"GUI/defaultitem.png",
position:cc.p(88,162),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["legs"] = {
texture:"GUI/defaultitem.png",
position:cc.p(88,82),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["feet"] = {
texture:"GUI/defaultitem.png",
position:cc.p(88,40),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["body"] = {
texture:"GUI/defaultitem.png",
position:cc.p(88,122),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["lArm"] = {
texture:"GUI/defaultitem.png",
position:cc.p(48,122),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["rArm"] = {
texture:"GUI/defaultitem.png",
position:cc.p(128,122),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
equipment_panel["mod"] = {
texture:"GUI/defaultitem.png",
position:cc.p(8,162),
size: cc.size(32,32),
anchorPoint:cc.p(0,1),
};
return {
"panels":{
position:cc.p(100,300),
children:{
"main_panel":{
anchorPoint:cc.p(0,0),
size: cc.size(168,168),
texture:"GUI/equipment.png",
children: equipment_panel,
},
"control_panel":{
anchorPoint:cc.p(0,0),
position: cc.p(0,168),
size: cc.size(168,32),
children:{
"header":{
label:settingsData["Equipment Header"],
fontSize:20,
anchorPoint:cc.p(0,0.5),
position:cc.p(8,16),
},
"exitBtn":{
position: cc.p(144,6),
size: cc.size(20,20),
anchorPoint:cc.p(0,0),
texture:"GUI/close.png"
}
}
},
"item_name":{
position:cc.p(0,0),<|fim▁hole|> "content":{
label:"",
fontSize:14,
color:cc.c3b(0,0,0),
anchorPoint:cc.p(0.5,0.5),
position:cc.p(32,8),
}
}
},
}
}
};
},
updateTileGrid:function(){
var equipmentList = PlayersController.getYou().getEquipment();
this.panels["main_panel"]["head"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["body"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["legs"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["feet"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["lArm"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["rArm"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
this.panels["main_panel"]["mod"].setTexture(cc.TextureCache.getInstance().addImage("GUI/defaultitem.png"));
for(var i in equipmentList){
if(equipmentList[i]){
var item = ObjectLists.getItemList()[equipmentList[i]["number"]];
for(var j in tileTextureList){
if(tileTextureList[j]["name"]==item["sprite"]["texture"]){
var texture=tileTextureList[j]["texture"];
}
}
if(this.panels["main_panel"][i]){
this.panels["main_panel"][i].setAnchorPoint(0,1);
this.panels["main_panel"][i].setTexture(texture);
this.panels["main_panel"][i].setTextureRect(cc.rect(item["sprite"]["position"].x*32, (item["sprite"]["position"].y*32),32,32));
}
}
}
},
listItemSelected:function(val){
switch(val){
case 0:PlayersController.getYou().dequipItem(this.delegate.itemContext);break;
case 1: PlayersController.getYou().dropItem(this.delegate.itemContext,"equipped"); break;
}
},
onTouchBegan:function(touch){
if(this._super(touch)){
return true;
}
this.prevMovPos=null;
var truePos = this.panels["main_panel"].convertToNodeSpace(touch._point);
var equipmentList = PlayersController.getYou().getEquipment();
for(var i in equipmentList){
if(equipmentList[i]){
var item = ObjectLists.getItemList()[equipmentList[i]["number"]];
var reducer= 32;
if(isTouching(this.panels["main_panel"][i],cc.p(truePos.x,truePos.y+reducer))){
this.itemContext=i;
this.panels["item_name"].setVisible(false)
var firstItem = settingsData["Item Dropdown Unequip"]+"";
firstItem = firstItem.replace("<ITEM>",(item["name"]));
var secondItem = settingsData["Item Dropdown Drop"]+"";
secondItem = secondItem.replace("<ITEM>",(item["name"]));
this.addChild(DropDownList.createWithListAndPosition(this,this.listItemSelected,[firstItem,secondItem],touch._point));
return true;
}
}
}
},
onMouseMoved:function(event){
var pos = event.getLocation();
var truePos = this.panels["main_panel"].convertToNodeSpace(pos);
this.panels["item_name"].setVisible(false);
var equipmentList = PlayersController.getYou().getEquipment();
for(var i in equipmentList){
if(equipmentList[i]){
var item = ObjectLists.getItemList()[equipmentList[i]["number"]];
var reducer= 32;
if(isTouching(this.panels["main_panel"][i],cc.p(truePos.x,truePos.y+reducer))){
this.panels["item_name"]["content"].setString(item["name"]);
this.panels["item_name"].setVisible(true);
this.panels["item_name"].setContentSize(this.panels["item_name"]["content"].getContentSize());
this.panels["item_name"]["content"].setPositionX(this.panels["item_name"]["content"].getContentSize().width/2);
this.panels["item_name"].setPosition(cc.p(this.panels["main_panel"][i].getPositionX()-(this.panels["item_name"]["content"].getContentSize().width/2)+16,this.panels["main_panel"][i].getPositionY()));
return true;
}
}
}
},
scheduledupdateTileGrid:function(){
if(this.panels["main_panel"]["head"].getTexture()){
this.unschedule(this.scheduledupdateTileGrid);
this.updateTileGrid();
}
},
didBecomeActive:function(){
this._super();
if(!this.panels["main_panel"]["head"].getTexture()){
this.schedule(this.scheduledupdateTileGrid);
}else{
this.updateTileGrid();
}
},
});<|fim▁end|> | color:cc.c4b(200,200,200,200),
size:cc.size(64,16),
visible:false,
children:{ |
<|file_name|>oscilloscope.py<|end_file_name|><|fim▁begin|>import wave
import struct
BPM = 320.0
BEAT_LENGTH = 60.0 / BPM
START_BEAT = 80.0 # beat 80 is the 'huh' of the first 'uuh-huh'
START_TIME = START_BEAT * BEAT_LENGTH
BEAT_COUNT = 16.0
CLIP_LENGTH = BEAT_LENGTH * BEAT_COUNT
FRAME_RATE = 50.0
FRAME_LENGTH = 1 / FRAME_RATE
FRAME_COUNT = int(FRAME_RATE * CLIP_LENGTH)<|fim▁hole|>PICK_RANGE = 0.008 # the duration of the span over which we pick our 32 samples each frame
PICK_INTERVAL = PICK_RANGE / 32
wav = wave.open('clips/kisskill_wipmix2_leadin.wav')
SAMPLE_RATE = wav.getframerate()
for frame in range(0, FRAME_COUNT):
frame_offset = START_TIME + (frame * FRAME_LENGTH)
for pick in range(0, 32):
pick_offset = frame_offset + (pick * PICK_INTERVAL)
wav.setpos(int(pick_offset * SAMPLE_RATE))
v1, v2 = struct.unpack('<hh', wav.readframes(1))
v = (v1 + v2) / 2
print "\tdb %d" % ((v / 5000) + 12)
print<|fim▁end|> | |
<|file_name|>keyboard_controller.py<|end_file_name|><|fim▁begin|>import pygame
import rospy
import time
from std_msgs.msg import Float64
from std_msgs.msg import Float64MultiArray
#pygame setup
pygame.init()
pygame.display.set_mode([100,100])
delay = 100
interval = 50
pygame.key.set_repeat(delay, interval)
#really this should be passed in or something but for now if you want to change the name just do it here
robot_namespace = "qubo/"
effort = 50
num_thrusters = 8
rospy.init_node('keyboard_node', anonymous=False)
#rospy spins all these up in their own thread, no need to call spin()
roll_pub = rospy.Publisher(robot_namespace + "roll_cmd" , Float64, queue_size = 10 )
pitch_pub = rospy.Publisher(robot_namespace + "pitch_cmd" , Float64, queue_size = 10 )
yaw_pub = rospy.Publisher(robot_namespace + "yaw_cmd" , Float64, queue_size = 10 )
depth_pub = rospy.Publisher(robot_namespace + "depth_cmd" , Float64, queue_size = 10 )
surge_pub = rospy.Publisher(robot_namespace + "surge_cmd" , Float64, queue_size = 10 )
sway_pub = rospy.Publisher(robot_namespace + "sway_cmd" , Float64, queue_size = 10 )
thruster_pub = rospy.Publisher(robot_namespace + "thruster_cmds" , Float64MultiArray, queue_size = 10)
thruster_msg = Float64MultiArray()
pygame.key.set_repeat(10,10)
while(True):
<|fim▁hole|> print event.key
keys_pressed = pygame.key.get_pressed()
sway = surge = yaw = depth = 0
thruster_msg.data = [0]*num_thrusters
if keys_pressed[pygame.K_a]:
sway_pub.publish(effort)
elif keys_pressed[pygame.K_d]:
sway_pub.publish(-effort)
if keys_pressed[pygame.K_w]:
surge_pub.publish(effort)
print "asdasd"
elif keys_pressed[pygame.K_s]:
surge_pub.publish(-effort)
if keys_pressed[pygame.K_q]:
yaw_pub.publish(effort)
elif keys_pressed[pygame.K_e]:
yaw_pub.publish(-effort)
if keys_pressed[pygame.K_r]:
depth_pub.publish(effort)
elif keys_pressed[pygame.K_f]:
depth_pub.publish(-effort)
if keys_pressed[pygame.K_MINUS]:
sign = -1
else:
sign = 1
#this only works because pygame.k_X is a number and k_0 - k_8 are contiguous
for i in range(0, 8):
if keys_pressed[pygame.K_0 + i]:
thruster_msg.data[i] = (effort*sign)
print thruster_msg.data
thruster_pub.publish(thruster_msg)
time.sleep(.05)<|fim▁end|> | for event in pygame.event.get():
if event.type == pygame.KEYDOWN: |
<|file_name|>filter.test.ts<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { CHART_LIST } from './chart_list.helper';
describe('chart card view filters', () => {
beforeEach(() => {
cy.login();
cy.server();
cy.visit(CHART_LIST);
cy.get('[data-test="card-view"]').click();
});
it('should filter by owners correctly', () => {
// filter by owners
cy.get('.Select__control').first().click();
cy.get('.Select__menu').contains('alpha user').click();
cy.get('.ant-card').should('not.exist');
cy.get('.Select__control').first().click();
cy.get('.Select__menu').contains('gamma user').click();
cy.get('.ant-card').should('not.exist');
});
it('should filter by created by correctly', () => {
// filter by created by
cy.get('.Select__control').eq(1).click();
cy.get('.Select__menu').contains('alpha user').click();
cy.get('.ant-card').should('not.exist');
cy.get('.Select__control').eq(1).click();
cy.get('.Select__menu').contains('gamma user').click();
cy.get('.ant-card').should('not.exist');
});
it('should filter by viz type correctly', () => {
// filter by viz type
cy.get('.Select__control').eq(2).click();
cy.get('.Select__menu').contains('area').click({ timeout: 5000 });
cy.get('.ant-card').its('length').should('be.gt', 0);
cy.get('.ant-card').contains("World's Pop Growth").should('exist');
cy.get('.Select__control').eq(2).click();
cy.get('.Select__control').eq(2).type('world_map{enter}');
cy.get('.ant-card').should('have.length', 1);
cy.get('.ant-card').contains('% Rural').should('exist');
});
it('should filter by datasource correctly', () => {
// filter by datasource
cy.get('.Select__control').eq(3).click();
cy.get('.Select__menu').contains('unicode_test').click();
cy.get('.ant-card').should('have.length', 1);
cy.get('.ant-card').contains('Unicode Cloud').should('exist');
cy.get('.Select__control').eq(3).click();
cy.get('.Select__control').eq(3).type('energy_usage{enter}{enter}');
cy.get('.ant-card').its('length').should('be.gt', 0);
});
});
describe('chart list view filters', () => {
beforeEach(() => {
cy.login();
cy.server();
cy.visit(CHART_LIST);
cy.get('[data-test="list-view"]').click();
});
it('should filter by owners correctly', () => {
// filter by owners
cy.get('.Select__control').first().click();
cy.get('.Select__menu').contains('alpha user').click();
cy.get('.table-row').should('not.exist');
cy.get('.Select__control').first().click();
cy.get('.Select__menu').contains('gamma user').click();
cy.get('.table-row').should('not.exist');
});
it('should filter by created by correctly', () => {
// filter by created by
cy.get('.Select__control').eq(1).click();
cy.get('.Select__menu').contains('alpha user').click();
cy.get('.table-row').should('not.exist');
cy.get('.Select__control').eq(1).click();
cy.get('.Select__menu').contains('gamma user').click();
cy.get('.table-row').should('not.exist');
});
it('should filter by viz type correctly', () => {
// filter by viz type
cy.get('.Select__control').eq(2).click();
cy.get('.Select__menu').contains('area').click({ timeout: 5000 });
cy.get('.table-row').its('length').should('be.gt', 0);
cy.get('.table-row').contains("World's Pop Growth").should('exist');
cy.get('.Select__control').eq(2).click();
cy.get('.Select__control').eq(2).type('world_map{enter}');
cy.get('.table-row').should('have.length', 1);
cy.get('.table-row').contains('% Rural').should('exist');
});
it('should filter by datasource correctly', () => {
// filter by datasource
cy.get('.Select__control').eq(3).click();
cy.get('.Select__menu').contains('unicode_test').click();
cy.get('.table-row').should('have.length', 1);
cy.get('.table-row').contains('Unicode Cloud').should('exist');
cy.get('.Select__control').eq(3).click();
cy.get('.Select__control').eq(3).type('energy_usage{enter}{enter}');
cy.get('.table-row').its('length').should('be.gt', 0);
});
});<|fim▁end|> | *
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an |
<|file_name|>circle_network_0.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import division
import matplotlib.pyplot as plt
import random
import numpy as np
class Player:
def __init__(self, state, q):
self.state = state
self.gain_matrix = np.array([[q, 0], [0, 1-q]])
def action_distribution(self, N, m, players):
adj_matrix = np.zeros((N, N)) #両隣と対戦する様にしました。
adj_matrix[N-1,0], adj_matrix[N-1,N-2] = 1, 1
for i in range(N-1):
adj_matrix[i, i-1], adj_matrix[i, i+1] = 1, 1
current_state = [player.state for player in players]
num_op_state_1 = np.dot(adj_matrix, current_state)
position = players.index(self)
act_dist = [(m-num_op_state_1[position])/m, num_op_state_1[position]/m]
return act_dist
def play(self):
act_dist = self.action_distribution(N, m, players)
<|fim▁hole|> elif payoff_vec[0] == payoff_vec[1]:
action = random.choice([0, 1])
else:
action = 1
return action
def update_player(self):
action = self.play()
self.state = action
def count_action(players):
actions = []
for player in players:
actions.append(player.state)
return actions.count(1)
num_0 = 15
num_1 = 1
N = num_0 + num_1
m = 2 #num_opponent
q = 1/3
T = 200
players = [Player(0, q) for i in range(num_0)]
players_1 = [Player(1, q) for i in range(num_1)]
for player_1 in players_1:
players.insert(random.randint(0, num_0), player_1)
transition = []
for t in range(T):
transition.append(count_action(players))
#print [player.action_distribution(N, m, players) for player in players]
i = random.randint(0, N-1)
players[i].update_player()
plt.plot(transition, label="action transition")
plt.legend()
plt.show()<|fim▁end|> | payoff_vec = np.dot(self.gain_matrix, act_dist)
if payoff_vec[0] > payoff_vec[1]:
action = 0 |
<|file_name|>getUrl.js<|end_file_name|><|fim▁begin|>"use strict"
/**
* Construct URL for Magento Admin.
*
* @param {string} path path to Magento page (absolute path started with '/', alias - w/o)
*/
var result = function getUrl(path) {
/* shortcuts for globals */<|fim▁hole|> var casper = global.casper;
var mobi = global.mobi;
var root = mobi.opts.navig.mage;
/* functionality */
casper.echo(" construct Magento Admin URL for path '" + path + "'.", "PARAMETER");
var isAlias = path.indexOf('/') === -1; // absolute path contains at least one '/' char
var result, url;
if (isAlias) {
/* compose URI based on "route.to.page" */
var route = mobi.objPath.get(root.admin, path);
url = route.self;
} else {
/* absolute path is used */
url = path
}
/* "http://mage2.local.host.com" + "/admin" + "url" */
result = root.self + root.admin.self + url;
casper.echo(" result URL: " + result, "PARAMETER");
return result;
}
module.exports = result;<|fim▁end|> | |
<|file_name|>gdebug.py<|end_file_name|><|fim▁begin|>import time
import traceback
from .optionparser import args
debug_level=args.debug or 0
debug_file=args.debug_file
timestamp=args.time
if debug_file:
import re
debug_file = re.compile(debug_file)
if debug_level > 0: print('DEBUG_LEVEL=',debug_level)
if debug_file: print('DEBUG_FILE=',debug_file)
<|fim▁hole|> stack = traceback.extract_stack()
if len(stack) >= 2:
caller=stack[-2]
finame=caller[0]
line = caller[1]
else:
finame = " ".join(stack)
line = ""
if args.debug_file:
if debug_file.search(finame):
print("DEBUG: ",ts,"%s: %s"%(finame,line),message)
else:
print("DEBUG: ",ts,"%s: %s"%(finame,line),message)
timers = {}
class TimeAction:
def __init__ (self, name, level=10):
self.level = level
if level <= debug_level:
self.name = name
self.start = time.time()
def end (self):
if self.level <= debug_level:
end = time.time()
t=end-self.start
# grab our location
stack=traceback.extract_stack()
if len(stack)>2:
caller=stack[-2]
finame=caller[0]
line = caller[1]
else:
finame = " ".join(stack)
line = ""
if not args.debug_file or debug_file.search(finame):
print("DEBUG: %s TOOK %s SECONDS"%(self.name,t))
if self.name not in timers: timers[self.name]=[t]
else: timers[self.name].append(t)
def print_timer_info ():
for n,times in list(timers.items()):
print("%s:"%n, end=' ')
for t in times: print("%.02e"%t,",", end=' ')
print("")
if __name__ == '__main__':
t=TimeAction('this is a test',0)
debug('This is a test',0)
debug('This is another test',0)
t.end()
print_timer_info()<|fim▁end|> | def debug (message, level=10):
if timestamp: ts= '%s:'%time.time()
else: ts = ''
if level <= debug_level: |
<|file_name|>profiler.py<|end_file_name|><|fim▁begin|>import os
import sys
def start():
# enable profling by adding to local conf.yaml "with_internal_profiling: True"
# required: "pip install GreenletProfiler"
# Provides function stats in formats 'pstat', 'callgrind', 'ystat'
# stats are saved at "/var/lib/tendrl/profiling/$NS.publisher_id/last_run_func_stat.$stat_type"
# eg: tendrl-node-agent : /var/lib/tendrl/profiling/node_agent/last_run_func_stat.pstat
import atexit
import GreenletProfiler
GreenletProfiler.set_clock_type('cpu')
GreenletProfiler.start()
sys.stdout.write("\nStarted Tendrl profiling...")
@atexit.register
def finish():
GreenletProfiler.stop()
sys.stdout.write("\nStopped Tendrl profiling...")
stats = GreenletProfiler.get_func_stats()
_base_path = "/var/lib/tendrl/profiling/{0}/".format(NS.publisher_id)
if not os.path.exists(_base_path):
os.makedirs(_base_path)
for stat_type in ['pstat', 'callgrind', 'ystat']:
_stat_file = "last_run_func_stat.{0}".format(stat_type)
_stat_path = os.path.join(_base_path, _stat_file)<|fim▁hole|><|fim▁end|> | stats.save(_stat_path, type=stat_type)
sys.stdout.write("\nSaved Tendrl profiling stats at %s" % _base_path) |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
setup(name='gym_square',
version='0.0.1',
author='Guillaume de Chambrier',
author_email='[email protected]',
description='A simple square world environment for openai/gym',
packages=find_packages(),
url='https://github.com/gpldecha/gym-square',
license='MIT',
install_requires=['gym']
)<|fim▁end|> | from setuptools import setup
from setuptools import find_packages |
<|file_name|>hotels.controller.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular
.module('app.hotels')
.controller('HotelsController', HotelsController);
/* @ngInject */
function HotelsController($q, hotelService, logger) {
/*jshint validthis: true */
var vm = this;
vm.title = 'Hotels';
vm.categories = [];
<|fim▁hole|>
function activate() {
var promises = [
/*
* We get the first set of categories
* In a real application we would need to perform paging here
*/
getHotelCategories({from: 0, to:3}),
];
return $q.all(promises).then(function() {
logger.info('Activated Hotels View');
});
}
function getHotelCategories(indices) {
return hotelService.getHotelCategories(indices)
.subscribe(categories => {
vm.categories = categories;
return vm.categories;
});
}
}
})();<|fim▁end|> | activate(); |
<|file_name|>client_sync.cc<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <cassert>
#include <chrono>
#include <memory>
#include <mutex>
#include <sstream>
#include <string>
#include <thread>
#include <vector>
#include <gflags/gflags.h>
#include <grpc++/channel.h>
#include <grpc++/client_context.h>
#include <grpc++/server.h><|fim▁hole|>#include <grpc/support/alloc.h>
#include <grpc/support/histogram.h>
#include <grpc/support/host_port.h>
#include <grpc/support/log.h>
#include <grpc/support/time.h>
#include <gtest/gtest.h>
#include "src/core/lib/profiling/timers.h"
#include "src/proto/grpc/testing/services.grpc.pb.h"
#include "test/cpp/qps/client.h"
#include "test/cpp/qps/histogram.h"
#include "test/cpp/qps/interarrival.h"
#include "test/cpp/qps/usage_timer.h"
namespace grpc {
namespace testing {
static std::unique_ptr<BenchmarkService::Stub> BenchmarkStubCreator(
std::shared_ptr<Channel> ch) {
return BenchmarkService::NewStub(ch);
}
class SynchronousClient
: public ClientImpl<BenchmarkService::Stub, SimpleRequest> {
public:
SynchronousClient(const ClientConfig& config)
: ClientImpl<BenchmarkService::Stub, SimpleRequest>(
config, BenchmarkStubCreator) {
num_threads_ =
config.outstanding_rpcs_per_channel() * config.client_channels();
responses_.resize(num_threads_);
SetupLoadTest(config, num_threads_);
}
virtual ~SynchronousClient(){};
protected:
void WaitToIssue(int thread_idx) {
if (!closed_loop_) {
gpr_sleep_until(NextIssueTime(thread_idx));
}
}
size_t num_threads_;
std::vector<SimpleResponse> responses_;
};
class SynchronousUnaryClient GRPC_FINAL : public SynchronousClient {
public:
SynchronousUnaryClient(const ClientConfig& config)
: SynchronousClient(config) {
StartThreads(num_threads_);
}
~SynchronousUnaryClient() { EndThreads(); }
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
auto* stub = channels_[thread_idx % channels_.size()].get_stub();
double start = UsageTimer::Now();
GPR_TIMER_SCOPE("SynchronousUnaryClient::ThreadFunc", 0);
grpc::ClientContext context;
grpc::Status s =
stub->UnaryCall(&context, request_, &responses_[thread_idx]);
histogram->Add((UsageTimer::Now() - start) * 1e9);
return s.ok();
}
};
class SynchronousStreamingClient GRPC_FINAL : public SynchronousClient {
public:
SynchronousStreamingClient(const ClientConfig& config)
: SynchronousClient(config) {
context_ = new grpc::ClientContext[num_threads_];
stream_ = new std::unique_ptr<
grpc::ClientReaderWriter<SimpleRequest, SimpleResponse>>[num_threads_];
for (size_t thread_idx = 0; thread_idx < num_threads_; thread_idx++) {
auto* stub = channels_[thread_idx % channels_.size()].get_stub();
stream_[thread_idx] = stub->StreamingCall(&context_[thread_idx]);
}
StartThreads(num_threads_);
}
~SynchronousStreamingClient() {
EndThreads();
for (auto stream = &stream_[0]; stream != &stream_[num_threads_];
stream++) {
if (*stream) {
(*stream)->WritesDone();
EXPECT_TRUE((*stream)->Finish().ok());
}
}
delete[] stream_;
delete[] context_;
}
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
GPR_TIMER_SCOPE("SynchronousStreamingClient::ThreadFunc", 0);
double start = UsageTimer::Now();
if (stream_[thread_idx]->Write(request_) &&
stream_[thread_idx]->Read(&responses_[thread_idx])) {
histogram->Add((UsageTimer::Now() - start) * 1e9);
return true;
}
return false;
}
private:
// These are both conceptually std::vector but cannot be for old compilers
// that expect contained classes to support copy constructors
grpc::ClientContext* context_;
std::unique_ptr<grpc::ClientReaderWriter<SimpleRequest, SimpleResponse>>*
stream_;
};
std::unique_ptr<Client> CreateSynchronousUnaryClient(
const ClientConfig& config) {
return std::unique_ptr<Client>(new SynchronousUnaryClient(config));
}
std::unique_ptr<Client> CreateSynchronousStreamingClient(
const ClientConfig& config) {
return std::unique_ptr<Client>(new SynchronousStreamingClient(config));
}
} // namespace testing
} // namespace grpc<|fim▁end|> | #include <grpc++/server_builder.h>
#include <grpc/grpc.h> |
<|file_name|>addImagefile.go<|end_file_name|><|fim▁begin|>package main
import (
"errors"
"fmt"
"os"
"github.com/Symantec/Dominator/imageserver/client"
"github.com/Symantec/Dominator/lib/filesystem/util"
"github.com/Symantec/Dominator/lib/image"
objectclient "github.com/Symantec/Dominator/lib/objectserver/client"
"github.com/Symantec/Dominator/lib/srpc"
)
func addImagefileSubcommand(args []string) {
imageSClient, objectClient := getClients()
err := addImagefile(imageSClient, objectClient, args[0], args[1], args[2],<|fim▁hole|> fmt.Fprintf(os.Stderr, "Error adding image: \"%s\": %s\n", args[0], err)
os.Exit(1)
}
os.Exit(0)
}
func addImagefile(imageSClient *srpc.Client,
objectClient *objectclient.ObjectClient,
name, imageFilename, filterFilename, triggersFilename string) error {
imageExists, err := client.CheckImage(imageSClient, name)
if err != nil {
return errors.New("error checking for image existance: " + err.Error())
}
if imageExists {
return errors.New("image exists")
}
newImage := new(image.Image)
if err := loadImageFiles(newImage, objectClient, filterFilename,
triggersFilename); err != nil {
return err
}
newImage.FileSystem, err = buildImage(imageSClient, newImage.Filter,
imageFilename)
if err != nil {
return errors.New("error building image: " + err.Error())
}
if err := spliceComputedFiles(newImage.FileSystem); err != nil {
return err
}
if err := copyMtimes(imageSClient, newImage, *copyMtimesFrom); err != nil {
return err
}
return addImage(imageSClient, name, newImage)
}
func copyMtimes(imageSClient *srpc.Client, img *image.Image,
oldImageName string) error {
if oldImageName == "" {
return nil
}
fs := img.FileSystem
oldFs, err := getFsOfImage(imageSClient, oldImageName)
if err != nil {
return err
}
util.CopyMtimes(oldFs, fs)
return nil
}<|fim▁end|> | args[3])
if err != nil { |
<|file_name|>dir_d5c97c2750cda5d5e748b76e78cc7d4b.js<|end_file_name|><|fim▁begin|>var dir_d5c97c2750cda5d5e748b76e78cc7d4b =
[
[ "Masking.cu", "_masking_8cu.html", "_masking_8cu" ]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>PlayerFetchService.java<|end_file_name|><|fim▁begin|>package fi.ozzi.tapsudraft.service;
import com.google.common.collect.ImmutableMap;
import fi.ozzi.tapsudraft.model.Player;
import fi.ozzi.tapsudraft.model.Position;
import fi.ozzi.tapsudraft.repository.PlayerRepository;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClients;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class PlayerFetchService {
private static final Logger LOG = LoggerFactory.getLogger(PlayerFetchService.class);
@Value("${service.playerfetch.baseUrl}")
private String baseUrl;
@NonNull
private PlayerRepository playerRepository;
@Scheduled(cron = "${service.playerfetch.cron}")
public void fetchPlayers() throws IOException {
Map<String, String> params = ImmutableMap.<String, String>builder()
.put("js", "1")
.put("rand", Float.toString(new Random().nextFloat()))
.put("player_team", "all")
.put("player_value", "all")
.put("type", "player_search")
.put("phase", "0")
.build();
String url = buildUrlFromMap(baseUrl, params);
LOG.debug(url);
HttpClient httpClient = HttpClients.createDefault();
HttpGet httpGet = new HttpGet(url);
HttpResponse response = httpClient.execute(httpGet);
if (response.getStatusLine().getStatusCode() != 200) {
LOG.debug("HTTP resp " + response.getStatusLine().getStatusCode());
}
Document doc = Jsoup.parse(response.getEntity().getContent(), "UTF-8", url);
Elements positionGroups = doc.getElementsByTag("tbody");
for (Element group : positionGroups) {
Elements rows = group.getElementsByTag("tr");
for (Element row : rows) {
Element nameData = row.select("td.name_data").first();
String name = nameData.text().trim();
String team = nameData.select("a.logo").first().attr("title");
team = team.substring(team.indexOf("(")+1, team.indexOf(")"));
String position = nameData.select("input[name=player_position]").first().val();
String playerUid = nameData.select("input[name=player_id]").first().val();<|fim▁hole|> Player.builder()
.name(name)
.uid(Long.parseLong(playerUid))
.position(Position.fromCharacter(position))
.build());
}
String price = row.select("td[id~=.*_player_value]").first().text();
String points = row.select("td[title=IS Liigapörssi-pisteet]").first().text();
}
}
}
private static String buildUrlFromMap(String baseUrl, Map<String, String> params) {
String queryString = params.keySet().stream()
.map(key -> {
try {
return key + "=" + URLEncoder.encode(params.get(key), "UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return "Error: could not URL-encode value";
}).collect(Collectors.joining("&"));
return baseUrl + "?" + queryString;
}
}<|fim▁end|> |
Player player = playerRepository.findByUid(Long.parseLong(playerUid));
if (player == null) {
playerRepository.save( |
<|file_name|>htmlfieldsetelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding;
use dom::bindings::codegen::Bindings::HTMLFieldSetElementBinding::HTMLFieldSetElementMethods;
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{MutNullableJS, Root};
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmlformelement::{FormControl, HTMLFormElement};
use dom::htmllegendelement::HTMLLegendElement;
use dom::node::{Node, window_from_node};
use dom::validitystate::ValidityState;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use std::default::Default;
use style::element_state::*;
#[dom_struct]
pub struct HTMLFieldSetElement {
htmlelement: HTMLElement,
form_owner: MutNullableJS<HTMLFormElement>,
}
impl HTMLFieldSetElement {
fn new_inherited(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> HTMLFieldSetElement {
HTMLFieldSetElement {
htmlelement:
HTMLElement::new_inherited_with_state(IN_ENABLED_STATE,
local_name, prefix, document),
form_owner: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> Root<HTMLFieldSetElement> {
Node::reflect_node(box HTMLFieldSetElement::new_inherited(local_name, prefix, document),
document,
HTMLFieldSetElementBinding::Wrap)
}
}
impl HTMLFieldSetElementMethods for HTMLFieldSetElement {
// https://html.spec.whatwg.org/multipage/#dom-fieldset-elements
fn Elements(&self) -> Root<HTMLCollection> {
#[derive(JSTraceable, HeapSizeOf)]
struct ElementsFilter;
impl CollectionFilter for ElementsFilter {
fn filter<'a>(&self, elem: &'a Element, _root: &'a Node) -> bool {
elem.downcast::<HTMLElement>()
.map_or(false, HTMLElement::is_listed_element)
}
}
let filter = box ElementsFilter;
let window = window_from_node(self);
HTMLCollection::create(&window, self.upcast(), filter)
}
// https://html.spec.whatwg.org/multipage/#dom-cva-validity
fn Validity(&self) -> Root<ValidityState> {
let window = window_from_node(self);
ValidityState::new(&window, self.upcast())
}
// https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled
make_bool_getter!(Disabled, "disabled");
<|fim▁hole|> make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fae-form
fn GetForm(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner()
}
}
impl VirtualMethods for HTMLFieldSetElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&local_name!("disabled") => {
let disabled_state = match mutation {
AttributeMutation::Set(None) => true,
AttributeMutation::Set(Some(_)) => {
// Fieldset was already disabled before.
return;
},
AttributeMutation::Removed => false,
};
let node = self.upcast::<Node>();
let el = self.upcast::<Element>();
el.set_disabled_state(disabled_state);
el.set_enabled_state(!disabled_state);
let mut found_legend = false;
let children = node.children().filter(|node| {
if found_legend {
true
} else if node.is::<HTMLLegendElement>() {
found_legend = true;
false
} else {
true
}
});
let fields = children.flat_map(|child| {
child.traverse_preorder().filter(|descendant| {
match descendant.type_id() {
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(
ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLTextAreaElement)) => {
true
},
_ => false,
}
})
});
if disabled_state {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.set_disabled_state(true);
el.set_enabled_state(false);
}
} else {
for field in fields {
let el = field.downcast::<Element>().unwrap();
el.check_disabled_attribute();
el.check_ancestors_disabled_state_for_form_control();
}
}
},
&local_name!("form") => {
self.form_attribute_mutated(mutation);
},
_ => {},
}
}
}
impl FormControl for HTMLFieldSetElement {
fn form_owner(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner.get()
}
fn set_form_owner(&self, form: Option<&HTMLFormElement>) {
self.form_owner.set(form);
}
fn to_element<'a>(&'a self) -> &'a Element {
self.upcast::<Element>()
}
}<|fim▁end|> | // https://html.spec.whatwg.org/multipage/#dom-fieldset-disabled |
<|file_name|>json_schema.py<|end_file_name|><|fim▁begin|># ~*~ coding: utf-8 ~*~
"""Module that contains a Marshmallow schema that generate JSON schemas.
JSON Schemas can be a pain to write by hand. For example, the product
requirements change, thus your schema changes. If you are maintaining your
schemas by hand, you have to go through all of them and update them, or, even
worse, you just don't maintain them. With this class, you should never need to
hand write a JSON Schema again. Just pass your schema to it and it'll generate
it for you.
Example:
This module is super easy to use. All you need to do is pass a schema or
a Python path to a schema and this library will do the rest for you!
.. code-block:: python
# This is the schema we want to generate the schema for.
class UserSchema(Schema):
first_name = fields.String(**STR_REQUIRED)
last_name = fields.String(**STR_REQUIRED)
phone = PhoneNumberField(**REQUIRED)
company_id = ForeignKeyField(**REQUIRED)
joined = PendulumField(format='iso', **REQUIRED)
last_login = ArrowField(allow_none=True, format='iso')
class Meta(object):
# This will dictate the filename that this schema will be
# dumped to. If not provided, the filename will be
# UserSchema.json
json_schema_filename = 'user.json'
# You can dump the schema to a file in a folder
json_schema = FleakerJSONSchema.write_schema_to_file(
# This library doesn't care if the schema has been initialized
UserSchema,
# The folder to write this schema to
folder='docs/raml/schemas',
# The context can control certain things about how the schema will
# be dumped.
context={'dump_schema': True}
)
# Now, you can find the dumped schema in docs/raml/schemas/user.json
# You also have the end result stored in the json_schema variable
# If you'd like for fine grained control over the filename or want to
# use the file object further, a file pointer can be passed to the
# creation method.
with open('user_schema.json', 'w') as fp:
FleakerJSONSchema.write_schema_to_file(UserSchema, file_pointer=fp)
# Maybe you just want the schema in dict form. Super easy.<|fim▁hole|> 'app.schemata.user.UserSchema'
)
"""
import decimal
import json
import os.path
from inspect import isclass
from importlib import import_module
from sys import stdout
from marshmallow import Schema
from marshmallow_jsonschema import JSONSchema
from marshmallow_jsonschema.base import TYPE_MAP
from fleaker._compat import string_types
from fleaker.constants import DEFAULT_DICT, MISSING
# Update the built in TYPE_MAP to match our style better
TYPE_MAP.update({
int: {
'type': 'integer',
},
float: {
'type': 'number',
},
decimal.Decimal: {
'type': 'number',
},
})
class FleakerJSONSchema(JSONSchema):
"""Marshmallow schema that can be used to generate JSON schemas."""
@classmethod
def generate_json_schema(cls, schema, context=DEFAULT_DICT):
"""Generate a JSON Schema from a Marshmallow schema.
Args:
schema (marshmallow.Schema|str): The Marshmallow schema, or the
Python path to one, to create the JSON schema for.
Keyword Args:
file_pointer (file, optional): The path or pointer to the file
to write this schema to. If not provided, the schema will be
dumped to ``sys.stdout``.
Returns:
dict: The JSON schema in dictionary form.
"""
schema = cls._get_schema(schema)
# Generate the JSON Schema
return cls(context=context).dump(schema).data
@classmethod
def write_schema_to_file(cls, schema, file_pointer=stdout,
folder=MISSING, context=DEFAULT_DICT):
"""Given a Marshmallow schema, create a JSON Schema for it.
Args:
schema (marshmallow.Schema|str): The Marshmallow schema, or the
Python path to one, to create the JSON schema for.
Keyword Args:
file_pointer (file, optional): The pointer to the file to write
this schema to. If not provided, the schema will be dumped to
``sys.stdout``.
folder (str, optional): The folder in which to save the JSON
schema. The name of the schema file can be optionally
controlled my the schema's ``Meta.json_schema_filename``. If
that attribute is not set, the class's name will be used for
the filename. If writing the schema to a specific file is
desired, please pass in a ``file_pointer``.
context (dict, optional): The Marshmallow context to be pushed to
the schema generates the JSONSchema.
Returns:
dict: The JSON schema in dictionary form.
"""
schema = cls._get_schema(schema)
json_schema = cls.generate_json_schema(schema, context=context)
if folder:
schema_filename = getattr(
schema.Meta,
'json_schema_filename',
'.'.join([schema.__class__.__name__, 'json'])
)
json_path = os.path.join(folder, schema_filename)
file_pointer = open(json_path, 'w')
json.dump(json_schema, file_pointer, indent=2)
return json_schema
@classmethod
def _get_schema(cls, schema):
"""Method that will fetch a Marshmallow schema flexibly.
Args:
schema (marshmallow.Schema|str): Either the schema class, an
instance of a schema, or a Python path to a schema.
Returns:
marshmallow.Schema: The desired schema.
Raises:
TypeError: This is raised if the provided object isn't
a Marshmallow schema.
"""
if isinstance(schema, string_types):
schema = cls._get_object_from_python_path(schema)
if isclass(schema):
schema = schema()
if not isinstance(schema, Schema):
raise TypeError("The schema must be a path to a Marshmallow "
"schema or a Marshmallow schema.")
return schema
@staticmethod
def _get_object_from_python_path(python_path):
"""Method that will fetch a Marshmallow schema from a path to it.
Args:
python_path (str): The string path to the Marshmallow schema.
Returns:
marshmallow.Schema: The schema matching the provided path.
Raises:
TypeError: This is raised if the specified object isn't
a Marshmallow schema.
"""
# Dissect the path
python_path = python_path.split('.')
module_path = python_path[:-1]
object_class = python_path[-1]
if isinstance(module_path, list):
module_path = '.'.join(module_path)
# Grab the object
module = import_module(module_path)
schema = getattr(module, object_class)
if isclass(schema):
schema = schema()
return schema<|fim▁end|> | json_schema = FleakerJSONSchema.generate_json_schema(
# For all creation methods in this module can be loaded either by
# the instance/class of the schema or by passing a Python path to
# it, like so. |
<|file_name|>Aggregated.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/<|fim▁hole|>
import java.util.Objects;
/**
* Class for aggregated state.
*
* @param <K> the record key type.
*/
public class Aggregated<K> {
private final K key;
private final Aggregate aggregate;
/**
* Creates a new {@link Aggregated} instance.
* @param key the record key
* @param aggregate the instance of {@link Aggregate}.
*/
public Aggregated(final K key, final Aggregate aggregate) {
this.key = key;
this.aggregate = aggregate;
}
public K getKey() {
return key;
}
public Aggregate getAggregate() {
return aggregate;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Aggregated<?> that = (Aggregated<?>) o;
return Objects.equals(key, that.key) &&
Objects.equals(aggregate, that.aggregate);
}
@Override
public int hashCode() {
return Objects.hash(key, aggregate);
}
@Override
public String toString() {
return "Aggregated{" +
"key=" + key +
", aggregate=" + aggregate +
'}';
}
}<|fim▁end|> | package com.github.fhuss.kafka.streams.cep.core.state.internal; |
<|file_name|>v1.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cdn.manager import base
JSON_HOME = {
"resources": {
"rel/cdn": {
"href-template": "services{?marker,limit}",
"href-vars": {
"marker": "param/marker",
"limit": "param/limit"
},
"hints": {
"allow": [
"GET"
],
"formats": {
"application/json": {}
}
}
}
}
}
class DefaultV1Controller(base.V1Controller):
def __init__(self, manager):
super(DefaultV1Controller, self).__init__(manager)
self.JSON_HOME = JSON_HOME
<|fim▁hole|><|fim▁end|> | def get(self):
return self.JSON_HOME |
<|file_name|>vpsubusb.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;<|fim▁hole|>
fn vpsubusb_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM1)), operand3: Some(Indirect(EBX, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 241, 216, 3], OperandSize::Dword)
}
fn vpsubusb_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 216, 251], OperandSize::Qword)
}
fn vpsubusb_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledIndexedDisplaced(RDX, RSI, Eight, 715345256, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 216, 180, 242, 104, 77, 163, 42], OperandSize::Qword)
}
fn vpsubusb_5() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(YMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 229, 216, 193], OperandSize::Dword)
}
fn vpsubusb_6() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM2)), operand3: Some(Indirect(EBX, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 237, 216, 19], OperandSize::Dword)
}
fn vpsubusb_7() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 216, 238], OperandSize::Qword)
}
fn vpsubusb_8() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledDisplaced(RSI, Two, 1318634075, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 237, 216, 36, 117, 91, 194, 152, 78], OperandSize::Qword)
}
fn vpsubusb_9() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 125, 138, 216, 212], OperandSize::Dword)
}
fn vpsubusb_10() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledIndexed(EAX, ESI, Four, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 109, 139, 216, 44, 176], OperandSize::Dword)
}
fn vpsubusb_11() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM22)), operand2: Some(Direct(XMM9)), operand3: Some(Direct(XMM18)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 161, 53, 143, 216, 242], OperandSize::Qword)
}
fn vpsubusb_12() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM8)), operand2: Some(Direct(XMM9)), operand3: Some(IndirectScaledIndexedDisplaced(RAX, RBX, Four, 487116962, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 113, 53, 143, 216, 132, 152, 162, 208, 8, 29], OperandSize::Qword)
}
fn vpsubusb_13() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM2)), operand3: Some(Direct(YMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 109, 170, 216, 225], OperandSize::Dword)
}
fn vpsubusb_14() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM5)), operand3: Some(Indirect(EAX, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 85, 170, 216, 56], OperandSize::Dword)
}
fn vpsubusb_15() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM31)), operand2: Some(Direct(YMM31)), operand3: Some(Direct(YMM8)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 65, 5, 164, 216, 248], OperandSize::Qword)
}
fn vpsubusb_16() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(YMM13)), operand2: Some(Direct(YMM4)), operand3: Some(Indirect(RSI, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 113, 93, 175, 216, 46], OperandSize::Qword)
}
fn vpsubusb_17() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(ZMM3)), operand2: Some(Direct(ZMM7)), operand3: Some(Direct(ZMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 69, 204, 216, 223], OperandSize::Dword)
}
fn vpsubusb_18() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(ZMM6)), operand2: Some(Direct(ZMM3)), operand3: Some(IndirectScaledIndexed(EBX, ECX, Four, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 101, 205, 216, 52, 139], OperandSize::Dword)
}
fn vpsubusb_19() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(ZMM9)), operand2: Some(Direct(ZMM22)), operand3: Some(Direct(ZMM8)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 81, 77, 194, 216, 200], OperandSize::Qword)
}
fn vpsubusb_20() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(ZMM6)), operand2: Some(Direct(ZMM21)), operand3: Some(Indirect(RDI, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 85, 197, 216, 55], OperandSize::Qword)
}<|fim▁end|> |
fn vpsubusb_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPSUBUSB, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 216, 208], OperandSize::Dword)
} |
<|file_name|>migration.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
from nova import flags
import sqlalchemy
from migrate.versioning import api as versioning_api
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
try:
# python-migration changed location of exceptions after 1.6.3
# See LP Bug #717467
from migrate import exceptions as versioning_exceptions
except ImportError:
sys.exit(_("python-migrate is not installed. Exiting."))
FLAGS = flags.FLAGS
def db_sync(version=None):<|fim▁hole|> db_version()
repo_path = _find_migrate_repo()
return versioning_api.upgrade(FLAGS.sql_connection, repo_path, version)
def db_version():
repo_path = _find_migrate_repo()
try:
return versioning_api.db_version(FLAGS.sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError:
# If we aren't version controlled we may already have the database
# in the state from before we started version control, check for that
# and set up version_control appropriately
meta = sqlalchemy.MetaData()
engine = sqlalchemy.create_engine(FLAGS.sql_connection, echo=False)
meta.reflect(bind=engine)
try:
for table in ('auth_tokens', 'zones', 'export_devices',
'fixed_ips', 'floating_ips', 'instances',
'key_pairs', 'networks', 'projects', 'quotas',
'security_group_instance_association',
'security_group_rules', 'security_groups',
'services', 'migrations',
'users', 'user_project_association',
'user_project_role_association',
'user_role_association',
'volumes'):
assert table in meta.tables
return db_version_control(1)
except AssertionError:
return db_version_control(0)
def db_version_control(version=None):
repo_path = _find_migrate_repo()
versioning_api.version_control(FLAGS.sql_connection, repo_path, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
return path<|fim▁end|> | |
<|file_name|>fixtures.ts<|end_file_name|><|fim▁begin|>import { Chance } from 'chance';
const CHANCE = new Chance();<|fim▁hole|> country: CHANCE.country(),
nearestPlace: CHANCE.address(),
language: CHANCE.locale(),
rank: 2,
distanceToFocusKm: CHANCE.natural(),
};
}
export function generateCoordinate() {
return { lat: CHANCE.latitude(), lng: CHANCE.longitude() };
}<|fim▁end|> |
export function generateAutosuggestSuggestion() {
return {
words: `${CHANCE.word()}.${CHANCE.word()}.${CHANCE.word()}`, |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import django
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, Client
from django.test.client import RequestFactory
from django.test.utils import override_settings
from custard.conf import (CUSTOM_TYPE_TEXT, CUSTOM_TYPE_INTEGER,
CUSTOM_TYPE_BOOLEAN, CUSTOM_TYPE_FLOAT,
CUSTOM_TYPE_DATE, CUSTOM_TYPE_DATETIME,
CUSTOM_TYPE_TIME, settings)
from custard.builder import CustomFieldsBuilder
from custard.utils import import_class
from .models import (SimpleModelWithManager, SimpleModelWithoutManager,
CustomFieldsModel, CustomValuesModel, builder)
#==============================================================================
class SimpleModelWithManagerForm(builder.create_modelform()):
class Meta:
model = SimpleModelWithManager
fields = '__all__'
#class ExampleAdmin(admin.ModelAdmin):
# form = ExampleForm
# search_fields = ('name',)
#
# def get_search_results(self, request, queryset, search_term):
# queryset, use_distinct = super(ExampleAdmin, self).get_search_results(request, queryset, search_term)
# queryset |= self.model.objects.search(search_term)
# return queryset, use_distinct
#
# admin.site.register(Example, ExampleAdmin)
#==============================================================================
class CustomModelsTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.simple_with_manager_ct = ContentType.objects.get_for_model(SimpleModelWithManager)
self.simple_without_manager_ct = ContentType.objects.get_for_model(SimpleModelWithoutManager)
self.cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='text_field',
label="Text field",
data_type=CUSTOM_TYPE_TEXT)
self.cf.save()
self.cf2 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field 2",
data_type=CUSTOM_TYPE_TEXT,
required=True,
searchable=False)
self.cf2.clean()
self.cf2.save()
self.cf3 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='int_field', label="Integer field",
data_type=CUSTOM_TYPE_INTEGER)
self.cf3.save()
self.cf4 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='boolean_field', label="Boolean field",
data_type=CUSTOM_TYPE_BOOLEAN)
self.cf4.save()
self.cf5 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='float_field', label="Float field",
data_type=CUSTOM_TYPE_FLOAT)
self.cf5.save()
self.cf6 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='date_field', label="Date field",
data_type=CUSTOM_TYPE_DATE)
self.cf6.save()
self.cf7 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='datetime_field', label="Datetime field",
data_type=CUSTOM_TYPE_DATETIME)
self.cf7.save()
self.cf8 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='time_field', label="Time field",
data_type=CUSTOM_TYPE_TIME)
self.cf8.save()
self.obj = SimpleModelWithManager.objects.create(name='old test')
self.obj.save()
def tearDown(self):
CustomFieldsModel.objects.all().delete()
def test_import_class(self):
self.assertEqual(import_class('custard.builder.CustomFieldsBuilder'), CustomFieldsBuilder)
def test_model_repr(self):
self.assertEqual(repr(self.cf), "<CustomFieldsModel: text_field>")
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="abcdefg")
val.save()
self.assertEqual(repr(val), "<CustomValuesModel: text_field: abcdefg>")
@override_settings(CUSTOM_CONTENT_TYPES=['simplemodelwithmanager'])
def test_field_creation(self):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel',
'tests.CustomValuesModel',
settings.CUSTOM_CONTENT_TYPES)
class TestCustomFieldsModel(builder2.create_fields()):
class Meta:
app_label = 'tests'
self.assertQuerysetEqual(ContentType.objects.filter(builder2.content_types_query),
ContentType.objects.filter(Q(name__in=['simplemodelwithmanager'])))
def test_mixin(self):
self.assertIn(self.cf, self.obj.get_custom_fields())
self.assertIn(self.cf, SimpleModelWithManager.get_model_custom_fields())
self.assertEqual(self.cf, self.obj.get_custom_field('text_field'))
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="123456")
val.save()
self.assertEqual("123456", self.obj.get_custom_value('text_field'))
self.obj.set_custom_value('text_field', "abcdefg")
self.assertEqual("abcdefg", self.obj.get_custom_value('text_field'))
val.delete()
def test_field_model_clean(self):
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field already present",
data_type=CUSTOM_TYPE_INTEGER)
with self.assertRaises(ValidationError):
cf.full_clean()
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='name',
label="Text field already in model",
data_type=CUSTOM_TYPE_TEXT)
with self.assertRaises(ValidationError):
cf.full_clean()
def test_value_model_clean(self):
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
val.save()
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
with self.assertRaises(ValidationError):
val.full_clean()
def test_value_creation(self):
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="qwertyuiop")
val.save()
self.assertEqual(val.content_type, self.simple_with_manager_ct)
self.assertEqual(val.content_type, val.custom_field.content_type)
self.assertEqual(val.value_text, "qwertyuiop")
self.assertEqual(val.value, "qwertyuiop")
def test_value_search(self):
newobj = SimpleModelWithManager.objects.create(name='new simple')
newobj.save()
v1 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="qwertyuiop")
v1.save()
v2 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=newobj.pk,
value="qwertyuiop")
v2.save()
v3 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=newobj.pk,
value="000asdf123")
v3.save()
qs1 = SimpleModelWithManager.objects.search("asdf")
self.assertQuerysetEqual(qs1, [repr(newobj)])
qs2 = SimpleModelWithManager.objects.search("qwerty")
self.assertQuerysetEqual(qs2, [repr(self.obj), repr(newobj)], ordered=False)
def test_value_search_not_searchable_field(self):
v1 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="12345")
v1.save()
v2 = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk,
value="67890")
v2.save()
qs1 = SimpleModelWithManager.objects.search("12345")
self.assertQuerysetEqual(qs1, [repr(self.obj)])
qs2 = SimpleModelWithManager.objects.search("67890")
self.assertQuerysetEqual(qs2, [])
def test_get_formfield_for_field(self):
with self.settings(CUSTOM_FIELD_TYPES={CUSTOM_TYPE_TEXT: 'django.forms.fields.EmailField'}):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel', 'tests.CustomValuesModel')
class SimpleModelWithManagerForm2(builder2.create_modelform(field_types=settings.CUSTOM_FIELD_TYPES)):
class Meta:
model = SimpleModelWithManager
fields = '__all__'
form = SimpleModelWithManagerForm2(data={}, instance=self.obj)
self.assertIsNotNone(form.get_formfield_for_field(self.cf))
self.assertEqual(django.forms.fields.EmailField, form.get_formfield_for_field(self.cf).__class__)
def test_get_widget_for_field(self):
with self.settings(CUSTOM_WIDGET_TYPES={CUSTOM_TYPE_TEXT: 'django.forms.widgets.CheckboxInput'}):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel', 'tests.CustomValuesModel')
class SimpleModelWithManagerForm2(builder2.create_modelform(widget_types=settings.CUSTOM_WIDGET_TYPES)):
class Meta:
fields = '__all__'
model = SimpleModelWithManager
form = SimpleModelWithManagerForm2(data={}, instance=self.obj)
self.assertIsNotNone(form.get_widget_for_field(self.cf))
self.assertEqual(django.forms.widgets.CheckboxInput, form.get_widget_for_field(self.cf).__class__)
def test_form(self):
class TestForm(builder.create_modelform()):
custom_name = 'My Custom Fields'
custom_description = 'Edit the Example custom fields here'
custom_classes = 'zzzap-class'
class Meta:
fields = '__all__'
model = SimpleModelWithManager
request = self.factory.post('/', { 'text_field': '123' })
form = TestForm(request.POST, instance=self.obj)
self.assertFalse(form.is_valid())
self.assertIn('another_text_field', form.errors)
self.assertRaises(ValueError, lambda: form.save())
request = self.factory.post('/', { 'id': self.obj.pk,
'name': 'xxx',
'another_text_field': 'wwwzzzyyyxxx' })
form = TestForm(request.POST, instance=self.obj)
self.assertTrue(form.is_valid())
form.save()<|fim▁hole|> #self.assertInHTML(TestForm.custom_description, form.as_p())
#self.assertInHTML(TestForm.custom_classes, form.as_p())
def test_admin(self):
modeladmin_class = builder.create_modeladmin()
#c = Client()
#if c.login(username='fred', password='secret'):
# response = c.get('/admin/', follow=True)
# print(response)<|fim▁end|> | self.assertEqual(self.obj.get_custom_value('another_text_field'), 'wwwzzzyyyxxx')
self.assertEqual(self.obj.name, 'xxx')
#self.assertInHTML(TestForm.custom_name, form.as_p()) |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>import Ember from "ember";
var oneWay = Ember.computed.oneWay,
equal = Ember.computed.equal;
export default Ember.Controller.extend({
needs: ['mixin-stack', 'mixin-details'],
emberApplication: false,
navWidth: 180,
inspectorWidth: 360,
mixinStack: oneWay('controllers.mixin-stack').readOnly(),
mixinDetails: oneWay('controllers.mixin-details').readOnly(),
isChrome: equal('port.adapter.name', 'chrome'),
// Indicates that the extension window is focused,
active: true,
inspectorExpanded: false,
pushMixinDetails: function(name, property, objectId, details) {
details = {
name: name,
property: property,
objectId: objectId,
mixins: details
};
this.get('mixinStack').pushObject(details);
this.set('mixinDetails.model', details);
},
popMixinDetails: function() {
var mixinStack = this.get('controllers.mixin-stack');
var item = mixinStack.popObject();
this.set('mixinDetails.model', mixinStack.get('lastObject'));
this.get('port').send('objectInspector:releaseObject', { objectId: item.objectId });
},
activateMixinDetails: function(name, details, objectId) {
var self = this;
this.get('mixinStack').forEach(function(item) {<|fim▁hole|>
this.set('mixinStack.model', []);
this.pushMixinDetails(name, undefined, objectId, details);
},
droppedObject: function(objectId) {
var mixinStack = this.get('mixinStack.model');
var obj = mixinStack.findProperty('objectId', objectId);
if (obj) {
var index = mixinStack.indexOf(obj);
var objectsToRemove = [];
for(var i = index; i >= 0; i--) {
objectsToRemove.pushObject(mixinStack.objectAt(i));
}
objectsToRemove.forEach(function(item) {
mixinStack.removeObject(item);
});
}
if (mixinStack.get('length') > 0) {
this.set('mixinDetails.model', mixinStack.get('lastObject'));
} else {
this.set('mixinDetails.model', null);
}
}
});<|fim▁end|> | self.get('port').send('objectInspector:releaseObject', { objectId: item.objectId });
}); |
<|file_name|>transport.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="node" />
import { Wire, WireConstructor, READY_STATE, ExistingConnectConfig, ConnectConfig, InternalConnectConfig } from './wire';
import { Identity } from '../identity';
import { EventEmitter } from 'events';<|fim▁hole|>import { View } from '../api/view/view';
import { Frame, Window } from '../main';
import { EntityTypeHelpers } from '../util/entity-type';
export declare type MessageHandler = (data: any) => boolean;
declare class Transport extends EventEmitter {
protected wireListeners: Map<number, {
resolve: Function;
reject: Function;
}>;
protected uncorrelatedListener: Function;
me: (View | Window | Frame | {}) & Identity & EntityTypeHelpers;
environment: Environment;
topicRefMap: Map<string, number>;
sendRaw: Wire['send'];
eventAggregator: EventAggregator;
protected messageHandlers: MessageHandler[];
constructor(wireType: WireConstructor, environment: Environment);
connectSync: (config: ConnectConfig) => void;
getPort: () => string;
shutdown(): Promise<void>;
connect(config: InternalConnectConfig): Promise<string>;
connectByPort(config: ExistingConnectConfig): Promise<string>;
READY_STATE: typeof READY_STATE;
ferryAction(data: any): Promise<Message<any>>;
registerMessageHandler(handler: MessageHandler): void;
protected addWireListener(id: number, resolve: Function, reject: Function, uncorrelated: boolean): void;
protected onmessage(data: Message<Payload>): void;
protected handleMessage(data: Message<Payload>): boolean;
}
export default Transport;
interface Transport {
sendAction(action: 'request-external-authorization', payload: {}, uncorrelated: true): Promise<Message<AuthorizationPayload>>;
sendAction(action: string, payload: {}, uncorrelated: boolean): Promise<Message<Payload>>;
topicRefMap: Map<string, number>;
}
export declare class Message<T> {
action: string;
payload: T;
correlationId?: number;
}
export declare class EventMessage implements Message<RuntimeEvent> {
action: 'process-desktop-event';
payload: RuntimeEvent;
}
export declare class NotificationEventMessage implements Message<NotificationEvent> {
action: 'process-notification-event';
payload: NotificationEvent;
}
export interface NotificationEvent {
payload: {
notificationId: string;
};
type: string | symbol;
}
export declare class Payload {
success: boolean;
data: any;
}
export declare class AuthorizationPayload {
token: string;
file: string;
}<|fim▁end|> | import { Environment } from '../environment/environment';
import { RuntimeEvent } from '../api/events/base';
import { EventAggregator } from '../api/events/eventAggregator'; |
<|file_name|>expr-block.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// Tests for standalone blocks as expressions
<|fim▁hole|>
fn test_rec() { let rs = { RS {v1: 10, v2: 20} }; assert_eq!(rs.v2, 20); }
fn test_filled_with_stuff() {
let rs = { let mut a = 0; while a < 10 { a += 1; } a };
assert_eq!(rs, 10);
}
pub fn main() { test_basic(); test_rec(); test_filled_with_stuff(); }<|fim▁end|> | fn test_basic() { let rs: bool = { true }; assert!((rs)); }
struct RS { v1: isize, v2: isize } |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
class PartnerLogoForm(forms.Form):<|fim▁hole|> label='Select a file',
)<|fim▁end|> | partner_logo = forms.ImageField( |
<|file_name|>KeyboardExercise.java<|end_file_name|><|fim▁begin|>/*
* This file is part of InTEL, the Interactive Toolkit for Engineering Learning.
* http://intel.gatech.edu
*
* InTEL is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* InTEL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with InTEL. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package keyboard;
import com.jme.math.Vector3f;
import com.jme.renderer.ColorRGBA;
import com.jme.system.DisplaySystem;
import edu.gatech.statics.application.StaticsApplication;
import edu.gatech.statics.exercise.Diagram;
import edu.gatech.statics.exercise.DiagramType;
import edu.gatech.statics.exercise.Schematic;
import edu.gatech.statics.math.Unit;
import edu.gatech.statics.math.Vector3bd;
import edu.gatech.statics.modes.description.Description;
import edu.gatech.statics.modes.equation.EquationDiagram;
import edu.gatech.statics.modes.equation.EquationMode;
import edu.gatech.statics.modes.equation.EquationState;
import edu.gatech.statics.modes.equation.worksheet.TermEquationMathState;
import edu.gatech.statics.modes.frame.FrameExercise;
import edu.gatech.statics.objects.Body;
import edu.gatech.statics.objects.DistanceMeasurement;
import edu.gatech.statics.objects.Force;
import edu.gatech.statics.objects.Point;
import edu.gatech.statics.objects.bodies.Bar;
import edu.gatech.statics.objects.bodies.Beam;
import edu.gatech.statics.objects.connectors.Connector2ForceMember2d;
import edu.gatech.statics.objects.connectors.Pin2d;
import edu.gatech.statics.objects.connectors.Roller2d;
import edu.gatech.statics.objects.representations.ModelNode;
import edu.gatech.statics.objects.representations.ModelRepresentation;
import edu.gatech.statics.tasks.Solve2FMTask;
import edu.gatech.statics.ui.AbstractInterfaceConfiguration;
import edu.gatech.statics.ui.windows.navigation.Navigation3DWindow;
import edu.gatech.statics.ui.windows.navigation.ViewConstraints;
import java.math.BigDecimal;
import java.util.Map;
/**
*
* @author Calvin Ashmore
*/
public class KeyboardExercise extends FrameExercise {
@Override
public AbstractInterfaceConfiguration createInterfaceConfiguration() {
AbstractInterfaceConfiguration ic = super.createInterfaceConfiguration();
ic.setNavigationWindow(new Navigation3DWindow());
ic.setCameraSpeed(.2f, 0.02f, .05f);
ViewConstraints vc = new ViewConstraints();
vc.setPositionConstraints(-2, 2, -1, 4);
vc.setZoomConstraints(0.5f, 1.5f);
vc.setRotationConstraints(-5, 5, 0, 5);
ic.setViewConstraints(vc);
return ic;
}
@Override
public Description getDescription() {
Description description = new Description();
description.setTitle("Keyboard Stand");
description.setNarrative(
"Kasiem Hill is in a music group comprised of Civil Engineering " +
"students from Georgia Tech, in which he plays the keyboard. " +
"For his birthday, he received a new keyboard, but it is much bigger " +
"(both in size and weight) than his last one, so he needs to buy a " +
"new keyboard stand. He finds one he really likes from a local " +
"dealer and is unsure if the connections will be able to support " +
"the weight of the new keyboard. He measures the dimensions of " +
"the stand and he wants to calculate how much force he can expect " +
"at each connection in the cross bar before he makes the investment.");
description.setProblemStatement(
"The stand can be modeled as a frame and is supported by two beams and a cross bar PQ. " +
"The supports at B and E are rollers and the floor is frictionless.");
description.setGoals(
"Find the force in PQ and define whether it is in tension or compression.");
description.addImage("keyboard/assets/keyboard 1.png");
description.addImage("keyboard/assets/keyboard 2.jpg");
description.addImage("keyboard/assets/keyboard 3.jpg");
return description;
}
@Override
public void initExercise() {
// setName("Keyboard Stand");
//
// setDescription(
// "This is a keyboard stand supported by two beams and a cross bar, PQ. " +
// "Find the force in PQ and define whether it is in tension or compression. " +
// "The supports at B and E are rollers, and the floor is frictionless.");
Unit.setSuffix(Unit.distance, " m");
Unit.setSuffix(Unit.moment, " N*m");
Unit.setDisplayScale(Unit.distance, new BigDecimal("10"));
getDisplayConstants().setMomentSize(0.5f);
getDisplayConstants().setForceSize(0.5f);
getDisplayConstants().setPointSize(0.5f);
getDisplayConstants().setCylinderRadius(0.5f);
//getDisplayConstants().setForceLabelDistance(1f);
//getDisplayConstants().setMomentLabelDistance(0f);
//getDisplayConstants().setMeasurementBarSize(0.1f);
// 10/21/2010 HOTFIX: THIS CORRECTS AN ISSUE IN WHICH OBSERVATION DIRECTION IS SET TO NULL IN EQUATIONS
for (Map<DiagramType, Diagram> diagramMap : getState().allDiagrams().values()) {
EquationDiagram eqDiagram = (EquationDiagram) diagramMap.get(EquationMode.instance.getDiagramType());
if(eqDiagram == null) continue;
EquationState.Builder builder = new EquationState.Builder(eqDiagram.getCurrentState());
TermEquationMathState.Builder xBuilder = new TermEquationMathState.Builder((TermEquationMathState) builder.getEquationStates().get("F[x]"));
xBuilder.setObservationDirection(Vector3bd.UNIT_X);
TermEquationMathState.Builder yBuilder = new TermEquationMathState.Builder((TermEquationMathState) builder.getEquationStates().get("F[y]"));
yBuilder.setObservationDirection(Vector3bd.UNIT_Y);
TermEquationMathState.Builder zBuilder = new TermEquationMathState.Builder((TermEquationMathState) builder.getEquationStates().get("M[p]"));
zBuilder.setObservationDirection(Vector3bd.UNIT_Z);
builder.putEquationState(xBuilder.build());
builder.putEquationState(yBuilder.build());
builder.putEquationState(zBuilder.build());
eqDiagram.pushState(builder.build());
eqDiagram.clearStateStack();
}
}
Point A, B, C, D, E, P, Q;
Pin2d jointC;
Connector2ForceMember2d jointP, jointQ;
Roller2d jointB, jointE;
Body leftLeg, rightLeg;
Bar bar;
@Override
public void loadExercise() {
Schematic schematic = getSchematic();
DisplaySystem.getDisplaySystem().getRenderer().setBackgroundColor(new ColorRGBA(.7f, .8f, .9f, 1.0f));
StaticsApplication.getApp().getCamera().setLocation(new Vector3f(0.0f, 0.0f, 65.0f));
A = new Point("A", "0", "6", "0");
D = new Point("D", "8", "6", "0");
B = new Point("B", "8", "0", "0");
E = new Point("E", "0", "0", "0");
C = new Point("C", "4", "3", "0");
P = new Point("P", "2.7", "4", "0");
Q = new Point("Q", "5.3", "4", "0");
leftLeg = new Beam("Left Leg", B, A);
bar = new Bar("Bar", P, Q);
rightLeg = new Beam("Right Leg", E, D);
jointC = new Pin2d(C);
jointP = new Connector2ForceMember2d(P, bar); //Pin2d(P);
jointQ = new Connector2ForceMember2d(Q, bar); //new Pin2d(Q);
jointB = new Roller2d(B);
jointE = new Roller2d(E);
jointB.setDirection(Vector3bd.UNIT_Y);
jointE.setDirection(Vector3bd.UNIT_Y);
DistanceMeasurement distance1 = new DistanceMeasurement(D, A);
distance1.setName("Measure AD");
distance1.createDefaultSchematicRepresentation(0.5f);
distance1.addPoint(E);
distance1.addPoint(B);
schematic.add(distance1);
DistanceMeasurement distance2 = new DistanceMeasurement(C, D);
distance2.setName("Measure CD");
distance2.createDefaultSchematicRepresentation(0.5f);
distance2.forceVertical();
distance2.addPoint(A);
schematic.add(distance2);
DistanceMeasurement distance3 = new DistanceMeasurement(C, Q);
distance3.setName("Measure CQ");
distance3.createDefaultSchematicRepresentation(1f);
distance3.forceVertical();
distance3.addPoint(P);
schematic.add(distance3);
DistanceMeasurement distance4 = new DistanceMeasurement(B, D);
distance4.setName("Measure BD");
distance4.createDefaultSchematicRepresentation(2.4f);<|fim▁hole|> distance4.addPoint(A);
distance4.addPoint(E);
schematic.add(distance4);
Force keyboardLeft = new Force(A, Vector3bd.UNIT_Y.negate(), new BigDecimal(50));
keyboardLeft.setName("Keyboard Left");
leftLeg.addObject(keyboardLeft);
Force keyboardRight = new Force(D, Vector3bd.UNIT_Y.negate(), new BigDecimal(50));
keyboardRight.setName("Keyboard Right");
rightLeg.addObject(keyboardRight);
jointC.attach(leftLeg, rightLeg);
jointC.setName("Joint C");
jointP.attach(leftLeg, bar);
jointP.setName("Joint P");
jointQ.attach(bar, rightLeg);
jointQ.setName("Joint Q");
jointE.attachToWorld(rightLeg);
jointE.setName("Joint E");
jointB.attachToWorld(leftLeg);
jointB.setName("Joint B");
A.createDefaultSchematicRepresentation();
B.createDefaultSchematicRepresentation();
C.createDefaultSchematicRepresentation();
D.createDefaultSchematicRepresentation();
E.createDefaultSchematicRepresentation();
P.createDefaultSchematicRepresentation();
Q.createDefaultSchematicRepresentation();
keyboardLeft.createDefaultSchematicRepresentation();
keyboardRight.createDefaultSchematicRepresentation();
//leftLeg.createDefaultSchematicRepresentation();
//bar.createDefaultSchematicRepresentation();
//rightLeg.createDefaultSchematicRepresentation();
schematic.add(leftLeg);
schematic.add(bar);
schematic.add(rightLeg);
ModelNode modelNode = ModelNode.load("keyboard/assets/", "keyboard/assets/keyboard.dae");
float scale = .28f;
ModelRepresentation rep = modelNode.extractElement(leftLeg, "VisualSceneNode/stand/leg1");
rep.setLocalScale(scale);
rep.setModelOffset(new Vector3f(14f, 0, 0));
leftLeg.addRepresentation(rep);
rep.setSynchronizeRotation(false);
rep.setSynchronizeTranslation(false);
rep.setHoverLightColor(ColorRGBA.yellow);
rep.setSelectLightColor(ColorRGBA.yellow);
rep = modelNode.extractElement(rightLeg, "VisualSceneNode/stand/leg2");
rep.setLocalScale(scale);
rep.setModelOffset(new Vector3f(14f, 0, 0));
rightLeg.addRepresentation(rep);
rep.setSynchronizeRotation(false);
rep.setSynchronizeTranslation(false);
rep.setHoverLightColor(ColorRGBA.yellow);
rep.setSelectLightColor(ColorRGBA.yellow);
rep = modelNode.extractElement(bar, "VisualSceneNode/stand/middle_support");
rep.setLocalScale(scale);
rep.setModelOffset(new Vector3f(14f, 0, 0));
bar.addRepresentation(rep);
rep.setSynchronizeRotation(false);
rep.setSynchronizeTranslation(false);
rep.setHoverLightColor(ColorRGBA.yellow);
rep.setSelectLightColor(ColorRGBA.yellow);
rep = modelNode.getRemainder(schematic.getBackground());
schematic.getBackground().addRepresentation(rep);
rep.setLocalScale(scale);
rep.setModelOffset(new Vector3f(14f, 0, 0));
rep.setSynchronizeRotation(false);
rep.setSynchronizeTranslation(false);
addTask(new Solve2FMTask("Solve PQ", bar, jointP));
}
}<|fim▁end|> | |
<|file_name|>test_python_ctools.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# ==========================================================================
# This scripts performs unit tests for the ctools package
#
# Copyright (C) 2012-2017 Juergen Knoedlseder
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ==========================================================================
import gammalib
import ctools
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import test_ctobssim
import test_ctselect
import test_ctphase
import test_ctfindvar
import test_ctbin
import test_ctlike
import test_cttsmap
import test_ctmodel
import test_ctskymap
import test_ctexpcube
import test_ctpsfcube
import test_ctedispcube
import test_ctbkgcube
import test_ctmapcube
import test_ctcubemask
import test_ctbutterfly
import test_ctulimit
import test_cterror
import test_ctprob
import test_pipelines<|fim▁hole|>
# ========================= #
# Perform ctools unit tests #
# ========================= #
def test(installed=False):
"""
Perform unit testing for ctools.
Parameters
----------
installed : bool, optional
Flag indicating whether the script has been installed or not
"""
# If we have an installed version then create a temporary
# directory and copy over all information that is needed
if installed:
# Create temporary working directory
import tempfile
path = tempfile.mkdtemp()
os.chdir(path)
# Get test directory
import inspect
testdir = inspect.getfile(ctools.tests)
dirname = os.path.dirname(testdir)
# Copy test data in "data" directory
os.system('cp -r %s %s' % (dirname+'/data', 'data'))
# Set test data environment variable
os.environ['TEST_DATA'] = 'data'
# ... otherwise set the calibration database to the one shipped with the
# package; we don't need to set the 'TEST_DATA', this is done by the
# test environment
else:
os.environ['CALDB'] = '%s/caldb' % (os.environ['TEST_SRCDIR'])
# Create a local "pfiles" directory and set PFILES environment variable
try:
os.mkdir('pfiles')
except:
pass
os.environ['PFILES'] = 'pfiles'
# Copy the ctools parameter files into the "pfiles" directory. For a
# non-installed test we copy the parameter files from the respective
# source directories into the "pfiles" directory, for an installed version
# we get all parameter files from the "syspfiles" directory. Also make
# sure that all parameter files are writable.
if not installed:
os.system('cp -r %s/src/*/*.par pfiles/' % (os.environ['TEST_SRCDIR']))
os.system('chmod u+w pfiles/*')
else:
os.system('cp -r %s/syspfiles/*.par pfiles/' % (os.environ['CTOOLS']))
os.system('chmod u+w pfiles/*')
# Define list of test suites
tests = [test_ctobssim.Test(),
test_ctselect.Test(),
test_ctphase.Test(),
test_ctfindvar.Test(),
test_ctbin.Test(),
test_ctlike.Test(),
test_cttsmap.Test(),
test_ctmodel.Test(),
test_ctskymap.Test(),
test_ctexpcube.Test(),
test_ctpsfcube.Test(),
test_ctedispcube.Test(),
test_ctbkgcube.Test(),
test_ctmapcube.Test(),
test_ctcubemask.Test(),
test_ctbutterfly.Test(),
test_ctulimit.Test(),
test_cterror.Test(),
test_ctprob.Test(),
test_pipelines.Test()]
# Allocate test suite container
suites = gammalib.GTestSuites('ctools unit testing')
# Set test suites and append them to suite container
for suite in tests:
suite.set()
suites.append(suite)
# Run test suite
success = suites.run()
# Save test results
if not installed:
suites.save('reports/ctools.xml')
else:
suites.save('ctools_reports.xml')
# Remove temporary direction
if installed:
os.system('rm -rf %s' % (path))
# Raise an exception in case of failure
if not success:
raise RuntimeError('At least one error occured during the test.')
# Return
return
# ======================== #
# Main routine entry point #
# ======================== #
if __name__ == '__main__':
# Run tests
test()<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa
from .elementwise import (
Unary,
Binary,
ColsBinary,
Reduce,
func2class_name,
unary_module,
make_unary,
binary_module,
make_binary,
reduce_module,
make_reduce,
binary_dict_int_tst,
unary_dict_gen_tst,
binary_dict_gen_tst,
)
from .linear_map import LinearMap
from .nexpr import NumExprABC
from .mixufunc import make_local, make_local_dict, get_ufunc_args, MixUfuncABC
from ._elementwise import (
BitwiseNot,
Absolute,
Arccos,
Arccosh,
Arcsin,
Arcsinh,
Arctan,
Arctanh,
Cbrt,
Ceil,
Conj,
Conjugate,
Cos,
Cosh,
Deg2rad,
Degrees,
Exp,
Exp2,
Expm1,
Fabs,
Floor,
Frexp,
Invert,
Isfinite,
Isinf,
Isnan,
Isnat,
Log,
Log10,
Log1p,
Log2,
LogicalNot,
Modf,
Negative,
Positive,
Rad2deg,
Radians,
Reciprocal,
Rint,
Sign,
Signbit,
Sin,
Sinh,
Spacing,
Sqrt,
Square,
Tan,
Tanh,
Trunc,
Abs,
Add,
Arctan2,
BitwiseAnd,
BitwiseOr,
BitwiseXor,
Copysign,
Divide,
Divmod,
Equal,
FloorDivide,
FloatPower,
Fmax,
Fmin,
Fmod,
Gcd,
Greater,
GreaterEqual,
Heaviside,
Hypot,
Lcm,
Ldexp,
LeftShift,
Less,
LessEqual,
Logaddexp,
Logaddexp2,
LogicalAnd,
LogicalOr,
LogicalXor,
Maximum,
Minimum,
Mod,
Multiply,
Nextafter,
NotEqual,
Power,
Remainder,
RightShift,
Subtract,
TrueDivide,
ColsAdd,
ColsArctan2,
ColsBitwiseAnd,
ColsBitwiseOr,
ColsBitwiseXor,
ColsCopysign,
ColsDivide,
ColsDivmod,
ColsEqual,
ColsFloorDivide,
ColsFloatPower,
ColsFmax,
ColsFmin,
ColsFmod,
ColsGcd,
ColsGreater,
ColsGreaterEqual,
ColsHeaviside,
ColsHypot,
ColsLcm,
ColsLdexp,
ColsLeftShift,
ColsLess,
ColsLessEqual,
ColsLogaddexp,
ColsLogaddexp2,
ColsLogicalAnd,
ColsLogicalOr,
ColsLogicalXor,
ColsMaximum,
ColsMinimum,
ColsMod,
ColsMultiply,
ColsNextafter,
ColsNotEqual,
ColsPower,
ColsRemainder,
ColsRightShift,
ColsSubtract,
ColsTrueDivide,
AddReduce,
Arctan2Reduce,
BitwiseAndReduce,
BitwiseOrReduce,
BitwiseXorReduce,
CopysignReduce,
DivideReduce,
DivmodReduce,
EqualReduce,
FloorDivideReduce,
FloatPowerReduce,
FmaxReduce,
FminReduce,
FmodReduce,
GcdReduce,
GreaterReduce,
GreaterEqualReduce,
HeavisideReduce,
HypotReduce,
LcmReduce,
LdexpReduce,
LeftShiftReduce,
LessReduce,
LessEqualReduce,
LogaddexpReduce,
Logaddexp2Reduce,
LogicalAndReduce,
LogicalOrReduce,
LogicalXorReduce,
MaximumReduce,
MinimumReduce,
ModReduce,
MultiplyReduce,
NextafterReduce,
NotEqualReduce,
PowerReduce,
RemainderReduce,
RightShiftReduce,<|fim▁hole|> TrueDivideReduce,
)
__all__ = [
"Unary",
"Binary",
"ColsBinary",
"Reduce",
"func2class_name",
"unary_module",
"make_unary",
"binary_module",
"make_binary",
"reduce_module",
"make_reduce",
"binary_dict_int_tst",
"unary_dict_gen_tst",
"binary_dict_gen_tst",
"LinearMap",
"NumExprABC",
"make_local",
"make_local_dict",
"get_ufunc_args",
"MixUfuncABC",
"BitwiseNot",
"Absolute",
"Arccos",
"Arccosh",
"Arcsin",
"Arcsinh",
"Arctan",
"Arctanh",
"Cbrt",
"Ceil",
"Conj",
"Conjugate",
"Cos",
"Cosh",
"Deg2rad",
"Degrees",
"Exp",
"Exp2",
"Expm1",
"Fabs",
"Floor",
"Frexp",
"Invert",
"Isfinite",
"Isinf",
"Isnan",
"Isnat",
"Log",
"Log10",
"Log1p",
"Log2",
"LogicalNot",
"Modf",
"Negative",
"Positive",
"Rad2deg",
"Radians",
"Reciprocal",
"Rint",
"Sign",
"Signbit",
"Sin",
"Sinh",
"Spacing",
"Sqrt",
"Square",
"Tan",
"Tanh",
"Trunc",
"Abs",
"Add",
"Arctan2",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"Copysign",
"Divide",
"Divmod",
"Equal",
"FloorDivide",
"FloatPower",
"Fmax",
"Fmin",
"Fmod",
"Gcd",
"Greater",
"GreaterEqual",
"Heaviside",
"Hypot",
"Lcm",
"Ldexp",
"LeftShift",
"Less",
"LessEqual",
"Logaddexp",
"Logaddexp2",
"LogicalAnd",
"LogicalOr",
"LogicalXor",
"Maximum",
"Minimum",
"Mod",
"Multiply",
"Nextafter",
"NotEqual",
"Power",
"Remainder",
"RightShift",
"Subtract",
"TrueDivide",
"ColsAdd",
"ColsArctan2",
"ColsBitwiseAnd",
"ColsBitwiseOr",
"ColsBitwiseXor",
"ColsCopysign",
"ColsDivide",
"ColsDivmod",
"ColsEqual",
"ColsFloorDivide",
"ColsFloatPower",
"ColsFmax",
"ColsFmin",
"ColsFmod",
"ColsGcd",
"ColsGreater",
"ColsGreaterEqual",
"ColsHeaviside",
"ColsHypot",
"ColsLcm",
"ColsLdexp",
"ColsLeftShift",
"ColsLess",
"ColsLessEqual",
"ColsLogaddexp",
"ColsLogaddexp2",
"ColsLogicalAnd",
"ColsLogicalOr",
"ColsLogicalXor",
"ColsMaximum",
"ColsMinimum",
"ColsMod",
"ColsMultiply",
"ColsNextafter",
"ColsNotEqual",
"ColsPower",
"ColsRemainder",
"ColsRightShift",
"ColsSubtract",
"ColsTrueDivide",
"AddReduce",
"Arctan2Reduce",
"BitwiseAndReduce",
"BitwiseOrReduce",
"BitwiseXorReduce",
"CopysignReduce",
"DivideReduce",
"DivmodReduce",
"EqualReduce",
"FloorDivideReduce",
"FloatPowerReduce",
"FmaxReduce",
"FminReduce",
"FmodReduce",
"GcdReduce",
"GreaterReduce",
"GreaterEqualReduce",
"HeavisideReduce",
"HypotReduce",
"LcmReduce",
"LdexpReduce",
"LeftShiftReduce",
"LessReduce",
"LessEqualReduce",
"LogaddexpReduce",
"Logaddexp2Reduce",
"LogicalAndReduce",
"LogicalOrReduce",
"LogicalXorReduce",
"MaximumReduce",
"MinimumReduce",
"ModReduce",
"MultiplyReduce",
"NextafterReduce",
"NotEqualReduce",
"PowerReduce",
"RemainderReduce",
"RightShiftReduce",
"SubtractReduce",
"TrueDivideReduce",
]<|fim▁end|> | SubtractReduce, |
<|file_name|>test_index.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015 OpenCraft <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#<|fim▁hole|>"""
Views - Index - Tests
"""
# Imports #####################################################################
from instance.tests.base import WithUserTestCase
# Tests #######################################################################
class IndexViewsTestCase(WithUserTestCase):
"""
Test cases for views
"""
def test_index_unauthenticated(self):
"""
Index view - Unauthenticated users go to login page
"""
response = self.client.get('/')
self.assertRedirects(response, 'http://testserver/admin/login/?next=/')
def test_index_authenticated(self):
"""
Index view - Authenticated
"""
self.client.login(username='user1', password='pass')
response = self.client.get('/')
self.assertContains(response, 'ng-app="InstanceApp"')<|fim▁end|> | # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# |
<|file_name|>Leetcode_valid-sudoku.cc<|end_file_name|><|fim▁begin|>class Solution {
public:
bool is_fill(vector<vector<char>>&board, int row, int col, char ch)
{
for(int i = 0; i<9; ++i)
{
if((i!=row && board[i][col]==ch)||(i!=col && board[row][i]==ch))
return false;
}
int left_row, left_col, right_row, right_col;
if(row % 3 ==2)
{
left_row = row-2;
right_row = row;
}
else if(row % 3 ==1)
{
left_row = row-1;
right_row = row+1;
}
else
{
left_row = row;
right_row = row+2;
}
if(col % 3 ==2)
{
right_col = col;
left_col = col-2;
}
else if(col % 3 ==1)
{
left_col = col-1;
right_col = col+1;
}
else
{
left_col = col;
right_col = col + 2;
}
for(int i = left_row; i<=right_row; ++i)
for(int j = left_col; j<=right_col; ++j)
if(i!=row && j!=col && board[i][j]==ch)
return false;
return true;
}
bool isValidSudoku(vector<vector<char> > &board) {
if(board.empty() || board.size()!=9)
return false;
for(int i = 0; i<board.size(); ++i)
{
for(int j = 0; j<board[i].size(); ++j)
{
if(board[i][j]!='.' && !is_fill(board, i, j, board[i][j]))
{
return false;
}
}
}
return true;
}
};
//Solution Two
class Solution {
private:
bool rowArr[9], colArr[9], nineGrid[9];
bool checkRow(vector<vector<char> > &board, int row){
if(rowArr[row])
return true;
vector<bool>vec(10, false);
for(int i = 0; i<9; ++i) {
if(board[row][i]=='.')
continue;
if(vec[board[row][i]-'0'])
return false;
vec[board[row][i]-'0'] = true;
}
rowArr[row] = true;
return true;
}
bool checkCol(vector<vector<char> > &board, int col) {<|fim▁hole|> for(int i = 0; i<9; ++i) {
if(board[i][col]=='.')
continue;
if(vec[board[i][col]-'0'])
return false;
vec[board[i][col]-'0'] = true;
}
colArr[col] = true;
return true;
}
bool checkNineGrid(vector<vector<char> > &board, int row, int col) {
int cnt = (row / 3)*3 + col / 3;
if(nineGrid[cnt])
return true;
vector<bool>vec(10, false);
for(int i = row / 3 * 3; i <= row / 3 * 3 + 2; ++i) {
for(int j = col / 3 * 3; j <= col / 3 * 3 + 2; ++j) {
if(board[i][j]=='.')
continue;
if(vec[board[i][j]-'0'])
return false;
vec[board[i][j]-'0'] = true;
}
}
return true;
}
public:
bool isValidSudoku(vector<vector<char> > &board) {
memset(rowArr, false, sizeof(rowArr));
memset(colArr, false, sizeof(colArr));
memset(nineGrid, false, sizeof(nineGrid));
for(int i = 0; i<9; ++i) {
for(int j = 0; j<9; ++j) {
if(board[i][j]!='.') {
if(!checkRow(board, i) || !checkCol(board, j) || !checkNineGrid(board, i, j))
return false;
}
}
}
return true;
}
};<|fim▁end|> | if(colArr[col])
return true;
vector<bool>vec(10, false); |
<|file_name|>route.go<|end_file_name|><|fim▁begin|>package core
import (
"crypto/rand"
"fmt"
"regexp"
"time"
"github.com/jzaikovs/core/loggy"
"github.com/jzaikovs/core/session"
"github.com/jzaikovs/t"
"github.com/jzaikovs/tokenbucket"
)
const (
HeaderXRateLimit = `X-Rate-Limit-Limit`
HeaderXRateLimitRemaining = `X-Rate-Limit-Remaining`
)
// RouteFunc is function type used in routes
type RouteFunc func(Context)
// Route handles single route
type Route struct {
app Router
patternStr string
pattern *regexp.Regexp
callback RouteFunc
method string
handler bool
jsonRequest bool
noCache bool
// authorized user test config
authRequest bool // to call route function, session must be authorized
redirect string // if doredirect set then redirects to redirect value
doredirect bool
// rate-limits for guest and authorized user
limits *tokenbucket.Buckets // this is rate limit for each IP address
limitsAuth *tokenbucket.Buckets
rules []func(context Context) error
validateCSRFToken bool
emitCSRFToken bool
needs []string
}
func newRoute(method, pattern string, callback RouteFunc, router Router) *Route {
return &Route{
app: router,
patternStr: pattern,
pattern: regexp.MustCompile(pattern),
callback: callback,
method: method,
}
}
// ReqAuth marks route so that it can be accessed only by authorized session
// if session is not authorized request is redirected to route that is passed in argument
func (route *Route) ReqAuth(args ...string) *Route {
route.authRequest = true
if len(args) > 0 {
route.redirect = args[0]
route.doredirect = true
}
return route
}
// Need functions adds validation for mandatory fields
func (route *Route) Need(fields ...string) *Route {
route.rules = append(route.rules, func(context Context) error {
data := context.Data()
// for request we can add some mandatory fields
// for example, we can add that for sign-in we need login and password
for _, need := range fields {
if _, ok := data[need]; !ok {
// TODO: if string, then check for empty string?
return fmt.Errorf("field [%s] required", need)
}
}
return nil
})
return route
}
// RateLimitAuth sets routes maximum request rate per time for authorized users
func (route *Route) RateLimitAuth(rate, per float32) *Route {
route.limitsAuth = tokenbucket.NewBuckets(int(rate), rate/per)
return route
}
// RateLimit sets routes maximum request rate per second from specific remote IP
func (route *Route) RateLimit(rate, per float32) *Route {
route.limits = tokenbucket.NewBuckets(int(rate), rate/per)
return route
}
// Match sets a rule that two input data fields should match
func (route *Route) Match(nameA, nameB string) *Route {
route.rules = append(route.rules, func(context Context) error {
data := context.Data()
if data.Str(nameA) != data.Str(nameB) {
return fmt.Errorf(`field [%s] not match field [%s]`, nameA, nameB)
}
return nil
})
return route
}
// JSON adds validation for request content so that only requests with content type json is handled
func (route *Route) JSON() *Route {
route.jsonRequest = true
return route
}
// NoCache marks request handler output of route will not be cached in any way
// to client will be sent headers to not cache response
func (route *Route) NoCache() *Route {
route.noCache = true
return route
}
// CSRF route option for setting CSRF validations
func (route *Route) CSRF(emit, need bool) *Route {
route.emitCSRFToken = emit
route.validateCSRFToken = need
return route
}
func (route *Route) exeedsRateLimit(context Context, t time.Time) bool {
// if session is authorized then check auth rate limits
if context.Session().IsAuth() {
if route.limitsAuth != nil {
space, ok := route.limitsAuth.Add(context.RemoteAddr(), t)
if !ok {
return true
}
context.AddHeader(HeaderXRateLimit, route.limitsAuth.Capacity())
context.AddHeader(HeaderXRateLimitRemaining, space)
}
return false
}
if route.limits != nil {
space, ok := route.limits.Add(context.RemoteAddr(), t)
if !ok { // reached guest rate limie for IP, to many request from this IP
return true
}
context.AddHeader(HeaderXRateLimit, route.limits.Capacity())
context.AddHeader(HeaderXRateLimitRemaining, space)
}
return false
}
// route handler method
func (route *Route) handle(args []t.T, startTime time.Time, context Context) {
// now defer that at the end we write data
defer context.Flush()
// route asks for JSON as content type
if route.jsonRequest {
if context.ContentType() != ContentType_JSON {
context.Response(Response_Unsupported_Media_Type)
return
}
}
// connect our request to session manager
context.linkArgs(args)
context.linkSession(session.New(context))
// defer some cleanup when done routing
defer context.Session().Unlink()
// testing rate limits
// TODO: need testing
if route.exeedsRateLimit(context, startTime) {
context.Response(Response_Too_Many_Requests)
return
}
// testing if user is authorized
// route have flag that session must be authorize to access it
if route.authRequest && !context.Session().IsAuth() {
// if we have set up redirect then on fail we redirect there
if route.doredirect {
context.Redirect(route.redirect)
return
}
// else just say that we are unauthorized
context.Response(Response_Unauthorized)
return
}
if route.validateCSRFToken {
csrf, ok := context.CookieValue("_csrf")
if !ok || len(csrf) == 0 || csrf != context.Session().Data.Str("_csrf") {
context.Response(Response_Forbidden) // TODO: what is best status code for CSRF violation
return
}
delete(context.Session().Data, "_csrf")
context.SetCookieValue("_csrf", "")
}
// route can be useful if we add session status in request data
// TODO: need some mark to identify core added data, example, $is_auth, $base_url, etc..
context.addData("is_auth", context.Session().IsAuth())
if route.noCache {
// route is for IE to not cache JSON responses!
context.AddHeader("If-Modified-Since", "01 Jan 1970 00:00:00 GMT")
context.AddHeader("Cache-Control", "no-cache")
}
// TODO: verify that route is good way to emit CSRF tokens
if route.emitCSRFToken {
// generate csrf token
b := make([]byte, 16)
rand.Read(b)
csrf := Base64Encode(b)
context.Session().Data["_csrf"] = csrf
context.SetCookieValue("_csrf", csrf)
}
// validate all added rules
for _, rule := range route.rules {
if err := rule(context); err != nil {
loggy.Warning.Println(context.RemoteAddr(), err)
context.WriteJSON(DefaultConfig.err_object_func(Response_Bad_Request, err))
context.Response(Response_Bad_Request)
return
}
}
// call route function<|fim▁hole|><|fim▁end|> | route.callback(context)
} |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>"""
Ludolph: Monitoring Jabber Bot
Copyright (C) 2012-2017 Erigones, s. r. o.
This file is part of Ludolph.
See the LICENSE file for copying permission.
"""
import os
import re
import sys
import signal
import logging
from collections import namedtuple
try:
# noinspection PyCompatibility,PyUnresolvedReferences
from configparser import RawConfigParser
except ImportError:
# noinspection PyCompatibility,PyUnresolvedReferences
from ConfigParser import RawConfigParser
try:
# noinspection PyCompatibility
from importlib import reload
except ImportError:
# noinspection PyUnresolvedReferences
from imp import reload
from ludolph.utils import parse_loglevel
from ludolph.bot import LudolphBot
from ludolph.plugins.plugin import LudolphPlugin
from ludolph import __version__
LOGFORMAT = '%(asctime)s %(levelname)-8s %(name)s: %(message)s'
logger = logging.getLogger('ludolph.main')
Plugin = namedtuple('Plugin', ('name', 'module', 'cls'))
def daemonize():
"""
http://code.activestate.com/recipes/278731-creating-a-daemon-the-python-way/
http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
"""
try:
pid = os.fork() # Fork #1
if pid > 0:
sys.exit(0) # Exit first parent
except OSError as e:
sys.stderr.write('Fork #1 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# The first child. Decouple from parent environment
# Become session leader of this new session.
# Also be guaranteed not to have a controlling terminal
os.chdir('/')
# noinspection PyArgumentList
os.setsid()
os.umask(0o022)
try:
pid = os.fork() # Fork #2
if pid > 0:
sys.exit(0) # Exit from second parent
except OSError as e:
sys.stderr.write('Fork #2 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# Close all open file descriptors
import resource # Resource usage information
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY:
maxfd = 1024
# Iterate through and close all file descriptors
for fd in range(0, maxfd):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open (ignored)
pass
# Redirect standard file descriptors to /dev/null
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
return 0
def start():
"""
Start the daemon.
"""
ret = 0
cfg = 'ludolph.cfg'
cfg_fp = None
cfg_lo = ((os.path.expanduser('~'), '.' + cfg), (sys.prefix, 'etc', cfg), ('/etc', cfg))
config_base_sections = ('global', 'xmpp', 'webserver', 'cron', 'ludolph.bot')
# Try to read config file from ~/.ludolph.cfg or /etc/ludolph.cfg
for i in cfg_lo:
try:
cfg_fp = open(os.path.join(*i))
except IOError:
continue
else:
break
if not cfg_fp:
sys.stderr.write("""\nLudolph can't start!\n
You need to create a config file in one these locations: \n%s\n
You can rename ludolph.cfg.example and update the required options.
The example file is located in: %s\n\n""" % (
'\n'.join([os.path.join(*i) for i in cfg_lo]),
os.path.dirname(os.path.abspath(__file__))))
sys.exit(1)
# Read and parse configuration
# noinspection PyShadowingNames
def load_config(fp, reopen=False):
config = RawConfigParser()
if reopen:
fp = open(fp.name)
try: # config.readfp() is Deprecated since python 3.2
# noinspection PyDeprecation
read_file = config.readfp
except AttributeError:
read_file = config.read_file
read_file(fp)
fp.close()
return config
config = load_config(cfg_fp)
# Prepare logging configuration
logconfig = {
'level': parse_loglevel(config.get('global', 'loglevel')),
'format': LOGFORMAT,
}
if config.has_option('global', 'logfile'):
logfile = config.get('global', 'logfile').strip()
if logfile:
logconfig['filename'] = logfile
# Daemonize
if config.has_option('global', 'daemon'):
if config.getboolean('global', 'daemon'):
ret = daemonize()
# Save pid file
if config.has_option('global', 'pidfile'):
try:
with open(config.get('global', 'pidfile'), 'w') as fp:
fp.write('%s' % os.getpid())
except Exception as ex:
# Setup logging just to show this error
logging.basicConfig(**logconfig)
logger.critical('Could not write to pidfile (%s)\n', ex)
sys.exit(1)
# Setup logging
logging.basicConfig(**logconfig)
# All exceptions will be logged without exit
def log_except_hook(*exc_info):
logger.critical('Unhandled exception!', exc_info=exc_info)
sys.excepthook = log_except_hook
# Default configuration
use_tls = True
use_ssl = False
address = []
# Starting
logger.info('Starting Ludolph %s (%s %s)', __version__, sys.executable, sys.version.split()[0])
logger.info('Loaded configuration from %s', cfg_fp.name)
# Load plugins
# noinspection PyShadowingNames
def load_plugins(config, reinit=False):
plugins = []
for config_section in config.sections():
config_section = config_section.strip()
if config_section in config_base_sections:
continue
<|fim▁hole|> # Parse other possible imports
parsed_plugin = config_section.split('.')
if len(parsed_plugin) == 1:
modname = 'ludolph.plugins.' + config_section
plugin = config_section
else:
modname = config_section
plugin = parsed_plugin[-1]
logger.info('Loading plugin: %s', modname)
try:
# Translate super_ludolph_plugin into SuperLudolphPlugin
clsname = plugin[0].upper() + re.sub(r'_+([a-zA-Z0-9])', lambda m: m.group(1).upper(), plugin[1:])
module = __import__(modname, fromlist=[clsname])
if reinit and getattr(module, '_loaded_', False):
reload(module)
module._loaded_ = True
imported_class = getattr(module, clsname)
if not issubclass(imported_class, LudolphPlugin):
raise TypeError('Plugin: %s is not LudolphPlugin instance' % modname)
plugins.append(Plugin(config_section, modname, imported_class))
except Exception as ex:
logger.exception(ex)
logger.critical('Could not load plugin: %s', modname)
return plugins
plugins = load_plugins(config)
# XMPP connection settings
if config.has_option('xmpp', 'host'):
address = [config.get('xmpp', 'host'), '5222']
if config.has_option('xmpp', 'port'):
address[1] = config.get('xmpp', 'port')
logger.info('Connecting to jabber server %s', ':'.join(address))
else:
logger.info('Using DNS SRV lookup to find jabber server')
if config.has_option('xmpp', 'tls'):
use_tls = config.getboolean('xmpp', 'tls')
if config.has_option('xmpp', 'ssl'):
use_ssl = config.getboolean('xmpp', 'ssl')
# Here we go
xmpp = LudolphBot(config, plugins=plugins)
signal.signal(signal.SIGINT, xmpp.shutdown)
signal.signal(signal.SIGTERM, xmpp.shutdown)
if hasattr(signal, 'SIGHUP'): # Windows does not support SIGHUP - bug #41
# noinspection PyUnusedLocal,PyShadowingNames
def sighup(signalnum, handler):
if xmpp.reloading:
logger.warning('Reload already in progress')
else:
xmpp.reloading = True
try:
config = load_config(cfg_fp, reopen=True)
logger.info('Reloaded configuration from %s', cfg_fp.name)
xmpp.prereload()
plugins = load_plugins(config, reinit=True)
xmpp.reload(config, plugins=plugins)
finally:
xmpp.reloading = False
signal.signal(signal.SIGHUP, sighup)
# signal.siginterrupt(signal.SIGHUP, false) # http://stackoverflow.com/a/4302037
if xmpp.client.connect(tuple(address), use_tls=use_tls, use_ssl=use_ssl):
xmpp.client.process(block=True)
sys.exit(ret)
else:
logger.error('Ludolph is unable to connect to jabber server')
sys.exit(2)
if __name__ == '__main__':
start()<|fim▁end|> | |
<|file_name|>selfkeyword.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
-------------------------------------------------------------------------------
Function:
【整理】Python中:self和init__的含义 + 为何要有self和__init__
http://www.crifan.com/summary_the_meaning_of_self_and___init___in_python_and_why_need_them
Author: Crifan
Verison: 2012-11-27
-------------------------------------------------------------------------------
"""
#注:此处全局的变量名,写成name,只是为了演示而用
#实际上,好的编程风格,应该写成gName之类的名字,以表示该变量是Global的变量
name = "whole global name";
class Person:
name = "class global name"
def __init__(self, newPersionName):
#self.name = newPersionName;
#此处,没有使用self.name
#而使得此处的name,实际上仍是局部变量name
#虽然此处赋值了,但是后面没有被利用到,属于被浪费了的局部变量name
self.name = newPersionName;
def sayYourName(self):
#此处,之所以没有像之前一样出现:
#AttributeError: Person instance has no attribute 'name'
#那是因为,虽然当前的实例self中,没有在__init__中初始化对应的name变量,实例self中没有对应的name变量
#但是由于实例所对应的类Person,有对应的name变量,所以也是可以正常执行代码的
#对应的,此处的self.name,实际上是Person.name
print 'My name is %s'%(self.name); # -> class global name
print 'name within class Person is actually the global name: %s'%(name); #-> whole global name
print "only access Person's name via Person.name=%s"%(Person.name); # -> class global name
def changeGlobalName(self,newName):
global name <|fim▁hole|>class Child(Person):
def say(self):
#此处,之所以没有像之前一样出现:
#AttributeError: Person instance has no attribute 'name'
#那是因为,虽然当前的实例self中,没有在__init__中初始化对应的name变量,实例self中没有对应的name变量
#但是由于实例所对应的类Person,有对应的name变量,所以也是可以正常执行代码的
#对应的,此处的self.name,实际上是Person.name
print 'My name is %s'%(self.name); # -> class global name
print 'name within class Person is actually the global name: %s'%(name); #-> whole global name
print "only access Person's name via Person.name=%s"%(Person.name); # -> class global name
#def __init__(self):
def selfAndInitDemo():
persionInstance = Person("crifan");
persionInstance.sayYourName();
personInstance2 = Person("michael")
personInstance2.sayYourName()
personInstance2.changeGlobalName("newName")
personInstance2.sayYourName()
print "whole global name is %s"%(name); # -> whole global name
child = Child('child')
child.say()
###############################################################################
if __name__=="__main__":
selfAndInitDemo();<|fim▁end|> | name = "new class global name"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.