prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>54388.cc<|end_file_name|><|fim▁begin|>// { dg-do run { target c++11 } }
//
// Copyright (C) 2012-2021 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <array><|fim▁hole|>struct A
{
bool valid = true;
~A() { valid = false; }
};
void
test01()
{
const std::array<A, 1> a;
const A& aa = a.at(0);
VERIFY(aa.valid);
}
int main()
{
test01();
}<|fim▁end|>
|
#include <testsuite_hooks.h>
|
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>extern crate edit_distance;
#[test]
fn simple() {
assert_eq!(edit_distance::edit_distance("kitten", "sitting"), 3);
assert_eq!(edit_distance::edit_distance("Tier", "Tor"), 2);
}
#[test]
fn same() {
assert_eq!(edit_distance::edit_distance("kitten", "kitten"), 0);
}
#[test]
fn empty_a() {
assert_eq!(edit_distance::edit_distance("", "kitten"), 6);
}
#[test]<|fim▁hole|> assert_eq!(edit_distance::edit_distance("sitting", ""), 7);
}
#[test]
fn empty_both() {
assert_eq!(edit_distance::edit_distance("", ""), 0);
}
#[test]
fn unicode_misc() {
assert_eq!(edit_distance::edit_distance("üö", "uo"), 2);
}
#[test]
fn unicode_thai() {
assert_eq!(edit_distance::edit_distance("ฎ ฏ ฐ", "a b c"), 3);
}
#[test]
fn unicode_misc_equal() {
assert_eq!(
edit_distance::edit_distance("☀☂☃☄", "☀☂☃☄"),
0
);
}
extern crate quickcheck;
use quickcheck::quickcheck;
#[test]
fn at_least_size_difference_property() {
fn at_least_size_difference(a: String, b: String) -> bool {
let size_a = a.chars().count();
let size_b = b.chars().count();
let diff = if size_a > size_b {
size_a - size_b
} else {
size_b - size_a
};
edit_distance::edit_distance(&a, &b) >= diff
}
quickcheck(at_least_size_difference as fn(a: String, b: String) -> bool);
}
#[test]
fn at_most_length_of_longer_property() {
fn at_most_size_of_longer(a: String, b: String) -> bool {
let upper_bound = *[a.chars().count(), b.chars().count()].iter().max().unwrap();
edit_distance::edit_distance(&a, &b) <= upper_bound
}
quickcheck(at_most_size_of_longer as fn(a: String, b: String) -> bool);
}
#[test]
fn zero_iff_a_equals_b_property() {
fn zero_iff_a_equals_b(a: String, b: String) -> bool {
let d = edit_distance::edit_distance(&a, &b);
if a == b {
d == 0
} else {
d > 0
}
}
quickcheck(zero_iff_a_equals_b as fn(a: String, b: String) -> bool);
}
#[test]
fn triangle_inequality_property() {
fn triangle_inequality(a: String, b: String, c: String) -> bool {
edit_distance::edit_distance(&a, &b)
<= edit_distance::edit_distance(&a, &c) + edit_distance::edit_distance(&b, &c)
}
quickcheck(triangle_inequality as fn(a: String, b: String, c: String) -> bool);
}<|fim▁end|>
|
fn empty_b() {
|
<|file_name|>api.py<|end_file_name|><|fim▁begin|>import detectlanguage
<|fim▁hole|>
def simple_detect(data):
result = detect(data)
return result[0]['language']
def user_status():
return detectlanguage.client.get('user/status')
def languages():
return detectlanguage.client.get('languages')<|fim▁end|>
|
def detect(data):
result = detectlanguage.client.post('detect', { 'q': data })
return result['data']['detections']
|
<|file_name|>test_query_transform.py<|end_file_name|><|fim▁begin|># LIBRARIES
from django.db import models, connections, connection as default_connection
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.query import Q
from google.appengine.api import datastore
# DJANGAE
from djangae.db.backends.appengine.query import transform_query, Query, WhereNode
from djangae.test import TestCase
DEFAULT_NAMESPACE = default_connection.ops.connection.settings_dict.get("NAMESPACE")
class TransformTestModel(models.Model):
field1 = models.CharField(max_length=255)
field2 = models.CharField(max_length=255, unique=True)
field3 = models.CharField(null=True, max_length=255)
field4 = models.TextField()
class Meta:
app_label = "djangae"
class InheritedModel(TransformTestModel):
class Meta:
app_label = "djangae"
class TransformQueryTest(TestCase):
def test_polymodel_filter_applied(self):
query = transform_query(
connections['default'],
InheritedModel.objects.filter(field1="One").all().query
)
query.prepare()
self.assertEqual(2, len(query.where.children))
self.assertTrue(query.where.children[0].children[0].is_leaf)
self.assertTrue(query.where.children[1].children[0].is_leaf)
self.assertEqual("class", query.where.children[0].children[0].column)
self.assertEqual("field1", query.where.children[1].children[0].column)
def test_basic_query(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.all().query
)
self.assertEqual(query.model, TransformTestModel)
self.assertEqual(query.kind, 'SELECT')
self.assertEqual(query.tables, [ TransformTestModel._meta.db_table ])
self.assertIsNone(query.where)
def test_and_filter(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.filter(field1="One", field2="Two").all().query
)
self.assertEqual(query.model, TransformTestModel)
self.assertEqual(query.kind, 'SELECT')
self.assertEqual(query.tables, [ TransformTestModel._meta.db_table ])
self.assertTrue(query.where)
self.assertEqual(2, len(query.where.children)) # Two child nodes
def test_exclude_filter(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.exclude(field1="One").all().query
)
self.assertEqual(query.model, TransformTestModel)
self.assertEqual(query.kind, 'SELECT')
self.assertEqual(query.tables, [ TransformTestModel._meta.db_table ])
self.assertTrue(query.where)
self.assertEqual(1, len(query.where.children)) # One child node
self.assertTrue(query.where.children[0].negated)
self.assertEqual(1, len(query.where.children[0].children))
def test_ordering(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.filter(field1="One", field2="Two").order_by("field1", "-field2").query
)
self.assertEqual(query.model, TransformTestModel)
self.assertEqual(query.kind, 'SELECT')
self.assertEqual(query.tables, [ TransformTestModel._meta.db_table ])
self.assertTrue(query.where)
self.assertEqual(2, len(query.where.children)) # Two child nodes
self.assertEqual(["field1", "-field2"], query.order_by)
def test_projection(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.only("field1").query
)
self.assertItemsEqual(["id", "field1"], query.columns)
query = transform_query(
connections['default'],
TransformTestModel.objects.values_list("field1").query
)
self.assertEqual(set(["field1"]), query.columns)
query = transform_query(
connections['default'],
TransformTestModel.objects.defer("field1", "field4").query
)
self.assertItemsEqual(set(["id", "field2", "field3"]), query.columns)
def test_no_results_returns_emptyresultset(self):
self.assertRaises(
EmptyResultSet,
transform_query,
connections['default'],
TransformTestModel.objects.none().query
)
def test_offset_and_limit(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.all()[5:10].query
)
self.assertEqual(5, query.low_mark)
self.assertEqual(10, query.high_mark)
def test_isnull(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.filter(field3__isnull=True).all()[5:10].query
)
self.assertTrue(query.where.children[0].value)<|fim▁hole|> def test_distinct(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.distinct("field2", "field3").query
)
self.assertTrue(query.distinct)
self.assertEqual(query.columns, set(["field2", "field3"]))
query = transform_query(
connections['default'],
TransformTestModel.objects.distinct().values("field2", "field3").query
)
self.assertTrue(query.distinct)
self.assertEqual(query.columns, set(["field2", "field3"]))
def test_order_by_pk(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.order_by("pk").query
)
self.assertEqual("__key__", query.order_by[0])
query = transform_query(
connections['default'],
TransformTestModel.objects.order_by("-pk").query
)
self.assertEqual("-__key__", query.order_by[0])
def test_reversed_ordering(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.order_by("pk").reverse().query
)
self.assertEqual("-__key__", query.order_by[0])
def test_clear_ordering(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.order_by("pk").order_by().query
)
self.assertFalse(query.order_by)
def test_projection_on_textfield_disabled(self):
query = transform_query(
connections['default'],
TransformTestModel.objects.values_list("field4").query
)
self.assertFalse(query.columns)
self.assertFalse(query.projection_possible)
from djangae.tests.test_connector import Relation
from djangae.db.backends.appengine.dnf import normalize_query
class QueryNormalizationTests(TestCase):
"""
The parse_dnf function takes a Django where tree, and converts it
into a tree of one of the following forms:
[ (column, operator, value), (column, operator, value) ] <- AND only query
[ [(column, operator, value)], [(column, operator, value) ]] <- OR query, of multiple ANDs
"""
def test_and_with_child_or_promoted(self):
from .test_connector import TestUser
"""
Given the following tree:
AND
/ | \
A B OR
/ \
C D
The OR should be promoted, so the resulting tree is
OR
/ \
AND AND
/ | \ / | \
A B C A B D
"""
query = Query(TestUser, "SELECT")
query.where = WhereNode()
query.where.children.append(WhereNode())
query.where.children[-1].column = "A"
query.where.children[-1].operator = "="
query.where.children.append(WhereNode())
query.where.children[-1].column = "B"
query.where.children[-1].operator = "="
query.where.children.append(WhereNode())
query.where.children[-1].connector = "OR"
query.where.children[-1].children.append(WhereNode())
query.where.children[-1].children[-1].column = "C"
query.where.children[-1].children[-1].operator = "="
query.where.children[-1].children.append(WhereNode())
query.where.children[-1].children[-1].column = "D"
query.where.children[-1].children[-1].operator = "="
query = normalize_query(query)
self.assertEqual(query.where.connector, "OR")
self.assertEqual(2, len(query.where.children))
self.assertFalse(query.where.children[0].is_leaf)
self.assertFalse(query.where.children[1].is_leaf)
self.assertEqual(query.where.children[0].connector, "AND")
self.assertEqual(query.where.children[1].connector, "AND")
self.assertEqual(3, len(query.where.children[0].children))
self.assertEqual(3, len(query.where.children[1].children))
def test_and_queries(self):
from .test_connector import TestUser
qs = TestUser.objects.filter(username="test").all()
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertTrue(1, len(query.where.children))
self.assertEqual(query.where.children[0].children[0].column, "username")
self.assertEqual(query.where.children[0].children[0].operator, "=")
self.assertEqual(query.where.children[0].children[0].value, "test")
qs = TestUser.objects.filter(username="test", email="[email protected]")
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertTrue(2, len(query.where.children[0].children))
self.assertEqual(query.where.connector, "OR")
self.assertEqual(query.where.children[0].connector, "AND")
self.assertEqual(query.where.children[0].children[0].column, "username")
self.assertEqual(query.where.children[0].children[0].operator, "=")
self.assertEqual(query.where.children[0].children[0].value, "test")
self.assertEqual(query.where.children[0].children[1].column, "email")
self.assertEqual(query.where.children[0].children[1].operator, "=")
self.assertEqual(query.where.children[0].children[1].value, "[email protected]")
qs = TestUser.objects.filter(username="test").exclude(email="[email protected]")
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertTrue(2, len(query.where.children[0].children))
self.assertEqual(query.where.connector, "OR")
self.assertEqual(query.where.children[0].connector, "AND")
self.assertEqual(query.where.children[0].children[0].column, "username")
self.assertEqual(query.where.children[0].children[0].operator, "=")
self.assertEqual(query.where.children[0].children[0].value, "test")
self.assertEqual(query.where.children[0].children[1].column, "email")
self.assertEqual(query.where.children[0].children[1].operator, "<")
self.assertEqual(query.where.children[0].children[1].value, "[email protected]")
self.assertEqual(query.where.children[1].children[0].column, "username")
self.assertEqual(query.where.children[1].children[0].operator, "=")
self.assertEqual(query.where.children[1].children[0].value, "test")
self.assertEqual(query.where.children[1].children[1].column, "email")
self.assertEqual(query.where.children[1].children[1].operator, ">")
self.assertEqual(query.where.children[1].children[1].value, "[email protected]")
instance = Relation(pk=1)
qs = instance.related_set.filter(headline__startswith='Fir')
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertTrue(2, len(query.where.children[0].children))
self.assertEqual(query.where.connector, "OR")
self.assertEqual(query.where.children[0].connector, "AND")
self.assertEqual(query.where.children[0].children[0].column, "relation_id")
self.assertEqual(query.where.children[0].children[0].operator, "=")
self.assertEqual(query.where.children[0].children[0].value, 1)
self.assertEqual(query.where.children[0].children[1].column, "_idx_startswith_headline")
self.assertEqual(query.where.children[0].children[1].operator, "=")
self.assertEqual(query.where.children[0].children[1].value, u"Fir")
def test_or_queries(self):
from .test_connector import TestUser
qs = TestUser.objects.filter(
username="python").filter(
Q(username__in=["ruby", "jruby"]) | (Q(username="php") & ~Q(username="perl"))
)
query = normalize_query(transform_query(
connections['default'],
qs.query
))
# After IN and != explosion, we have...
# (AND: (username='python', OR: (username='ruby', username='jruby', AND: (username='php', AND: (username < 'perl', username > 'perl')))))
# Working backwards,
# AND: (username < 'perl', username > 'perl') can't be simplified
# AND: (username='php', AND: (username < 'perl', username > 'perl')) can become (OR: (AND: username = 'php', username < 'perl'), (AND: username='php', username > 'perl'))
# OR: (username='ruby', username='jruby', (OR: (AND: username = 'php', username < 'perl'), (AND: username='php', username > 'perl')) can't be simplified
# (AND: (username='python', OR: (username='ruby', username='jruby', (OR: (AND: username = 'php', username < 'perl'), (AND: username='php', username > 'perl'))
# becomes...
# (OR: (AND: username='python', username = 'ruby'), (AND: username='python', username='jruby'), (AND: username='python', username='php', username < 'perl') \
# (AND: username='python', username='php', username > 'perl')
self.assertTrue(4, len(query.where.children[0].children))
self.assertEqual(query.where.children[0].connector, "AND")
self.assertEqual(query.where.children[0].children[0].column, "username")
self.assertEqual(query.where.children[0].children[0].operator, "=")
self.assertEqual(query.where.children[0].children[0].value, "python")
self.assertEqual(query.where.children[0].children[1].column, "username")
self.assertEqual(query.where.children[0].children[1].operator, "=")
self.assertEqual(query.where.children[0].children[1].value, "php")
self.assertEqual(query.where.children[0].children[2].column, "username")
self.assertEqual(query.where.children[0].children[2].operator, "<")
self.assertEqual(query.where.children[0].children[2].value, "perl")
self.assertEqual(query.where.children[1].connector, "AND")
self.assertEqual(query.where.children[1].children[0].column, "username")
self.assertEqual(query.where.children[1].children[0].operator, "=")
self.assertEqual(query.where.children[1].children[0].value, "python")
self.assertEqual(query.where.children[1].children[1].column, "username")
self.assertEqual(query.where.children[1].children[1].operator, "=")
self.assertEqual(query.where.children[1].children[1].value, "jruby")
self.assertEqual(query.where.children[2].connector, "AND")
self.assertEqual(query.where.children[2].children[0].column, "username")
self.assertEqual(query.where.children[2].children[0].operator, "=")
self.assertEqual(query.where.children[2].children[0].value, "python")
self.assertEqual(query.where.children[2].children[1].column, "username")
self.assertEqual(query.where.children[2].children[1].operator, "=")
self.assertEqual(query.where.children[2].children[1].value, "php")
self.assertEqual(query.where.children[2].children[2].column, "username")
self.assertEqual(query.where.children[2].children[2].operator, ">")
self.assertEqual(query.where.children[2].children[2].value, "perl")
self.assertEqual(query.where.connector, "OR")
self.assertEqual(query.where.children[3].connector, "AND")
self.assertEqual(query.where.children[3].children[0].column, "username")
self.assertEqual(query.where.children[3].children[0].operator, "=")
self.assertEqual(query.where.children[3].children[0].value, "python")
self.assertEqual(query.where.children[3].children[1].column, "username")
self.assertEqual(query.where.children[3].children[1].operator, "=")
self.assertEqual(query.where.children[3].children[1].value, "ruby")
qs = TestUser.objects.filter(username="test") | TestUser.objects.filter(username="cheese")
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertEqual(query.where.connector, "OR")
self.assertEqual(2, len(query.where.children))
self.assertTrue(query.where.children[0].is_leaf)
self.assertEqual("cheese", query.where.children[0].value)
self.assertTrue(query.where.children[1].is_leaf)
self.assertEqual("test", query.where.children[1].value)
qs = TestUser.objects.using("default").filter(username__in=set()).values_list('email')
with self.assertRaises(EmptyResultSet):
query = normalize_query(transform_query(
connections['default'],
qs.query
))
qs = TestUser.objects.filter(username__startswith='Hello') | TestUser.objects.filter(username__startswith='Goodbye')
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertEqual(2, len(query.where.children))
self.assertEqual("_idx_startswith_username", query.where.children[0].column)
self.assertEqual(u"Goodbye", query.where.children[0].value)
self.assertEqual("_idx_startswith_username", query.where.children[1].column)
self.assertEqual(u"Hello", query.where.children[1].value)
qs = TestUser.objects.filter(pk__in=[1, 2, 3])
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertEqual(3, len(query.where.children))
self.assertEqual("__key__", query.where.children[0].column)
self.assertEqual("__key__", query.where.children[1].column)
self.assertEqual("__key__", query.where.children[2].column)
self.assertEqual({
datastore.Key.from_path(TestUser._meta.db_table, 1, namespace=DEFAULT_NAMESPACE),
datastore.Key.from_path(TestUser._meta.db_table, 2, namespace=DEFAULT_NAMESPACE),
datastore.Key.from_path(TestUser._meta.db_table, 3, namespace=DEFAULT_NAMESPACE),
}, {
query.where.children[0].value,
query.where.children[1].value,
query.where.children[2].value,
}
)
qs = TestUser.objects.filter(pk__in=[1, 2, 3]).filter(username="test")
query = normalize_query(transform_query(
connections['default'],
qs.query
))
self.assertEqual(3, len(query.where.children))
self.assertEqual("__key__", query.where.children[0].children[0].column)
self.assertEqual("test", query.where.children[0].children[1].value)
self.assertEqual("__key__", query.where.children[1].children[0].column)
self.assertEqual("test", query.where.children[0].children[1].value)
self.assertEqual("__key__", query.where.children[2].children[0].column)
self.assertEqual("test", query.where.children[0].children[1].value)
self.assertEqual({
datastore.Key.from_path(TestUser._meta.db_table, 1, namespace=DEFAULT_NAMESPACE),
datastore.Key.from_path(TestUser._meta.db_table, 2, namespace=DEFAULT_NAMESPACE),
datastore.Key.from_path(TestUser._meta.db_table, 3, namespace=DEFAULT_NAMESPACE),
}, {
query.where.children[0].children[0].value,
query.where.children[1].children[0].value,
query.where.children[2].children[0].value,
}
)<|fim▁end|>
|
self.assertEqual("ISNULL", query.where.children[0].operator)
|
<|file_name|>on_PRIVMSG.py<|end_file_name|><|fim▁begin|>import shelve, time, random
def main(connection, info) :
"""This is the old plugin"""
#"""Run every time a message is seen"""
if info["message"].startswith("\x01ACTION") and info["message"].endswith("\x01") :
on_ACTION(connection, info)
return None
# if info["sender"] == "OperServ" :
# words = info["message"].split(" ")
# if words[0] == "REGISTER:" :
# newchannel = words[1].replace("\002", "")
# registeree = words[3].replace("\002", "")
# connection.rawsend("JOIN %s\n" % (newchannel))
# connection.rawsend("MODE %s +o %s\n" % (newchannel, conf.nick))
# connection.msg(newchannel, "Hello %s, I am sonicbot and I am here to help you with IRC." % (registeree))
seendb = shelve.open("seen.db", writeback=True)
if not seendb.has_key("users") :
seendb["users"] = {}
seendb.sync()
seendb["users"][info["sender"].lower()] = [time.time(), info["message"]]
seendb.sync()
seendb.close()
badwords = shelve.open("badwords.db", writeback=True)
if badwords.has_key(connection.host) :
if badwords[connection.host].has_key(info["channel"]) :
nosay = badwords[connection.host][info["channel"]]["badwords"]
for word in nosay :
if word in [message.replace(".", "").replace("!","").replace("?", "") for message in info["message"].lower().split(" ")] :
if info["sender"] not in badwords[connection.host][info["channel"]]["users"] :
badwords[connection.host][info["channel"]]["users"][info["sender"]] = 0
badwords.sync()
# if badwords[connection.host][info["channel"]]["users"][info["sender"]] > 0 :
# if info["sender"] in connection.hostnames.keys() :
# target = "*!*@%s" % (connection.hostnames[info["sender"]])
# else : target = "%s*!*@*" % (info["sender"])
# connection.rawsend("MODE %s +b %s\n" % (info["channel"], target))
connection.rawsend("KICK %s %s :%s (%s)\n" % (info["channel"], info["sender"], "Don't use that word!", word))
badwords[connection.host][info["channel"]]["users"][info["sender"]] += 1
badwords.sync()
badwords.close()
if info["sender"] not in connection.ignorelist :
if info["message"].lower().startswith("hi") or info["message"].lower().startswith("hello") or info["message"].lower().startswith("hey") :
if connection.nick.lower() in info["message"].lower() :
connection.msg(info["channel"], _("Hello %(sender)s!") % dict(sender=info["sender"]))
contextdb = shelve.open("context.db", writeback=True)
if not contextdb.has_key(info["channel"]) and info["channel"].startswith("#") :
contextdb[info["channel"]] = ["<%s> %s" % (info["sender"], info["message"])]
contextdb.sync()
elif contextdb.has_key(info["channel"]) :
contextdb[info["channel"]].append("<%s> %s" % (info["sender"], info["message"]))
contextdb.sync()
if len(contextdb[info["channel"]]) > 10 :
contextdb[info["channel"]].pop(0)
contextdb.sync()
contextdb.close()
memos = shelve.open("memos.db", writeback=True)
if memos.has_key(info["sender"].lower()) :
for memo in memos[info["sender"].lower()] :
connection.ircsend(info["channel"], "%(sender)s: %(memoer)s sent you a memo! '%(memo)s'" % {"sender":info["sender"], "memoer":memo["sender"], "memo":memo["message"]})
memos[info["sender"].lower()] = []
memos.sync()
memos.close()
<|fim▁hole|># combos = shelve.open("combos.db", writeback=True)
# if info["channel"] not in combos.keys() :
# combos[info["channel"]] = []
# combos.sync()
# combos[info["channel"]].append(info["message"])
# combos.sync()
# if len(combos[info["channel"]]) > 3 :
# combos[info["channel"]].pop(0)
# combos.sync()
# if len(combos[info["channel"]]) == 3 :
# temp = combos[info["channel"]]
# if temp[1].lower().startswith(temp[0].lower()) and temp[2].lower().startswith(temp[0].lower()) :
# connection.msg(info["channel"], temp[0])
# del combos[info["channel"]]
# combos.sync()
# combos.close()
if info["message"].startswith("PING") : connection.notice(info["sender"], info["message"])
mail = shelve.open("mail.db", writeback=True)
if info["sender"].replace("[", "").replace("]", "") in mail.keys() :
if info["hostname"] in mail[info["sender"].replace("[", "").replace("]", "")]["hostname"] :
if mail[info["sender"].replace("[", "").replace("]", "")]["notify"] :
connection.msg(info["sender"], _("You have new mail."))
mail[info["sender"].replace("[", "").replace("]", "")]["notify"] = False
mail.sync()
mail.close()
emotions = shelve.open("emotions.db", writeback=True)
info["sender"] = info["sender"].lower()
if info["sender"].lower() not in emotions.keys() and happiness_detect(info) :
emotions[info["sender"].lower()] = {}
emotions.sync()
emotions[info["sender"].lower()]["happy"] = 0
emotions.sync()
emotions[info["sender"].lower()]["sad"] = 0
emotions.sync()
if info["sender"].lower() in emotions.keys() :
for emotion in [":)", ":D", "C:", "=D", ";p", "=)", "C=", "(=", "(:" "xD", "=p", ":p"] :
if emotion in info["message"] :
emotions[info["sender"].lower()]["happy"] += 1
emotions.sync()
break
for emotion in [":(", "D:", "=(", "D=", "):", ")=", "=C", ":C"] :
if emotion in info["message"] :
emotions[info["sender"].lower()]["sad"] += 1
emotions.sync()
break
if ":P" in info["message"] :
emotions[info["sender"].lower()]["happy"] += .5
emotions.sync()
emotions.close()
notify = shelve.open("notify.db", writeback=True)
if info["sender"] in notify.keys() :
temp = notify[info["sender"]]
for user in temp :
connection.msg(user, _("%(nick)s has just said something in %(channel)s") % dict(nick=info["sender"], channel=info["channel"]))
notify[info["sender"]].remove(user)
notify.sync()
if notify[info["sender"]] == [] :
del notify[info["sender"]]
notify.sync()
notify.close()
def happiness_detect(info) :
"""Checks to see if a smiley is in the message"""
for emotion in [":)", ":D", "C:", "=D", "=)", "C=", "(=", "(:" "xD", ":p", ";p", "=p", ":(", "D:", "=(", "D=", "):", ")=", "=C", ":C", ":P"] :
if emotion in info["message"] : return True
return False
def on_ACTION(connection, info) :
"""Runs every time somebody does an action (/me)"""
badwords = shelve.open("badwords.db", writeback=True)
if badwords.has_key(connection.host) :
if badwords[connection.host].has_key(info["channel"]) :
nosay = badwords[connection.host][info["channel"]]["badwords"]
for word in nosay :
if word in [message.replace(".", "").replace("!","").replace("?", "") for message in info["message"].lower().split(" ")] :
if info["sender"] not in badwords[connection.host][info["channel"]]["users"] :
badwords[connection.host][info["channel"]]["users"][info["sender"]] = 0
badwords.sync()
# if badwords[connection.host][info["channel"]]["users"][info["sender"]] > 0 :
# if info["sender"] in connection.hostnames.keys() :
# target = "*!*@%s" % (connection.hostnames[info["sender"]])
# else : target = "%s*!*@*" % (info["sender"])
# connection.rawsend("MODE %s +b %s\n" % (info["channel"], target))
connection.rawsend("KICK %s %s :%s (%s)\n" % (info["channel"], info["sender"], "Don't use that word!", word))
badwords[connection.host][info["channel"]]["users"][info["sender"]] += 1
badwords.sync()
badwords.close()
memos = shelve.open("memos.db", writeback=True)
if memos.has_key(info["sender"].lower()) :
for memo in memos[info["sender"].lower()] :
connection.ircsend(info["channel"], "%(sender)s: %(memoer)s sent you a memo! '%(memo)s'" % {"sender":info["sender"], "memoer":memo["sender"], "memo":memo["message"]})
memos[info["sender"].lower()] = []
memos.sync()
memos.close()
args = info["message"].replace("\x01", "").split(" ")[1:]
contextdb = shelve.open("context.db", writeback=True)
if not contextdb.has_key(info["channel"]) and info["channel"].startswith("#") :
contextdb[info["channel"]] = ["<%s> %s" % (info["sender"], info["message"])]
contextdb.sync()
elif contextdb.has_key(info["channel"]) :
contextdb[info["channel"]].append("*%s %s" % (info["sender"], " ".join(args).replace("", "")))
contextdb.sync()
if len(contextdb[info["channel"]]) > 10 :
contextdb[info["channel"]].pop(0)
contextdb.sync()
contextdb.close()
seendb = shelve.open("seen.db", writeback=True)
if not seendb.has_key("users") :
seendb["users"] = {}
seendb.sync()
seendb["users"][info["sender"].lower()] = [time.time(), "*%s %s" % (info["sender"], " ".join(args).replace("", ""))]
seendb.close()
if len(args) > 1 :
if args[0] in ["slaps", "punches", "stomps", "hurts", "rapes", "hits", "fucks", "smacks", "crunches", "kicks", "barfs", "forces", "force", "squishes", "bodyslams", "shoots", "compresses", "tackles", "stabs"] :
if args[1] == connection.nick or args[-1] == connection.nick :
connection.msg(info["channel"], random.choice(["Oww!", "Ouch, that hurt!", "\x01ACTION curls up in fetal position\x01", "\x01ACTION slaps %s\x01" % (info["sender"]), "\x01ACTION smacks %s\x01" % (info["sender"]), "\x01ACTION kicks %s\x01" % (info["sender"]), "\x01ACTION explodes\x01"]))
if len(args) > 1 :
if args[0].lower() == "hugs" and args[1] == connection.nick :
connection.msg(info["channel"], "\x01ACTION hugs %(sender)s\x01" % dict(sender=info["sender"]))<|fim▁end|>
|
# if info["sender"] not in conf.ignorelist and info["hostname"] not in conf.hostignores :
|
<|file_name|>Core.java<|end_file_name|><|fim▁begin|>package sample.multiversion;<|fim▁hole|> String getDependencyVersion();
}<|fim▁end|>
|
public interface Core {
String getVersion();
|
<|file_name|>cascadenik-style.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os.path
import optparse
import cascadenik
# monkey with sys.path due to some weirdness inside cssutils
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
from cssutils.tokenize2 import Tokenizer as cssTokenizer
def main(filename):
""" Given an input file containing nothing but styles, print out an
unrolled list of declarations in cascade order.
"""
input = open(filename, 'r').read()
declarations = cascadenik.stylesheet_declarations(input, is_merc=True)
for dec in declarations:
print dec.selector,
print '{',
print dec.property.name+':',
if cascadenik.style.properties[dec.property.name] in (cascadenik.style.color, cascadenik.style.boolean, cascadenik.style.numbers):
print str(dec.value.value)+';',
elif cascadenik.style.properties[dec.property.name] is cascadenik.style.uri:
print 'url("'+str(dec.value.value)+'");',
elif cascadenik.style.properties[dec.property.name] is str:
print '"'+str(dec.value.value)+'";',
elif cascadenik.style.properties[dec.property.name] in (int, float) or type(cascadenik.style.properties[dec.property.name]) is tuple:
print str(dec.value.value)+';',
print '}'
return 0
parser = optparse.OptionParser(usage="""cascadenik-style.py <style file>""")
if __name__ == '__main__':
(options, args) = parser.parse_args()
if not args:
parser.error('Please specify a .mss file')
stylefile = args[0]<|fim▁hole|><|fim▁end|>
|
if not stylefile.endswith('.mss'):
parser.error('Only accepts an .mss file')
sys.exit(main(stylefile))
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import isArray from 'lodash/isArray';
import { BaseModule } from '../baseModule';
import { HlrCheckResponse } from './types/HlrCheckResponse';
export class Hlr extends BaseModule {
async check(
numbers: string | string[],
idx?: string | string[]
): Promise<HlrCheckResponse | HlrCheckResponse[]> {
const params: Record<string, unknown> = {
number: isArray(numbers) ? numbers.join(',') : numbers,
};
if (idx) {
params.idx = isArray(idx) ? idx.join(',') : idx;
}<|fim▁hole|> HlrCheckResponse | HlrCheckResponse[],
HlrCheckResponse | HlrCheckResponse[]
>('/hlr.do', {
params: {
format: 'json',
...params,
},
});
}
}<|fim▁end|>
|
return await this.httpClient.get<
|
<|file_name|>_wdg_spec.js<|end_file_name|><|fim▁begin|>describe("OCombo:", function () {
var wtest, $p;
beforeEach(function () {
wtest = frames[0];
$p = wtest.$p;
});<|fim▁hole|>
it("Конствуктор должен возвращать объект типа OCombo", function () {
expect(typeof $p).toBe("object");
});
});<|fim▁end|>
| |
<|file_name|>storage.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { timeout } from 'vs/base/common/async';
import { Event } from 'vs/base/common/event';
import { mapToString, setToString } from 'vs/base/common/map';
import { basename } from 'vs/base/common/path';
import { Promises } from 'vs/base/node/pfs';
import { IStorageDatabase, IStorageItemsChangeEvent, IUpdateRequest } from 'vs/base/parts/storage/common/storage';
import type { Database, Statement } from '@vscode/sqlite3';
interface IDatabaseConnection {
readonly db: Database;
readonly isInMemory: boolean;
isErroneous?: boolean;
lastError?: string;
}
export interface ISQLiteStorageDatabaseOptions {
readonly logging?: ISQLiteStorageDatabaseLoggingOptions;
}
export interface ISQLiteStorageDatabaseLoggingOptions {
logError?: (error: string | Error) => void;
logTrace?: (msg: string) => void;
}
export class SQLiteStorageDatabase implements IStorageDatabase {
static readonly IN_MEMORY_PATH = ':memory:';
get onDidChangeItemsExternal(): Event<IStorageItemsChangeEvent> { return Event.None; } // since we are the only client, there can be no external changes
private static readonly BUSY_OPEN_TIMEOUT = 2000; // timeout in ms to retry when opening DB fails with SQLITE_BUSY
private static readonly MAX_HOST_PARAMETERS = 256; // maximum number of parameters within a statement
private readonly name = basename(this.path);
private readonly logger = new SQLiteStorageDatabaseLogger(this.options.logging);
private readonly whenConnected = this.connect(this.path);
constructor(private readonly path: string, private readonly options: ISQLiteStorageDatabaseOptions = Object.create(null)) { }
async getItems(): Promise<Map<string, string>> {
const connection = await this.whenConnected;
const items = new Map<string, string>();
const rows = await this.all(connection, 'SELECT * FROM ItemTable');
rows.forEach(row => items.set(row.key, row.value));
if (this.logger.isTracing) {
this.logger.trace(`[storage ${this.name}] getItems(): ${items.size} rows`);
}
return items;
}
async updateItems(request: IUpdateRequest): Promise<void> {
const connection = await this.whenConnected;
return this.doUpdateItems(connection, request);
}
private doUpdateItems(connection: IDatabaseConnection, request: IUpdateRequest): Promise<void> {
if (this.logger.isTracing) {
this.logger.trace(`[storage ${this.name}] updateItems(): insert(${request.insert ? mapToString(request.insert) : '0'}), delete(${request.delete ? setToString(request.delete) : '0'})`);
}
return this.transaction(connection, () => {
const toInsert = request.insert;
const toDelete = request.delete;
// INSERT
if (toInsert && toInsert.size > 0) {
const keysValuesChunks: (string[])[] = [];
keysValuesChunks.push([]); // seed with initial empty chunk
// Split key/values into chunks of SQLiteStorageDatabase.MAX_HOST_PARAMETERS
// so that we can efficiently run the INSERT with as many HOST parameters as possible
let currentChunkIndex = 0;
toInsert.forEach((value, key) => {
let keyValueChunk = keysValuesChunks[currentChunkIndex];
if (keyValueChunk.length > SQLiteStorageDatabase.MAX_HOST_PARAMETERS) {
currentChunkIndex++;
keyValueChunk = [];
keysValuesChunks.push(keyValueChunk);
}
keyValueChunk.push(key, value);
});
keysValuesChunks.forEach(keysValuesChunk => {
this.prepare(connection, `INSERT INTO ItemTable VALUES ${new Array(keysValuesChunk.length / 2).fill('(?,?)').join(',')}`, stmt => stmt.run(keysValuesChunk), () => {
const keys: string[] = [];
let length = 0;
toInsert.forEach((value, key) => {
keys.push(key);
length += value.length;
});
return `Keys: ${keys.join(', ')} Length: ${length}`;
});
});
}
// DELETE
if (toDelete && toDelete.size) {
const keysChunks: (string[])[] = [];
keysChunks.push([]); // seed with initial empty chunk
// Split keys into chunks of SQLiteStorageDatabase.MAX_HOST_PARAMETERS
// so that we can efficiently run the DELETE with as many HOST parameters
// as possible
let currentChunkIndex = 0;
toDelete.forEach(key => {
let keyChunk = keysChunks[currentChunkIndex];
if (keyChunk.length > SQLiteStorageDatabase.MAX_HOST_PARAMETERS) {
currentChunkIndex++;
keyChunk = [];
keysChunks.push(keyChunk);
}
keyChunk.push(key);
});
keysChunks.forEach(keysChunk => {
this.prepare(connection, `DELETE FROM ItemTable WHERE key IN (${new Array(keysChunk.length).fill('?').join(',')})`, stmt => stmt.run(keysChunk), () => {
const keys: string[] = [];
toDelete.forEach(key => {
keys.push(key);
});
return `Keys: ${keys.join(', ')}`;
});
});
}
});
}
async close(recovery?: () => Map<string, string>): Promise<void> {
this.logger.trace(`[storage ${this.name}] close()`);
const connection = await this.whenConnected;
return this.doClose(connection, recovery);
}
private doClose(connection: IDatabaseConnection, recovery?: () => Map<string, string>): Promise<void> {
return new Promise((resolve, reject) => {
connection.db.close(closeError => {
if (closeError) {
this.handleSQLiteError(connection, `[storage ${this.name}] close(): ${closeError}`);
}
// Return early if this storage was created only in-memory
// e.g. when running tests we do not need to backup.
if (this.path === SQLiteStorageDatabase.IN_MEMORY_PATH) {
return resolve();
}
// If the DB closed successfully and we are not running in-memory
// and the DB did not get errors during runtime, make a backup
// of the DB so that we can use it as fallback in case the actual
// DB becomes corrupt in the future.
if (!connection.isErroneous && !connection.isInMemory) {
return this.backup().then(resolve, error => {
this.logger.error(`[storage ${this.name}] backup(): ${error}`);
return resolve(); // ignore failing backup
});
}
// Recovery: if we detected errors while using the DB or we are using
// an inmemory DB (as a fallback to not being able to open the DB initially)
// and we have a recovery function provided, we recreate the DB with this
// data to recover all known data without loss if possible.
if (typeof recovery === 'function') {
// Delete the existing DB. If the path does not exist or fails to
// be deleted, we do not try to recover anymore because we assume
// that the path is no longer writeable for us.
return Promises.unlink(this.path).then(() => {
// Re-open the DB fresh
return this.doConnect(this.path).then(recoveryConnection => {
const closeRecoveryConnection = () => {
return this.doClose(recoveryConnection, undefined /* do not attempt to recover again */);
};
// Store items
return this.doUpdateItems(recoveryConnection, { insert: recovery() }).then(() => closeRecoveryConnection(), error => {
// In case of an error updating items, still ensure to close the connection
// to prevent SQLITE_BUSY errors when the connection is reestablished
closeRecoveryConnection();
return Promise.reject(error);
});
});
}).then(resolve, reject);
}
// Finally without recovery we just reject
return reject(closeError || new Error('Database has errors or is in-memory without recovery option'));
});
});
}
private backup(): Promise<void> {
const backupPath = this.toBackupPath(this.path);
return Promises.copy(this.path, backupPath, { preserveSymlinks: false });
}
private toBackupPath(path: string): string {
return `${path}.backup`;
}
async checkIntegrity(full: boolean): Promise<string> {
this.logger.trace(`[storage ${this.name}] checkIntegrity(full: ${full})`);
const connection = await this.whenConnected;
const row = await this.get(connection, full ? 'PRAGMA integrity_check' : 'PRAGMA quick_check');
const integrity = full ? (row as any)['integrity_check'] : (row as any)['quick_check'];
if (connection.isErroneous) {
return `${integrity} (last error: ${connection.lastError})`;
}
if (connection.isInMemory) {
return `${integrity} (in-memory!)`;
}
return integrity;
}
private async connect(path: string, retryOnBusy: boolean = true): Promise<IDatabaseConnection> {
this.logger.trace(`[storage ${this.name}] open(${path}, retryOnBusy: ${retryOnBusy})`);
try {
return await this.doConnect(path);
} catch (error) {
this.logger.error(`[storage ${this.name}] open(): Unable to open DB due to ${error}`);
// SQLITE_BUSY should only arise if another process is locking the same DB we want
// to open at that time. This typically never happens because a DB connection is
// limited per window. However, in the event of a window reload, it may be possible
// that the previous connection was not properly closed while the new connection is
// already established.
//
// In this case we simply wait for some time and retry once to establish the connection.
//
if (error.code === 'SQLITE_BUSY' && retryOnBusy) {
await timeout(SQLiteStorageDatabase.BUSY_OPEN_TIMEOUT);
return this.connect(path, false /* not another retry */);
}
// Otherwise, best we can do is to recover from a backup if that exists, as such we
// move the DB to a different filename and try to load from backup. If that fails,
// a new empty DB is being created automatically.
//
// The final fallback is to use an in-memory DB which should only happen if the target
// folder is really not writeable for us.
//
try {
await Promises.unlink(path);
try {
await Promises.rename(this.toBackupPath(path), path);
} catch (error) {
// ignore
}
return await this.doConnect(path);
} catch (error) {
this.logger.error(`[storage ${this.name}] open(): Unable to use backup due to ${error}`);
// In case of any error to open the DB, use an in-memory
// DB so that we always have a valid DB to talk to.
return this.doConnect(SQLiteStorageDatabase.IN_MEMORY_PATH);
}
}
}
private handleSQLiteError(connection: IDatabaseConnection, msg: string): void {
connection.isErroneous = true;
connection.lastError = msg;
this.logger.error(msg);
}
private doConnect(path: string): Promise<IDatabaseConnection> {
return new Promise((resolve, reject) => {
import('@vscode/sqlite3').then(sqlite3 => {
const connection: IDatabaseConnection = {
db: new (this.logger.isTracing ? sqlite3.verbose().Database : sqlite3.Database)(path, error => {
if (error) {
return connection.db ? connection.db.close(() => reject(error)) : reject(error);
}
// The following exec() statement serves two purposes:
// - create the DB if it does not exist yet
// - validate that the DB is not corrupt (the open() call does not throw otherwise)
return this.exec(connection, [
'PRAGMA user_version = 1;',
'CREATE TABLE IF NOT EXISTS ItemTable (key TEXT UNIQUE ON CONFLICT REPLACE, value BLOB)'
].join('')).then(() => {
return resolve(connection);
}, error => {
return connection.db.close(() => reject(error));
});
}),
isInMemory: path === SQLiteStorageDatabase.IN_MEMORY_PATH
};
// Errors
connection.db.on('error', error => this.handleSQLiteError(connection, `[storage ${this.name}] Error (event): ${error}`));
// Tracing
if (this.logger.isTracing) {
connection.db.on('trace', sql => this.logger.trace(`[storage ${this.name}] Trace (event): ${sql}`));
}
}, reject);
});
}
private exec(connection: IDatabaseConnection, sql: string): Promise<void> {
return new Promise((resolve, reject) => {
connection.db.exec(sql, error => {
if (error) {
this.handleSQLiteError(connection, `[storage ${this.name}] exec(): ${error}`);
return reject(error);
}
return resolve();
});
});
}
private get(connection: IDatabaseConnection, sql: string): Promise<object> {
return new Promise((resolve, reject) => {
connection.db.get(sql, (error, row) => {
if (error) {
this.handleSQLiteError(connection, `[storage ${this.name}] get(): ${error}`);
return reject(error);
}
return resolve(row);
});
});
}
private all(connection: IDatabaseConnection, sql: string): Promise<{ key: string; value: string }[]> {
return new Promise((resolve, reject) => {
connection.db.all(sql, (error, rows) => {
if (error) {
this.handleSQLiteError(connection, `[storage ${this.name}] all(): ${error}`);
return reject(error);
}
return resolve(rows);<|fim▁hole|> });
});
}
private transaction(connection: IDatabaseConnection, transactions: () => void): Promise<void> {
return new Promise((resolve, reject) => {
connection.db.serialize(() => {
connection.db.run('BEGIN TRANSACTION');
transactions();
connection.db.run('END TRANSACTION', error => {
if (error) {
this.handleSQLiteError(connection, `[storage ${this.name}] transaction(): ${error}`);
return reject(error);
}
return resolve();
});
});
});
}
private prepare(connection: IDatabaseConnection, sql: string, runCallback: (stmt: Statement) => void, errorDetails: () => string): void {
const stmt = connection.db.prepare(sql);
const statementErrorListener = (error: Error) => {
this.handleSQLiteError(connection, `[storage ${this.name}] prepare(): ${error} (${sql}). Details: ${errorDetails()}`);
};
stmt.on('error', statementErrorListener);
runCallback(stmt);
stmt.finalize(error => {
if (error) {
statementErrorListener(error);
}
stmt.removeListener('error', statementErrorListener);
});
}
}
class SQLiteStorageDatabaseLogger {
// to reduce lots of output, require an environment variable to enable tracing
// this helps when running with --verbose normally where the storage tracing
// might hide useful output to look at
static readonly VSCODE_TRACE_STORAGE = 'VSCODE_TRACE_STORAGE';
private readonly logTrace: ((msg: string) => void) | undefined;
private readonly logError: ((error: string | Error) => void) | undefined;
constructor(options?: ISQLiteStorageDatabaseLoggingOptions) {
if (options && typeof options.logTrace === 'function' && process.env[SQLiteStorageDatabaseLogger.VSCODE_TRACE_STORAGE]) {
this.logTrace = options.logTrace;
}
if (options && typeof options.logError === 'function') {
this.logError = options.logError;
}
}
get isTracing(): boolean {
return !!this.logTrace;
}
trace(msg: string): void {
if (this.logTrace) {
this.logTrace(msg);
}
}
error(error: string | Error): void {
if (this.logError) {
this.logError(error);
}
}
}<|fim▁end|>
| |
<|file_name|>TestGraph.java<|end_file_name|><|fim▁begin|>/* *******************************************************
* Released under the MIT License (MIT) --- see LICENSE
* Copyright (c) 2014 Ankit Singla, Sangeetha Abdu Jyothi,
* Chi-Yao Hong, Lucian Popa, P. Brighten Godfrey,
* Alexandra Kolla, Simon Kassing
* ******************************************************** */
package ch.ethz.topobench.graph;
public class TestGraph extends Graph {
public TestGraph(String name, int size) {
super(name, size);
}
public TestGraph(String name, int size, int uniformWeight) {
super(name, size, uniformWeight);
}<|fim▁hole|>
public void setNodeWeight(int i, int weight) {
super.setNodeWeight(i, weight);
}
}<|fim▁end|>
|
public boolean addBidirNeighbor(int n1, int n2) {
return super.addBidirNeighbor(n1, n2);
}
|
<|file_name|>huffman.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use crate::decoders::basics::*;
const DECODE_CACHE_BITS: u32 = 13;
pub struct HuffTable {
// These two fields directly represent the contents of a JPEG DHT marker
pub bits: [u32;17],
pub huffval: [u32;256],
// Represent the weird shifts that are needed for some NEF files
pub shiftval: [u32;256],
// Enable the workaround for 16 bit decodes in DNG that need to consume those
// bits instead of the value being implied
pub dng_bug: bool,
// In CRW we only use the len code so the cache is not needed
pub disable_cache: bool,
// The remaining fields are computed from the above to allow more
// efficient coding and decoding and thus private
// The max number of bits in a huffman code and the table that converts those
// bits into how many bits to consume and the decoded length and shift
nbits: u32,
hufftable: Vec<(u8,u8,u8)>,
// A pregenerated table that goes straight to decoding a diff without first
// finding a length, fetching bits, and sign extending them. The table is
// sized by DECODE_CACHE_BITS and can have 99%+ hit rate with 13 bits
decodecache: [Option<(u8,i16)>; 1<< DECODE_CACHE_BITS],
initialized: bool,
}
struct MockPump {
bits: u64,
nbits: u32,
}
impl MockPump {
pub fn empty() -> Self {
MockPump {
bits: 0,
nbits: 0,
}
}
pub fn set(&mut self, bits: u32, nbits: u32) {
self.bits = (bits as u64) << 32;
self.nbits = nbits + 32;
}
pub fn validbits(&self) -> i32 {<|fim▁hole|>
impl BitPump for MockPump {
fn peek_bits(&mut self, num: u32) -> u32 {
(self.bits >> (self.nbits-num)) as u32
}
fn consume_bits(&mut self, num: u32) {
self.nbits -= num;
self.bits &= (1 << self.nbits) - 1;
}
}
impl HuffTable {
pub fn empty() -> HuffTable {
HuffTable {
bits: [0;17],
huffval: [0;256],
shiftval: [0;256],
dng_bug: false,
disable_cache: false,
nbits: 0,
hufftable: Vec::new(),
decodecache: [None; 1 << DECODE_CACHE_BITS],
initialized: false,
}
}
pub fn new(bits: [u32;17], huffval: [u32;256], dng_bug: bool) -> Result<HuffTable,String> {
let mut tbl = HuffTable {
bits: bits,
huffval: huffval,
shiftval: [0;256],
dng_bug: dng_bug,
disable_cache: false,
nbits: 0,
hufftable: Vec::new(),
decodecache: [None; 1 << DECODE_CACHE_BITS],
initialized: false,
};
tbl.initialize()?;
Ok(tbl)
}
pub fn initialize(&mut self) -> Result<(), String> {
// Find out the max code length and allocate a table with that size
self.nbits = 16;
for i in 0..16 {
if self.bits[16-i] != 0 {
break;
}
self.nbits -= 1;
}
self.hufftable = vec![(0,0,0); 1 << self.nbits];
// Fill in the table itself
let mut h = 0;
let mut pos = 0;
for len in 0..self.nbits {
for _ in 0..self.bits[len as usize + 1] {
for _ in 0..(1 << (self.nbits-len-1)) {
self.hufftable[h] = (len as u8 + 1, self.huffval[pos] as u8, self.shiftval[pos] as u8);
h += 1;
}
pos += 1;
}
}
// Create the decode cache by running the slow code over all the possible
// values DECODE_CACHE_BITS wide
if !self.disable_cache {
let mut pump = MockPump::empty();
let mut i = 0;
loop {
pump.set(i, DECODE_CACHE_BITS);
let (bits, decode) = self.huff_decode_slow(&mut pump);
if pump.validbits() >= 0 {
self.decodecache[i as usize] = Some((bits, decode as i16));
}
i += 1;
if i >= 1 << DECODE_CACHE_BITS {
break;
}
}
}
self.initialized = true;
Ok(())
}
#[inline(always)]
pub fn huff_decode(&self, pump: &mut dyn BitPump) -> Result<i32,String> {
let code = pump.peek_bits(DECODE_CACHE_BITS) as usize;
if let Some((bits,decode)) = self.decodecache[code] {
pump.consume_bits(bits as u32);
Ok(decode as i32)
} else {
let decode = self.huff_decode_slow(pump);
Ok(decode.1)
}
}
#[inline(always)]
pub fn huff_decode_slow(&self, pump: &mut dyn BitPump) -> (u8,i32) {
let len = self.huff_len(pump);
(len.0+len.1, self.huff_diff(pump, len))
}
#[inline(always)]
pub fn huff_len(&self, pump: &mut dyn BitPump) -> (u8,u8,u8) {
let code = pump.peek_bits(self.nbits) as usize;
let (bits, len, shift) = self.hufftable[code];
pump.consume_bits(bits as u32);
(bits, len, shift)
}
#[inline(always)]
pub fn huff_get_bits(&self, pump: &mut dyn BitPump) -> u32 {
let code = pump.peek_bits(self.nbits) as usize;
let (bits, len, _) = self.hufftable[code];
pump.consume_bits(bits as u32);
len as u32
}
#[inline(always)]
pub fn huff_diff(&self, pump: &mut dyn BitPump, input: (u8,u8,u8)) -> i32 {
let (_, len, shift) = input;
match len {
0 => 0,
16 => {
if self.dng_bug {
pump.get_bits(16); // consume can fail because we haven't peeked yet
}
-32768
},
len => {
// decode the difference and extend sign bit
let fulllen: i32 = len as i32 + shift as i32;
let shift: i32 = shift as i32;
let bits = pump.get_bits(len as u32) as i32;
let mut diff: i32 = ((bits << 1) + 1) << shift >> 1;
if (diff & (1 << (fulllen - 1))) == 0 {
diff -= (1 << fulllen) - ((shift == 0) as i32);
}
diff
},
}
}
}
impl fmt::Debug for HuffTable {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.initialized {
write!(f, "HuffTable {{ bits: {:?} huffval: {:?} }}", self.bits, &self.huffval[..])
} else {
write!(f, "HuffTable {{ uninitialized }}")
}
}
}<|fim▁end|>
|
self.nbits as i32 - 32
}
}
|
<|file_name|>home.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { HomeComponent } from './home.component';<|fim▁hole|>
@NgModule({
imports: [CommonModule, CarouselModule, DropdownModule, AlertModule, ChartModule],
declarations: [HomeComponent, TimelineComponent, ChatComponent, NotificationComponent],
exports: [HomeComponent, TimelineComponent, ChatComponent, NotificationComponent]
})
export class HomeModule {}<|fim▁end|>
|
import { CarouselModule, DropdownModule, AlertModule } from 'ng2-bootstrap';
import { ChartModule } from 'angular2-highcharts';
import { TimelineComponent, ChatComponent, NotificationComponent } from './home.component';
|
<|file_name|>adapt_fit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
# Import the relevant PTS classes and modules
from pts.modeling.core.environment import load_modeling_environment_cwd
from pts.modeling.config.component import definition
# -----------------------------------------------------------------
# Load environment and model suite
environment = load_modeling_environment_cwd()
runs = environment.fitting_runs
# -----------------------------------------------------------------
properties = ["representation", "filters", "ranges", "genetic", "grid", "units", "types"]
# -----------------------------------------------------------------
definition = definition.copy()
# -----------------------------------------------------------------
# The fitting run for which to adapt the configuration<|fim▁hole|>elif runs.has_single: definition.add_fixed("name", "name of the fitting run", runs.single_name)
else: definition.add_required("name", "string", "name of the fitting run", choices=runs.names)
# -----------------------------------------------------------------
# Dust or stellar
definition.add_positional_optional("properties", "string_list", "properties to adapt", default=properties, choices=properties)
# -----------------------------------------------------------------
# Select certain properties
definition.add_optional("contains", "string", "only adapt properties containing this string in their name")
definition.add_optional("not_contains", "string", "don't adapt properties containing this string in their name")
definition.add_optional("exact_name", "string", "only adapt properties with this exact string as their name")
definition.add_optional("exact_not_name", "string", "don't adapt properties with this exact string as their name")
definition.add_optional("startswith", "string", "only adapt properties whose name starts with this string")
definition.add_optional("endswith", "string", "only adapt properties whose name starts with this string")
# -----------------------------------------------------------------
# Save
definition.add_flag("save", "save adapted properties", True)
# -----------------------------------------------------------------<|fim▁end|>
|
if runs.empty: raise RuntimeError("No fitting runs are present")
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import sys
import re<|fim▁hole|> os.system("python setup.py sdist upload")
sys.exit()
packages = [
"the_big_username_blacklist"
]
# Handle requirements
install_requires = []
tests_requires = [
"pytest==3.0.5",
]
# Convert markdown to rst
try:
from pypandoc import convert
long_description = convert("README.md", "rst")
except:
long_description = ""
version = ''
with io.open('the_big_username_blacklist/__init__.py', 'r', encoding='utf-8') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name="the_big_username_blacklist",
version=version,
description="Validate usernames against a blacklist", # NOQA
long_description=long_description,
author="Martin Sandström",
author_email="[email protected]",
url="https://github.com/marteinn/the-big-username-blacklist-python",
packages=packages,
package_data={"": ["LICENSE", ], "the_big_username_blacklist": ["*.txt"]},
package_dir={"the_big_username_blacklist": "the_big_username_blacklist"},
include_package_data=True,
install_requires=install_requires,
license="MIT",
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy"
],
)<|fim▁end|>
|
from setuptools import setup
if sys.argv[-1] == "publish":
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(lang_items)]
#![feature(asm)]
#![no_std]
#![no_main]
mod exceptions;
mod gpio;
#[no_mangle]
pub fn start() -> ! {
/*
let mut i = 0;
unsafe {
let ram_boundary = *(0x0000_0000 as *const u32); // Get stack boundary
let crash = *(ram_boundary as *const u32); // Crash the program
}
loop {
i += 1;
}
*/
gpio::GPIO::enable(gpio::GPIOGroup::B);
//turn_on_gpiob();
let mut pb3 = gpio::GPIOPort::new(3, gpio::GPIOGroup::B);
pb3.set_mode(gpio::GPIOMode::Output);
pb3.set_type(gpio::GPIOType::PushPull);
//put_pb3_in_output_mode();
// Just looking...
let pb3_mode = pb3.get_mode();
let pb3_type = pb3.get_type();
let mut ticks: u32 = 5_000;
loop {
//set_pb3_high();
pb3.set();
delay(ticks);
//set_pb3_low();
pb3.reset();
delay(ticks);
}
}
fn delay(n: u32) {
for _ in 0..n {}
}
/*
fn turn_on_gpiob() {
// Start address of the RCC register block
const RCC: u32 = 0x4002_1000;
const RCC_AHBENR: u32 = 0x14;
const RCC_AHBENR_IOPBEN: u32 = (1 << 18);
unsafe {
let ahbenr = (RCC + RCC_AHBENR) as *mut u32;
*ahbenr |= RCC_AHBENR_IOPBEN; // Enable GPIOB
}
}
const GPIOB: u32 = 0x4800_0400;
const GPIOB_BSRR: u32 = 0x18;
fn put_pb3_in_output_mode() {
const GPIOB_MODER: u32 = 0x0;
const GPIOB_OTYPER: u32 = 0x4;
unsafe {
let moder = (GPIOB + GPIOB_MODER) as *mut u32;
let otyper = (GPIOB + GPIOB_OTYPER) as *mut u32;
<|fim▁hole|> *otyper |= 0b0 << 3; // Set type to output push-pull
}
}
fn set_pb3_high() {
unsafe {
let bsrr = (GPIOB + GPIOB_BSRR) as *mut u32;
*bsrr |= 1 << 3;
}
}
fn set_pb3_low() {
unsafe {
let bsrr = (GPIOB + GPIOB_BSRR) as *mut u32;
*bsrr |= 1 << (16 + 3);
}
}
*/
mod vector_table {
#[link_section = ".reset"]
static RESET: fn() -> ! = ::start;
}
#[lang = "eh_personality"] extern fn eh_personality() {}
#[lang = "panic_fmt"] extern fn panic_fmt() -> ! {loop{}}<|fim▁end|>
|
*moder |= 0b01 << 6; // Set mode to general purpose output
|
<|file_name|>0005_auto_20170704_1452.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-07-04 14:52
from __future__ import unicode_literals
<|fim▁hole|>class Migration(migrations.Migration):
dependencies = [("events", "0004_create_basic_calendars")]
operations = [
migrations.AlterField(
model_name="event",
name="name",
field=models.CharField(
help_text="Le nom de l'événement", max_length=255, verbose_name="nom"
),
)
]<|fim▁end|>
|
from django.db import migrations, models
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import hashlib
import jwt
from six.moves.urllib.parse import quote
from sentry.shared_integrations.exceptions import ApiError
def percent_encode(val):
# see https://en.wikipedia.org/wiki/Percent-encoding
return quote(val.encode("utf8", errors="replace")).replace("%7E", "~").replace("/", "%2F")<|fim▁hole|>def get_query_hash(uri, method, query_params=None):
# see
# https://developer.atlassian.com/static/connect/docs/latest/concepts/understanding-jwt.html#qsh
uri = uri.rstrip("/")
method = method.upper()
if query_params is None:
query_params = {}
sorted_query = []
for k, v in sorted(query_params.items()):
# don't include jwt query param
if k != "jwt":
if isinstance(v, list):
param_val = [percent_encode(val) for val in v].join(",")
else:
param_val = percent_encode(v)
sorted_query.append("%s=%s" % (percent_encode(k), param_val))
query_string = "%s&%s&%s" % (method, uri, "&".join(sorted_query))
return hashlib.sha256(query_string.encode("utf8")).hexdigest()
def get_jira_auth_from_request(request):
# https://developer.atlassian.com/static/connect/docs/latest/concepts/authentication.html
# Extract the JWT token from the request's jwt query
# parameter or the authorization header.
token = request.GET.get("jwt")
if token is None:
raise ApiError("No token parameter")
# Decode the JWT token, without verification. This gives
# you a header JSON object, a claims JSON object, and a signature.
decoded = jwt.decode(token, verify=False)
# Extract the issuer ('iss') claim from the decoded, unverified
# claims object. This is the clientKey for the tenant - an identifier
# for the Atlassian application making the call
issuer = decoded["iss"]
# Look up the sharedSecret for the clientKey, as stored
# by the add-on during the installation handshake
from sentry_plugins.jira_ac.models import JiraTenant
jira_auth = JiraTenant.objects.get(client_key=issuer)
# Verify the signature with the sharedSecret and
# the algorithm specified in the header's alg field.
decoded_verified = jwt.decode(token, jira_auth.secret)
# Verify the query has not been tampered by Creating a Query Hash
# and comparing it against the qsh claim on the verified token.
# TODO: probably shouldn't need to hardcode get... for post maybe
# the secret should just be a hidden field in the form ?
qsh = get_query_hash(request.path, "GET", request.GET)
# qsh = get_query_hash(request.path, request.method, request.GET)
if qsh != decoded_verified["qsh"]:
raise ApiError("Query hash mismatch")
return jira_auth<|fim▁end|>
| |
<|file_name|>sujiko.rs<|end_file_name|><|fim▁begin|>//! Sujiko.
//!
//! https://en.wikipedia.org/wiki/Sujiko
//! https://www.simetric.co.uk/sujiko/index.htm
extern crate puzzle_solver;
use puzzle_solver::{Puzzle,Solution,Val,VarToken};
const SIZE: usize = 3;
type Board = [[Val; SIZE]; SIZE];
fn make_sujiko(board: &Board, tl: Val, tr: Val, bl: Val, br: Val)
-> (Puzzle, Vec<Vec<VarToken>>) {
let mut sys = Puzzle::new();
let vars = sys.new_vars_with_candidates_2d(3, 3, &[1,2,3,4,5,6,7,8,9]);
sys.all_different(vars.iter().flat_map(|it| it));
<|fim▁hole|> sys.equals(br, vars[1][1] + vars[1][2] + vars[2][1] + vars[2][2]);
sys.equals(tl + tr + bl + br - (1..(9 + 1)).sum::<Val>(),
vars[0][1] + vars[1][0] + 3 * vars[1][1] + vars[1][2] + vars[2][1]);
for y in 0..SIZE {
for x in 0..SIZE {
let value = board[y][x];
if value != 0 {
sys.set_value(vars[y][x], value);
}
}
}
(sys, vars)
}
fn print_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>) {
for y in 0..SIZE {
for x in 0..SIZE {
print!(" {}", dict[vars[y][x]]);
}
println!();
}
}
fn verify_sujiko(dict: &Solution, vars: &Vec<Vec<VarToken>>, expected: &Board) {
for y in 0..SIZE {
for x in 0..SIZE {
assert_eq!(dict[vars[y][x]], expected[y][x]);
}
}
}
#[test]
fn sujiko_simetric() {
let puzzle = [ [ 6,0,9 ], [ 0,0,0 ], [ 5,0,0 ] ];
let expected = [ [ 6,2,9 ], [ 8,1,3 ], [ 5,4,7 ] ];
let (mut sys, vars) = make_sujiko(&puzzle, 17, 15, 18, 15);
let dict = sys.solve_unique().expect("solution");
print_sujiko(&dict, &vars);
verify_sujiko(&dict, &vars, &expected);
println!("sujiko_simetric: {} guesses", sys.num_guesses());
}<|fim▁end|>
|
sys.equals(tl, vars[0][0] + vars[0][1] + vars[1][0] + vars[1][1]);
sys.equals(tr, vars[0][1] + vars[0][2] + vars[1][1] + vars[1][2]);
sys.equals(bl, vars[1][0] + vars[1][1] + vars[2][0] + vars[2][1]);
|
<|file_name|>test_math_core.py<|end_file_name|><|fim▁begin|>"""Test methods for `zcode/math/math_core.py`.
Can be run with:
$ nosetests math/tests/test_math_core.py
$ nosetests math/tests/test_math_core.py:TestMathCore.test_around
$ python math/tests/test_math_core.py
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from numpy.testing import run_module_suite
import scipy as sp
import scipy.stats # noqa
from nose.tools import assert_true, assert_false, assert_equal, assert_raises, assert_almost_equal
from zcode.math import math_core, interpolate
class TestMathCore(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.SIZE = 100
cls.r1 = np.random.random(cls.SIZE)
cls.r2 = np.random.uniform(-1.0, 1.0, size=cls.SIZE)
def test_argnearest_ordered(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = [0, 0, 1, 2, 4, 5, 7, 9, 10, 10]
retval = argnearest(edges, vals, assume_sorted=True)
assert_true(np.all(correct == retval))
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
return
def test_argnearest_ordered_left_right(self):
from zcode.math.math_core import argnearest
# 0 1 2 3 4 5 6 7 8 9 10
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([-1, -1 , 1, 2 , 3, 4, 7, 8, 9 , 10])
print("LEFT")
retval = argnearest(edges, vals, assume_sorted=True, side='left')
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
print(correct == retval)
print(np.all(correct == retval))
assert_true(np.all(correct == retval))
correct += 1
for ee in edges:
correct[vals == ee] += 1
print("RIGHT")
retval = argnearest(edges, vals, assume_sorted=True, side='right')
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_argnearest_unordered_x(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([2, 2, 8, 7, 0, 6, 9, 3, 4, 4])
# ix = np.random.permutation(edges.size)
ix = np.array([4, 3, 0, 9, 10, 8, 5, 2, 1, 7, 6])
edges = edges[ix]
retval = argnearest(edges, vals, assume_sorted=False)
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("nearest = {}".format(edges[retval]))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_argnearest_unordered_xy(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([0, 7, 6, 3, 4, 9, 2, 4, 2, 8])
# ix = np.random.permutation(edges.size)
ix = np.array([4, 3, 0, 9, 10, 8, 5, 2, 1, 7, 6])
edges = edges[ix]
iy = np.array([4, 3, 5, 7, 9, 6, 0, 8, 1, 2])
vals = vals[iy]
retval = argnearest(edges, vals, assume_sorted=False)
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("nearest = {}".format(edges[retval]))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_spacing(self):
from zcode.math.math_core import spacing
# Linear Spacing
ref1 = np.linspace(0.0, 1.0, num=20)
spc1 = spacing([0.0, 1.0], scale='lin', num=20)
assert_true(np.allclose(ref1, spc1))
# Logarithmic Spacing
ref2 = np.logspace(0.0, 2.5, num=20)
spc2 = spacing([np.power(10.0, 0.0), np.power(10.0, 2.5)], scale='log', num=20)
assert_true(np.allclose(ref2, spc2))
# Automatically selects appropriate Range
ref3 = np.logspace(1.0, 2.0, num=13)
spc3 = spacing([-10.0, 100.0, 0.0, 10.0], scale='log', num=13)
assert_true(np.allclose(ref3, spc3))
# Manually selects appropraite range
ref4 = np.linspace(-5.0, -2.5, num=27)<|fim▁hole|>
# Only integral (whole number) values
# log spacing
vals = [2.34, 365.23]
res = np.array([2., 3., 4., 5., 6., 7., 8., 9., 10.,
20., 30., 40., 50., 60., 70., 80., 90., 100.,
200., 300., 400.])
retvals = spacing(vals, 'log', integers=True)
print("integers, log\n", vals, "\n\t", res, "\n\t", retvals)
print(retvals)
print(np.allclose(retvals, res))
assert_true(np.allclose(retvals, res))
# lin spacing
vals = [2.34, 11.23]
res = np.arange(2, 13)
retvals = spacing(vals, 'lin', integers=True)
print("integers, lin\n", vals, "\n\t", res, "\n\t", retvals)
print(np.allclose(retvals, res))
assert_true(np.allclose(retvals, res))
return
def test_mono(self):
arr_g = [-1.0, 1.0, 2.0, 3.0]
arr_ge = [-1.0, 1.0, 1.0, 2.0, 2.5]
arr_l = [11.5, 9.2, -2.0, -301.0]
arr_le = [11.5, 9.2, -2.0, -2.0, -301.0]
arr_e = 11*[1.0]
assert_true(math_core.mono(arr_g, 'g'))
assert_true(math_core.mono(arr_ge, 'ge'))
assert_true(math_core.mono(arr_g, 'ge'))
assert_false(math_core.mono(arr_ge, 'g'))
assert_true(math_core.mono(arr_l, 'l'))
assert_true(math_core.mono(arr_le, 'le'))
assert_true(math_core.mono(arr_l, 'le'))
assert_false(math_core.mono(arr_le, 'l'))
assert_true(math_core.mono(arr_e, 'e'))
assert_false(math_core.mono(arr_le, 'e'))
def test_ordered_groups(self):
arr = np.array([99, 77, 14, 21, 71, 64, 98, 38, 66, 25])
sinds = np.argsort(arr)
targets = [40, 77]
print("arr = {}, targets = {}, sorted arr = {}".format(arr, targets, arr[sinds]))
# Group into elements below targets
# Exclusively
print("Below, exclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='b', include=False)
assert_true(np.all(sinds == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] < tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] < tt]))
# Inclusively
print("Below, inclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='b', include=True)
assert_true(np.all(sinds == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] <= tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] <= tt]))
# Group into elements above targets
# Exclusive
print("Above, exclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='a', include=False)
assert_true(np.all(sinds[::-1] == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] > tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] > tt]))
# Exclusive
print("Above, inclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='a', include=True)
assert_true(np.all(sinds[::-1] == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] >= tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] >= tt]))
# Should raise error for unsorted `targets`
assert_raises(ValueError, math_core.ordered_groups, arr, targets[::-1])
# Should raise error for `dir` not starting with 'a' or 'b'
assert_raises(ValueError, math_core.ordered_groups, arr, targets, None, 'c')
return
def test_really1d(self):
from zcode.math import really1d
assert_true(really1d([1, 2, 3]))
assert_true(really1d([1]))
assert_true(really1d([]))
assert_true(really1d(np.arange(10)))
assert_false(really1d(1))
assert_false(really1d([[1]]))
assert_false(really1d([[1, 2], [2, 3]]))
assert_false(really1d([[1, 2, 3], [4, 5]]))
assert_false(really1d(np.random.random((4, 3))))
assert_false(really1d([[]]))
def test_argextrema(self):
# Basic usage without filtering
assert_equal(math_core.argextrema([-1, -5, 2, 10], 'min'), 1)
assert_equal(math_core.argextrema([-1, -5, 2, 10], 'max'), 3)
# Filtering
# min
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'min', 'g'), 2)
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'min', 'ge'), 4)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'min', 'l'), 1)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'min', 'le'), 1)
# max
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'max', 'g'), 3)
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'max', 'ge'), 3)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'max', 'l'), 0)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'max', 'le'), 2)
# Raises appropriate errors
# Incorrect shape input array
assert_raises(ValueError, math_core.argextrema, np.arange(4).reshape(2, 2), 'max')
assert_raises(ValueError, math_core.argextrema, 0.0, 'max')
# Invalid `type` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'mex')
# Invalid `filter` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'max', 'e')
# Invalid `filter` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'max', 'greater')
def test_asBinEdges_1d(self):
print("TestMathCore.test_asBinEdges_1d")
from zcode.math import asBinEdges, spacing
data_1d = np.random.random(40)
bins_1d = np.arange(20)
# Preserves valid bins
assert_true(np.allclose(bins_1d, asBinEdges(bins_1d, data_1d)))
# Constructs valid bins
# lin
lin_1d = spacing(data_1d, scale='lin', num=8+1)
lin_edges_1d = asBinEdges(8, data_1d, scale='lin')
assert_true(np.allclose(lin_1d, lin_edges_1d))
# log
log_1d = spacing(data_1d, scale='log', num=7+1)
log_edges_1d = asBinEdges(7, data_1d, scale='log')
assert_true(np.allclose(log_1d, log_edges_1d))
# Raises appropriate errors
data_2d = data_1d.reshape(8, 5)
bins_2d = bins_1d.reshape(4, 5)
# 1D bins, 2D data
assert_raises(ValueError, asBinEdges, bins_1d, data_2d)
# 2D bins, 1D data
assert_raises(ValueError, asBinEdges, bins_2d, data_1d)
def test_asBinEdges_nd(self):
print("TestMathCore.test_asBinEdges_nd")
from zcode.math import asBinEdges
data_2d = np.random.random((8, 2))
bins_2d = np.arange(8).reshape(2, 4)
bins_2d2 = [[0.0, 1.0], [0.0, 0.5, 1.0]]
# Preserves valid bins
edges_2d = asBinEdges(bins_2d, data_2d)
assert_true(np.allclose(bins_2d, edges_2d))
edges_2d2 = asBinEdges(bins_2d2, data_2d)
assert_true(np.allclose(bins_2d2[0], edges_2d2[0]))
assert_true(np.allclose(bins_2d2[1], edges_2d2[1]))
# Constructs valid bins
# lin
lin_2d1 = sp.stats.binned_statistic_dd(data_2d, None, 'count', bins=4).bin_edges
lin_edges_2d1 = asBinEdges(4, data_2d, scale='lin')
assert_true(np.allclose(lin_2d1, lin_edges_2d1))
lin_2d2 = sp.stats.binned_statistic_dd(data_2d, None, 'count', bins=[4, 3]).bin_edges
lin_edges_2d2 = asBinEdges([4, 3], data_2d, scale='lin')
assert_true(np.allclose(lin_2d2[0], lin_edges_2d2[0]))
assert_true(np.allclose(lin_2d2[1], lin_edges_2d2[1]))
# Raises appropriate errors
# 1D bins, 2D data
assert_raises(ValueError, asBinEdges, [4], data_2d)
# 2D bins, 1D data
assert_raises(ValueError, asBinEdges, [4, 3, 2], data_2d)
def test_comparison_function(self):
from zcode.math.math_core import _comparison_function
comp = ['g', '>']
arr = [0.5, 1.5, -0.5, 0.0]
res = [True, True, False, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['ge', '>=']
arr = [0.5, 1.5, -0.5, 0.0]
res = [True, True, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['l', '<']
arr = [-10.5, -1.5, 0.5, 0.0]
res = [True, True, False, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['le', '<=']
arr = [-10.5, -1.5, 0.5, 0.0]
res = [True, True, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['e', '=', '==']
arr = [-10.5, 0.5, 0.0]
res = [False, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['ne', '!=']
arr = [-10.5, 0.5, 0.0]
res = [True, True, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
return
def test_comparison_filter(self):
from zcode.math.math_core import comparison_filter
comp = ['g', '>']
arr = [0.5, -1.0, 1.5, -0.5, 0.0]
res = [0.5, 1.5]
inds = [0, 2]
arr = np.array(arr)
for cc in comp:
vals = comparison_filter(arr, cc, value=0.0)
assert_true(np.all(np.equal(vals, res)))
val_inds = comparison_filter(arr, cc, inds=True, value=0.0)
assert_true(np.all(np.equal(arr[val_inds], arr[inds])))
comp = ['le', '<=']
arr = [0.5, -1.0, 1.5, -0.5, 0.0]
res = [-1.0, -0.5, 0.0]
inds = [1, 3, 4]
arr = np.array(arr)
for cc in comp:
vals = comparison_filter(arr, cc, value=0.0)
assert_true(np.all(np.equal(vals, res)))
vals = comparison_filter(arr, cc, inds=True, value=0.0)
assert_true(np.all(np.equal(arr[vals], arr[inds])))
return
def test_around(self):
from zcode.math.math_core import around
vals = [
# Nearest
# linear
[[123.4678, 0, 'lin', 'near'], 123.00],
[[123.4678, 1, 'linear', 'nearest'], 123.50],
[[123.4678, 2, 'lin', 'n'], 123.47],
# logarithmic
[[123.4678, 0, 'log', 'nearest'], 100.0],
[[123.4678, 1, 'logarithmic', 'nearest'], 120.0],
[[123.4678, 2, 'log', 'nearest'], 123.0],
[[123.4678, 3, 'log', 'nearest'], 123.5],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'nearest'], 100.0],
# Ceiling (up)
# linear
[[123.4678, 0, 'lin', 'c'], 124.0],
[[123.4678, 1, 'linear', 'ceiling'], 123.5],
[[123.4678, 2, 'lin', 'ceil'], 123.47],
# logarithmic
[[123.4678, 0, 'log', 'c'], 200.0],
[[123.4678, 1, 'logarithmic', 'c'], 130.0],
[[123.4678, 2, 'log', 'c'], 124.0],
[[123.4678, 3, 'log', 'c'], 123.5],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'c'], 1000.0],
# Floor (down)
# linear
[[123.4678, 0, 'lin', 'f'], 123.0],
[[123.4678, 1, 'linear', 'fl'], 123.4],
[[123.4678, 2, 'lin', 'floor'], 123.46],
# logarithmic
[[123.4678, 0, 'log', 'f'], 100.0],
[[123.4678, 1, 'logarithmic', 'f'], 120.0],
[[123.4678, 2, 'log', 'f'], 123.0],
[[123.4678, 3, 'log', 'f'], 123.4],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'f'], 100.0],
]
for vv in vals:
print(vv)
res = around(*vv[0])
print("\t", res)
assert_true(np.isclose(vv[1], res))
# Invalid 'scaling'
assert_raises(ValueError, around, 1234.567, 1, 'symlog', 'n')
# Invalid 'dir'ection
assert_raises(ValueError, around, 1234.567, 1, 'log', 'm')
return
def test_str_array(self):
from zcode.math.math_core import str_array
print("TestMathCore.test_str_array()")
arr = np.linspace(0, 10.0, 6)
correct = '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]'
sa = str_array(arr)
print("'({})' ==> '{}', should be '{}'".format(arr, sa, correct))
assert_true(sa == correct)
sa = str_array(arr, (2, 2))
print("'({}, (2, 2))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00... 8.00, 10.00]')
sa = str_array(arr, None)
print("'({}, None)' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]')
sa = str_array(arr, 1)
print("'({}, 1)' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00... 10.00]')
sa = str_array(arr, (1, 3))
print("'({}, (1, 3))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00... 6.00, 8.00, 10.00]')
sa = str_array(arr, (12, 10))
print("'({}, (12, 10))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]')
sa = str_array(arr, (2, 1), delim=' ')
print("'({}, (2, 1), delim=' ')' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00 2.00... 10.00]')
sa = str_array(arr, (2, 1), format=':.1e')
print("'({}, (2, 1), format=':.1e')' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.0e+00, 2.0e+00... 1.0e+01]')
return
def test_broadcast(self):
from zcode.math.math_core import broadcast
def check_in_ot(din, check):
dot = broadcast(*din)
print("input: {}".format(din))
print("output: {} ({})".format(dot, check))
assert_true(np.all([dd == cc for dd, cc in zip(dot, check)]))
assert_true(np.all([np.shape(dd) == np.shape(cc) for dd, cc in zip(dot, check)]))
return
# Normal broadcast (1,) (2,) ==> (2,) (2,)
din = [[1.0], [2.0, 3.0]]
check = [[[1.0, 1.0]], [[2.0, 3.0]]]
check_in_ot(din, check)
# Scalar-only broadcast () () ==> () ()
din = [1.0, 2.0]
check = din
check_in_ot(din, check)
# Mixed scalar and array
din = [1.5, [1.0, 2.0], [1.0, 2.0, 3.0]]
check = [
[[1.5, 1.5, 1.5], [1.5, 1.5, 1.5]],
[[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]],
[[1.0, 2.0, 3.0], [1.0, 2.0, 3.0]]
]
check_in_ot(din, check)
din = [[1.0], [2.0, 3.0]]
check = [[[1.0, 1.0]], [[2.0, 3.0]]]
dot = broadcast(*din)
check_in_ot(din, check)
sh_in = np.random.randint(1, 5, 3)
sh_ot = [sh_in for ii in range(len(sh_in))]
din = [np.random.normal(size=sh) for sh in sh_in]
dot = broadcast(*din)
print("Input shapes: '{}'".format(sh_in))
print("Output shapes: '{}' ({})".format([dd.shape for dd in dot], sh_ot))
assert_true(np.all([dd.shape == sh for dd, sh in zip(dot, sh_ot)]))
return
class Test_Interp(object):
def test_interp_lin_lin(self):
print("\ntest_interp_lin_lin()")
kw = dict(xlog=False, ylog=False, valid=False, left=np.nan, right=100.0)
xo = [1.0, 2.0, 3.0]
yo = [10.0, 20.0, 30.0]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [15.0, 25.0, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_lin_log(self):
print("\ntest_interp_lin_log()")
kw = dict(xlog=False, ylog=True, valid=False, left=100.0, right=np.nan)
xo = [1.0, 2.0, 3.0]
yo = [1.0e1, 1.0e3, 1.0e5]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [1.0e2, 1.0e4, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_log_lin(self):
print("\ntest_interp_log_lin()")
kw = dict(xlog=True, ylog=False, valid=False, left=100.0, right=np.nan)
xo = [2.0e-5, 2.0e-3, 2.0e-1]
yo = [-10.0, -20.0, -30.0]
tests = [2.0e-4, 2.0e-2, 1.0e-8, 1.0e8]
truth = [-15.0, -25.0, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_log_log(self):
print("\ntest_interp_log_log()")
kw = dict(xlog=True, ylog=True, valid=False, left=np.nan, right=100.0)
xo = [1.0e-1, 1.0e1, 1.0e5]
yo = [3.0e0, 3.0e-2, 3.0e6]
tests = [1.0, 1.0e3, 1.0e-8, 1.0e8]
truth = [3.0e-1, 3.0e2, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
class Test_Interp_Func_Linear(object):
KW = dict(kind='linear', bounds_error=False)
def test_interp_func(self):
print("\n|test_interp_func()|")
options = [True, False]
TRIES = 10
SAMPS = 40
TESTS = 100
LOG_RANGE = [-8.0, 8.0]
for xlog in options:
for ylog in options:
kw = dict(xlog=xlog, ylog=ylog)
print("xlog = {}, ylog = {}".format(xlog, ylog))
for kk in range(TRIES):
xo = np.random.uniform(*LOG_RANGE, SAMPS)
xo = np.sort(xo)
yo = np.random.uniform(*LOG_RANGE, SAMPS)
xx = np.random.uniform(*math_core.minmax(xo), TESTS)
if xlog:
xo = np.power(10.0, xo)
xx = np.power(10.0, xx)
if ylog:
yo = np.power(10.0, yo)
y1 = interpolate.interp(xx, xo, yo, valid=False, **kw)
y2 = interpolate.interp_func(xo, yo, kind='linear', bounds_error=False, **kw)(xx)
assert_true(np.allclose(y1, y2))
return
def test_interp_func_lin_lin(self):
print("\n|test_interp_func_lin_lin()|")
kw = dict(xlog=False, ylog=False, fill_value=(np.nan, 100.0))
kw.update(self.KW)
xo = [1.0, 2.0, 3.0]
yo = [10.0, 20.0, 30.0]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [15.0, 25.0, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_lin_log(self):
print("\n|test_interp_func_lin_log()|")
kw = dict(xlog=False, ylog=True, fill_value=(100.0, np.nan))
kw.update(self.KW)
xo = [1.0, 2.0, 3.0]
yo = [1.0e1, 1.0e3, 1.0e5]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [1.0e2, 1.0e4, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_log_lin(self):
print("\n|test_interp_func_log_lin()|")
kw = dict(xlog=True, ylog=False, fill_value=(100.0, np.nan))
kw.update(self.KW)
xo = [2.0e-5, 2.0e-3, 2.0e-1]
yo = [-10.0, -20.0, -30.0]
tests = [2.0e-4, 2.0e-2, 1.0e-8, 1.0e8]
truth = [-15.0, -25.0, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_log_log(self):
print("\n|test_interp_func_log_log()|")
kw = dict(xlog=True, ylog=True, fill_value=(np.nan, 100.0))
kw.update(self.KW)
xo = [1.0e-1, 1.0e1, 1.0e5]
yo = [3.0e0, 3.0e-2, 3.0e6]
tests = [1.0, 1.0e3, 1.0e-8, 1.0e8]
truth = [3.0e-1, 3.0e2, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
class Test_Interp_Func_Mono(object):
KW = dict(kind='mono')
def test_interp_func(self):
print("\n|test_interp_func()|")
xo = [0.1, 1.0, 2.0, 3.0, 4.0, 5.0]
yo = [100.0, 100.0, 90.0, 0.1, 2.0, 2.0]
NUM = len(xo)
xn = np.linspace(xo[1], xo[-2], 1000)
def test_within(xx, yy):
vals = []
for ii in range(NUM-1):
xl = xo[ii]
xh = xo[ii+1]
yl = yo[ii]
yh = yo[ii+1]
inds = (xl <= xx) & (xx <= xh)
rv1 = math_core.within(yy[inds], [yl, yh], all=True, close=True)
rv2 = math_core.mono(yy[inds], 'ge') or math_core.mono(yy[inds], 'le')
rv = (rv1 and rv2)
vals.append(rv)
return np.all(vals)
options = [True, False]
for xlog in options:
for ylog in options:
func = interpolate.interp_func(xo, yo, xlog=xlog, ylog=ylog, kind='mono')
yn = func(xn)
print("xlog = {}, ylog = {}".format(xlog, ylog))
assert_true(test_within(xn, yn))
# 'cubic' should be NON-monotonic, make sure test shows that
func = interpolate.interp_func(xo, yo, xlog=xlog, ylog=ylog, kind='cubic')
yn = func(xn)
assert_false(test_within(xn, yn))
return
class Test_Edges_From_Cents(object):
def test_lin_spacing(self):
print("\n|test_lin_spacing()|")
edges_true = [
np.linspace(0.0, 1.0, 20),
np.linspace(1.0, 0.0, 20),
np.linspace(-100, 100, 100)
]
for true in edges_true:
cents = math_core.midpoints(true, log=False)
edges = math_core.edges_from_cents(cents, log=False)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true))
return
def test_log_spacing(self):
print("\n|test_log_spacing()|")
true_pars = [
[0.0, 1.0, 20],
[1.0, 0.0, 20],
[2.0, -2.0, 100]
]
for pars in true_pars:
true = np.logspace(*pars)
cents = math_core.midpoints(true, log=True)
edges = math_core.edges_from_cents(cents, log=True)
print("pars = ", pars)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true))
return
def test_irr_spacing(self):
print("\n|test_irr_spacing()|")
NUM = 10
xx = np.arange(NUM)
widths = 1.5 + 0.4*xx + 0.1*(xx**2)
true = np.zeros(NUM+1)
true[0] = 4.0
for ii in range(1, NUM+1):
true[ii] = true[ii-1] + widths[ii-1]
cents = math_core.midpoints(true, log=False)
edges = math_core.edges_from_cents(cents, log=False)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true, rtol=1e-1))
return
# Run all methods as if with `nosetests ...`
if __name__ == "__main__":
run_module_suite()<|fim▁end|>
|
spc4 = spacing([3.0, -2.5, -5.0, 0.0], scale='lin', num=27, filter='<')
assert_true(np.allclose(ref4, spc4))
|
<|file_name|>indexing.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* This controller handles trigger requests from the Cumulus system that fires
* when assets are updated, inserted or deleted.
*
* An example request is:
*
* req.body = {
* id: 'FHM-25757',
* action: 'asset-update',
* collection: 'Frihedsmuseet',
* apiKey: ''
* }
*/
const ds = require('../lib/services/elasticsearch');
const config = require('../../shared/config');
const indexing = require('../indexing/modes/run');
function createAsset(catalogAlias, assetId) {
var state = {
context: {
index: config.es.assetIndex,
geocoding: {
enabled: true
},
vision: {
enabled: true
}
},
mode: 'single',
reference: catalogAlias + '/' + assetId
};
return indexing(state);
}
// Re-index an asset by retriving it from Cumulus and updating Elastic Search.
function updateAsset(catalogAlias, assetId) {
var state = {
context: {
index: config.es.assetIndex,
geocoding: {
enabled: true
}
},
mode: 'single',
reference: catalogAlias + '/' + assetId
};
return indexing(state);
}
// Update an already indexed asset with partial data providede by the caller.
function updateAssetsFromData(partials) {
// Support both a single document and a list.
if (!Array.isArray(partials)) {
partials = [partials];
}
// Construct an array of bulk-updates. Each document update is prefixed with
// an update action object.
let items = [];
partials.forEach((partial) => {
const updateAction = {
'update': {
_index: config.es.assetIndex,
'_id': partial.collection + '-' + partial.id
}
};
items.push(updateAction);
items.push({doc: partial});
});
const query = {
body: items,
};
return ds.bulk(query).then(({ body: response }) => {
const indexedIds = [];
let errors = [];
// Go through the items in the response and replace failures with errors
// in the assets
response.items.forEach(item => {
if (item.update.status >= 200 && item.update.status < 300) {
indexedIds.push(item.update._id);
} else {
// TODO: Consider using the AssetIndexingError instead
errors.push({
trace: new Error('Failed update ' + item.update._id),
item,
});
}
});
console.log('Updated ', indexedIds.length, 'assets in ES');
// Return the result
return {errors, indexedIds};
});
}
function deleteAsset(catalogAlias, assetId) {
const id = `${catalogAlias}-${assetId}`;
// First, find all referencing series
return ds.search({
index: config.es.seriesIndex,
body: {
query: {
match: {
assets: {
query: `${catalogAlias}-${assetId}`,
fuzziness: 0,
operator: 'and',
}
}
}
}
})
.then(({body: response}) => {
const bulkOperations = [];
response.hits.hits.forEach(({ _id: seriesId, _source: series }) => {
const assetIndex = series.assets.findIndex((assetId) => assetId === id);
series.assets.splice(assetIndex, 1);
const previewAssetIndex = series.previewAssets.findIndex((previewAsset) => `${previewAsset.collection}-${previewAsset.id}` === id);
if(previewAssetIndex !== -1) {
//TODO: Replace preview asset -- we need to look up a full new asset
// For now, we just remove the preview asset - editing any other asset should
// result in it being added here.
series.previewAssets.splice(previewAssetIndex, 1);
}
if(series.assets.length > 0) {
// If at least one asset remains in series, update it
bulkOperations.push({
'index' : {
'_index': config.es.seriesIndex,
'_id': seriesId
}
});
bulkOperations.push({...series});
}
else {
// If the serie is now empty, delete it
bulkOperations.push({delete: {_index: config.es.seriesIndex, _id: seriesId}});
}
});
bulkOperations.push({delete: {_index: config.es.assetIndex, _id: id}});
return ds.bulk({
body: bulkOperations,
}).then(({body: response}) => response);
});
}
module.exports.asset = function(req, res, next) {
if(req.body.apiKey !== config.kbhAccessKey) {
return res.sendStatus(401);
}
const action = req.body.action || null;
const catalogName = req.body.collection || null;
let id = req.body.id || '';
let catalogAlias = null;
console.log('Index asset called with body: ', JSON.stringify(req.body));
// If the catalog alias is not sat in the ID
if(id.indexOf('-') > -1) {
[catalogAlias, id] = id.split('-');
} else if(catalogName) {
// No slash in the id - the catalog should be read from .collection
catalogAlias = Object.keys(config.cip.catalogs)
.find((alias) => catalogName === config.cip.catalogs[alias]);
}
if (!catalogAlias) {
throw new Error('Failed to determine catalog alias');<|fim▁hole|> function success() {
res.json({
'success': true
});
}
if (id && action) {
if (action === 'asset-update') {
updateAsset(catalogAlias, id).then(success, next);
} else if (action === 'asset-create') {
createAsset(catalogAlias, id).then(success, next);
} else if (action === 'asset-delete') {
deleteAsset(catalogAlias, id).then(success, next);
} else {
next(new Error('Unexpected action from Cumulus: ' + action));
}
} else {
var requestBody = JSON.stringify(req.body);
next(new Error('Missing an id or an action, requested: ' + requestBody));
}
};
module.exports.createAsset = createAsset;
module.exports.updateAsset = updateAsset;
module.exports.updateAssetsFromData = updateAssetsFromData;
module.exports.deleteAsset = deleteAsset;<|fim▁end|>
|
}
|
<|file_name|>instr_cvtdq2ps.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn cvtdq2ps_1() {
run_test(&Instruction { mnemonic: Mnemonic::CVTDQ2PS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 91, 210], OperandSize::Dword)
}
#[test]
fn cvtdq2ps_2() {
run_test(&Instruction { mnemonic: Mnemonic::CVTDQ2PS, operand1: Some(Direct(XMM6)), operand2: Some(IndirectScaledDisplaced(ECX, Two, 829617393, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 91, 52, 77, 241, 244, 114, 49], OperandSize::Dword)<|fim▁hole|>#[test]
fn cvtdq2ps_3() {
run_test(&Instruction { mnemonic: Mnemonic::CVTDQ2PS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 91, 244], OperandSize::Qword)
}
#[test]
fn cvtdq2ps_4() {
run_test(&Instruction { mnemonic: Mnemonic::CVTDQ2PS, operand1: Some(Direct(XMM2)), operand2: Some(IndirectScaledDisplaced(RBX, Two, 1952076968, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 91, 20, 93, 168, 84, 90, 116], OperandSize::Qword)
}<|fim▁end|>
|
}
|
<|file_name|>unreachable-locals.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
#![allow(unused_variables)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
// No need to actually run the debugger, just make sure that the compiler can
// handle locals in unreachable code.
fn after_return() {
return;
let x = "0";
let (ref y,z) = (1i32, 2u32);
match (20i32, 'c') {
(a, ref b) => {}
}
for a in &[111i32] {}
let test = if some_predicate() { 1 } else { 2 };
while some_predicate() {
let abc = !some_predicate();
}
loop {
let abc = !some_predicate();
break;
}
// nested block
{
let abc = !some_predicate();
{
let def = !some_predicate();
}
}
}
fn after_panic() {
panic!();
let x = "0";
let (ref y,z) = (1i32, 2u32);
match (20i32, 'c') {
(a, ref b) => {}
}
for a in &[111i32] {}
let test = if some_predicate() { 1 } else { 2 };
while some_predicate() {
let abc = !some_predicate();
}
loop {<|fim▁hole|> // nested block
{
let abc = !some_predicate();
{
let def = !some_predicate();
}
}
}
fn after_diverging_function() {
diverge();
let x = "0";
let (ref y,z) = (1i32, 2u32);
match (20i32, 'c') {
(a, ref b) => {}
}
for a in &[111i32] {}
let test = if some_predicate() { 1 } else { 2 };
while some_predicate() {
let abc = !some_predicate();
}
loop {
let abc = !some_predicate();
break;
}
// nested block
{
let abc = !some_predicate();
{
let def = !some_predicate();
}
}
}
fn after_break() {
loop {
break;
let x = "0";
let (ref y,z) = (1i32, 2u32);
match (20i32, 'c') {
(a, ref b) => {}
}
for a in &[111i32] {}
let test = if some_predicate() { 1 } else { 2 };
while some_predicate() {
let abc = !some_predicate();
}
loop {
let abc = !some_predicate();
break;
}
// nested block
{
let abc = !some_predicate();
{
let def = !some_predicate();
}
}
}
}
fn after_continue() {
for _ in 0..10i32 {
continue;
let x = "0";
let (ref y,z) = (1i32, 2u32);
match (20i32, 'c') {
(a, ref b) => {}
}
for a in &[111i32] {}
let test = if some_predicate() { 1 } else { 2 };
while some_predicate() {
let abc = !some_predicate();
}
loop {
let abc = !some_predicate();
break;
}
// nested block
{
let abc = !some_predicate();
{
let def = !some_predicate();
}
}
}
}
fn main() {
after_return();
after_panic();
after_diverging_function();
after_break();
after_continue();
}
fn diverge() -> ! {
panic!();
}
fn some_predicate() -> bool { true || false }<|fim▁end|>
|
let abc = !some_predicate();
break;
}
|
<|file_name|>project.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from PyQt5.QtCore import Qt
from dgp.core.controllers.project_controllers import AirborneProjectController
from .base import WorkspaceTab
class ProjectTab(WorkspaceTab):<|fim▁hole|> self.project = project
@property
def title(self) -> str:
return f'{self.project.get_attr("name")}'
@property
def uid(self):
return self.project.uid<|fim▁end|>
|
def __init__(self, project: AirborneProjectController, parent=None):
super().__init__(parent=parent, flags=Qt.Widget)
|
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.prod')<|fim▁hole|><|fim▁end|>
|
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<|file_name|>reachable-unnameable-type-alias.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(staged_api)]
#![stable(feature = "a", since = "b")]
<|fim▁hole|>
#[stable(feature = "a", since = "b")]
pub fn f() -> inner_private_module::UnnameableTypeAlias {
0
}
fn main() {}<|fim▁end|>
|
mod inner_private_module {
// UnnameableTypeAlias isn't marked as reachable, so no stability annotation is required here
pub type UnnameableTypeAlias = u8;
}
|
<|file_name|>bilock.rs<|end_file_name|><|fim▁begin|>use std::boxed::Box;
use std::cell::UnsafeCell;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
use {Async, Future, Poll};
use task::{self, Task};
/// A type of futures-powered synchronization primitive which is a mutex between
/// two possible owners.
///
/// This primitive is not as generic as a full-blown mutex but is sufficient for
/// many use cases where there are only two possible owners of a resource. The
/// implementation of `BiLock` can be more optimized for just the two possible
/// owners.
///
/// Note that it's possible to use this lock through a poll-style interface with
/// the `poll_lock` method but you can also use it as a future with the `lock`
/// method that consumes a `BiLock` and returns a future that will resolve when
/// it's locked.
///
/// A `BiLock` is typically used for "split" operations where data which serves
/// two purposes wants to be split into two to be worked with separately. For
/// example a TCP stream could be both a reader and a writer or a framing layer
/// could be both a stream and a sink for messages. A `BiLock` enables splitting
/// these two and then using each independently in a futures-powered fashion.
pub struct BiLock<T> {
inner: Arc<Inner<T>>,
}
struct Inner<T> {
state: AtomicUsize,
inner: UnsafeCell<T>,
}
unsafe impl<T: Send> Send for Inner<T> {}
unsafe impl<T: Send> Sync for Inner<T> {}
impl<T> BiLock<T> {
/// Creates a new `BiLock` protecting the provided data.
///
/// Two handles to the lock are returned, and these are the only two handles
/// that will ever be available to the lock. These can then be sent to separate
/// tasks to be managed there.
pub fn new(t: T) -> (BiLock<T>, BiLock<T>) {
let inner = Arc::new(Inner {
state: AtomicUsize::new(0),
inner: UnsafeCell::new(t),
});
(BiLock { inner: inner.clone() }, BiLock { inner: inner })
}
/// Attempt to acquire this lock, returning `NotReady` if it can't be
/// acquired.
///
/// This function will acquire the lock in a nonblocking fashion, returning
/// immediately if the lock is already held. If the lock is successfully
/// acquired then `Async::Ready` is returned with a value that represents
/// the locked value (and can be used to access the protected data). The
/// lock is unlocked when the returned `BiLockGuard` is dropped.
///
/// If the lock is already held then this function will return
/// `Async::NotReady`. In this case the current task will also be scheduled
/// to receive a notification when the lock would otherwise become
/// available.
///
/// # Panics
///
/// This function will panic if called outside the context of a future's
/// task.
pub fn poll_lock(&self) -> Async<BiLockGuard<T>> {
loop {
match self.inner.state.swap(1, SeqCst) {
// Woohoo, we grabbed the lock!
0 => return Async::Ready(BiLockGuard { inner: self }),
// Oops, someone else has locked the lock
1 => {}
// A task was previously blocked on this lock, likely our task,
// so we need to update that task.
n => unsafe {
drop(Box::from_raw(n as *mut Task));
}
}
let me = Box::new(task::park());
let me = Box::into_raw(me) as usize;
match self.inner.state.compare_exchange(1, me, SeqCst, SeqCst) {
// The lock is still locked, but we've now parked ourselves, so
// just report that we're scheduled to receive a notification.
Ok(_) => return Async::NotReady,
// Oops, looks like the lock was unlocked after our swap above
// and before the compare_exchange. Deallocate what we just
// allocated and go through the loop again.
Err(0) => unsafe {
drop(Box::from_raw(me as *mut Task));
},
// The top of this loop set the previous state to 1, so if we
// failed the CAS above then it's because the previous value was
// *not* zero or one. This indicates that a task was blocked,
// but we're trying to acquire the lock and there's only one
// other reference of the lock, so it should be impossible for
// that task to ever block itself.
Err(n) => panic!("invalid state: {}", n),
}
}
}
/// Perform a "blocking lock" of this lock, consuming this lock handle and
/// returning a future to the acquired lock.
///
/// This function consumes the `BiLock<T>` and returns a sentinel future,
/// `BiLockAcquire<T>`. The returned future will resolve to
/// `BiLockAcquired<T>` which represents a locked lock similarly to
/// `BiLockGuard<T>`.
///
/// Note that the returned future will never resolve to an error.
pub fn lock(self) -> BiLockAcquire<T> {
BiLockAcquire {
inner: self,
}
}
fn unlock(&self) {
match self.inner.state.swap(0, SeqCst) {
// we've locked the lock, shouldn't be possible for us to see an
// unlocked lock.
0 => panic!("invalid unlocked state"),
// Ok, no one else tried to get the lock, we're done.
1 => {}
// Another task has parked themselves on this lock, let's wake them
// up as its now their turn.
n => unsafe {
Box::from_raw(n as *mut Task).unpark();
}
}
}
}
impl<T> Drop for Inner<T> {
fn drop(&mut self) {
assert_eq!(self.state.load(SeqCst), 0);
}
}
/// Returned RAII guard from the `poll_lock` method.
///
/// This structure acts as a sentinel to the data in the `BiLock<T>` itself,
/// implementing `Deref` and `DerefMut` to `T`. When dropped, the lock will be
/// unlocked.
pub struct BiLockGuard<'a, T: 'a> {
inner: &'a BiLock<T>,
}
impl<'a, T> Deref for BiLockGuard<'a, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.inner.inner.inner.get() }
}
}
impl<'a, T> DerefMut for BiLockGuard<'a, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.inner.inner.inner.get() }
}
}
impl<'a, T> Drop for BiLockGuard<'a, T> {
fn drop(&mut self) {
self.inner.unlock();
}
}
/// Future returned by `BiLock::lock` which will resolve when the lock is
/// acquired.
pub struct BiLockAcquire<T> {
inner: BiLock<T>,
}
impl<T> Future for BiLockAcquire<T> {
type Item = BiLockAcquired<T>;
type Error = ();
fn poll(&mut self) -> Poll<BiLockAcquired<T>, ()> {
match self.inner.poll_lock() {
Async::Ready(r) => {
mem::forget(r);
Ok(BiLockAcquired {
inner: BiLock { inner: self.inner.inner.clone() },<|fim▁hole|> Async::NotReady => Ok(Async::NotReady),
}
}
}
/// Resolved value of the `BiLockAcquire<T>` future.
///
/// This value, like `BiLockGuard<T>`, is a sentinel to the value `T` through
/// implementations of `Deref` and `DerefMut`. When dropped will unlock the
/// lock, and the original unlocked `BiLock<T>` can be recovered through the
/// `unlock` method.
pub struct BiLockAcquired<T> {
inner: BiLock<T>,
}
impl<T> BiLockAcquired<T> {
/// Recovers the original `BiLock<T>`, unlocking this lock.
pub fn unlock(self) -> BiLock<T> {
// note that unlocked is implemented in `Drop`, so we don't do anything
// here other than creating a new handle to return.
BiLock { inner: self.inner.inner.clone() }
}
}
impl<T> Deref for BiLockAcquired<T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.inner.inner.inner.get() }
}
}
impl<T> DerefMut for BiLockAcquired<T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.inner.inner.inner.get() }
}
}
impl<T> Drop for BiLockAcquired<T> {
fn drop(&mut self) {
self.inner.unlock();
}
}<|fim▁end|>
|
}.into())
}
|
<|file_name|>pp.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This pretty-printer is a direct reimplementation of Philip Karlton's
//! Mesa pretty-printer, as described in appendix A of<|fim▁hole|>//! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
//! Stanford Department of Computer Science, 1979.
//!
//! The algorithm's aim is to break a stream into as few lines as possible
//! while respecting the indentation-consistency requirements of the enclosing
//! block, and avoiding breaking at silly places on block boundaries, for
//! example, between "x" and ")" in "x)".
//!
//! I am implementing this algorithm because it comes with 20 pages of
//! documentation explaining its theory, and because it addresses the set of
//! concerns I've seen other pretty-printers fall down on. Weirdly. Even though
//! it's 32 years old. What can I say?
//!
//! Despite some redundancies and quirks in the way it's implemented in that
//! paper, I've opted to keep the implementation here as similar as I can,
//! changing only what was blatantly wrong, a typo, or sufficiently
//! non-idiomatic rust that it really stuck out.
//!
//! In particular you'll see a certain amount of churn related to INTEGER vs.
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
//! somewhat readily? In any case, I've used usize for indices-in-buffers and
//! ints for character-sizes-and-indentation-offsets. This respects the need
//! for ints to "go negative" while carrying a pending-calculation balance, and
//! helps differentiate all the numbers flying around internally (slightly).
//!
//! I also inverted the indentation arithmetic used in the print stack, since
//! the Mesa implementation (somewhat randomly) stores the offset on the print
//! stack in terms of margin-col rather than col itself. I store col.
//!
//! I also implemented a small change in the String token, in that I store an
//! explicit length for the string. For most tokens this is just the length of
//! the accompanying string. But it's necessary to permit it to differ, for
//! encoding things that are supposed to "go on their own line" -- certain
//! classes of comment and blank-line -- where relying on adjacent
//! hardbreak-like Break tokens with long blankness indication doesn't actually
//! work. To see why, consider when there is a "thing that should be on its own
//! line" between two long blocks, say functions. If you put a hardbreak after
//! each function (or before each) and the breaking algorithm decides to break
//! there anyways (because the functions themselves are long) you wind up with
//! extra blank lines. If you don't put hardbreaks you can wind up with the
//! "thing which should be on its own line" not getting its own line in the
//! rare case of "really small functions" or such. This re-occurs with comments
//! and explicit blank lines. So in those cases we use a string with a payload
//! we want isolated to a line and an explicit length that's huge, surrounded
//! by two zero-length breaks. The algorithm will try its best to fit it on a
//! line (which it can't) and so naturally place the content on its own line to
//! avoid combining it with other lines and making matters even worse.
use std::old_io;
use std::string;
use std::iter::repeat;
#[derive(Clone, Copy, PartialEq)]
pub enum Breaks {
Consistent,
Inconsistent,
}
#[derive(Clone, Copy)]
pub struct BreakToken {
offset: isize,
blank_space: isize
}
#[derive(Clone, Copy)]
pub struct BeginToken {
offset: isize,
breaks: Breaks
}
#[derive(Clone)]
pub enum Token {
String(String, isize),
Break(BreakToken),
Begin(BeginToken),
End,
Eof,
}
impl Token {
pub fn is_eof(&self) -> bool {
match *self {
Token::Eof => true,
_ => false,
}
}
pub fn is_hardbreak_tok(&self) -> bool {
match *self {
Token::Break(BreakToken {
offset: 0,
blank_space: bs
}) if bs == SIZE_INFINITY =>
true,
_ =>
false
}
}
}
pub fn tok_str(token: &Token) -> String {
match *token {
Token::String(ref s, len) => format!("STR({},{})", s, len),
Token::Break(_) => "BREAK".to_string(),
Token::Begin(_) => "BEGIN".to_string(),
Token::End => "END".to_string(),
Token::Eof => "EOF".to_string()
}
}
pub fn buf_str(toks: &[Token],
szs: &[isize],
left: usize,
right: usize,
lim: usize)
-> String {
let n = toks.len();
assert_eq!(n, szs.len());
let mut i = left;
let mut l = lim;
let mut s = string::String::from_str("[");
while i != right && l != 0 {
l -= 1;
if i != left {
s.push_str(", ");
}
s.push_str(&format!("{}={}",
szs[i],
tok_str(&toks[i]))[]);
i += 1;
i %= n;
}
s.push(']');
s
}
#[derive(Copy)]
pub enum PrintStackBreak {
Fits,
Broken(Breaks),
}
#[derive(Copy)]
pub struct PrintStackElem {
offset: isize,
pbreak: PrintStackBreak
}
static SIZE_INFINITY: isize = 0xffff;
pub fn mk_printer(out: Box<old_io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
let size: Vec<isize> = repeat(0).take(n).collect();
let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
left: 0,
right: 0,
token: token,
size: size,
left_total: 0,
right_total: 0,
scan_stack: scan_stack,
scan_stack_empty: true,
top: 0,
bottom: 0,
print_stack: Vec::new(),
pending_indentation: 0
}
}
/// In case you do not have the paper, here is an explanation of what's going
/// on.
///
/// There is a stream of input tokens flowing through this printer.
///
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
/// case every token worth buffering is 1 char long, so it's ok.
///
/// Tokens are String, Break, and Begin/End to delimit blocks.
///
/// Begin tokens can carry an offset, saying "how far to indent when you break
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
/// breaking. Consistent breaking means that after the first break, no attempt
/// will be made to flow subsequent breaks together onto lines. Inconsistent
/// is the opposite. Inconsistent breaking example would be, say:
///
/// foo(hello, there, good, friends)
///
/// breaking inconsistently to become
///
/// foo(hello, there
/// good, friends);
///
/// whereas a consistent breaking would yield:
///
/// foo(hello,
/// there
/// good,
/// friends);
///
/// That is, in the consistent-break blocks we value vertical alignment
/// more than the ability to cram stuff onto a line. But in all cases if it
/// can make a block a one-liner, it'll do so.
///
/// Carrying on with high-level logic:
///
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
/// 'right' indices denote the active portion of the ring buffer as well as
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
/// and point-in-infinite-stream senses freely.
///
/// There is a parallel ring buffer, 'size', that holds the calculated size of
/// each token. Why calculated? Because for Begin/End pairs, the "size"
/// includes everything between the pair. That is, the "size" of Begin is
/// actually the sum of the sizes of everything between Begin and the paired
/// End that follows. Since that is arbitrarily far in the future, 'size' is
/// being rewritten regularly while the printer runs; in fact most of the
/// machinery is here to work out 'size' entries on the fly (and give up when
/// they're so obviously over-long that "infinity" is a good enough
/// approximation for purposes of line breaking).
///
/// The "input side" of the printer is managed as an abstract process called
/// SCAN, which uses 'scan_stack', 'scan_stack_empty', 'top' and 'bottom', to
/// manage calculating 'size'. SCAN is, in other words, the process of
/// calculating 'size' entries.
///
/// The "output side" of the printer is managed by an abstract process called
/// PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
/// do with each token/size pair it consumes as it goes. It's trying to consume
/// the entire buffered window, but can't output anything until the size is >=
/// 0 (sizes are set to negative while they're pending calculation).
///
/// So SCAN takes input and buffers tokens and pending calculations, while
/// PRINT gobbles up completed calculations and tokens from the buffer. The
/// theory is that the two can never get more than 3N tokens apart, because
/// once there's "obviously" too much data to fit on a line, in a size
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
/// it.
///
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
pub struct Printer {
pub out: Box<old_io::Writer+'static>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
/// Number of spaces left on line
space: isize,
/// Index of left side of input stream
left: usize,
/// Index of right side of input stream
right: usize,
/// Ring-buffer stream goes through
token: Vec<Token> ,
/// Ring-buffer of calculated sizes
size: Vec<isize> ,
/// Running size of stream "...left"
left_total: isize,
/// Running size of stream "...right"
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: Vec<usize> ,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
top: usize,
/// Index of bottom of scan_stack
bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem> ,
/// Buffered indentation to avoid writing trailing whitespace
pending_indentation: isize,
}
impl Printer {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
// be very careful with this!
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
pub fn pretty_print(&mut self, token: Token) -> old_io::IoResult<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
if !self.scan_stack_empty {
self.check_stack(0);
try!(self.advance_left());
}
self.indent(0);
Ok(())
}
Token::Begin(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0;
self.right = 0;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
let right = self.right;
self.scan_push(right);
Ok(())
}
Token::End => {
if self.scan_stack_empty {
debug!("pp End/print ~[{},{}]", self.left, self.right);
self.print(token, 0)
} else {
debug!("pp End/buffer ~[{},{}]", self.left, self.right);
self.advance_right();
self.token[self.right] = token;
self.size[self.right] = -1;
let right = self.right;
self.scan_push(right);
Ok(())
}
}
Token::Break(b) => {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
self.left = 0;
self.right = 0;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
self.check_stack(0);
let right = self.right;
self.scan_push(right);
self.token[self.right] = token;
self.size[self.right] = -self.right_total;
self.right_total += b.blank_space;
Ok(())
}
Token::String(s, len) => {
if self.scan_stack_empty {
debug!("pp String('{}')/print ~[{},{}]",
s, self.left, self.right);
self.print(Token::String(s, len), len)
} else {
debug!("pp String('{}')/buffer ~[{},{}]",
s, self.left, self.right);
self.advance_right();
self.token[self.right] = Token::String(s, len);
self.size[self.right] = len;
self.right_total += len;
self.check_stream()
}
}
}
}
pub fn check_stream(&mut self) -> old_io::IoResult<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
debug!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space);
if !self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] {
debug!("setting {} to infinity and popping", self.left);
let scanned = self.scan_pop_bottom();
self.size[scanned] = SIZE_INFINITY;
}
}
try!(self.advance_left());
if self.left != self.right {
try!(self.check_stream());
}
}
Ok(())
}
pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
self.top += 1;
self.top %= self.buf_len;
assert!((self.top != self.bottom));
}
self.scan_stack[self.top] = x;
}
pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.top += self.buf_len - 1; self.top %= self.buf_len;
}
return x;
}
pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
self.bottom += 1; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
self.right += 1;
self.right %= self.buf_len;
assert!((self.right != self.left));
}
pub fn advance_left(&mut self) -> old_io::IoResult<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
let mut left_size = self.size[self.left];
while left_size >= 0 {
let left = self.token[self.left].clone();
let len = match left {
Token::Break(b) => b.blank_space,
Token::String(_, len) => {
assert_eq!(len, left_size);
len
}
_ => 0
};
try!(self.print(left, left_size));
self.left_total += len;
if self.left == self.right {
break;
}
self.left += 1;
self.left %= self.buf_len;
left_size = self.size[self.left];
}
Ok(())
}
pub fn check_stack(&mut self, k: isize) {
if !self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
Token::Begin(_) => {
if k > 0 {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
self.check_stack(k - 1);
}
}
Token::End => {
// paper says + not =, but that makes no sense.
let popped = self.scan_pop();
self.size[popped] = 1;
self.check_stack(k + 1);
}
_ => {
let popped = self.scan_pop();
self.size[popped] = self.size[x] + self.right_total;
if k > 0 {
self.check_stack(k);
}
}
}
}
}
pub fn print_newline(&mut self, amount: isize) -> old_io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
if n != 0 {
(*print_stack)[n - 1]
} else {
PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
}
}
}
pub fn print_str(&mut self, s: &str) -> old_io::IoResult<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
pub fn print(&mut self, token: Token, l: isize) -> old_io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
&self.size[],
self.left,
self.right,
6));
match token {
Token::Begin(b) => {
if l > self.space {
let col = self.margin - self.space + b.offset;
debug!("print Begin -> push broken block at col {}", col);
self.print_stack.push(PrintStackElem {
offset: col,
pbreak: PrintStackBreak::Broken(b.breaks)
});
} else {
debug!("print Begin -> push fitting block");
self.print_stack.push(PrintStackElem {
offset: 0,
pbreak: PrintStackBreak::Fits
});
}
Ok(())
}
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
assert!((print_stack.len() != 0));
print_stack.pop().unwrap();
Ok(())
}
Token::Break(b) => {
let top = self.get_top();
match top.pbreak {
PrintStackBreak::Fits => {
debug!("print Break({}) in fitting block", b.blank_space);
self.space -= b.blank_space;
self.indent(b.blank_space);
Ok(())
}
PrintStackBreak::Broken(Breaks::Consistent) => {
debug!("print Break({}+{}) in consistent block",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
}
PrintStackBreak::Broken(Breaks::Inconsistent) => {
if l > self.space {
debug!("print Break({}+{}) w/ newline in inconsistent",
top.offset, b.offset);
let ret = self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset);
ret
} else {
debug!("print Break({}) w/o newline in inconsistent",
b.blank_space);
self.indent(b.blank_space);
self.space -= b.blank_space;
Ok(())
}
}
}
}
Token::String(s, len) => {
debug!("print String({})", s);
assert_eq!(l, len);
// assert!(l <= space);
self.space -= len;
self.print_str(&s[])
}
Token::Eof => {
// Eof should never get here.
panic!();
}
}
}
}
// Convenience functions to talk to the printer.
//
// "raw box"
pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> old_io::IoResult<()> {
p.pretty_print(Token::Begin(BeginToken {
offset: indent as isize,
breaks: b
}))
}
pub fn ibox(p: &mut Printer, indent: usize) -> old_io::IoResult<()> {
rbox(p, indent, Breaks::Inconsistent)
}
pub fn cbox(p: &mut Printer, indent: usize) -> old_io::IoResult<()> {
rbox(p, indent, Breaks::Consistent)
}
pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> old_io::IoResult<()> {
p.pretty_print(Token::Break(BreakToken {
offset: off,
blank_space: n as isize
}))
}
pub fn end(p: &mut Printer) -> old_io::IoResult<()> {
p.pretty_print(Token::End)
}
pub fn eof(p: &mut Printer) -> old_io::IoResult<()> {
p.pretty_print(Token::Eof)
}
pub fn word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
}
pub fn huge_word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), SIZE_INFINITY))
}
pub fn zero_word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
}
pub fn spaces(p: &mut Printer, n: usize) -> old_io::IoResult<()> {
break_offset(p, n, 0)
}
pub fn zerobreak(p: &mut Printer) -> old_io::IoResult<()> {
spaces(p, 0)
}
pub fn space(p: &mut Printer) -> old_io::IoResult<()> {
spaces(p, 1)
}
pub fn hardbreak(p: &mut Printer) -> old_io::IoResult<()> {
spaces(p, SIZE_INFINITY as usize)
}
pub fn hardbreak_tok_offset(off: isize) -> Token {
Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY})
}
pub fn hardbreak_tok() -> Token {
hardbreak_tok_offset(0)
}<|fim▁end|>
|
//!
|
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package ssm
const (
// ErrCodeAlreadyExistsException for service response error code
// "AlreadyExistsException".
//
// Error returned if an attempt is made to register a patch group with a patch
// baseline that is already registered with a different patch baseline.
ErrCodeAlreadyExistsException = "AlreadyExistsException"
// ErrCodeAssociatedInstances for service response error code
// "AssociatedInstances".
//
// You must disassociate a document from all instances before you can delete
// it.
ErrCodeAssociatedInstances = "AssociatedInstances"
// ErrCodeAssociationAlreadyExists for service response error code
// "AssociationAlreadyExists".
//
// The specified association already exists.
ErrCodeAssociationAlreadyExists = "AssociationAlreadyExists"
// ErrCodeAssociationDoesNotExist for service response error code
// "AssociationDoesNotExist".
//
// The specified association does not exist.
ErrCodeAssociationDoesNotExist = "AssociationDoesNotExist"
// ErrCodeAssociationLimitExceeded for service response error code
// "AssociationLimitExceeded".
//
// You can have at most 2,000 active associations.
ErrCodeAssociationLimitExceeded = "AssociationLimitExceeded"
// ErrCodeAssociationVersionLimitExceeded for service response error code
// "AssociationVersionLimitExceeded".
//
// You have reached the maximum number versions allowed for an association.
// Each association has a limit of 1,000 versions.
ErrCodeAssociationVersionLimitExceeded = "AssociationVersionLimitExceeded"
// ErrCodeAutomationDefinitionNotFoundException for service response error code
// "AutomationDefinitionNotFoundException".
//
// An Automation document with the specified name could not be found.
ErrCodeAutomationDefinitionNotFoundException = "AutomationDefinitionNotFoundException"
// ErrCodeAutomationDefinitionVersionNotFoundException for service response error code
// "AutomationDefinitionVersionNotFoundException".
//
// An Automation document with the specified name and version could not be found.
ErrCodeAutomationDefinitionVersionNotFoundException = "AutomationDefinitionVersionNotFoundException"
// ErrCodeAutomationExecutionLimitExceededException for service response error code
// "AutomationExecutionLimitExceededException".
//
// The number of simultaneously running Automation executions exceeded the allowable
// limit.
ErrCodeAutomationExecutionLimitExceededException = "AutomationExecutionLimitExceededException"
// ErrCodeAutomationExecutionNotFoundException for service response error code
// "AutomationExecutionNotFoundException".
//
// There is no automation execution information for the requested automation
// execution ID.
ErrCodeAutomationExecutionNotFoundException = "AutomationExecutionNotFoundException"
// ErrCodeAutomationStepNotFoundException for service response error code
// "AutomationStepNotFoundException".
//
// The specified step name and execution ID don't exist. Verify the information
// and try again.
ErrCodeAutomationStepNotFoundException = "AutomationStepNotFoundException"
// ErrCodeComplianceTypeCountLimitExceededException for service response error code
// "ComplianceTypeCountLimitExceededException".
//
// You specified too many custom compliance types. You can specify a maximum
// of 10 different types.
ErrCodeComplianceTypeCountLimitExceededException = "ComplianceTypeCountLimitExceededException"
// ErrCodeCustomSchemaCountLimitExceededException for service response error code
// "CustomSchemaCountLimitExceededException".
//
// You have exceeded the limit for custom schemas. Delete one or more custom
// schemas and try again.
ErrCodeCustomSchemaCountLimitExceededException = "CustomSchemaCountLimitExceededException"
// ErrCodeDocumentAlreadyExists for service response error code
// "DocumentAlreadyExists".
//
// The specified document already exists.
ErrCodeDocumentAlreadyExists = "DocumentAlreadyExists"
// ErrCodeDocumentLimitExceeded for service response error code
// "DocumentLimitExceeded".
//
// You can have at most 200 active Systems Manager documents.
ErrCodeDocumentLimitExceeded = "DocumentLimitExceeded"
// ErrCodeDocumentPermissionLimit for service response error code
// "DocumentPermissionLimit".
//
// The document cannot be shared with more AWS user accounts. You can share
// a document with a maximum of 20 accounts. You can publicly share up to five
// documents. If you need to increase this limit, contact AWS Support.
ErrCodeDocumentPermissionLimit = "DocumentPermissionLimit"
// ErrCodeDocumentVersionLimitExceeded for service response error code
// "DocumentVersionLimitExceeded".
//
// The document has too many versions. Delete one or more document versions
// and try again.
ErrCodeDocumentVersionLimitExceeded = "DocumentVersionLimitExceeded"
// ErrCodeDoesNotExistException for service response error code
// "DoesNotExistException".
//
// Error returned when the ID specified for a resource, such as a Maintenance
// Window or Patch baseline, doesn't exist.
//
// For information about resource limits in Systems Manager, see AWS Systems
// Manager Limits (http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_ssm).
ErrCodeDoesNotExistException = "DoesNotExistException"
// ErrCodeDuplicateDocumentContent for service response error code
// "DuplicateDocumentContent".
//
// The content of the association document matches another document. Change
// the content of the document and try again.
ErrCodeDuplicateDocumentContent = "DuplicateDocumentContent"
// ErrCodeDuplicateInstanceId for service response error code
// "DuplicateInstanceId".
//
// You cannot specify an instance ID in more than one association.
ErrCodeDuplicateInstanceId = "DuplicateInstanceId"
// ErrCodeFeatureNotAvailableException for service response error code
// "FeatureNotAvailableException".
//
// You attempted to register a LAMBDA or STEP_FUNCTION task in a region where
// the corresponding service is not available.
ErrCodeFeatureNotAvailableException = "FeatureNotAvailableException"
// ErrCodeHierarchyLevelLimitExceededException for service response error code
// "HierarchyLevelLimitExceededException".
//
// A hierarchy can have a maximum of 15 levels. For more information, see Working
// with Systems Manager Parameters (http://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-paramstore-working.html).
ErrCodeHierarchyLevelLimitExceededException = "HierarchyLevelLimitExceededException"
// ErrCodeHierarchyTypeMismatchException for service response error code
// "HierarchyTypeMismatchException".
//
// Parameter Store does not support changing a parameter type in a hierarchy.
// For example, you can't change a parameter from a String type to a SecureString
// type. You must create a new, unique parameter.
ErrCodeHierarchyTypeMismatchException = "HierarchyTypeMismatchException"
// ErrCodeIdempotentParameterMismatch for service response error code
// "IdempotentParameterMismatch".
//
// Error returned when an idempotent operation is retried and the parameters
// don't match the original call to the API with the same idempotency token.
ErrCodeIdempotentParameterMismatch = "IdempotentParameterMismatch"
// ErrCodeInternalServerError for service response error code
// "InternalServerError".
//
// An error occurred on the server side.
ErrCodeInternalServerError = "InternalServerError"
// ErrCodeInvalidActivation for service response error code
// "InvalidActivation".
//
// The activation is not valid. The activation might have been deleted, or the
// ActivationId and the ActivationCode do not match.
ErrCodeInvalidActivation = "InvalidActivation"
// ErrCodeInvalidActivationId for service response error code
// "InvalidActivationId".
//
// The activation ID is not valid. Verify the you entered the correct ActivationId
// or ActivationCode and try again.
ErrCodeInvalidActivationId = "InvalidActivationId"
// ErrCodeInvalidAllowedPatternException for service response error code
// "InvalidAllowedPatternException".
//
// The request does not meet the regular expression requirement.
ErrCodeInvalidAllowedPatternException = "InvalidAllowedPatternException"
// ErrCodeInvalidAssociationVersion for service response error code
// "InvalidAssociationVersion".
//
// The version you specified is not valid. Use ListAssociationVersions to view
// all versions of an association according to the association ID. Or, use the
// $LATEST parameter to view the latest version of the association.
ErrCodeInvalidAssociationVersion = "InvalidAssociationVersion"
// ErrCodeInvalidAutomationExecutionParametersException for service response error code
// "InvalidAutomationExecutionParametersException".
//
// The supplied parameters for invoking the specified Automation document are
// incorrect. For example, they may not match the set of parameters permitted
// for the specified Automation document.
ErrCodeInvalidAutomationExecutionParametersException = "InvalidAutomationExecutionParametersException"
// ErrCodeInvalidAutomationSignalException for service response error code
// "InvalidAutomationSignalException".
//
// The signal is not valid for the current Automation execution.
ErrCodeInvalidAutomationSignalException = "InvalidAutomationSignalException"
// ErrCodeInvalidAutomationStatusUpdateException for service response error code
// "InvalidAutomationStatusUpdateException".
//
// The specified update status operation is not valid.
ErrCodeInvalidAutomationStatusUpdateException = "InvalidAutomationStatusUpdateException"
// ErrCodeInvalidCommandId for service response error code
// "InvalidCommandId".
ErrCodeInvalidCommandId = "InvalidCommandId"
// ErrCodeInvalidDeleteInventoryParametersException for service response error code
// "InvalidDeleteInventoryParametersException".
//
// One or more of the parameters specified for the delete operation is not valid.
// Verify all parameters and try again.
ErrCodeInvalidDeleteInventoryParametersException = "InvalidDeleteInventoryParametersException"
// ErrCodeInvalidDeletionIdException for service response error code
// "InvalidDeletionIdException".
//
// The ID specified for the delete operation does not exist or is not valide.
// Verify the ID and try again.
ErrCodeInvalidDeletionIdException = "InvalidDeletionIdException"
// ErrCodeInvalidDocument for service response error code
// "InvalidDocument".
//
// The specified document does not exist.
ErrCodeInvalidDocument = "InvalidDocument"
// ErrCodeInvalidDocumentContent for service response error code
// "InvalidDocumentContent".
//
// The content for the document is not valid.
ErrCodeInvalidDocumentContent = "InvalidDocumentContent"
// ErrCodeInvalidDocumentOperation for service response error code
// "InvalidDocumentOperation".
//
// You attempted to delete a document while it is still shared. You must stop
// sharing the document before you can delete it.
ErrCodeInvalidDocumentOperation = "InvalidDocumentOperation"
// ErrCodeInvalidDocumentSchemaVersion for service response error code
// "InvalidDocumentSchemaVersion".
//
// The version of the document schema is not supported.
ErrCodeInvalidDocumentSchemaVersion = "InvalidDocumentSchemaVersion"
// ErrCodeInvalidDocumentVersion for service response error code
// "InvalidDocumentVersion".
//
// The document version is not valid or does not exist.
ErrCodeInvalidDocumentVersion = "InvalidDocumentVersion"
// ErrCodeInvalidFilter for service response error code
// "InvalidFilter".
//
// The filter name is not valid. Verify the you entered the correct name and
// try again.
ErrCodeInvalidFilter = "InvalidFilter"
// ErrCodeInvalidFilterKey for service response error code
// "InvalidFilterKey".
//
// The specified key is not valid.
ErrCodeInvalidFilterKey = "InvalidFilterKey"
// ErrCodeInvalidFilterOption for service response error code
// "InvalidFilterOption".
//
// The specified filter option is not valid. Valid options are Equals and BeginsWith.
// For Path filter, valid options are Recursive and OneLevel.
ErrCodeInvalidFilterOption = "InvalidFilterOption"
// ErrCodeInvalidFilterValue for service response error code
// "InvalidFilterValue".
//
// The filter value is not valid. Verify the value and try again.
ErrCodeInvalidFilterValue = "InvalidFilterValue"
// ErrCodeInvalidInstanceId for service response error code
// "InvalidInstanceId".
//
// The following problems can cause this exception:
//
// You do not have permission to access the instance.
//
// The SSM Agent is not running. On managed instances and Linux instances, verify
// that the SSM Agent is running. On EC2 Windows instances, verify that the
// EC2Config service is running.
//
// The SSM Agent or EC2Config service is not registered to the SSM endpoint.
// Try reinstalling the SSM Agent or EC2Config service.
//
// The instance is not in valid state. Valid states are: Running, Pending, Stopped,
// Stopping. Invalid states are: Shutting-down and Terminated.
ErrCodeInvalidInstanceId = "InvalidInstanceId"
// ErrCodeInvalidInstanceInformationFilterValue for service response error code
// "InvalidInstanceInformationFilterValue".
//
// The specified filter value is not valid.
ErrCodeInvalidInstanceInformationFilterValue = "InvalidInstanceInformationFilterValue"
// ErrCodeInvalidInventoryItemContextException for service response error code
// "InvalidInventoryItemContextException".
//
// You specified invalid keys or values in the Context attribute for InventoryItem.
// Verify the keys and values, and try again.
ErrCodeInvalidInventoryItemContextException = "InvalidInventoryItemContextException"
// ErrCodeInvalidInventoryRequestException for service response error code
// "InvalidInventoryRequestException".
//
// The request is not valid.
ErrCodeInvalidInventoryRequestException = "InvalidInventoryRequestException"
// ErrCodeInvalidItemContentException for service response error code
// "InvalidItemContentException".
//
// One or more content items is not valid.
ErrCodeInvalidItemContentException = "InvalidItemContentException"
// ErrCodeInvalidKeyId for service response error code
// "InvalidKeyId".
//
// The query key ID is not valid.
ErrCodeInvalidKeyId = "InvalidKeyId"
// ErrCodeInvalidNextToken for service response error code
// "InvalidNextToken".
//
// The specified token is not valid.
ErrCodeInvalidNextToken = "InvalidNextToken"
// ErrCodeInvalidNotificationConfig for service response error code
// "InvalidNotificationConfig".
//
// One or more configuration items is not valid. Verify that a valid Amazon
// Resource Name (ARN) was provided for an Amazon SNS topic.
ErrCodeInvalidNotificationConfig = "InvalidNotificationConfig"
// ErrCodeInvalidOptionException for service response error code
// "InvalidOptionException".
//
// The delete inventory option specified is not valid. Verify the option and
// try again.
ErrCodeInvalidOptionException = "InvalidOptionException"
// ErrCodeInvalidOutputFolder for service response error code
// "InvalidOutputFolder".
//
// The S3 bucket does not exist.
ErrCodeInvalidOutputFolder = "InvalidOutputFolder"
// ErrCodeInvalidOutputLocation for service response error code
// "InvalidOutputLocation".
//
// The output location is not valid or does not exist.
ErrCodeInvalidOutputLocation = "InvalidOutputLocation"
// ErrCodeInvalidParameters for service response error code
// "InvalidParameters".
//
// You must specify values for all required parameters in the Systems Manager
// document. You can only supply values to parameters defined in the Systems
// Manager document.
ErrCodeInvalidParameters = "InvalidParameters"
// ErrCodeInvalidPermissionType for service response error code
// "InvalidPermissionType".
//
// The permission type is not supported. Share is the only supported permission
// type.
ErrCodeInvalidPermissionType = "InvalidPermissionType"
// ErrCodeInvalidPluginName for service response error code
// "InvalidPluginName".
//
// The plugin name is not valid.
ErrCodeInvalidPluginName = "InvalidPluginName"
// ErrCodeInvalidResourceId for service response error code
// "InvalidResourceId".
//
// The resource ID is not valid. Verify that you entered the correct ID and
// try again.
ErrCodeInvalidResourceId = "InvalidResourceId"
// ErrCodeInvalidResourceType for service response error code
// "InvalidResourceType".
//
// The resource type is not valid. For example, if you are attempting to tag
// an instance, the instance must be a registered, managed instance.
ErrCodeInvalidResourceType = "InvalidResourceType"
// ErrCodeInvalidResultAttributeException for service response error code
// "InvalidResultAttributeException".
//
// The specified inventory item result attribute is not valid.
ErrCodeInvalidResultAttributeException = "InvalidResultAttributeException"
// ErrCodeInvalidRole for service response error code
// "InvalidRole".
//
// The role name can't contain invalid characters. Also verify that you specified
// an IAM role for notifications that includes the required trust policy. For
// information about configuring the IAM role for Run Command notifications,
// see Configuring Amazon SNS Notifications for Run Command (http://docs.aws.amazon.com/systems-manager/latest/userguide/rc-sns-notifications.html)
// in the AWS Systems Manager User Guide.
ErrCodeInvalidRole = "InvalidRole"
// ErrCodeInvalidSchedule for service response error code
// "InvalidSchedule".
//
// The schedule is invalid. Verify your cron or rate expression and try again.
ErrCodeInvalidSchedule = "InvalidSchedule"
// ErrCodeInvalidTarget for service response error code
// "InvalidTarget".
//
// The target is not valid or does not exist. It might not be configured for
// EC2 Systems Manager or you might not have permission to perform the operation.
ErrCodeInvalidTarget = "InvalidTarget"
// ErrCodeInvalidTypeNameException for service response error code
// "InvalidTypeNameException".
//
// The parameter type name is not valid.
ErrCodeInvalidTypeNameException = "InvalidTypeNameException"
// ErrCodeInvalidUpdate for service response error code
// "InvalidUpdate".
//
// The update is not valid.
ErrCodeInvalidUpdate = "InvalidUpdate"
// ErrCodeInvocationDoesNotExist for service response error code
// "InvocationDoesNotExist".
//
// The command ID and instance ID you specified did not match any invocations.
// Verify the command ID adn the instance ID and try again.
ErrCodeInvocationDoesNotExist = "InvocationDoesNotExist"
// ErrCodeItemContentMismatchException for service response error code
// "ItemContentMismatchException".
//
// The inventory item has invalid content.
ErrCodeItemContentMismatchException = "ItemContentMismatchException"
// ErrCodeItemSizeLimitExceededException for service response error code
// "ItemSizeLimitExceededException".
//
// The inventory item size has exceeded the size limit.
ErrCodeItemSizeLimitExceededException = "ItemSizeLimitExceededException"
// ErrCodeMaxDocumentSizeExceeded for service response error code
// "MaxDocumentSizeExceeded".
//
// The size limit of a document is 64 KB.
ErrCodeMaxDocumentSizeExceeded = "MaxDocumentSizeExceeded"
// ErrCodeParameterAlreadyExists for service response error code
// "ParameterAlreadyExists".
//
// The parameter already exists. You can't create duplicate parameters.
ErrCodeParameterAlreadyExists = "ParameterAlreadyExists"
// ErrCodeParameterLimitExceeded for service response error code
// "ParameterLimitExceeded".
//
// You have exceeded the number of parameters for this AWS account. Delete one
// or more parameters and try again.
ErrCodeParameterLimitExceeded = "ParameterLimitExceeded"
// ErrCodeParameterMaxVersionLimitExceeded for service response error code
// "ParameterMaxVersionLimitExceeded".
//
// The parameter exceeded the maximum number of allowed versions.
ErrCodeParameterMaxVersionLimitExceeded = "ParameterMaxVersionLimitExceeded"
// ErrCodeParameterNotFound for service response error code
// "ParameterNotFound".
//
// The parameter could not be found. Verify the name and try again.
ErrCodeParameterNotFound = "ParameterNotFound"
// ErrCodeParameterPatternMismatchException for service response error code
// "ParameterPatternMismatchException".
//
// The parameter name is not valid.
ErrCodeParameterPatternMismatchException = "ParameterPatternMismatchException"
// ErrCodeParameterVersionNotFound for service response error code
// "ParameterVersionNotFound".
//
// The specified parameter version was not found. Verify the parameter name
// and version, and try again.
ErrCodeParameterVersionNotFound = "ParameterVersionNotFound"
// ErrCodeResourceDataSyncAlreadyExistsException for service response error code
// "ResourceDataSyncAlreadyExistsException".
//
// A sync configuration with the same name already exists.
ErrCodeResourceDataSyncAlreadyExistsException = "ResourceDataSyncAlreadyExistsException"
// ErrCodeResourceDataSyncCountExceededException for service response error code
// "ResourceDataSyncCountExceededException".
//
// You have exceeded the allowed maximum sync configurations.
ErrCodeResourceDataSyncCountExceededException = "ResourceDataSyncCountExceededException"
// ErrCodeResourceDataSyncInvalidConfigurationException for service response error code
// "ResourceDataSyncInvalidConfigurationException".
//
// The specified sync configuration is invalid.
ErrCodeResourceDataSyncInvalidConfigurationException = "ResourceDataSyncInvalidConfigurationException"
// ErrCodeResourceDataSyncNotFoundException for service response error code
// "ResourceDataSyncNotFoundException".
//
// The specified sync name was not found.
ErrCodeResourceDataSyncNotFoundException = "ResourceDataSyncNotFoundException"
// ErrCodeResourceInUseException for service response error code
// "ResourceInUseException".
//
// Error returned if an attempt is made to delete a patch baseline that is registered
// for a patch group.
ErrCodeResourceInUseException = "ResourceInUseException"
// ErrCodeResourceLimitExceededException for service response error code
// "ResourceLimitExceededException".
//
// Error returned when the caller has exceeded the default resource limits.
// For example, too many Maintenance Windows or Patch baselines have been created.
//
// For information about resource limits in Systems Manager, see AWS Systems
// Manager Limits (http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_ssm).
ErrCodeResourceLimitExceededException = "ResourceLimitExceededException"
// ErrCodeStatusUnchanged for service response error code<|fim▁hole|> // The updated status is the same as the current status.
ErrCodeStatusUnchanged = "StatusUnchanged"
// ErrCodeSubTypeCountLimitExceededException for service response error code
// "SubTypeCountLimitExceededException".
//
// The sub-type count exceeded the limit for the inventory type.
ErrCodeSubTypeCountLimitExceededException = "SubTypeCountLimitExceededException"
// ErrCodeTargetInUseException for service response error code
// "TargetInUseException".
//
// You specified the Safe option for the DeregisterTargetFromMaintenanceWindow
// operation, but the target is still referenced in a task.
ErrCodeTargetInUseException = "TargetInUseException"
// ErrCodeTooManyTagsError for service response error code
// "TooManyTagsError".
//
// The Targets parameter includes too many tags. Remove one or more tags and
// try the command again.
ErrCodeTooManyTagsError = "TooManyTagsError"
// ErrCodeTooManyUpdates for service response error code
// "TooManyUpdates".
//
// There are concurrent updates for a resource that supports one update at a
// time.
ErrCodeTooManyUpdates = "TooManyUpdates"
// ErrCodeTotalSizeLimitExceededException for service response error code
// "TotalSizeLimitExceededException".
//
// The size of inventory data has exceeded the total size limit for the resource.
ErrCodeTotalSizeLimitExceededException = "TotalSizeLimitExceededException"
// ErrCodeUnsupportedInventoryItemContextException for service response error code
// "UnsupportedInventoryItemContextException".
//
// The Context attribute that you specified for the InventoryItem is not allowed
// for this inventory type. You can only use the Context attribute with inventory
// types like AWS:ComplianceItem.
ErrCodeUnsupportedInventoryItemContextException = "UnsupportedInventoryItemContextException"
// ErrCodeUnsupportedInventorySchemaVersionException for service response error code
// "UnsupportedInventorySchemaVersionException".
//
// Inventory item type schema version has to match supported versions in the
// service. Check output of GetInventorySchema to see the available schema version
// for each type.
ErrCodeUnsupportedInventorySchemaVersionException = "UnsupportedInventorySchemaVersionException"
// ErrCodeUnsupportedOperatingSystem for service response error code
// "UnsupportedOperatingSystem".
//
// The operating systems you specified is not supported, or the operation is
// not supported for the operating system. Valid operating systems include:
// Windows, AmazonLinux, RedhatEnterpriseLinux, and Ubuntu.
ErrCodeUnsupportedOperatingSystem = "UnsupportedOperatingSystem"
// ErrCodeUnsupportedParameterType for service response error code
// "UnsupportedParameterType".
//
// The parameter type is not supported.
ErrCodeUnsupportedParameterType = "UnsupportedParameterType"
// ErrCodeUnsupportedPlatformType for service response error code
// "UnsupportedPlatformType".
//
// The document does not support the platform type of the given instance ID(s).
// For example, you sent an document for a Windows instance to a Linux instance.
ErrCodeUnsupportedPlatformType = "UnsupportedPlatformType"
)<|fim▁end|>
|
// "StatusUnchanged".
//
|
<|file_name|>exercicio1.py<|end_file_name|><|fim▁begin|>resultado = 0
contador = 0
<|fim▁hole|> if resultado == numero:
print('O numero %d e triangular. %d x %d x %d = %d' % (numero, (contador -2), (contador - 1), contador, numero))
break
contador += 1
else:
print('O numero %d nao e triangular')<|fim▁end|>
|
numero = int(input('Digite um numero: '))
while resultado < numero:
resultado = (contador -2) * (contador -1) * contador
|
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as<|fim▁hole|># License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Bangkok Rubber Module',
'version': '0.1',
'category': 'Tools',
'description': """
""",
'author': 'Mr.Tititab Srisookco',
'website': 'http://www.ineco.co.th',
'summary': '',
'depends': ['account','purchase','sale','stock','product'],
'data': [ ],
'update_xml': [
'security.xml',
'stock_view.xml',
'adempier_view.xml',
],
'images': [],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
# published by the Free Software Foundation, either version 3 of the
|
<|file_name|>NPC.cpp<|end_file_name|><|fim▁begin|>#include "NPC.h"
#include "Application.h"
#include "ModuleEntityManager.h"
#include "Player.h"
#include "ModuleCamera.h"
#include "ModuleAudio.h"
#include <random>
NPC::NPC(Entity::Types entityType, iPoint iniPos, short int hp, Direction facing)
: Creature(entityType, iniPos, hp, facing)
{
facing = LEFT;
}
NPC::~NPC()
{}
bool NPC::Start()
{
if (NPC::LoadConfigFromJSON(CONFIG_FILE) == false)
return false;
return Creature::Start();
}
bool NPC::LoadConfigFromJSON(const char* fileName)
{
JSON_Value* root_value;
JSON_Object* moduleObject;
root_value = json_parse_file(fileName);
if (root_value != nullptr)
moduleObject = json_object(root_value);
else return false;
if (soundFxNPCHit == 0)
soundFxNPCHit = App->audio->LoadFx(json_object_dotget_string(moduleObject, "NPC.soundFxNPCHit"));
if (soundFxNPCDie == 0)
soundFxNPCDie = App->audio->LoadFx(json_object_dotget_string(moduleObject, "NPC.soundFxNPCDie"));
json_value_free(root_value);
return true;
}
// Update: draw background
update_status NPC::Update()
{
behaviour();
Creature::Update();
return UPDATE_CONTINUE;
}
void NPC::hit(Creature* c2) {
App->audio->PlayFx(soundFxNPCHit);
Creature::hit(c2);
}
void NPC::die() {
App->audio->PlayFx(soundFxNPCDie);
Creature::die();
}
void NPC::behaviour() {
// If the Player is in attack range then attack: attack or retreat
// else if the Player is close be aggressive: attack, chase, retreat
// else take it easy: patrol, wait
switch (action) {
case APPROACH:
if (position.x > App->entities->player->position.x) velocity.x = -1.0f;
else velocity.x = 1.0f;
if (depth > App->entities->player->depth) --depth;
else if (depth < App->entities->player->depth) ++depth;
if (getDistanceToPlayer() < 50 && abs(depth - App->entities->player->depth) <= DEPTH_THRESHOLD + 2) {
if (canBeAttacked()) {
NPCTimer.reset();
action = ATTACK;
}
else
{
NPCTimer.reset();
action = WAIT;
}
}
break;
case WAIT:
velocity = { 0.0f, 0.0f };
if (status != UNAVAILABLE && NPCTimer.getDelta() > waitingTime) {
action = APPROACH;
}
break;
case ATTACK:
velocity = { 0.0f, 0.0f };
status = ATTACKING;
setCurrentAnimation(&attack);
attackTimer.reset();
NPCTimer.reset();
action = ATTACK_RECOVER;
break;
case ATTACK_RECOVER:
// if the hit has landed continue to attack
// else retreat
if (NPCTimer.getDelta() > 300)
if (getDistanceToPlayer() < 50 && abs(depth - App->entities->player->depth) <= DEPTH_THRESHOLD + 2) {
if (canBeAttacked()) {
NPCTimer.reset();
action = ATTACK;
}
else
{
NPCTimer.reset();
action = WAIT;
}
}
break;
}
}
void NPC::chooseNextAction() {
/* if (getDistanceToPlayer() < 50) {
if (rollDice(2) == 1) {
action = ATTACK;
attack();
}
else {
action = RETREAT;
retreat();
}
}
else if (getDistanceToPlayer() < 150) {
// action = CHASE;
chase();
}
else {
if (rollDice(2) == 1) {
action = PATROL;
switch (rollDice(8)) {
case 1: velocity = { 0.0f, -1.0f }; break;
case 2: velocity = { 1.0f, -1.0f }; break;
case 3: velocity = { 1.0f, 0.0f }; break;
case 4: velocity = { 1.0f, 1.0f }; break;
case 5: velocity = { 0.0f, 1.0f }; break;
case 6: velocity = { -1.0f, 1.0f }; break;
case 7: velocity = { -1.0f, 0.0f }; break;
case 8: velocity = { -1.0f, -1.0f }; break;
}
}
else {
action = WAIT;
velocity = { 0.0f, 0.0f };
waitingTime = 500 + 100 * rollDice(5);
}
}
NPCTimer.reset();*/
}
float NPC::getDistanceToPlayer() const {
short int depthDist = this->depth - App->entities->player->depth;
short int xDist = this->position.x - App->entities->player->position.x;
return (float)(sqrt(pow(depthDist, 2) + pow(xDist, 2)));
}
const unsigned short int NPC::rollDice(unsigned short int nrDiceFaces) const {
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(1, nrDiceFaces);
return dis(gen);
}
void NPC::retreat() {
if (getDistanceToPlayer() < 100)
getAwayFromPlayer();
}
void NPC::getAwayFromPlayer() {
if (App->entities->player->position.x + App->camera->coord.x > (int)(SCREEN_WIDTH / 2))
this->velocity.x = -2.0f;
else
this->velocity.x = 2.0f;
if (App->entities->player->depth > (int)(53 / 2))
this->velocity.y = 1.0f;
else this->velocity.y = -1.0f;
}
void NPC::chase() {
if (App->entities->player->position.x + App->camera->coord.x > (int)(SCREEN_WIDTH / 2))
this->velocity.x = +2.0f;
else
this->velocity.x = -2.0f;
if (App->entities->player->depth > (int)(53 / 2))
this->velocity.y = -1.0f;
else this->velocity.y = +1.0f;
}
/*void NPC::doAttack() {<|fim▁hole|> status = ATTACKING;
setCurrentAnimation(&chop);
}*/<|fim▁end|>
| |
<|file_name|>handler.py<|end_file_name|><|fim▁begin|># Copyright 2016-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cloud-Custodian AWS Lambda Entry Point
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import logging
import json
from c7n.config import Config
from c7n.structure import StructureParser
from c7n.resources import load_resources
from c7n.policy import PolicyCollection
from c7n.utils import format_event, get_account_id_from_sts, local_session
import boto3
logging.root.setLevel(logging.DEBUG)
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
log = logging.getLogger('custodian.lambda')
##########################################
#
# Env var AWS Lambda specific configuration options, these are part of
# our "public" interface and hence are subject to compatiblity constraints.
#
# Control whether custodian lambda policy skips events that represent errors.
# We default to skipping events which denote they have errors.
# Set with `export C7N_SKIP_EVTERR=no` to process error events
C7N_SKIP_EVTERR = True
# Control whether the triggering event is logged.
# Set with `export C7N_DEBUG_EVENT=no` to disable event logging.
C7N_DEBUG_EVENT = True
# Control whether a policy failure will result in a lambda execution failure.
# Lambda on error will report error metrics and depending on event source
# automatically retry.
# Set with `export C7N_CATCH_ERR=yes`
C7N_CATCH_ERR = False
##########################################
#
# Internal global variables
#
# config.json policy data dict
policy_data = None
# execution options for the policy
policy_config = None
def init_env_globals():
"""Set module level values from environment variables.
Encapsulated here to enable better testing.
"""
global C7N_SKIP_EVTERR, C7N_DEBUG_EVENT, C7N_CATCH_ERR
C7N_SKIP_EVTERR = os.environ.get(
'C7N_SKIP_ERR_EVENT', 'yes') == 'yes' and True or False
C7N_DEBUG_EVENT = os.environ.get(
'C7N_DEBUG_EVENT', 'yes') == 'yes' and True or False
C7N_CATCH_ERR = os.environ.get(
'C7N_CATCH_ERR', 'no').strip().lower() == 'yes' and True or False
def init_config(policy_config):
"""Get policy lambda execution configuration.
cli parameters are serialized into the policy lambda config,
we merge those with any policy specific execution options.
--assume role and -s output directory get special handling, as
to disambiguate any cli context.
account id is sourced from the config options or from api call
and cached as a global.
Todo: this should get refactored out to mu.py as part of the
write out of configuration, instead of runtime processed.
"""
exec_options = policy_config.get('execution-options', {})
# Remove some configuration options that don't make sense to translate from
# cli to lambda automatically.
# - assume role on cli doesn't translate, it is the default lambda role and
# used to provision the lambda.
# - profile doesnt translate to lambda its `home` dir setup dependent
# - dryrun doesn't translate (and shouldn't be present)
# - region doesn't translate from cli (the lambda is bound to a region), and
# on the cli represents the region the lambda is provisioned in.
for k in ('assume_role', 'profile', 'region', 'dryrun', 'cache'):
exec_options.pop(k, None)
# a cli local directory doesn't translate to lambda
if not exec_options.get('output_dir', '').startswith('s3'):
exec_options['output_dir'] = '/tmp'
account_id = None
# we can source account id from the cli parameters to avoid the sts call
if exec_options.get('account_id'):
account_id = exec_options['account_id']
# merge with policy specific configuration
exec_options.update(
policy_config['policies'][0].get('mode', {}).get('execution-options', {}))
# if using assume role in lambda ensure that the correct
# execution account is captured in options.
if 'assume_role' in exec_options:
account_id = exec_options['assume_role'].split(':')[4]
elif account_id is None:
session = local_session(boto3.Session)
account_id = get_account_id_from_sts(session)
exec_options['account_id'] = account_id
# Historical compatibility with manually set execution options
# previously this was a boolean, its now a string value with the
# boolean flag triggering a string value of 'aws'
if 'metrics_enabled' in exec_options \
and isinstance(exec_options['metrics_enabled'], bool) \
and exec_options['metrics_enabled']:
exec_options['metrics_enabled'] = 'aws'
return Config.empty(**exec_options)
# One time initilization of global environment settings
init_env_globals()
def dispatch_event(event, context):
error = event.get('detail', {}).get('errorCode')
if error and C7N_SKIP_EVTERR:
log.debug("Skipping failed operation: %s" % error)
return
<|fim▁hole|> global policy_config, policy_data
if policy_config is None:
with open('config.json') as f:
policy_data = json.load(f)
policy_config = init_config(policy_data)
load_resources(StructureParser().get_resource_types(policy_data))
if C7N_DEBUG_EVENT:
event['debug'] = True
log.info("Processing event\n %s", format_event(event))
if not policy_data or not policy_data.get('policies'):
return False
policies = PolicyCollection.from_data(policy_data, policy_config)
for p in policies:
try:
# validation provides for an initialization point for
# some filters/actions.
p.validate()
p.push(event, context)
except Exception:
log.exception("error during policy execution")
if C7N_CATCH_ERR:
continue
raise
return True<|fim▁end|>
|
# one time initialization for cold starts.
|
<|file_name|>keychain.py<|end_file_name|><|fim▁begin|>import base64
import json
import os
import pickle
from Crypto import Random
from Crypto.Cipher import AES
from cumulusci.core.config import BaseConfig
from cumulusci.core.config import ConnectedAppOAuthConfig
from cumulusci.core.config import OrgConfig
from cumulusci.core.config import ScratchOrgConfig
from cumulusci.core.config import ServiceConfig
from cumulusci.core.exceptions import OrgNotFound
from cumulusci.core.exceptions import ServiceNotConfigured
from cumulusci.core.exceptions import ServiceNotValid
from cumulusci.core.exceptions import KeychainConnectedAppNotFound
class BaseProjectKeychain(BaseConfig):
encrypted = False
def __init__(self, project_config, key):
super(BaseProjectKeychain, self).__init__()
self.config = {
'orgs': {},
'app': None,
'services': {},
}
self.project_config = project_config
self.key = key
self._load_keychain()
def _load_keychain(self):
""" Subclasses can override to implement logic to load the keychain """
pass
def change_key(self, key):
""" re-encrypt stored services, orgs, and the connected_app
with the new key """
connected_app = self.get_connected_app()
services = {}
for service_name in self.list_services():
services[service_name] = self.get_service(service_name)
orgs = {}
for org_name in self.list_orgs():
orgs[org_name] = self.get_org(org_name)
self.key = key
if connected_app:
self.set_connected_app(connected_app)
if orgs:
for org_name, org_config in orgs.items():
self.set_org(org_name, org_config)
if services:
for service_name, service_config in services.items():
self.set_service(service_name, service_config)
def set_connected_app(self, app_config, project=False):
""" store a connected_app configuration """
self._set_connected_app(app_config, project)
self._load_keychain()
def _set_connected_app(self, app_config, project):
self.app = app_config
def get_connected_app(self):
""" retrieve the connected app configuration """
return self._get_connected_app()
def _get_connected_app(self):
return self.app
def set_org(self, name, org_config, global_org=False):
if isinstance(org_config, ScratchOrgConfig):
org_config.config['scratch'] = True
self._set_org(name, org_config, global_org)
self._load_keychain()
def _set_org(self, name, org_config, global_org):
self.orgs[name] = org_config
def get_default_org(self):
""" retrieve the name and configuration of the default org """
for org in self.list_orgs():
org_config = self.get_org(org)
if org_config.default:
return org, org_config
return None, None
def set_default_org(self, name):
""" set the default org for tasks by name key """
org = self.get_org(name)
self.unset_default_org()
org.config['default'] = True
self.set_org(name, org)
def unset_default_org(self):
""" unset the default orgs for tasks """
for org in self.list_orgs():
org_config = self.get_org(org)
if org_config.default:
del org_config.config['default']
self.set_org(org, org_config)
def get_org(self, name):
""" retrieve an org configuration by name key """
if name not in self.orgs:
self._raise_org_not_found(name)
return self._get_org(name)
def _get_org(self, name):
return self.orgs.get(name)
def _raise_org_not_found(self, name):
raise OrgNotFound('Org named {} was not found in keychain'.format(name))
def list_orgs(self):
""" list the orgs configured in the keychain """
orgs = self.orgs.keys()
orgs.sort()
return orgs
def set_service(self, name, service_config, project=False):
""" Store a ServiceConfig in the keychain """
if name not in self.project_config.services:
self._raise_service_not_valid(name)
self._validate_service(name, service_config)
self._set_service(name, service_config, project)
self._load_keychain()
def _set_service(self, name, service_config, project):
self.services[name] = service_config
def get_service(self, name):
""" Retrieve a stored ServiceConfig from the keychain or exception
:param name: the service name to retrieve
:type name: str
:rtype ServiceConfig
:return the configured Service
"""
if name not in self.project_config.services:
self._raise_service_not_valid(name)
if name not in self.services:
self._raise_service_not_configured(name)
return self._get_service(name)
def _get_service(self, name):
return self.services.get(name)
def _validate_service(self, name, service_config):
missing_required = []
attr_key = 'services__{0}__attributes'.format(name)
for atr, config in getattr(self.project_config, attr_key).iteritems():
if config.get('required') is True and not getattr(service_config, atr):
missing_required.append(atr)
if missing_required:
self._raise_service_not_valid(name)
def _raise_service_not_configured(self, name):
raise ServiceNotConfigured(
'Service named {} is not configured for this project'.format(name)
)
def _raise_service_not_valid(self, name):
raise ServiceNotValid('Service named {} is not valid for this project'.format(name))
def list_services(self):
""" list the services configured in the keychain """
services = self.services.keys()
services.sort()
return services
class EnvironmentProjectKeychain(BaseProjectKeychain):
""" A project keychain that stores org credentials in environment variables """
encrypted = False
org_var_prefix = 'CUMULUSCI_ORG_'
app_var = 'CUMULUSCI_CONNECTED_APP'
service_var_prefix = 'CUMULUSCI_SERVICE_'
def _load_keychain(self):
self._load_keychain_app()
self._load_keychain_orgs()
self._load_keychain_services()
def _load_keychain_app(self):
app = os.environ.get(self.app_var)
if app:
self.app = ConnectedAppOAuthConfig(json.loads(app))
def _load_keychain_orgs(self):
for key, value in os.environ.items():
if key.startswith(self.org_var_prefix):
org_config = json.loads(value)
if org_config.get('scratch'):
self.orgs[key[len(self.org_var_prefix):]] = ScratchOrgConfig(json.loads(value))
else:
self.orgs[key[len(self.org_var_prefix):]] = OrgConfig(json.loads(value))
def _load_keychain_services(self):
for key, value in os.environ.items():
if key.startswith(self.service_var_prefix):
self.services[key[len(self.service_var_prefix):]] = ServiceConfig(json.loads(value))
BS = 16
pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
unpad = lambda s : s[0:-ord(s[-1])]
class BaseEncryptedProjectKeychain(BaseProjectKeychain):
""" Base class for building project keychains that use AES encryption for securing stored org credentials """
encrypted = True
def _set_connected_app(self, app_config, project):
encrypted = self._encrypt_config(app_config)
self._set_encrypted_connected_app(encrypted, project)
def _set_encrypted_connected_app(self, encrypted, project):
self.app = encrypted
def _get_connected_app(self):
if self.app:
return self._decrypt_config(ConnectedAppOAuthConfig, self.app)
def _get_service(self, name):
return self._decrypt_config(ServiceConfig, self.services[name])
def _set_service(self, service, service_config, project):
encrypted = self._encrypt_config(service_config)
self._set_encrypted_service(service, encrypted, project)
def _set_encrypted_service(self, service, encrypted, project):
self.services[service] = encrypted
def _set_org(self, name, org_config, global_org):
encrypted = self._encrypt_config(org_config)
self._set_encrypted_org(name, encrypted, global_org)
def _set_encrypted_org(self, name, encrypted, global_org):
self.orgs[name] = encrypted
def _get_org(self, name):
return self._decrypt_config(OrgConfig, self.orgs[name])
def _get_cipher(self, iv=None):
if iv is None:
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return cipher, iv
def _encrypt_config(self, config):
pickled = pickle.dumps(config.config)
pickled = pad(pickled)
#pickled = base64.b64encode(pickled)
cipher, iv = self._get_cipher()
encrypted = base64.b64encode(iv + cipher.encrypt(pickled))
return encrypted
def _decrypt_config(self, config_class, encrypted_config):
if not encrypted_config:
return config_class()
encrypted_config = base64.b64decode(encrypted_config)
iv = encrypted_config[:16]
cipher, iv = self._get_cipher(iv)
pickled = cipher.decrypt(encrypted_config[16:])
config_dict = pickle.loads(pickled)
if config_dict.get('scratch'):
config_class = ScratchOrgConfig
return config_class(pickle.loads(pickled))
class EncryptedFileProjectKeychain(BaseEncryptedProjectKeychain):
""" An encrypted project keychain that stores in the project's local directory """
@property
def config_local_dir(self):
return os.path.join(
os.path.expanduser('~'),
self.project_config.global_config_obj.config_local_dir,
)
@property
def project_local_dir(self):
return self.project_config.project_local_dir
def _load_keychain(self):<|fim▁hole|> if item.endswith('.org'):
with open(os.path.join(dirname, item), 'r') as f_item:
org_config = f_item.read()
org_name = item.replace('.org', '')
self.config['orgs'][org_name] = org_config
elif item.endswith('.service'):
with open(os.path.join(dirname, item), 'r') as f_item:
service_config = f_item.read()
service_name = item.replace('.service', '')
self.config['services'][service_name] = service_config
elif item == 'connected.app':
with open(os.path.join(dirname, item), 'r') as f_item:
app_config = f_item.read()
self.config['app'] = app_config
load_files(self.config_local_dir)
if not self.project_local_dir:
return
load_files(self.project_local_dir)
def _set_encrypted_connected_app(self, encrypted, project):
if project:
filename = os.path.join(self.project_local_dir, 'connected.app')
else:
filename = os.path.join(self.config_local_dir, 'connected.app')
with open(filename, 'w') as f_org:
f_org.write(encrypted)
self.app = encrypted
def _set_encrypted_org(self, name, encrypted, global_org):
if global_org:
filename = os.path.join(self.config_local_dir, '{}.org'.format(name))
else:
filename = os.path.join(self.project_local_dir, '{}.org'.format(name))
with open(filename, 'w') as f_org:
f_org.write(encrypted)
def _set_encrypted_service(self, name, encrypted, project):
if project:
filename = os.path.join(self.project_local_dir, '{}.service'.format(name))
else:
filename = os.path.join(self.config_local_dir, '{}.service'.format(name))
with open(filename, 'w') as f_service:
f_service.write(encrypted)
def _raise_org_not_found(self, name):
raise OrgNotFound(
'Org information could not be found. Expected to find encrypted file at {}/{}.org'.format(
self.project_local_dir,
name
)
)
def _raise_service_not_configured(self, name):
raise ServiceNotConfigured(
'Service configuration could not be found. Expected to find encrypted file at {}/{}.org'.format(
self.project_local_dir,
name
)
)<|fim▁end|>
|
def load_files(dirname):
for item in os.listdir(dirname):
|
<|file_name|>credentials.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
import functools
import logging
import os
from botocore.compat import six
from six.moves import configparser
from dateutil.parser import parse
from dateutil.tz import tzlocal
import botocore.config
import botocore.compat
from botocore.compat import total_seconds
from botocore.exceptions import UnknownCredentialError
from botocore.exceptions import PartialCredentialsError
from botocore.exceptions import ConfigNotFound
from botocore.utils import InstanceMetadataFetcher, parse_key_val_file
logger = logging.getLogger(__name__)
def create_credential_resolver(session):
"""Create a default credential resolver.
This creates a pre-configured credential resolver
that includes the default lookup chain for
credentials.
"""
profile_name = session.get_config_variable('profile') or 'default'
credential_file = session.get_config_variable('credentials_file')
config_file = session.get_config_variable('config_file')
metadata_timeout = session.get_config_variable('metadata_service_timeout')
num_attempts = session.get_config_variable('metadata_service_num_attempts')
env_provider = EnvProvider()
providers = [
env_provider,
SharedCredentialProvider(
creds_filename=credential_file,
profile_name=profile_name
),
# The new config file has precedence over the legacy
# config file.
ConfigProvider(config_filename=config_file, profile_name=profile_name),
OriginalEC2Provider(),
BotoProvider(),
InstanceMetadataProvider(
iam_role_fetcher=InstanceMetadataFetcher(
timeout=metadata_timeout,
num_attempts=num_attempts)
)
]
explicit_profile = session.get_config_variable('profile',
methods=('instance',))
if explicit_profile is not None:
# An explicitly provided profile will negate an EnvProvider.
# We will defer to providers that understand the "profile"
# concept to retrieve credentials.
# The one edge case if is all three values are provided via
# env vars:
# export AWS_ACCESS_KEY_ID=foo
# export AWS_SECRET_ACCESS_KEY=bar
# export AWS_PROFILE=baz
# Then, just like our client() calls, the explicit credentials
# will take precedence.
#
# This precedence is enforced by leaving the EnvProvider in the chain.
# This means that the only way a "profile" would win is if the
# EnvProvider does not return credentials, which is what we want
# in this scenario.
providers.remove(env_provider)
else:
logger.debug('Skipping environment variable credential check'
' because profile name was explicitly set.')
resolver = CredentialResolver(providers=providers)
return resolver
def get_credentials(session):
resolver = create_credential_resolver(session)
return resolver.load_credentials()
def _local_now():
return datetime.datetime.now(tzlocal())
class Credentials(object):
"""
Holds the credentials needed to authenticate requests.
:ivar access_key: The access key part of the credentials.
:ivar secret_key: The secret key part of the credentials.
:ivar token: The security token, valid only for session credentials.
:ivar method: A string which identifies where the credentials
were found.
"""
def __init__(self, access_key, secret_key, token=None,
method=None):
self.access_key = access_key
self.secret_key = secret_key
self.token = token
if method is None:
method = 'explicit'
self.method = method
self._normalize()
def _normalize(self):
# Keys would sometimes (accidentally) contain non-ascii characters.
# It would cause a confusing UnicodeDecodeError in Python 2.
# We explicitly convert them into unicode to avoid such error.
#
# Eventually the service will decide whether to accept the credential.
# This also complies with the behavior in Python 3.
self.access_key = botocore.compat.ensure_unicode(self.access_key)
self.secret_key = botocore.compat.ensure_unicode(self.secret_key)
class RefreshableCredentials(Credentials):
"""
Holds the credentials needed to authenticate requests. In addition, it
knows how to refresh itself.
:ivar refresh_timeout: How long a given set of credentials are valid for.
Useful for credentials fetched over the network.
:ivar access_key: The access key part of the credentials.
:ivar secret_key: The secret key part of the credentials.
:ivar token: The security token, valid only for session credentials.
:ivar method: A string which identifies where the credentials
were found.
:ivar session: The ``Session`` the credentials were created for. Useful for
subclasses.
"""
refresh_timeout = 15 * 60
def __init__(self, access_key, secret_key, token,
expiry_time, refresh_using, method,
time_fetcher=_local_now):
self._refresh_using = refresh_using
self._access_key = access_key
self._secret_key = secret_key
self._token = token
self._expiry_time = expiry_time
self._time_fetcher = time_fetcher
self.method = method
self._normalize()
def _normalize(self):
self._access_key = botocore.compat.ensure_unicode(self._access_key)
self._secret_key = botocore.compat.ensure_unicode(self._secret_key)
@classmethod
def create_from_metadata(cls, metadata, refresh_using, method):
instance = cls(
access_key=metadata['access_key'],
secret_key=metadata['secret_key'],
token=metadata['token'],
expiry_time=cls._expiry_datetime(metadata['expiry_time']),
method=method,
refresh_using=refresh_using
)
return instance
@property
def access_key(self):
self._refresh()
return self._access_key
@access_key.setter
def access_key(self, value):
self._access_key = value
@property
def secret_key(self):
self._refresh()
return self._secret_key
@secret_key.setter
def secret_key(self, value):
self._secret_key = value
@property
def token(self):
self._refresh()
return self._token
@token.setter
def token(self, value):
self._token = value
def _seconds_remaining(self):
delta = self._expiry_time - self._time_fetcher()
return total_seconds(delta)
def refresh_needed(self):
if self._expiry_time is None:
# No expiration, so assume we don't need to refresh.
return False
# The credentials should be refreshed if they're going to expire
# in less than 5 minutes.
if self._seconds_remaining() >= self.refresh_timeout:
# There's enough time left. Don't refresh.
return False
# Assume the worst & refresh.
logger.debug("Credentials need to be refreshed.")
return True
def _refresh(self):
if not self.refresh_needed():
return
metadata = self._refresh_using()
self._set_from_data(metadata)
@staticmethod
def _expiry_datetime(time_str):
return parse(time_str)
def _set_from_data(self, data):
self.access_key = data['access_key']
self.secret_key = data['secret_key']
self.token = data['token']
self._expiry_time = parse(data['expiry_time'])
logger.debug("Retrieved credentials will expire at: %s", self._expiry_time)
self._normalize()
class CredentialProvider(object):
# Implementations must provide a method.
METHOD = None
def __init__(self, session=None):
self.session = session
def load(self):
"""
Loads the credentials from their source & sets them on the object.
Subclasses should implement this method (by reading from disk, the
environment, the network or wherever), returning ``True`` if they were
found & loaded.
If not found, this method should return ``False``, indictating that the
``CredentialResolver`` should fall back to the next available method.
The default implementation does nothing, assuming the user has set the
``access_key/secret_key/token`` themselves.
:returns: Whether credentials were found & set
:rtype: boolean
"""
return True
def _extract_creds_from_mapping(self, mapping, *key_names):
found = []
for key_name in key_names:
try:
found.append(mapping[key_name])
except KeyError:
raise PartialCredentialsError(provider=self.METHOD,
cred_var=key_name)
return found
class InstanceMetadataProvider(CredentialProvider):
METHOD = 'iam-role'
def __init__(self, iam_role_fetcher):
self._role_fetcher = iam_role_fetcher
def load(self):
fetcher = self._role_fetcher
# We do the first request, to see if we get useful data back.
# If not, we'll pass & move on to whatever's next in the credential
# chain.
metadata = fetcher.retrieve_iam_role_credentials()
if not metadata:
return None
logger.info('Found credentials from IAM Role: %s', metadata['role_name'])
# We manually set the data here, since we already made the request &
# have it. When the expiry is hit, the credentials will auto-refresh
# themselves.
creds = RefreshableCredentials.create_from_metadata(
metadata,
method=self.METHOD,
refresh_using=fetcher.retrieve_iam_role_credentials,
)
return creds
class EnvProvider(CredentialProvider):
METHOD = 'env'
ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
# The token can come from either of these env var.
# AWS_SESSION_TOKEN is what other AWS SDKs have standardized on.
TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']
def __init__(self, environ=None, mapping=None):
"""
:param environ: The environment variables (defaults to
``os.environ`` if no value is provided).
:param mapping: An optional mapping of variable names to
environment variable names. Use this if you want to
change the mapping of access_key->AWS_ACCESS_KEY_ID, etc.
The dict can have up to 3 keys: ``access_key``, ``secret_key``,
``session_token``.
"""
if environ is None:
environ = os.environ
self.environ = environ
self._mapping = self._build_mapping(mapping)
def _build_mapping(self, mapping):
# Mapping of variable name to env var name.
var_mapping = {}
if mapping is None:
# Use the class var default.
var_mapping['access_key'] = self.ACCESS_KEY
var_mapping['secret_key'] = self.SECRET_KEY
var_mapping['token'] = self.TOKENS
else:
var_mapping['access_key'] = mapping.get(
'access_key', self.ACCESS_KEY)
var_mapping['secret_key'] = mapping.get(
'secret_key', self.SECRET_KEY)
var_mapping['token'] = mapping.get(
'token', self.TOKENS)
if not isinstance(var_mapping['token'], list):
var_mapping['token'] = [var_mapping['token']]
return var_mapping
def load(self):
"""
Search for credentials in explicit environment variables.
"""
if self._mapping['access_key'] in self.environ:
logger.info('Found credentials in environment variables.')
access_key, secret_key = self._extract_creds_from_mapping(
self.environ, self._mapping['access_key'],
self._mapping['secret_key'])
token = self._get_session_token()
return Credentials(access_key, secret_key, token,
method=self.METHOD)
else:
return None
def _get_session_token(self):
for token_envvar in self._mapping['token']:
if token_envvar in self.environ:
return self.environ[token_envvar]
class OriginalEC2Provider(CredentialProvider):
METHOD = 'ec2-credentials-file'
CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE'
ACCESS_KEY = 'AWSAccessKeyId'
SECRET_KEY = 'AWSSecretKey'
def __init__(self, environ=None, parser=None):
if environ is None:
environ = os.environ
if parser is None:
parser = parse_key_val_file
self._environ = environ
self._parser = parser
def load(self):
"""
Search for a credential file used by original EC2 CLI tools.
"""
if 'AWS_CREDENTIAL_FILE' in self._environ:
full_path = os.path.expanduser(self._environ['AWS_CREDENTIAL_FILE'])
creds = self._parser(full_path)
if self.ACCESS_KEY in creds:
logger.info('Found credentials in AWS_CREDENTIAL_FILE.')
access_key = creds[self.ACCESS_KEY]
secret_key = creds[self.SECRET_KEY]
# EC2 creds file doesn't support session tokens.
return Credentials(access_key, secret_key, method=self.METHOD)
else:
return None
class SharedCredentialProvider(CredentialProvider):
METHOD = 'shared-credentials-file'
ACCESS_KEY = 'aws_access_key_id'
SECRET_KEY = 'aws_secret_access_key'
# Same deal as the EnvProvider above. Botocore originally supported
# aws_security_token, but the SDKs are standardizing on aws_session_token
# so we support both.
TOKENS = ['aws_security_token', 'aws_session_token']
def __init__(self, creds_filename, profile_name=None, ini_parser=None):<|fim▁hole|> self._profile_name = profile_name
if ini_parser is None:
ini_parser = botocore.config.raw_config_parse
self._ini_parser = ini_parser
def load(self):
try:
available_creds = self._ini_parser(self._creds_filename)
except ConfigNotFound:
return None
if self._profile_name in available_creds:
config = available_creds[self._profile_name]
if self.ACCESS_KEY in config:
logger.info("Found credentials in shared credentials file: %s",
self._creds_filename)
access_key, secret_key = self._extract_creds_from_mapping(
config, self.ACCESS_KEY, self.SECRET_KEY)
token = self._get_session_token(config)
return Credentials(access_key, secret_key, token,
method=self.METHOD)
def _get_session_token(self, config):
for token_envvar in self.TOKENS:
if token_envvar in config:
return config[token_envvar]
class ConfigProvider(CredentialProvider):
"""INI based config provider with profile sections."""
METHOD = 'config-file'
ACCESS_KEY = 'aws_access_key_id'
SECRET_KEY = 'aws_secret_access_key'
# Same deal as the EnvProvider above. Botocore originally supported
# aws_security_token, but the SDKs are standardizing on aws_session_token
# so we support both.
TOKENS = ['aws_security_token', 'aws_session_token']
def __init__(self, config_filename, profile_name, config_parser=None):
"""
:param config_filename: The session configuration scoped to the current
profile. This is available via ``session.config``.
:param profile_name: The name of the current profile.
:param config_parser: A config parser callable.
"""
self._config_filename = config_filename
self._profile_name = profile_name
if config_parser is None:
config_parser = botocore.config.load_config
self._config_parser = config_parser
def load(self):
"""
If there is are credentials in the configuration associated with
the session, use those.
"""
try:
full_config = self._config_parser(self._config_filename)
except ConfigNotFound:
return None
if self._profile_name in full_config['profiles']:
profile_config = full_config['profiles'][self._profile_name]
if self.ACCESS_KEY in profile_config:
logger.info("Credentials found in config file: %s",
self._config_filename)
access_key, secret_key = self._extract_creds_from_mapping(
profile_config, self.ACCESS_KEY, self.SECRET_KEY)
token = self._get_session_token(profile_config)
return Credentials(access_key, secret_key, token,
method=self.METHOD)
else:
return None
def _get_session_token(self, profile_config):
for token_name in self.TOKENS:
if token_name in profile_config:
return profile_config[token_name]
class BotoProvider(CredentialProvider):
METHOD = 'boto-config'
BOTO_CONFIG_ENV = 'BOTO_CONFIG'
DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto']
ACCESS_KEY = 'aws_access_key_id'
SECRET_KEY = 'aws_secret_access_key'
def __init__(self, environ=None, ini_parser=None):
if environ is None:
environ = os.environ
if ini_parser is None:
ini_parser = botocore.config.raw_config_parse
self._environ = environ
self._ini_parser = ini_parser
def load(self):
"""
Look for credentials in boto config file.
"""
if self.BOTO_CONFIG_ENV in self._environ:
potential_locations = [self._environ[self.BOTO_CONFIG_ENV]]
else:
potential_locations = self.DEFAULT_CONFIG_FILENAMES
for filename in potential_locations:
try:
config = self._ini_parser(filename)
except ConfigNotFound:
# Move on to the next potential config file name.
continue
if 'Credentials' in config:
credentials = config['Credentials']
if self.ACCESS_KEY in credentials:
logger.info("Found credentials in boto config file: %s",
filename)
access_key, secret_key = self._extract_creds_from_mapping(
credentials, self.ACCESS_KEY, self.SECRET_KEY)
return Credentials(access_key, secret_key,
method=self.METHOD)
class CredentialResolver(object):
def __init__(self, providers):
"""
:param providers: A list of ``CredentialProvider`` instances.
"""
self.providers = providers
def insert_before(self, name, credential_provider):
"""
Inserts a new instance of ``CredentialProvider`` into the chain that will
be tried before an existing one.
:param name: The short name of the credentials you'd like to insert the
new credentials before. (ex. ``env`` or ``config``). Existing names
& ordering can be discovered via ``self.available_methods``.
:type name: string
:param cred_instance: An instance of the new ``Credentials`` object
you'd like to add to the chain.
:type cred_instance: A subclass of ``Credentials``
"""
try:
offset = [p.METHOD for p in self.providers].index(name)
except ValueError:
raise UnknownCredentialError(name=name)
self.providers.insert(offset, credential_provider)
def insert_after(self, name, credential_provider):
"""
Inserts a new type of ``Credentials`` instance into the chain that will
be tried after an existing one.
:param name: The short name of the credentials you'd like to insert the
new credentials after. (ex. ``env`` or ``config``). Existing names
& ordering can be discovered via ``self.available_methods``.
:type name: string
:param cred_instance: An instance of the new ``Credentials`` object
you'd like to add to the chain.
:type cred_instance: A subclass of ``Credentials``
"""
try:
offset = [p.METHOD for p in self.providers].index(name)
except ValueError:
raise UnknownCredentialError(name=name)
self.providers.insert(offset + 1, credential_provider)
def remove(self, name):
"""
Removes a given ``Credentials`` instance from the chain.
:param name: The short name of the credentials instance to remove.
:type name: string
"""
available_methods = [p.METHOD for p in self.providers]
if not name in available_methods:
# It's not present. Fail silently.
return
offset = available_methods.index(name)
self.providers.pop(offset)
def load_credentials(self):
"""
Goes through the credentials chain, returning the first ``Credentials``
that could be loaded.
"""
# First provider to return a non-None response wins.
for provider in self.providers:
logger.debug("Looking for credentials via: %s", provider.METHOD)
creds = provider.load()
if creds is not None:
return creds
# If we got here, no credentials could be found.
# This feels like it should be an exception, but historically, ``None``
# is returned.
#
# +1
# -js
return None<|fim▁end|>
|
self._creds_filename = creds_filename
if profile_name is None:
profile_name = 'default'
|
<|file_name|>material.module.ts<|end_file_name|><|fim▁begin|>import {NgModule} from '@angular/core';
import {MatButtonModule} from '@angular/material/button';
import {MatCardModule} from '@angular/material/card';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatGridListModule} from '@angular/material/grid-list';
import {MatIconModule} from '@angular/material/icon';
import {MatInputModule} from '@angular/material/input';
import {MatListModule} from '@angular/material/list';
import {MatMenuModule} from '@angular/material/menu';
import {MatPaginatorModule} from '@angular/material/paginator';<|fim▁hole|>import {MatTableModule} from '@angular/material/table';
import {MatToolbarModule} from '@angular/material/toolbar';
import {MatTooltipModule} from '@angular/material/tooltip';
const matModules = [
MatButtonModule, MatCardModule, MatFormFieldModule, MatIconModule, MatInputModule, MatListModule,
MatToolbarModule, MatSidenavModule, MatRadioModule, MatSelectModule, MatGridListModule,
MatMenuModule, MatTableModule, MatPaginatorModule, MatTooltipModule
];
@NgModule({
imports: matModules,
exports: matModules,
})
export class MaterialModule {
}<|fim▁end|>
|
import {MatRadioModule} from '@angular/material/radio';
import {MatSelectModule} from '@angular/material/select';
import {MatSidenavModule} from '@angular/material/sidenav';
|
<|file_name|>gensky.py<|end_file_name|><|fim▁begin|># coding=utf-8
from _commandbase import RadianceCommand
from ..datatype import RadiancePath, RadianceTuple
from ..parameters.gensky import GenskyParameters
import os
class Gensky(RadianceCommand):
u"""
gensky - Generate an annual Perez sky matrix from a weather tape.
The attributes for this class and their data descriptors are given below.
Please note that the first two inputs for each descriptor are for internal
naming purposes only.
Attributes:
outputName: An optional name for output file name (Default: 'untitled').
monthDayHour: A tuple containing inputs for month, day and hour.
genskyParameters: Radiance parameters for gensky. If None Default
parameters will be set. You can use self.genskyParameters to view,
add or remove the parameters before executing the command.
Usage:
from honeybee.radiance.parameters.gensky import GenSkyParameters
from honeybee.radiance.command.gensky import GenSky
# create and modify genskyParameters. In this case a sunny with no sun
# will be generated.
gnskyParam = GenSkyParameters()
gnskyParam.sunnySkyNoSun = True
# create the gensky Command.
gnsky = GenSky(monthDayHour=(1,1,11), genskyParameters=gnskyParam,
outputName = r'd:/sunnyWSun_010111.sky' )
# run gensky
gnsky.execute()
>
"""
monthDayHour = RadianceTuple('monthDayHour', 'month day hour', tupleSize=3,
testType=False)
outputFile = RadiancePath('outputFile', descriptiveName='output sky file',
relativePath=None, checkExists=False)
def __init__(self, outputName='untitled', monthDayHour=None,
genskyParameters=None):
"""Init command."""
RadianceCommand.__init__(self)
self.outputFile = outputName if outputName.lower().endswith(".sky") \
else outputName + ".sky"
"""results file for sky (Default: untitled)"""
self.monthDayHour = monthDayHour
self.genskyParameters = genskyParameters
@classmethod
def fromSkyType(cls, outputName='untitled', monthDayHour=(1, 21, 12),
skyType=0, latitude=None, longitude=None, meridian=None):
"""Create a sky by sky type.
Args:
outputName: An optional name for output file name (Default: 'untitled').
monthDayHour: A tuple containing inputs for month, day and hour.
skyType: An intger between 0-5 for CIE sky type.<|fim▁hole|> latitude: [-a] A float number to indicate site altitude. Negative
angle indicates south latitude.
longitude: [-o] A float number to indicate site latitude. Negative
angle indicates east longitude.
meridian: [-m] A float number to indicate site meridian west of
Greenwich.
"""
_skyParameters = GenskyParameters(latitude=latitude, longitude=longitude,
meridian=meridian)
# modify parameters based on sky type
try:
skyType = int(skyType)
except TypeError:
"skyType should be an integer between 0-5."
assert 0 <= skyType <= 5, "Sky type should be an integer between 0-5."
if skyType == 0:
_skyParameters.sunnySky = True
elif skyType == 1:
_skyParameters.sunnySky = False
elif skyType == 2:
_skyParameters.intermSky = True
elif skyType == 3:
_skyParameters.intermSky = False
elif skyType == 4:
_skyParameters.cloudySky = True
elif skyType == 5:
_skyParameters.uniformCloudySky = True
return cls(outputName=outputName, monthDayHour=monthDayHour,
genskyParameters=_skyParameters)
@classmethod
def createUniformSkyfromIlluminanceValue(cls, outputName="untitled",
illuminanceValue=10000):
"""Uniform CIE sky based on illuminance value.
Attributes:
outputName: An optional name for output file name (Default: 'untitled').
illuminanceValue: Desired illuminance value in lux
"""
assert float(illuminanceValue) >= 0, "Illuminace value can't be negative."
_skyParameters = GenskyParameters(zenithBrightHorzDiff=illuminanceValue / 179.0)
return cls(outputName=outputName, genskyParameters=_skyParameters)
@classmethod
def fromRadiationValues(cls):
"""Create a sky based on sky radiation values."""
raise NotImplementedError()
@property
def genskyParameters(self):
"""Get and set genskyParameters."""
return self.__genskyParameters
@genskyParameters.setter
def genskyParameters(self, genskyParam):
self.__genskyParameters = genskyParam if genskyParam is not None \
else GenskyParameters()
assert hasattr(self.genskyParameters, "isRadianceParameters"), \
"input genskyParameters is not a valid parameters type."
def toRadString(self, relativePath=False):
"""Return full command as a string."""
# generate the name from self.weaFile
radString = "%s %s %s > %s" % (
self.normspace(os.path.join(self.radbinPath, 'gensky')),
self.monthDayHour.toRadString().replace("-monthDayHour ", ""),
self.genskyParameters.toRadString(),
self.normspace(self.outputFile.toRadString())
)
return radString
@property
def inputFiles(self):
"""Input files for this command."""
return None<|fim▁end|>
|
0: [+s] Sunny with sun, 1: [-s] Sunny without sun,
2: [+i] Intermediate with sun, 3: [-i] Intermediate with no sun,
4: [-c] Cloudy overcast sky, 5: [-u] Uniform cloudy sky
|
<|file_name|>inheritance_integrity.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use rustc::lint::{LateContext, LintPass, LintArray, Level, LateLintPass, LintContext};<|fim▁hole|>
declare_lint!(INHERITANCE_INTEGRITY, Deny,
"Ensures that struct fields are properly laid out for inheritance to work");
/// Lint for ensuring proper layout of DOM structs
///
/// A DOM struct must have one Reflector field or one field
/// which itself is a DOM struct (in which case it must be the first field).
pub struct InheritancePass;
impl LintPass for InheritancePass {
fn get_lints(&self) -> LintArray {
lint_array!(INHERITANCE_INTEGRITY)
}
}
impl LateLintPass for InheritancePass {
fn check_struct_def(&mut self, cx: &LateContext, def: &hir::VariantData, _n: ast::Name,
_gen: &hir::Generics, id: ast::NodeId) {
// Lints are run post expansion, so it's fine to use
// #[_dom_struct_marker] here without also checking for #[dom_struct]
if cx.tcx.has_attr(cx.tcx.map.local_def_id(id), "_dom_struct_marker") {
// Find the reflector, if any
let reflector_span = def.fields().iter().enumerate()
.find(|&(ctr, f)| {
if match_lang_ty(cx, &*f.node.ty, "reflector") {
if ctr > 0 {
cx.span_lint(INHERITANCE_INTEGRITY, f.span,
"The Reflector should be the first field of the DOM \
struct");
}
return true;
}
false
})
.map(|(_, f)| f.span);
// Find all #[dom_struct] fields
let dom_spans: Vec<_> = def.fields().iter().enumerate().filter_map(|(ctr, f)| {
if let hir::TyPath(..) = f.node.ty.node {
if let Some(&def::PathResolution { base_def: def, .. }) =
cx.tcx.def_map.borrow().get(&f.node.ty.id) {
if let def::Def::PrimTy(_) = def {
return None;
}
if cx.tcx.has_attr(def.def_id(), "_dom_struct_marker") {
// If the field is not the first, it's probably
// being misused (a)
if ctr > 0 {
cx.span_lint(INHERITANCE_INTEGRITY, f.span,
"Bare DOM structs should only be used as the first field of a \
DOM struct. Consider using JS<T> instead.");
}
return Some(f.span)
}
}
}
None
}).collect();
// We should not have both a reflector and a dom struct field
if let Some(sp) = reflector_span {
if dom_spans.len() > 0 {
let mut db = cx.struct_span_lint(INHERITANCE_INTEGRITY,
cx.tcx.map.expect_item(id).span,
"This DOM struct has both Reflector \
and bare DOM struct members");
if cx.current_level(INHERITANCE_INTEGRITY) != Level::Allow {
db.span_note(sp, "Reflector found here");
for span in &dom_spans {
db.span_note(*span, "Bare DOM struct found here");
}
}
}
// Nor should we have more than one dom struct field
} else if dom_spans.len() > 1 {
let mut db = cx.struct_span_lint(INHERITANCE_INTEGRITY,
cx.tcx.map.expect_item(id).span,
"This DOM struct has multiple \
DOM struct members, only one is allowed");
if cx.current_level(INHERITANCE_INTEGRITY) != Level::Allow {
for span in &dom_spans {
db.span_note(*span, "Bare DOM struct found here");
}
}
} else if dom_spans.is_empty() {
cx.span_lint(INHERITANCE_INTEGRITY, cx.tcx.map.expect_item(id).span,
"This DOM struct has no reflector or parent DOM struct");
}
}
}
}<|fim▁end|>
|
use rustc::middle::def;
use rustc_front::hir;
use syntax::ast;
use utils::match_lang_ty;
|
<|file_name|>AbstractEvent2.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.serialization;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.lightbend.lagom.javadsl.immutable.ImmutableStyle;
import org.immutables.value.Value;
import org.immutables.value.Value.Parameter;
@Value.Immutable
@ImmutableStyle<|fim▁hole|>@JsonDeserialize(as = Event2.class)
public interface AbstractEvent2 extends Jsonable {
@Parameter
String getField1V2(); // renamed from field1
@Parameter
int getField2(); // new mandatory field
}<|fim▁end|>
| |
<|file_name|>average6.py<|end_file_name|><|fim▁begin|># Program to find the averge of numbers in a file
def main():
#Get the filename with the numbers
fileName = input("What file are the numbers in? ")
#var to contain all the content of the file
infile = open(fileName, 'r')
#var to keep track of the sum of those numbers
sum = 0.0
#var to keep track of the sum
count = 0
#var with the first line of the file
line = infile.readline()
#iterate through all lines in the document
while line != "":
#math to sum the line with the total sum
print(line)
sum = sum + eval(line)
print(sum)
#increment the count var by 1
count = count + 1<|fim▁hole|> #read in the next line of the file
line = infile.readline()
print("\nThe average of the numbers is", sum / count)
main()<|fim▁end|>
| |
<|file_name|>IOSystem.java<|end_file_name|><|fim▁begin|>package jchess.game;
import jchess.eventbus.events.*;
import net.engio.mbassy.listener.Handler;
/**
* Bridge between GL and User Input.
*
* Created by andreas on 06.12.14.
*
* @trace [$REQ07]
*/
@SuppressWarnings("UnusedDeclaration")
public interface IOSystem {
@Handler
void handleSelectEvent(SelectEvent selectEvent);
<|fim▁hole|>
@Handler
void handlePossibleMovesEvent(PossibleMovesEvent possibleMovesEvent);
@Handler
void handlePossiblePromotionsEvent(PossiblePromotionsEvent possiblePromotionsEvent);
@Handler
void handlePromotionSelectEvent(PromotionSelectEvent promotionSelectEvent);
@Handler
void handleUpdateStatusMessageEvent(UpdateStatusMessageEvent updateStatusMessageEvent);
void setPlayer(Player player);
}<|fim▁end|>
|
@Handler
void handleUpdateBoardEvent(UpdateBoardEvent updateBoardEvent);
|
<|file_name|>chiptune-sample.rs<|end_file_name|><|fim▁begin|>extern crate chiptune;
extern crate sdl2;
use std::thread;
use std::time::Duration;
use std::io;
use std::io::prelude::*;
use std::fs::File;
fn play_sound(player: &mut chiptune::Chiptune, path: String) -> Result<chiptune::ChiptuneSound, chiptune::ChiptuneError> {
let sound = player.load_sound(path);
match sound {
Ok(mut chip_sound) => {
println!("Playing sound");
player.play_sound(&mut chip_sound, -1, 13312, chiptune::CYD_PAN_CENTER, 50);
}
Err(e) => println!("ERROR {:?}", e),
}
sound
}
fn play_sound_from_memory(player: &mut chiptune::Chiptune, path: String) -> Result<chiptune::ChiptuneSound, chiptune::ChiptuneError> {
let mut data = Vec::new();
let mut f = File::open(path.as_str()).unwrap();
f.read_to_end(&mut data).unwrap();
println!("DATA = {:?}", data.len());
let sound = player.load_sound_from_memory(data);
match sound {
Ok(mut chip_sound) => {
println!("Playing sound");
player.play_sound(&mut chip_sound, -1, 13312, chiptune::CYD_PAN_CENTER, 50);
}
Err(e) => println!("ERROR {:?}", e),
}
sound
}
fn main() {
sdl2::init();
let mut player = chiptune::Chiptune::new();
println!("Play music");
let song = player.load_music("./src/chiptune/libksnd-source/src/assets/ringmod.kt".to_string());
match song {
Ok(mut chip_song) => {
player.play_music(&mut chip_song, 0);
///println!("NUM INSTRUMENTS = {:?}", player.get_num_instruments(chip_song));
for i in 0..player.get_num_instruments(&mut chip_song) {
let instru = player.get_instrument(&mut chip_song, i).unwrap();
println!("INSTRU {:?} {:?}", i, player.get_name(instru));
}
}
Err(e) => println!("ERROR {:?}", e),
}
thread::sleep(Duration::from_secs(1));
println!("SOUND POSITION = {:?}", player.get_sound_position(0));
println!("Play sound");
/*let sound = play_sound(&mut player, "./src/chiptune/libksnd-source/src/assets/sounds/major.ki".to_string());
match sound {
Ok(mut chip_sound) => {<|fim▁hole|> for i in 0..32 {
println!("Program[{:?}] {:X}", i, program[i]);
match chiptune::get_instruction(program[i] as i32) {
Ok(v) => {
//println!("{:?}", v);
match chiptune::notename(program[i] as i32, player.get_base_note(chip_sound)) {
Ok(name) => println!("NAME {:?}", name),
Err(_) => (),
}
},
Err(e) => println!("Error {:?}", e),
}
}
}
Err(e) => println!("ERROR {:?}", e),
}
*/
thread::sleep(Duration::from_secs(3));
println!("CLAP");
play_sound_from_memory(&mut player, "./src/chiptune/libksnd-source/src/assets/sounds/clap.ki".to_string());
println!("SOUND POSITION = {:?}", player.get_sound_position(0));
println!("MUSIC POSITION = {:?}", player.get_music_position());
player.set_volume(64);
thread::sleep(Duration::from_secs(10));
println!("MUSIC POSITION = {:?}", player.get_music_position());
}<|fim▁end|>
|
let program = player.get_sound_program(chip_sound);
|
<|file_name|>test_block_device.py<|end_file_name|><|fim▁begin|># Copyright 2013 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from nova.cells import rpcapi as cells_rpcapi
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_objects
class _TestBlockDeviceMappingObject(object):
def fake_bdm(self, instance=None):
instance = instance or {}
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': 123,
'instance_uuid': instance.get('uuid') or 'fake-instance',
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1
})
if instance:
fake_bdm['instance'] = instance
return fake_bdm
def _test_save(self, cell_type=None):
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
fake_bdm = self.fake_bdm()
with test.nested(
mock.patch.object(
db, 'block_device_mapping_update', return_value=fake_bdm),
mock.patch.object(
cells_rpcapi.CellsAPI, 'bdm_update_or_create_at_top')
) as (bdm_update_mock, cells_update_mock):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.id = 123
bdm_object.volume_id = 'fake_volume_id'
bdm_object.save()
bdm_update_mock.assert_called_once_with(
self.context, 123, {'volume_id': 'fake_volume_id'},
legacy=False)
if cell_type != 'compute':
self.assertFalse(cells_update_mock.called)
else:
self.assertEqual(1, cells_update_mock.call_count)
self.assertTrue(len(cells_update_mock.call_args[0]) > 1)
self.assertIsInstance(cells_update_mock.call_args[0][1],
block_device_obj.BlockDeviceMapping)
self.assertEqual(cells_update_mock.call_args[1], {})
def test_save_nocells(self):
self._test_save()
def test_save_apicell(self):
self._test_save(cell_type='api')
def test_save_computecell(self):
self._test_save(cell_type='compute')
def test_save_instance_changed(self):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.instance = objects.Instance()
self.assertRaises(exception.ObjectActionError,
bdm_object.save)
@mock.patch.object(db, 'block_device_mapping_update', return_value=None)
def test_save_not_found(self, bdm_update):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.id = 123
self.assertRaises(exception.BDMNotFound, bdm_object.save)
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id(self, get_by_vol_id):
get_by_vol_id.return_value = self.fake_bdm()
vol_bdm = objects.BlockDeviceMapping.get_by_volume_id(
self.context, 'fake-volume-id')
for attr in block_device_obj.BLOCK_DEVICE_OPTIONAL_ATTRS:
self.assertFalse(vol_bdm.obj_attr_is_set(attr))
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id_not_found(self, get_by_vol_id):
get_by_vol_id.return_value = None
self.assertRaises(exception.VolumeBDMNotFound,
objects.BlockDeviceMapping.get_by_volume_id,
self.context, 'fake-volume-id')
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_instance_uuid_missmatch(self, get_by_vol_id):
fake_bdm_vol = self.fake_bdm(instance={'uuid': 'other-fake-instance'})
get_by_vol_id.return_value = fake_bdm_vol
self.assertRaises(exception.InvalidVolume,
objects.BlockDeviceMapping.get_by_volume_id,
self.context, 'fake-volume-id',
instance_uuid='fake-instance')
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id_with_expected(self, get_by_vol_id):
get_by_vol_id.return_value = self.fake_bdm(
fake_instance.fake_db_instance())
vol_bdm = objects.BlockDeviceMapping.get_by_volume_id(
self.context, 'fake-volume-id', expected_attrs=['instance'])
for attr in block_device_obj.BLOCK_DEVICE_OPTIONAL_ATTRS:
self.assertTrue(vol_bdm.obj_attr_is_set(attr))
get_by_vol_id.assert_called_once_with(self.context, 'fake-volume-id',
['instance'])
def _test_create_mocked(self, cell_type=None, update_or_create=False,
device_name=None):
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
if device_name:
values['device_name'] = device_name
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(values)
with test.nested(
mock.patch.object(
db, 'block_device_mapping_create', return_value=fake_bdm),
mock.patch.object(
db, 'block_device_mapping_update_or_create',
return_value=fake_bdm),
mock.patch.object(cells_rpcapi.CellsAPI,
'bdm_update_or_create_at_top')
) as (bdm_create_mock, bdm_update_or_create_mock, cells_update_mock):
bdm = objects.BlockDeviceMapping(context=self.context, **values)
if update_or_create:
method = bdm.update_or_create
else:
method = bdm.create
if cell_type == 'api':
self.assertRaises(exception.ObjectActionError,
method)
else:
method()
if update_or_create:
bdm_update_or_create_mock.assert_called_once_with(
self.context, values, legacy=False)
else:
bdm_create_mock.assert_called_once_with(
self.context, values, legacy=False)
if cell_type == 'compute' and 'device_name' in values:
self.assertEqual(1, cells_update_mock.call_count)
self.assertTrue(len(cells_update_mock.call_args[0]) > 1)
self.assertEqual(cells_update_mock.call_args[0][0],
self.context)
self.assertIsInstance(cells_update_mock.call_args[0][1],
block_device_obj.BlockDeviceMapping)
self.assertEqual(cells_update_mock.call_args[1],
{'create': update_or_create or None})
else:
self.assertFalse(cells_update_mock.called)
def test_create_nocells(self):
self._test_create_mocked()
def test_update_or_create(self):
self._test_create_mocked(update_or_create=True)
def test_create_apicell(self):
self._test_create_mocked(cell_type='api')
def test_update_or_create_apicell(self):
self._test_create_mocked(cell_type='api', update_or_create=True)
def test_create_computecell(self):
self._test_create_mocked(cell_type='compute')
def test_update_or_create_computecell(self):
self._test_create_mocked(cell_type='compute', update_or_create=True)
def test_device_name_compute_cell(self):
self._test_create_mocked(cell_type='compute', device_name='/dev/xvdb')
def test_create(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
with mock.patch.object(cells_rpcapi.CellsAPI,
'bdm_update_or_create_at_top'):
bdm.create()
for k, v in six.iteritems(values):<|fim▁hole|> self.assertEqual(v, getattr(bdm, k))
def test_create_fails(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
bdm.create()
self.assertRaises(exception.ObjectActionError,
bdm.create)
def test_create_fails_instance(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance',
'instance': objects.Instance()}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
self.assertRaises(exception.ObjectActionError,
bdm.create)
def _test_destroy_mocked(self, cell_type=None):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume', 'id': 1,
'instance_uuid': 'fake-instance', 'device_name': 'fake'}
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
with test.nested(
mock.patch.object(db, 'block_device_mapping_destroy'),
mock.patch.object(cells_rpcapi.CellsAPI, 'bdm_destroy_at_top')
) as (bdm_del, cells_destroy):
bdm = objects.BlockDeviceMapping(context=self.context, **values)
bdm.destroy()
bdm_del.assert_called_once_with(self.context, values['id'])
if cell_type != 'compute':
self.assertFalse(cells_destroy.called)
else:
cells_destroy.assert_called_once_with(
self.context, values['instance_uuid'],
device_name=values['device_name'],
volume_id=values['volume_id'])
def test_destroy_nocells(self):
self._test_destroy_mocked()
def test_destroy_apicell(self):
self._test_destroy_mocked(cell_type='api')
def test_destroy_computecell(self):
self._test_destroy_mocked(cell_type='compute')
def test_is_image_true(self):
bdm = objects.BlockDeviceMapping(context=self.context,
source_type='image')
self.assertTrue(bdm.is_image)
def test_is_image_false(self):
bdm = objects.BlockDeviceMapping(context=self.context,
source_type='snapshot')
self.assertFalse(bdm.is_image)
def test_is_volume_true(self):
bdm = objects.BlockDeviceMapping(context=self.context,
destination_type='volume')
self.assertTrue(bdm.is_volume)
def test_is_volume_false(self):
bdm = objects.BlockDeviceMapping(context=self.context,
destination_type='local')
self.assertFalse(bdm.is_volume)
class TestBlockDeviceMappingObject(test_objects._LocalTest,
_TestBlockDeviceMappingObject):
pass
class TestRemoteBlockDeviceMappingObject(test_objects._RemoteTest,
_TestBlockDeviceMappingObject):
pass
class _TestBlockDeviceMappingListObject(object):
def fake_bdm(self, bdm_id):
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': bdm_id, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1,
})
return fake_bdm
@mock.patch.object(db, 'block_device_mapping_get_all_by_instance')
def test_get_by_instance_uuid(self, get_all_by_inst):
fakes = [self.fake_bdm(123), self.fake_bdm(456)]
get_all_by_inst.return_value = fakes
bdm_list = (
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, 'fake_instance_uuid'))
for faked, got in zip(fakes, bdm_list):
self.assertIsInstance(got, objects.BlockDeviceMapping)
self.assertEqual(faked['id'], got.id)
@mock.patch.object(db, 'block_device_mapping_get_all_by_instance')
def test_get_by_instance_uuid_no_result(self, get_all_by_inst):
get_all_by_inst.return_value = None
bdm_list = (
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, 'fake_instance_uuid'))
self.assertEqual(0, len(bdm_list))
def test_root_volume_metadata(self):
fake_volume = {
'volume_image_metadata': {'vol_test_key': 'vol_test_value'}}
class FakeVolumeApi(object):
def get(*args, **kwargs):
return fake_volume
block_device_mapping = block_device_obj.block_device_make_list(None, [
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'boot_index': 0,
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake_volume_id',
'delete_on_termination': False})])
volume_meta = block_device_mapping.root_metadata(
self.context, None, FakeVolumeApi())
self.assertEqual(fake_volume['volume_image_metadata'], volume_meta)
def test_root_image_metadata(self):
fake_image = {'properties': {'img_test_key': 'img_test_value'}}
class FakeImageApi(object):
def show(*args, **kwargs):
return fake_image
block_device_mapping = block_device_obj.block_device_make_list(None, [
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'boot_index': 0,
'source_type': 'image',
'destination_type': 'local',
'image_id': "fake-image",
'delete_on_termination': True})])
image_meta = block_device_mapping.root_metadata(
self.context, FakeImageApi(), None)
self.assertEqual(fake_image['properties'], image_meta)
class TestBlockDeviceMappingListObject(test_objects._LocalTest,
_TestBlockDeviceMappingListObject):
pass
class TestRemoteBlockDeviceMappingListObject(
test_objects._RemoteTest, _TestBlockDeviceMappingListObject):
pass
class TestBlockDeviceUtils(test.NoDBTestCase):
def test_make_list_from_dicts(self):
ctx = context.get_admin_context()
dicts = [{'id': 1}, {'id': 2}]
objs = block_device_obj.block_device_make_list_from_dicts(ctx,
dicts)
self.assertIsInstance(objs, block_device_obj.BlockDeviceMappingList)
self.assertEqual(2, len(objs))
self.assertEqual(1, objs[0].id)
self.assertEqual(2, objs[1].id)
def test_make_list_from_dicts_empty(self):
ctx = context.get_admin_context()
objs = block_device_obj.block_device_make_list_from_dicts(ctx, [])
self.assertIsInstance(objs, block_device_obj.BlockDeviceMappingList)
self.assertEqual(0, len(objs))<|fim▁end|>
| |
<|file_name|>eventdetails.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
script.matchcenter - Football information for Kodi
A program addon that can be mapped to a key on your remote to display football information.
Livescores, Event details, Line-ups, League tables, next and previous matches by team. Follow what
others are saying about the match in twitter.
Copyright (C) 2016 enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import xbmcgui
import xbmc
import sys
import thesportsdb
import random
import threading
import pytz
import re
import ignoreleagues
from resources.lib.utilities import positions
from resources.lib.utilities import ssutils
from resources.lib.utilities.addonfileio import FileIO
from resources.lib.utilities.common_addon import *
api = thesportsdb.Api("7723457519235")
class detailsDialog(xbmcgui.WindowXMLDialog):
def __init__( self, *args, **kwargs ):
self.isRunning = True
self.match = kwargs["item"]
self.controls = []
def onInit(self):
self.setEventDetails()
def setEventDetails(self):
xbmc.executebuiltin("ClearProperty(has_lineups,Home)")
xbmc.executebuiltin("SetProperty(has_details,1,home)")
#livematch
if 'idEvent' not in self.match.__dict__.keys():
header = self.match.League + " - " + translate(32017) + " " + str(self.match.Round)
matchTime = ssutils.translatematch(self.match.Time)
matchHomeGoals = self.match.HomeGoals
matchAwayGoals = self.match.AwayGoals
matchpercent = 0.0
#match time
if "'" in self.match.Time.lower():
try:
matchpercent = float(int((float(self.match.Time.replace("'",""))/90)*100))
except: pass
else:
if self.match.Time.lower() == "halftime":
matchpercent = 50.0
elif self.match.Time.lower() == "postponed" or self.match.Time.lower() == "not started":
matchpercent = 0.0
elif self.match.Time.lower() == "finished":
matchpercent = 100.0
#match status
if self.match.Time.lower() == "finished": status = os.path.join(addon_path,"resources","img","redstatus.png")
elif "'" in self.match.Time.lower(): status = os.path.join(addon_path,"resources","img","greenstatus.png")
else: status = os.path.join(addon_path,"resources","img","yellowstatus.png")
stadium = self.match.Stadium
matchReferee = self.match.Referee
matchSpectators = self.match.Spectators
matchHomeGoalDetails = self.match.HomeGoalDetails
matchHomeTeamRedCardDetails = self.match.HomeTeamRedCardDetails
matchHomeTeamYellowCardDetails = self.match.HomeTeamYellowCardDetails
matchHomeSubDetails = self.match.HomeSubDetails
matchAwayGoalDetails = self.match.AwayGoalDetails
matchAwayTeamRedCardDetails = self.match.AwayTeamRedCardDetails
matchAwayTeamYellowCardDetails = self.match.AwayTeamYellowCardDetails
matchAwaySubDetails = self.match.AwaySubDetails
#past match
else:
header = self.match.strLeague + " - " + translate(32017) + " " + str(self.match.intRound)
matchTime = ssutils.translatematch("Finished")
matchHomeGoals = self.match.intHomeScore
matchAwayGoals = self.match.intAwayScore
status = os.path.join(addon_path,"resources","img","redstatus.png")
matchpercent = 100.0
stadium = self.match.HomeTeamObj.strStadium
matchReferee = ""
matchSpectators = self.match.intSpectators
matchHomeGoalDetails = self.match.strHomeGoalDetails
matchHomeTeamRedCardDetails = self.match.strHomeRedCards
matchHomeTeamYellowCardDetails = self.match.strHomeYellowCards
matchHomeSubDetails = ""
matchAwayGoalDetails = self.match.strAwayGoalDetails
matchAwayTeamRedCardDetails = self.match.strAwayRedCards
matchAwayTeamYellowCardDetails = self.match.strAwayYellowCards
matchAwaySubDetails = ""
self.getControl(32500).setLabel(header)
if self.match.HomeTeamObj:
if self.match.HomeTeamObj.strTeamBadge:
self.getControl(32501).setImage(self.match.HomeTeamObj.strTeamBadge)
else:
self.getControl(32501).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
if self.match.HomeTeamObj.strTeamJersey:
self.getControl(32502).setImage(self.match.HomeTeamObj.strTeamJersey)
else:
self.getControl(32502).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
else:
self.getControl(32501).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
self.getControl(32502).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
#Default values for team names. It depends if it is a live object or simple a past event
if ("HomeTeam" in self.match.__dict__.keys() and "AwayTeam" in self.match.__dict__.keys()):
self.getControl(32503).setLabel(self.match.HomeTeam)
self.getControl(32506).setLabel(self.match.AwayTeam)
else:
self.getControl(32503).setLabel(self.match.strHomeTeam)
self.getControl(32506).setLabel(self.match.strAwayTeam)
if show_alternative == "true":
if self.match.HomeTeamObj: self.getControl(32503).setLabel(self.match.HomeTeamObj.AlternativeNameFirst)
if self.match.AwayTeamObj: self.getControl(32506).setLabel(self.match.AwayTeamObj.AlternativeNameFirst)
if self.match.AwayTeamObj:
if self.match.AwayTeamObj.strTeamBadge:
self.getControl(32504).setImage(self.match.AwayTeamObj.strTeamBadge)
else:
self.getControl(32504).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
if self.match.AwayTeamObj.strTeamJersey:
self.getControl(32505).setImage(self.match.AwayTeamObj.strTeamJersey)
else:
self.getControl(32505).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
else:
self.getControl(32504).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
self.getControl(32505).setImage(os.path.join(addon_path,"resources","img","nokit_placeholder.png"))
if matchHomeGoals and matchAwayGoals:
self.getControl(32507).setLabel(str(matchHomeGoals)+"-"+str(matchAwayGoals))
if matchTime:
self.getControl(32508).setLabel(matchTime)
#Match Status (yellow,green,red)
self.getControl(32509).setImage(status)
#Match progress bar
self.getControl(32510).setPercent(matchpercent)
#Stadium and location
self.getControl(32511).setLabel(stadium)
#Spectators and Referee
if matchReferee:
self.getControl(32512).setLabel("[COLOR selected]" + translate(32023) + ": [/COLOR]" + matchReferee)
if matchSpectators:
self.getControl(32513).setLabel(matchSpectators + " " + translate(32024))
#Home Team Event Details
vars = [("goal",matchHomeGoalDetails),("redcard",matchHomeTeamRedCardDetails),("yellowcard",matchHomeTeamYellowCardDetails),("sub",matchHomeSubDetails)]
hometeamevents = {}
home_subs = {}
for key,var in vars:
if key and var:
if ";" in var:
events = var.split(";")
if events:
for event in events:
stringregex = re.findall("(\d+)'\:(.*)", event)
if stringregex:
for time,strevent in stringregex:
if key == "sub":
if time in home_subs.keys():
if strevent.strip().startswith("in"):
home_subs[time]["in"] = strevent
if "out" in home_subs[time].keys():
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,home_subs[time]["out"] + " |" + home_subs[time]["in"])]
else:
hometeamevents[int(time)].append((key,home_subs[time]["out"] + " |" + home_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
home_subs.pop(time, None)
elif strevent.strip().startswith("out"):
home_subs[time]["out"] = strevent
if "in" in home_subs[time].keys():
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,home_subs[time]["out"] + " |" + home_subs[time]["in"])]
else:
hometeamevents[int(time)].append((key,home_subs[time]["out"] + " |" + home_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
home_subs.pop(time, None)
else:
home_subs[time] = {}
if strevent.strip().startswith("in"):
home_subs[time]["in"] = strevent
elif strevent.strip().startswith("out"):
home_subs[time]["out"] = strevent
else:
if not int(time) in hometeamevents.keys():
hometeamevents[int(time)] = [(key,strevent)]
else:
hometeamevents[int(time)].append((key,strevent))
#Away Team Event Details
vars = [("goal",matchAwayGoalDetails),("redcard",matchAwayTeamRedCardDetails),("yellowcard",matchAwayTeamYellowCardDetails),("sub",matchAwaySubDetails)]
awayteamevents = {}
away_subs = {}
for key,var in vars:
if key and var:
if ";" in var:
events = var.split(";")
if events:
for event in events:
stringregex = re.findall("(\d+)'\:(.*)", event)
if stringregex:
for time,strevent in stringregex:
if key == "sub":
if time in away_subs.keys():
if strevent.strip().startswith("in"):
away_subs[time]["in"] = strevent
if "out" in away_subs[time].keys():
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,away_subs[time]["out"] + " |" + away_subs[time]["in"])]
else:
awayteamevents[int(time)].append((key,away_subs[time]["out"] + " |" + away_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
away_subs.pop(time, None)
elif strevent.strip().startswith("out"):
away_subs[time]["out"] = strevent
if "in" in away_subs[time].keys():
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,away_subs[time]["out"] + " |" + away_subs[time]["in"])]
else:
awayteamevents[int(time)].append((key,away_subs[time]["out"] + " |" + away_subs[time]["in"]))
#Remove item from dict (we might have more than one sub associated to a given minute)
away_subs.pop(time, None)
else:
away_subs[time] = {}
if strevent.strip().startswith("in"):
away_subs[time]["in"] = strevent
elif strevent.strip().startswith("out"):
away_subs[time]["out"] = strevent
else:
if not strevent: strevent = translate(32025)
if not int(time) in awayteamevents.keys():
awayteamevents[int(time)] = [(key,strevent.strip())]
else:
awayteamevents[int(time)].append((key,strevent.strip()))
#set home and away event details
#set home
self.getControl(32516).reset()
if hometeamevents:
items = []
ordered_times = reversed(sorted(hometeamevents.keys()))
for time in ordered_times:
eventlist = hometeamevents[time]
for eventtype,eventlabel in eventlist:
item = xbmcgui.ListItem(str(eventtype) + str(eventlabel))
item.setProperty("eventlabel",eventlabel)
item.setProperty("eventimg",os.path.join(addon_path,"resources","img",str(eventtype)+".png"))
item.setProperty("eventtime",str(time) + "':")
items.append(item)
if items:
self.getControl(32516).addItems(items)
#set home and away event details
#set away
self.getControl(32517).reset()
if awayteamevents:
items = []
ordered_times = reversed(sorted(awayteamevents.keys()))
for time in ordered_times:
eventlist = awayteamevents[time]
for eventtype,eventlabel in eventlist:
item = xbmcgui.ListItem(str(eventtype) + str(eventlabel))
item.setProperty("eventlabel",eventlabel)
item.setProperty("eventimg",os.path.join(addon_path,"resources","img",str(eventtype)+".png"))
item.setProperty("eventtime",str(time) + "':")
items.append(item)
if items:
self.getControl(32517).addItems(items)
self.setFocusId(32514)
def setLineUps(self,team):
xbmc.executebuiltin("ClearProperty(has_details,Home)")
self.getControl(32519).setImage(os.path.join(addon_path,"resources","img","pitch.png"))
xbmc.executebuiltin("SetProperty(has_lineups,1,home)")
self.current_lineup = team
if team == "home":
if 'idEvent' not in self.match.__dict__.keys():
if self.match.HomeTeamObj: self.LineUpTeamObj = self.match.HomeTeamObj
else: self.LineUpTeamObj = None
self.teamname = self.match.HomeTeam
self.formationlabel = self.match.HomeTeamFormation
self.lineupgoalkeeper = self.match.HomeLineupGoalkeeper
self.lineupdefenders = self.match.HomeLineupDefense
self.lineupmidfielders = self.match.HomeLineupMidfield
self.lineupforwarders = self.match.HomeLineupForward
self.lineupsubs = self.match.HomeLineupSubstitutes
if self.match.HomeLineupCoach:
self.lineupcoach = self.match.HomeLineupCoach.replace(";","")
else: self.lineupcoach = {}
else:
self.teamname = self.match.strHomeTeam
self.LineUpTeamObj = self.match.HomeTeamObj
self.formationlabel = self.match.strHomeFormation
self.lineupgoalkeeper = self.match.strHomeLineupGoalkeeper
self.lineupdefenders = self.match.strHomeLineupDefense
self.lineupmidfielders = self.match.strHomeLineupMidfield
self.lineupforwarders = self.match.strHomeLineupForward
self.lineupsubs = self.match.strHomeLineupSubstitutes
self.lineupcoach = {}
self.getControl(32527).setLabel(translate(32027))
else:
if 'idEvent' not in self.match.__dict__.keys():
if self.match.AwayTeamObj: self.LineUpTeamObj = self.match.AwayTeamObj
else: self.LineUpTeamObj = None
self.teamname = self.match.AwayTeam
self.formationlabel = self.match.AwayTeamFormation
self.lineupgoalkeeper = self.match.AwayLineupGoalkeeper
self.lineupdefenders = self.match.AwayLineupDefense
self.lineupmidfielders = self.match.AwayLineupMidfield
self.lineupforwarders = self.match.AwayLineupForward
self.lineupsubs = self.match.AwayLineupSubstitutes
if self.match.AwayLineupCoach:
self.lineupcoach = self.match.AwayLineupCoach.replace(";","")
else: self.lineupcoach = {}
else:
self.teamname = self.match.strAwayTeam
self.LineUpTeamObj = self.match.AwayTeamObj
self.formationlabel = self.match.strAwayFormation
self.lineupgoalkeeper = self.match.strAwayLineupGoalkeeper
self.lineupdefenders = self.match.strAwayLineupDefense
self.lineupmidfielders = self.match.strAwayLineupMidfield
self.lineupforwarders = self.match.strAwayLineupForward
self.lineupsubs = self.match.strAwayLineupSubstitutes
self.lineupcoach = {}
self.getControl(32527).setLabel(translate(32028))
#Set Labels for the panel
self.getControl(32522).setLabel(translate(32029) + ":")
self.getControl(32523).setLabel(translate(32030) + ":")
#Set team information
#Name
self.getControl(32521).setLabel(self.teamname)
if self.LineUpTeamObj:
if show_alternative == "true":
self.getControl(32521).setLabel(self.LineUpTeamObj.AlternativeNameFirst)
#Set team Badge
if self.LineUpTeamObj.strTeamBadge:
self.getControl(32520).setImage(self.LineUpTeamObj.strTeamBadge)
else:
self.getControl(32520).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))
else:
self.getControl(32520).setImage(os.path.join(addon_path,"resources","img","nobadge_placeholder.png"))<|fim▁hole|> if self.formationlabel:
self.getControl(32518).setLabel(self.formationlabel)
#Set coach
if self.lineupcoach:
self.getControl(32526).setLabel("[COLOR selected]" + translate(32026) + ":[/COLOR] " + self.lineupcoach)
#Set Lineup
starters = []
if self.lineupgoalkeeper:
self.lineupgoalkeeper = self.lineupgoalkeeper.replace(";","")
starters.append(self.lineupgoalkeeper)
defenders = []
if self.lineupdefenders:
for player in self.lineupdefenders.split(";"):
if player:
defenders.append(player.strip())
starters.append(player.strip())
self.lineupdefenders = defenders
del defenders
midfielders = []
if self.lineupmidfielders:
for player in self.lineupmidfielders.split(";"):
if player:
midfielders.append(player.strip())
starters.append(player.strip())
self.lineupmidfielders = midfielders
del midfielders
forwarders = []
if self.lineupforwarders:
for player in self.lineupforwarders.split(";"):
if player:
forwarders.append(player.strip())
starters.append(player.strip())
self.getControl(32524).reset()
self.getControl(32524).addItems(starters)
self.lineupforwarders = forwarders
#Set Subs
subs = []
if self.lineupsubs:
for player in self.lineupsubs.split(";"):
if player: subs.append(player.strip())
self.getControl(32525).reset()
self.getControl(32525).addItems(subs)
#Players on pitch
pitch = self.getControl(32519)
pitchPosition = pitch.getPosition()
pitchHeight = pitch.getHeight()
pitchWidth = pitch.getWidth()
if self.formationlabel:
formationsjson = eval(FileIO.fileread(json_formations))
formation = formationsjson[self.formationlabel]
else:
formation = None
if formation:
#goalkeeper
goalkeeper = formation["goalkeeper"]
image_size = positions.getShirtHeight(pitchHeight,goalkeeper[1])
image_x = int(goalkeeper[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(goalkeeper[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey )
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image, "[B]" + self.lineupgoalkeeper + "[/B]")
self.controls.append(label)
#defenders
defenders = formation["defenders"]
if defenders:
i = 0
for defender in defenders:
image_size = positions.getShirtHeight(pitchHeight,defender[1])
image_x = int(defender[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(defender[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupdefenders[i] + "[/B]")
self.controls.append(label)
i += 1
#midfielders
midfielders = formation["midfielders"]
if midfielders:
i = 0
for midfielder in midfielders:
image_size = positions.getShirtHeight(pitchHeight,midfielder[1])
image_x = int(midfielder[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(midfielder[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupmidfielders[i] + "[/B]")
self.controls.append(label)
i += 1
#forwarders
forwarders = formation["forwarders"]
if forwarders:
i = 0
for forwarder in forwarders:
image_size = positions.getShirtHeight(pitchHeight,forwarder[1])
image_x = int(forwarder[0]*float(pitchWidth))+int(0.15*image_size)
image_y = int(forwarder[1]*float(pitchHeight))+int(0.15*image_size)
if self.LineUpTeamObj and self.LineUpTeamObj.strTeamJersey:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, self.LineUpTeamObj.strTeamJersey)
self.controls.append(image)
else:
image = xbmcgui.ControlImage(image_x,image_y,image_size,image_size, os.path.join(addon_path,"resources","img","nokit_placeholder.png") )
self.controls.append(image)
label = positions.getLabel(image,"[B]" + self.lineupforwarders[i] + "[/B]")
self.controls.append(label)
i += 1
self.addControls(self.controls)
self.setFocusId(32527)
def resetControls(self):
self.removeControls(self.controls)
self.controls = []
def stopRunning(self):
self.isRunning = False
xbmc.executebuiltin("ClearProperty(has_lineups,Home)")
xbmc.executebuiltin("ClearProperty(has_details,Home)")
self.close()
def onAction(self,action):
if action.getId() == 92 or action.getId() == 10:
self.stopRunning()
def onClick(self,controlId):
if controlId == 32514:
if self.controls:
self.resetControls()
self.setLineUps("home")
elif controlId == 32515:
if self.controls:
self.resetControls()
self.setLineUps("away")
elif controlId == 32528:
if self.controls:
self.resetControls()
self.setEventDetails()
elif controlId == 32527:
if self.controls:
self.resetControls()
if self.current_lineup == "home":
self.setLineUps("away")
else:
self.setLineUps("home")
def showDetails(match, matchid = None):
if not match and matchid:
match = api.Lookups().Event(eventid=matchid)
if match:
match = match[0]
match.setHomeTeamObj(api.Lookups().Team(teamid=match.idHomeTeam)[0])
match.setAwayTeamObj(api.Lookups().Team(teamid=match.idAwayTeam)[0])
else:
xbmcgui.Dialog().ok(translate(32000), translate(32064))
sys.exit(0)
main = detailsDialog('script-matchcenter-EventDetails.xml', addon_path,getskinfolder(),'', item=match )
main.doModal()
del main<|fim▁end|>
|
#Set team formation label
|
<|file_name|>DirectionalMovementIndicator.java<|end_file_name|><|fim▁begin|>/**
* The MIT License (MIT)
*
* Copyright (c) 2014-2017 Marc de Verdelhan & respective authors (see AUTHORS)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,<|fim▁hole|> * subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package eu.verdelhan.ta4j.indicators.trackers;
import eu.verdelhan.ta4j.Decimal;
import eu.verdelhan.ta4j.TimeSeries;
import eu.verdelhan.ta4j.indicators.CachedIndicator;
import eu.verdelhan.ta4j.indicators.helpers.DirectionalDownIndicator;
import eu.verdelhan.ta4j.indicators.helpers.DirectionalUpIndicator;
/**
* Directional movement indicator.
* <p>
*/
public class DirectionalMovementIndicator extends CachedIndicator<Decimal>{
private final int timeFrame;
private final DirectionalUpIndicator dup;
private final DirectionalDownIndicator ddown;
public DirectionalMovementIndicator(TimeSeries series, int timeFrame) {
super(series);
this.timeFrame = timeFrame;
dup = new DirectionalUpIndicator(series, timeFrame);
ddown = new DirectionalDownIndicator(series, timeFrame);
}
@Override
protected Decimal calculate(int index) {
Decimal dupValue = dup.getValue(index);
Decimal ddownValue = ddown.getValue(index);
Decimal difference = dupValue.minus(ddownValue);
return difference.abs().dividedBy(dupValue.plus(ddownValue)).multipliedBy(Decimal.HUNDRED);
}
@Override
public String toString() {
return getClass().getSimpleName() + " timeFrame: " + timeFrame;
}
}<|fim▁end|>
| |
<|file_name|>httpEncodings.go<|end_file_name|><|fim▁begin|>package encoding
import (
"compress/gzip"
"compress/lzw"
"compress/zlib"
"io"
"github.com/JOTPOT-UK/JOTPOT-Server/http/http1/encoding/chunked"
"github.com/JOTPOT-UK/JOTPOT-Server/jps/pipe"
)
//ChunkedEncoding is the Encoding for HTTP chunked encoding.
var ChunkedEncoding = Encoding{
Name: "chunked",
Reader: pipe.ReaderPipeGenerator{Generator: chunked.NewPipe},
}
//NewLzwReader calls lzw.NewReader with order as x and litWidth as y.
func NewLzwReader(r io.Reader) (io.ReadCloser, error) {
return lzw.NewReader(r, 0, 0), nil
}
//NewLzwWriter calls lzw.NewWriter with order as x and litWidth as y.
func NewLzwWriter(r io.Writer) (io.WriteCloser, error) {
return lzw.NewWriter(r, 0, 0), nil
}
//CompressEncoding is the HTTP "compress" encoding
var CompressEncoding = Encoding{
Name: "compress",
Reader: pipe.ReaderPipeGenerator{Generator: NewLzwReader},
Writer: pipe.WriterPipeGenerator{Generator: NewLzwWriter},
}
//XCompressEncoding is identicle to CompressEncoding, as per the HTTP Spec
var XCompressEncoding = Encoding{
Name: "x-compress",
Reader: pipe.ReaderPipeGenerator{Generator: NewLzwReader},
Writer: pipe.WriterPipeGenerator{Generator: NewLzwWriter},
}
//DeflateEncoding implements zlib compression, which is the "deflate" HTTP transfer encoding.
var DeflateEncoding = Encoding{
Name: "deflate",
Reader: pipe.ReaderPipeGenerator{Generator: zlib.NewReader},
Writer: pipe.WriterPipeGenerator{Generator: func(w io.Writer) (io.WriteCloser, error) {
return zlib.NewWriter(w), nil
}},
}
//NewGzipReader calls gzip.NewReader, but returns the result as an interface
func NewGzipReader(r io.Reader) (io.ReadCloser, error) {
return gzip.NewReader(r)
}
//NewGzipWriter calls gzip.NewWriter, but returns the result as an interface
func NewGzipWriter(w io.Writer) io.WriteCloser {
return gzip.NewWriter(w)
}
//GzipEncoding implements gzip compression
var GzipEncoding = Encoding{
Name: "gzip",
Reader: pipe.ReaderPipeGenerator{Generator: NewGzipReader},
Writer: pipe.WriterPipeGenerator{Generator: func(w io.Writer) (io.WriteCloser, error) {
return NewGzipWriter(w), nil
}},
}
//XGzipEncoding is identicle to GzipEncoding, apart from the name<|fim▁hole|>var XGzipEncoding = Encoding{
Name: "x-gzip",
Reader: pipe.ReaderPipeGenerator{Generator: NewGzipReader},
Writer: pipe.WriterPipeGenerator{Generator: func(w io.Writer) (io.WriteCloser, error) {
return NewGzipWriter(w), nil
}},
}<|fim▁end|>
| |
<|file_name|>Fonts.py<|end_file_name|><|fim▁begin|>#
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Pygame.UI.
#
# Pygame.UI is free software; you can redistribute it and/or modify
# it under the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Pygame.UI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with Pygame.UI; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from pygame.font import Font
from types import StringType, UnicodeType
__all__ = ['initFont', 'renderText', 'getTextSize', 'getLineSize']
fontFaces = {}
fontCache = {}
misses = 0
hits = 0
def initFont(name, ttfFile, size, bold = 0, italic = 0, underline = 0):
global fontFaces
if name in fontFaces:
del fontFaces[name]
font = Font(ttfFile, size)
font.set_bold(bold)
font.set_italic(italic)
font.set_underline(underline)
fontFaces[name] = font
def renderText(name, text, antialias, fg, bg = None):
<|fim▁hole|> antialias = 1
tType = type(text)
if tType != StringType and tType != UnicodeType:
text = str(text)
if len(text) == 0:
# TODO return very small surface
text = " "
#@print "->", text, "<-", type(text)
global misses, hits, fontCache
surface = fontCache.get((name, text, antialias, fg, bg), None)
if not surface:
misses += 1
if bg:
surface = fontFaces[name].render(text, antialias, fg, bg)
else:
surface = fontFaces[name].render(text, antialias, fg)
fontCache[name, text, antialias, fg, bg] = surface
else:
hits += 1
# clean up cache if size is > 2000
if misses > 2000:
print 'FONT CACHE STATS:', misses, hits, hits / float(misses + hits)
misses = 0
fontCache.clear()
return surface
def renderSmartText(surface, x, y, name, text, antialias, fg, bg = None):
# TODO
pass
def getTextSize(name, text):
#return renderText(name, text, 1, (0x00, 0x00, 0x00)).get_size()
return fontFaces[name].size(text)
def getLineSize(name):
return fontFaces[name].get_linesize()<|fim▁end|>
| |
<|file_name|>testqgsgrassprovider.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
testqgsgrassprovider.cpp
--------------------------------------
Date : April 2015
Copyright : (C) 2015 by Radim Blazek
Email : radim dot blazek at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include <cmath>
#include <QApplication>
#include <QDir>
#include <QObject>
#include <QString>
#include <QStringList>
#include <QTemporaryFile>
#include <QtTest/QtTest>
#include <qgsapplication.h>
#include <qgscoordinatereferencesystem.h>
#include <qgsgrass.h>
#include <qgsgrassimport.h>
#include <qgsproviderregistry.h>
#include <qgsrasterbandstats.h>
#include <qgsrasterlayer.h>
#include <qgsvectordataprovider.h>
extern "C"
{
#ifndef _MSC_VER
#include <unistd.h>
#endif
#include <grass/version.h>
}
#define TINY_VALUE std::numeric_limits<double>::epsilon() * 20
/** \ingroup UnitTests
* This is a unit test for the QgsRasterLayer class.
*/
class TestQgsGrassProvider: public QObject
{
Q_OBJECT
private slots:
void initTestCase();// will be called before the first testfunction is executed.
void cleanupTestCase();// will be called after the last testfunction was executed.
void init() {} // will be called before each testfunction is executed.
void cleanup() {} // will be called after every testfunction.
void fatalError();
void locations();
void mapsets();
void maps();
void vectorLayers();
void region();
void info();
void rasterImport();
void vectorImport();
private:
void reportRow( QString message );
void reportHeader( QString message );
// verify result and report result
bool verify( bool ok );
// compare expected and got string and set ok to false if not equal
bool compare( QString expected, QString got, bool& ok );
// lists are considered equal if contains the same values regardless order
// set ok to false if not equal
bool compare( QStringList expected, QStringList got, bool& ok );
// compare with tolerance
bool compare( double expected, double got, bool& ok );
bool createTmpLocation( QString& tmpGisdbase, QString& tmpLocation, QString& tmpMapset );
QString mGisdbase;
QString mLocation;
QString mReport;
QString mBuildMapset;
};
#define GVERIFY(x) QVERIFY( verify(x) )
void TestQgsGrassProvider::reportRow( QString message )
{
mReport += message + "<br>\n";
}
void TestQgsGrassProvider::reportHeader( QString message )
{
mReport += "<h2>" + message + "</h2>\n";
}
//runs before all tests
void TestQgsGrassProvider::initTestCase()
{
// init QGIS's paths - true means that all path will be inited from prefix
QgsApplication::init();
// QgsApplication::initQgis() calls QgsProviderRegistry::instance() which registers providers.
// Because providers are linked in build directory with rpath, it would also try to load GRASS providers
// in version different form which we are testing here and it would also load GRASS libs in different version
// and result in segfault when __do_global_dtors_aux() is called.
// => we must set QGIS_PROVIDER_FILE before QgsApplication::initQgis() to avoid loading GRASS provider in different version
QgsGrass::putEnv( "QGIS_PROVIDER_FILE", "gdal|ogr" );
QgsApplication::initQgis();
QString mySettings = QgsApplication::showSettings();
mySettings = mySettings.replace( "\n", "<br />\n" );
mReport += QString( "<h1>GRASS %1 provider tests</h1>\n" ).arg( GRASS_BUILD_VERSION );
mReport += "<p>" + mySettings + "</p>\n";
#ifndef Q_OS_WIN
reportRow( "LD_LIBRARY_PATH: " + QString( getenv( "LD_LIBRARY_PATH" ) ) );
#else
reportRow( "PATH: " + QString( getenv( "PATH" ) ) );
#endif
QgsGrass::init();
//create some objects that will be used in all tests...
//create a raster layer that will be used in all tests...
mGisdbase = QString( TEST_DATA_DIR ) + "/grass";
mLocation = "wgs84";
mBuildMapset = QString( "test%1" ).arg( GRASS_BUILD_VERSION );
reportRow( "mGisdbase: " + mGisdbase );
reportRow( "mLocation: " + mLocation );
reportRow( "mBuildMapset: " + mBuildMapset );
qDebug() << "mGisdbase = " << mGisdbase << " mLocation = " << mLocation;
}
//runs after all tests
void TestQgsGrassProvider::cleanupTestCase()
{
QString myReportFile = QDir::tempPath() + "/qgistest.html";
QFile myFile( myReportFile );
if ( myFile.open( QIODevice::WriteOnly | QIODevice::Append ) )
{
QTextStream myQTextStream( &myFile );
myQTextStream << mReport;
myFile.close();
}
//QgsApplication::exitQgis();
}
bool TestQgsGrassProvider::verify( bool ok )
{
reportRow( "" );
reportRow( QString( "Test result: " ) + ( ok ? "ok" : "error" ) );
return ok;
}
bool TestQgsGrassProvider::compare( QString expected, QString got, bool &ok )
{
if ( expected != got )
{
ok = false;
return false;
}
return true;
}
bool TestQgsGrassProvider::compare( QStringList expected, QStringList got, bool &ok )
{
QStringList e = expected;
QStringList g = got;
e.sort();
g.sort();
if ( e != g )
{
ok = false;
return false;
}
return true;
}
bool TestQgsGrassProvider::compare( double expected, double got, bool& ok )
{
if ( qAbs( got - expected ) > TINY_VALUE )
{
ok = false;
return false;
}
return true;
}
// G_fatal_error() handling
void TestQgsGrassProvider::fatalError()
{
reportHeader( "TestQgsGrassProvider::fatalError" );
bool ok = true;
QString errorMessage = "test fatal error";
G_TRY
{
G_fatal_error( "%s", errorMessage.toAscii().data() );
ok = false; // should not be reached
reportRow( "G_fatal_error() did not throw exception" );
}
G_CATCH( QgsGrass::Exception &e )
{
reportRow( QString( "Exception thrown and caught correctly" ) );
reportRow( "expected error message: " + errorMessage );
reportRow( "got error message: " + QString( e.what() ) );
compare( errorMessage, e.what(), ok );
}
compare( errorMessage, QgsGrass::errorMessage(), ok );
GVERIFY( ok );
}
void TestQgsGrassProvider::locations()
{
reportHeader( "TestQgsGrassProvider::locations" );
bool ok = true;
QStringList expectedLocations;
expectedLocations << "wgs84";
QStringList locations = QgsGrass::locations( mGisdbase );
reportRow( "expectedLocations: " + expectedLocations.join( ", " ) );
reportRow( "locations: " + locations.join( ", " ) );
compare( expectedLocations, locations, ok );
GVERIFY( ok );
}
void TestQgsGrassProvider::mapsets()
{
reportHeader( "TestQgsGrassProvider::mapsets" );
bool ok = true;
QStringList expectedMapsets;
expectedMapsets << "PERMANENT" << "test" << "test6" << "test7";
QStringList mapsets = QgsGrass::mapsets( mGisdbase, mLocation );
reportRow( "expectedMapsets: " + expectedMapsets.join( ", " ) );
reportRow( "mapsets: " + mapsets.join( ", " ) );
compare( expectedMapsets, mapsets, ok );
QgsGrass::setLocation( mGisdbase, mLocation ); // for G_is_mapset_in_search_path
foreach ( QString expectedMapset, expectedMapsets )
{
if ( G_is_mapset_in_search_path( expectedMapset.toAscii().data() ) != 1 )
{
reportRow( "mapset " + expectedMapset + " not in search path" );
ok = false;
}
}
// open/close mapset try twice to be sure that lock was not left etc.
for ( int i = 1; i < 3; i++ )
{
reportRow( "" );
reportRow( "Open/close mapset " + mBuildMapset + " for the " + QString::number( i ) + ". time" );
QString error = QgsGrass::openMapset( mGisdbase, mLocation, mBuildMapset );
if ( !error.isEmpty() )
{
reportRow( "QgsGrass::openMapset() failed: " + error );
ok = false;
}
else
{
reportRow( "mapset successfully opened" );
if ( !QgsGrass::activeMode() )
{
reportRow( "QgsGrass::activeMode() returns false after openMapset()" );
ok = false;
}
error = QgsGrass::closeMapset();
if ( !error.isEmpty() )
{
reportRow( "QgsGrass::close() failed: " + error );
ok = false;
}
else
{
reportRow( "mapset successfully closed" );
}
if ( QgsGrass::activeMode() )
{
reportRow( "QgsGrass::activeMode() returns true after closeMapset()" );
ok = false;
}
}
}
GVERIFY( ok );
}
void TestQgsGrassProvider::maps()
{
reportHeader( "TestQgsGrassProvider::maps" );
bool ok = true;
QStringList expectedVectors;
expectedVectors << "test";
QStringList vectors = QgsGrass::vectors( mGisdbase, mLocation, mBuildMapset );
reportRow( "expectedVectors: " + expectedVectors.join( ", " ) );
reportRow( "vectors: " + vectors.join( ", " ) );
compare( expectedVectors, vectors, ok );
reportRow( "" );
QStringList expectedRasters;
expectedRasters << "cell" << "dcell" << "fcell";
QStringList rasters = QgsGrass::rasters( mGisdbase, mLocation, "test" );
reportRow( "expectedRasters: " + expectedRasters.join( ", " ) );
reportRow( "rasters: " + rasters.join( ", " ) );
compare( expectedRasters, rasters, ok );
GVERIFY( ok );
}
void TestQgsGrassProvider::vectorLayers()
{
reportHeader( "TestQgsGrassProvider::vectorLayers" );
QString mapset = mBuildMapset;
QString mapName = "test";
QStringList expectedLayers;
expectedLayers << "1_point" << "2_line" << "3_polygon";
reportRow( "mapset: " + mapset );
reportRow( "mapName: " + mapName );
reportRow( "expectedLayers: " + expectedLayers.join( ", " ) );
bool ok = true;
G_TRY
{
QStringList layers = QgsGrass::vectorLayers( mGisdbase, mLocation, mapset, mapName );
reportRow( "layers: " + layers.join( ", " ) );
compare( expectedLayers, layers, ok );
}
G_CATCH( QgsGrass::Exception &e )
{
ok = false;
reportRow( QString( "ERROR: %1" ).arg( e.what() ) );
}
GVERIFY( ok );
}
void TestQgsGrassProvider::region()
{
reportHeader( "TestQgsGrassProvider::region" );
struct Cell_head window;
struct Cell_head windowCopy;
bool ok = true;
try
{
QgsGrass::region( mGisdbase, mLocation, "PERMANENT", &window );
}
catch ( QgsGrass::Exception &e )
{
Q_UNUSED( e );
reportRow( "QgsGrass::region() failed" );
ok = false;
}
if ( ok )
{
QString expectedRegion = "proj:3;zone:0;north:90N;south:90S;east:180E;west:180W;cols:1000;rows:500;e-w resol:0:21:36;n-s resol:0:21:36;";
QString region = QgsGrass::regionString( &window );
reportRow( "expectedRegion: " + expectedRegion );
reportRow( "region: " + region );
compare( expectedRegion, region, ok );
windowCopy.proj = window.proj;
windowCopy.zone = window.zone;
windowCopy.rows = window.rows;
windowCopy.cols = window.cols;
QgsGrass::copyRegionExtent( &window, &windowCopy );
QgsGrass::copyRegionResolution( &window, &windowCopy );
QString regionCopy = QgsGrass::regionString( &windowCopy );
reportRow( "regionCopy: " + regionCopy );
compare( expectedRegion, regionCopy, ok );
}
GVERIFY( ok );
}
void TestQgsGrassProvider::info()
{
// info() -> getInfo() -> runModule() -> startModule()
reportHeader( "TestQgsGrassProvider::info" );
bool ok = true;
QgsRectangle expectedExtent( -5, -5, 5, 5 );
QMap<QString, QgsRasterBandStats> expectedStats;
QgsRasterBandStats es;
es.minimumValue = -20;
es.maximumValue = 20;
expectedStats.insert( "cell", es );
es.minimumValue = -20.25;
es.maximumValue = 20.25;
expectedStats.insert( "dcell", es );
es.minimumValue = -20.25;
es.maximumValue = 20.25;
expectedStats.insert( "fcell", es );
foreach ( QString map, expectedStats.keys() )
{
es = expectedStats.value( map );
// TODO: QgsGrass::info() may open dialog window on error which blocks tests
QHash<QString, QString> info = QgsGrass::info( mGisdbase, mLocation, "test", map, QgsGrassObject::Raster, "stats",
expectedExtent, 10, 10, 5000, false );
reportRow( "map: " + map );
QgsRasterBandStats s;
s.minimumValue = info["MIN"].toDouble();
s.maximumValue = info["MAX"].toDouble();
reportRow( QString( "expectedStats: min = %1 max = %2" ).arg( es.minimumValue ).arg( es.maximumValue ) ) ;
reportRow( QString( "stats: min = %1 max = %2" ).arg( s.minimumValue ).arg( s.maximumValue ) ) ;
compare( es.minimumValue, s.minimumValue, ok );
compare( es.maximumValue, s.maximumValue, ok );
QgsRectangle extent = QgsGrass::extent( mGisdbase, mLocation, "test", map, QgsGrassObject::Raster, false );
reportRow( "expectedExtent: " + expectedExtent.toString() );
reportRow( "extent: " + extent.toString() );
if ( extent != expectedExtent )
{
ok = false;
}
}
reportRow( "" );
QgsCoordinateReferenceSystem expectedCrs;
expectedCrs.createFromOgcWmsCrs( "EPSG:4326" );
QgsCoordinateReferenceSystem crs = QgsGrass::crs( mGisdbase, mLocation );
reportRow( "expectedCrs: " + expectedCrs.toWkt() );
reportRow( "crs: " + crs.toWkt() );
if ( crs != expectedCrs )
{
ok = false;
}
GVERIFY( ok );
}
// create temporary output location
bool TestQgsGrassProvider::createTmpLocation( QString& tmpGisdbase, QString& tmpLocation, QString& tmpMapset )
{
// use QTemporaryFile to generate name (QTemporaryDir since 5.0)
QTemporaryFile* tmpFile = new QTemporaryFile( QDir::tempPath() + "/qgis-grass-test" );
tmpFile->open();
tmpGisdbase = tmpFile->fileName();
delete tmpFile;
reportRow( "tmpGisdbase: " + tmpGisdbase );
tmpLocation = "test";
tmpMapset = "PERMANENT";
QString tmpMapsetPath = tmpGisdbase + "/" + tmpLocation + "/" + tmpMapset;
reportRow( "tmpMapsetPath: " + tmpMapsetPath );
QDir tmpDir = QDir::temp();
if ( !tmpDir.mkpath( tmpMapsetPath ) )
{
reportRow( "cannot create " + tmpMapsetPath );
return false;
}
QStringList cpFiles;
cpFiles << "DEFAULT_WIND" << "WIND" << "PROJ_INFO" << "PROJ_UNITS";
QString templateMapsetPath = mGisdbase + "/" + mLocation + "/PERMANENT";
foreach ( QString cpFile, cpFiles )
{
if ( !QFile::copy( templateMapsetPath + "/" + cpFile, tmpMapsetPath + "/" + cpFile ) )
{
reportRow( "cannot copy " + cpFile );
return false;
}
}
return true;
}
void TestQgsGrassProvider::rasterImport()
{
reportHeader( "TestQgsGrassProvider::rasterImport" );
bool ok = true;
QString tmpGisdbase;
QString tmpLocation;
QString tmpMapset;
if ( !createTmpLocation( tmpGisdbase, tmpLocation, tmpMapset ) )
{
reportRow( "cannot create temporary location" );
GVERIFY( false );
return;
}
QStringList rasterFiles;
rasterFiles << "tenbytenraster.asc" << "landsat.tif" << "raster/band1_byte_ct_epsg4326.tif" << "raster/band1_int16_noct_epsg4326.tif";
rasterFiles << "raster/band1_float32_noct_epsg4326.tif" << "raster/band3_int16_noct_epsg4326.tif";
QgsCoordinateReferenceSystem mapsetCrs = QgsGrass::crsDirect( mGisdbase, mLocation );
foreach ( QString rasterFile, rasterFiles )
{
QString uri = QString( TEST_DATA_DIR ) + "/" + rasterFile;
QString name = QFileInfo( uri ).baseName();
reportRow( "input raster: " + uri );
QgsRasterDataProvider* provider = qobject_cast<QgsRasterDataProvider*>( QgsProviderRegistry::instance()->provider( "gdal", uri ) );
if ( !provider )
{
reportRow( "Cannot create provider " + uri );
ok = false;
continue;
}
if ( !provider->isValid() )
{
reportRow( "Provider is not valid " + uri );
ok = false;
continue;
}
QgsRectangle newExtent = provider->extent();
int newXSize = provider->xSize();
int newYSize = provider->ySize();
QgsRasterPipe* pipe = new QgsRasterPipe();
pipe->set( provider );
QgsCoordinateReferenceSystem providerCrs = provider->crs();
if ( providerCrs.isValid() && mapsetCrs.isValid() && providerCrs != mapsetCrs )
{
QgsRasterProjector * projector = new QgsRasterProjector;
projector->setCRS( providerCrs, mapsetCrs );
projector->destExtentSize( provider->extent(), provider->xSize(), provider->ySize(),
newExtent, newXSize, newYSize );
pipe->set( projector );
}
QgsGrassObject rasterObject( tmpGisdbase, tmpLocation, tmpMapset, name, QgsGrassObject::Raster );
QgsGrassRasterImport *import = new QgsGrassRasterImport( pipe, rasterObject,
newExtent, newXSize, newYSize );
if ( !import->import() )
{
reportRow( "import failed: " + import->error() );
ok = false;
}
delete import;
}
GVERIFY( ok );
}
void TestQgsGrassProvider::vectorImport()
{
reportHeader( "TestQgsGrassProvider::vectorImport" );
bool ok = true;
QString tmpGisdbase;
QString tmpLocation;
QString tmpMapset;
if ( !createTmpLocation( tmpGisdbase, tmpLocation, tmpMapset ) )
{<|fim▁hole|>
QStringList files;
files << "points.shp" << "multipoint.shp" << "lines.shp" << "polys.shp";
files << "polys_overlapping.shp" << "bug5598.shp";
QgsCoordinateReferenceSystem mapsetCrs = QgsGrass::crsDirect( mGisdbase, mLocation );
foreach ( QString file, files )
{
QString uri = QString( TEST_DATA_DIR ) + "/" + file;
QString name = QFileInfo( uri ).baseName();
reportRow( "input vector: " + uri );
QgsVectorDataProvider* provider = qobject_cast<QgsVectorDataProvider*>( QgsProviderRegistry::instance()->provider( "ogr", uri ) );
if ( !provider )
{
reportRow( "Cannot create provider " + uri );
ok = false;
continue;
}
if ( !provider->isValid() )
{
reportRow( "Provider is not valid " + uri );
ok = false;
continue;
}
QgsGrassObject vectorObject( tmpGisdbase, tmpLocation, tmpMapset, name, QgsGrassObject::Vector );
QgsGrassVectorImport *import = new QgsGrassVectorImport( provider, vectorObject );
if ( !import->import() )
{
reportRow( "import failed: " + import->error() );
ok = false;
}
delete import;
QStringList layers = QgsGrass::vectorLayers( tmpGisdbase, tmpLocation, tmpMapset, name );
reportRow( "created layers: " + layers.join( "," ) );
}
GVERIFY( ok );
}
QTEST_MAIN( TestQgsGrassProvider )
#include "testqgsgrassprovider.moc"<|fim▁end|>
|
reportRow( "cannot create temporary location" );
GVERIFY( false );
return;
}
|
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Gengo Translator',
'category': 'Website/Website',
'summary': 'Translate website in one-click',
'description': """
This module allows to send website content to Gengo translation service in a single click. Gengo then gives back the translated terms in the destination language.
""",
'depends': [
'website',
'base_gengo'
],
'data': [<|fim▁hole|> 'views/website_gengo_templates.xml',
]
}<|fim▁end|>
| |
<|file_name|>rnn_cell_impl.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module implementing RNN Cells.
This module provides a number of basic commonly used RNN cells, such as LSTM
(Long Short Term Memory) or GRU (Gated Recurrent Unit), and a number of
operators that allow adding dropouts, projections, or embeddings for inputs.
Constructing multi-layer cells is supported by the class `MultiRNNCell`, or by
calling the `rnn` ops several times.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import hashlib
import numbers
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import activations
from tensorflow.python.keras import initializers
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.layers import base as base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.util import nest
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.tf_export import tf_export
_BIAS_VARIABLE_NAME = "bias"
_WEIGHTS_VARIABLE_NAME = "kernel"
# This can be used with self.assertRaisesRegexp for assert_like_rnncell.
ASSERT_LIKE_RNNCELL_ERROR_REGEXP = "is not an RNNCell"
def assert_like_rnncell(cell_name, cell):
"""Raises a TypeError if cell is not like an RNNCell.
NOTE: Do not rely on the error message (in particular in tests) which can be
subject to change to increase readability. Use
ASSERT_LIKE_RNNCELL_ERROR_REGEXP.
Args:
cell_name: A string to give a meaningful error referencing to the name
of the functionargument.
cell: The object which should behave like an RNNCell.
Raises:
TypeError: A human-friendly exception.
"""
conditions = [
hasattr(cell, "output_size"),
hasattr(cell, "state_size"),
hasattr(cell, "get_initial_state") or hasattr(cell, "zero_state"),
callable(cell),
]
errors = [
"'output_size' property is missing",
"'state_size' property is missing",
"either 'zero_state' or 'get_initial_state' method is required",
"is not callable"
]
if not all(conditions):
errors = [error for error, cond in zip(errors, conditions) if not cond]
raise TypeError("The argument {!r} ({}) is not an RNNCell: {}.".format(
cell_name, cell, ", ".join(errors)))
def _concat(prefix, suffix, static=False):
"""Concat that enables int, Tensor, or TensorShape values.
This function takes a size specification, which can be an integer, a
TensorShape, or a Tensor, and converts it into a concatenated Tensor
(if static = False) or a list of integers (if static = True).
Args:
prefix: The prefix; usually the batch size (and/or time step size).
(TensorShape, int, or Tensor.)
suffix: TensorShape, int, or Tensor.
static: If `True`, return a python list with possibly unknown dimensions.
Otherwise return a `Tensor`.
Returns:
shape: the concatenation of prefix and suffix.
Raises:
ValueError: if `suffix` is not a scalar or vector (or TensorShape).
ValueError: if prefix or suffix was `None` and asked for dynamic
Tensors out.
"""
if isinstance(prefix, ops.Tensor):
p = prefix
p_static = tensor_util.constant_value(prefix)
if p.shape.ndims == 0:
p = array_ops.expand_dims(p, 0)
elif p.shape.ndims != 1:
raise ValueError("prefix tensor must be either a scalar or vector, "
"but saw tensor: %s" % p)
else:
p = tensor_shape.as_shape(prefix)
p_static = p.as_list() if p.ndims is not None else None
p = (constant_op.constant(p.as_list(), dtype=dtypes.int32)
if p.is_fully_defined() else None)
if isinstance(suffix, ops.Tensor):
s = suffix
s_static = tensor_util.constant_value(suffix)
if s.shape.ndims == 0:
s = array_ops.expand_dims(s, 0)
elif s.shape.ndims != 1:
raise ValueError("suffix tensor must be either a scalar or vector, "
"but saw tensor: %s" % s)
else:
s = tensor_shape.as_shape(suffix)
s_static = s.as_list() if s.ndims is not None else None
s = (constant_op.constant(s.as_list(), dtype=dtypes.int32)
if s.is_fully_defined() else None)
if static:
shape = tensor_shape.as_shape(p_static).concatenate(s_static)
shape = shape.as_list() if shape.ndims is not None else None
else:
if p is None or s is None:
raise ValueError("Provided a prefix or suffix of None: %s and %s"
% (prefix, suffix))
shape = array_ops.concat((p, s), 0)
return shape
def _zero_state_tensors(state_size, batch_size, dtype):
"""Create tensors of zeros based on state_size, batch_size, and dtype."""
def get_state_shape(s):
"""Combine s with batch_size to get a proper tensor shape."""
c = _concat(batch_size, s)
size = array_ops.zeros(c, dtype=dtype)
if not context.executing_eagerly():
c_static = _concat(batch_size, s, static=True)
size.set_shape(c_static)
return size
return nest.map_structure(get_state_shape, state_size)
@tf_export("nn.rnn_cell.RNNCell")
class RNNCell(base_layer.Layer):
"""Abstract object representing an RNN cell.
Every `RNNCell` must have the properties below and implement `call` with
the signature `(output, next_state) = call(input, state)`. The optional
third input argument, `scope`, is allowed for backwards compatibility
purposes; but should be left off for new subclasses.
This definition of cell differs from the definition used in the literature.
In the literature, 'cell' refers to an object with a single scalar output.
This definition refers to a horizontal array of such units.
An RNN cell, in the most abstract setting, is anything that has
a state and performs some operation that takes a matrix of inputs.
This operation results in an output matrix with `self.output_size` columns.
If `self.state_size` is an integer, this operation also results in a new
state matrix with `self.state_size` columns. If `self.state_size` is a
(possibly nested tuple of) TensorShape object(s), then it should return a
matching structure of Tensors having shape `[batch_size].concatenate(s)`
for each `s` in `self.batch_size`.<|fim▁hole|> super(RNNCell, self).__init__(
trainable=trainable, name=name, dtype=dtype, **kwargs)
# Attribute that indicates whether the cell is a TF RNN cell, due the slight
# difference between TF and Keras RNN cell.
self._is_tf_rnn_cell = True
def __call__(self, inputs, state, scope=None):
"""Run this RNN cell on inputs, starting from the given state.
Args:
inputs: `2-D` tensor with shape `[batch_size, input_size]`.
state: if `self.state_size` is an integer, this should be a `2-D Tensor`
with shape `[batch_size, self.state_size]`. Otherwise, if
`self.state_size` is a tuple of integers, this should be a tuple
with shapes `[batch_size, s] for s in self.state_size`.
scope: VariableScope for the created subgraph; defaults to class name.
Returns:
A pair containing:
- Output: A `2-D` tensor with shape `[batch_size, self.output_size]`.
- New state: Either a single `2-D` tensor, or a tuple of tensors matching
the arity and shapes of `state`.
"""
if scope is not None:
with vs.variable_scope(scope,
custom_getter=self._rnn_get_variable) as scope:
return super(RNNCell, self).__call__(inputs, state, scope=scope)
else:
scope_attrname = "rnncell_scope"
scope = getattr(self, scope_attrname, None)
if scope is None:
scope = vs.variable_scope(vs.get_variable_scope(),
custom_getter=self._rnn_get_variable)
setattr(self, scope_attrname, scope)
with scope:
return super(RNNCell, self).__call__(inputs, state)
def _rnn_get_variable(self, getter, *args, **kwargs):
variable = getter(*args, **kwargs)
if context.executing_eagerly():
trainable = variable._trainable # pylint: disable=protected-access
else:
trainable = (
variable in tf_variables.trainable_variables() or
(isinstance(variable, tf_variables.PartitionedVariable) and
list(variable)[0] in tf_variables.trainable_variables()))
if trainable and variable not in self._trainable_weights:
self._trainable_weights.append(variable)
elif not trainable and variable not in self._non_trainable_weights:
self._non_trainable_weights.append(variable)
return variable
@property
def state_size(self):
"""size(s) of state(s) used by this cell.
It can be represented by an Integer, a TensorShape or a tuple of Integers
or TensorShapes.
"""
raise NotImplementedError("Abstract method")
@property
def output_size(self):
"""Integer or TensorShape: size of outputs produced by this cell."""
raise NotImplementedError("Abstract method")
def build(self, _):
# This tells the parent Layer object that it's OK to call
# self.add_variable() inside the call() method.
pass
def get_initial_state(self, inputs=None, batch_size=None, dtype=None):
if inputs is not None:
# Validate the given batch_size and dtype against inputs if provided.
inputs = ops.convert_to_tensor(inputs, name="inputs")
if batch_size is not None:
if tensor_util.is_tensor(batch_size):
static_batch_size = tensor_util.constant_value(
batch_size, partial=True)
else:
static_batch_size = batch_size
if inputs.shape.dims[0].value != static_batch_size:
raise ValueError(
"batch size from input tensor is different from the "
"input param. Input tensor batch: {}, batch_size: {}".format(
inputs.shape.dims[0].value, batch_size))
if dtype is not None and inputs.dtype != dtype:
raise ValueError(
"dtype from input tensor is different from the "
"input param. Input tensor dtype: {}, dtype: {}".format(
inputs.dtype, dtype))
batch_size = inputs.shape.dims[0].value or array_ops.shape(inputs)[0]
dtype = inputs.dtype
if None in [batch_size, dtype]:
raise ValueError(
"batch_size and dtype cannot be None while constructing initial "
"state: batch_size={}, dtype={}".format(batch_size, dtype))
return self.zero_state(batch_size, dtype)
def zero_state(self, batch_size, dtype):
"""Return zero-filled state tensor(s).
Args:
batch_size: int, float, or unit Tensor representing the batch size.
dtype: the data type to use for the state.
Returns:
If `state_size` is an int or TensorShape, then the return value is a
`N-D` tensor of shape `[batch_size, state_size]` filled with zeros.
If `state_size` is a nested list or tuple, then the return value is
a nested list or tuple (of the same structure) of `2-D` tensors with
the shapes `[batch_size, s]` for each s in `state_size`.
"""
# Try to use the last cached zero_state. This is done to avoid recreating
# zeros, especially when eager execution is enabled.
state_size = self.state_size
is_eager = context.executing_eagerly()
if is_eager and hasattr(self, "_last_zero_state"):
(last_state_size, last_batch_size, last_dtype,
last_output) = getattr(self, "_last_zero_state")
if (last_batch_size == batch_size and
last_dtype == dtype and
last_state_size == state_size):
return last_output
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
output = _zero_state_tensors(state_size, batch_size, dtype)
if is_eager:
self._last_zero_state = (state_size, batch_size, dtype, output)
return output
class LayerRNNCell(RNNCell):
"""Subclass of RNNCells that act like proper `tf.Layer` objects.
For backwards compatibility purposes, most `RNNCell` instances allow their
`call` methods to instantiate variables via `tf.get_variable`. The underlying
variable scope thus keeps track of any variables, and returning cached
versions. This is atypical of `tf.layer` objects, which separate this
part of layer building into a `build` method that is only called once.
Here we provide a subclass for `RNNCell` objects that act exactly as
`Layer` objects do. They must provide a `build` method and their
`call` methods do not access Variables `tf.get_variable`.
"""
def __call__(self, inputs, state, scope=None, *args, **kwargs):
"""Run this RNN cell on inputs, starting from the given state.
Args:
inputs: `2-D` tensor with shape `[batch_size, input_size]`.
state: if `self.state_size` is an integer, this should be a `2-D Tensor`
with shape `[batch_size, self.state_size]`. Otherwise, if
`self.state_size` is a tuple of integers, this should be a tuple
with shapes `[batch_size, s] for s in self.state_size`.
scope: optional cell scope.
*args: Additional positional arguments.
**kwargs: Additional keyword arguments.
Returns:
A pair containing:
- Output: A `2-D` tensor with shape `[batch_size, self.output_size]`.
- New state: Either a single `2-D` tensor, or a tuple of tensors matching
the arity and shapes of `state`.
"""
# Bypass RNNCell's variable capturing semantics for LayerRNNCell.
# Instead, it is up to subclasses to provide a proper build
# method. See the class docstring for more details.
return base_layer.Layer.__call__(self, inputs, state, scope=scope,
*args, **kwargs)
@tf_export(v1=["nn.rnn_cell.BasicRNNCell"])
class BasicRNNCell(LayerRNNCell):
"""The most basic RNN cell.
Note that this cell is not optimized for performance. Please use
`tf.contrib.cudnn_rnn.CudnnRNNTanh` for better performance on GPU.
Args:
num_units: int, The number of units in the RNN cell.
activation: Nonlinearity to use. Default: `tanh`. It could also be string
that is within Keras activation function names.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
dtype: Default dtype of the layer (default of `None` means use the type
of the first input). Required when `build` is called before `call`.
**kwargs: Dict, keyword named properties for common layer attributes, like
`trainable` etc when constructing the cell from configs of get_config().
"""
@deprecated(None, "This class is equivalent as tf.keras.layers.SimpleRNNCell,"
" and will be replaced by that in Tensorflow 2.0.")
def __init__(self,
num_units,
activation=None,
reuse=None,
name=None,
dtype=None,
**kwargs):
super(BasicRNNCell, self).__init__(
_reuse=reuse, name=name, dtype=dtype, **kwargs)
if context.executing_eagerly() and context.num_gpus() > 0:
logging.warn("%s: Note that this cell is not optimized for performance. "
"Please use tf.contrib.cudnn_rnn.CudnnRNNTanh for better "
"performance on GPU.", self)
# Inputs must be 2-dimensional.
self.input_spec = input_spec.InputSpec(ndim=2)
self._num_units = num_units
if activation:
self._activation = activations.get(activation)
else:
self._activation = math_ops.tanh
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@tf_utils.shape_type_conversion
def build(self, inputs_shape):
if inputs_shape[-1] is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% str(inputs_shape))
input_depth = inputs_shape[-1]
self._kernel = self.add_variable(
_WEIGHTS_VARIABLE_NAME,
shape=[input_depth + self._num_units, self._num_units])
self._bias = self.add_variable(
_BIAS_VARIABLE_NAME,
shape=[self._num_units],
initializer=init_ops.zeros_initializer(dtype=self.dtype))
self.built = True
def call(self, inputs, state):
"""Most basic RNN: output = new_state = act(W * input + U * state + B)."""
gate_inputs = math_ops.matmul(
array_ops.concat([inputs, state], 1), self._kernel)
gate_inputs = nn_ops.bias_add(gate_inputs, self._bias)
output = self._activation(gate_inputs)
return output, output
def get_config(self):
config = {
"num_units": self._num_units,
"activation": activations.serialize(self._activation),
"reuse": self._reuse,
}
base_config = super(BasicRNNCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export(v1=["nn.rnn_cell.GRUCell"])
class GRUCell(LayerRNNCell):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
Note that this cell is not optimized for performance. Please use
`tf.contrib.cudnn_rnn.CudnnGRU` for better performance on GPU, or
`tf.contrib.rnn.GRUBlockCellV2` for better performance on CPU.
Args:
num_units: int, The number of units in the GRU cell.
activation: Nonlinearity to use. Default: `tanh`.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
kernel_initializer: (optional) The initializer to use for the weight and
projection matrices.
bias_initializer: (optional) The initializer to use for the bias.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
dtype: Default dtype of the layer (default of `None` means use the type
of the first input). Required when `build` is called before `call`.
**kwargs: Dict, keyword named properties for common layer attributes, like
`trainable` etc when constructing the cell from configs of get_config().
"""
@deprecated(None, "This class is equivalent as tf.keras.layers.GRUCell,"
" and will be replaced by that in Tensorflow 2.0.")
def __init__(self,
num_units,
activation=None,
reuse=None,
kernel_initializer=None,
bias_initializer=None,
name=None,
dtype=None,
**kwargs):
super(GRUCell, self).__init__(
_reuse=reuse, name=name, dtype=dtype, **kwargs)
if context.executing_eagerly() and context.num_gpus() > 0:
logging.warn("%s: Note that this cell is not optimized for performance. "
"Please use tf.contrib.cudnn_rnn.CudnnGRU for better "
"performance on GPU.", self)
# Inputs must be 2-dimensional.
self.input_spec = input_spec.InputSpec(ndim=2)
self._num_units = num_units
if activation:
self._activation = activations.get(activation)
else:
self._activation = math_ops.tanh
self._kernel_initializer = initializers.get(kernel_initializer)
self._bias_initializer = initializers.get(bias_initializer)
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@tf_utils.shape_type_conversion
def build(self, inputs_shape):
if inputs_shape[-1] is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% str(inputs_shape))
input_depth = inputs_shape[-1]
self._gate_kernel = self.add_variable(
"gates/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[input_depth + self._num_units, 2 * self._num_units],
initializer=self._kernel_initializer)
self._gate_bias = self.add_variable(
"gates/%s" % _BIAS_VARIABLE_NAME,
shape=[2 * self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else init_ops.constant_initializer(1.0, dtype=self.dtype)))
self._candidate_kernel = self.add_variable(
"candidate/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[input_depth + self._num_units, self._num_units],
initializer=self._kernel_initializer)
self._candidate_bias = self.add_variable(
"candidate/%s" % _BIAS_VARIABLE_NAME,
shape=[self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else init_ops.zeros_initializer(dtype=self.dtype)))
self.built = True
def call(self, inputs, state):
"""Gated recurrent unit (GRU) with nunits cells."""
gate_inputs = math_ops.matmul(
array_ops.concat([inputs, state], 1), self._gate_kernel)
gate_inputs = nn_ops.bias_add(gate_inputs, self._gate_bias)
value = math_ops.sigmoid(gate_inputs)
r, u = array_ops.split(value=value, num_or_size_splits=2, axis=1)
r_state = r * state
candidate = math_ops.matmul(
array_ops.concat([inputs, r_state], 1), self._candidate_kernel)
candidate = nn_ops.bias_add(candidate, self._candidate_bias)
c = self._activation(candidate)
new_h = u * state + (1 - u) * c
return new_h, new_h
def get_config(self):
config = {
"num_units": self._num_units,
"kernel_initializer": initializers.serialize(self._kernel_initializer),
"bias_initializer": initializers.serialize(self._bias_initializer),
"activation": activations.serialize(self._activation),
"reuse": self._reuse,
}
base_config = super(GRUCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
_LSTMStateTuple = collections.namedtuple("LSTMStateTuple", ("c", "h"))
@tf_export("nn.rnn_cell.LSTMStateTuple")
class LSTMStateTuple(_LSTMStateTuple):
"""Tuple used by LSTM Cells for `state_size`, `zero_state`, and output state.
Stores two elements: `(c, h)`, in that order. Where `c` is the hidden state
and `h` is the output.
Only used when `state_is_tuple=True`.
"""
__slots__ = ()
@property
def dtype(self):
(c, h) = self
if c.dtype != h.dtype:
raise TypeError("Inconsistent internal state: %s vs %s" %
(str(c.dtype), str(h.dtype)))
return c.dtype
@tf_export(v1=["nn.rnn_cell.BasicLSTMCell"])
class BasicLSTMCell(LayerRNNCell):
"""DEPRECATED: Please use `tf.nn.rnn_cell.LSTMCell` instead.
Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
It does not allow cell clipping, a projection layer, and does not
use peep-hole connections: it is the basic baseline.
For advanced models, please use the full `tf.nn.rnn_cell.LSTMCell`
that follows.
Note that this cell is not optimized for performance. Please use
`tf.contrib.cudnn_rnn.CudnnLSTM` for better performance on GPU, or
`tf.contrib.rnn.LSTMBlockCell` and `tf.contrib.rnn.LSTMBlockFusedCell` for
better performance on CPU.
"""
@deprecated(None, "This class is equivalent as tf.keras.layers.LSTMCell,"
" and will be replaced by that in Tensorflow 2.0.")
def __init__(self,
num_units,
forget_bias=1.0,
state_is_tuple=True,
activation=None,
reuse=None,
name=None,
dtype=None,
**kwargs):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
Must set to `0.0` manually when restoring from CudnnLSTM-trained
checkpoints.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
activation: Activation function of the inner states. Default: `tanh`. It
could also be string that is within Keras activation function names.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
dtype: Default dtype of the layer (default of `None` means use the type
of the first input). Required when `build` is called before `call`.
**kwargs: Dict, keyword named properties for common layer attributes, like
`trainable` etc when constructing the cell from configs of get_config().
When restoring from CudnnLSTM-trained checkpoints, must use
`CudnnCompatibleLSTMCell` instead.
"""
super(BasicLSTMCell, self).__init__(
_reuse=reuse, name=name, dtype=dtype, **kwargs)
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if context.executing_eagerly() and context.num_gpus() > 0:
logging.warn("%s: Note that this cell is not optimized for performance. "
"Please use tf.contrib.cudnn_rnn.CudnnLSTM for better "
"performance on GPU.", self)
# Inputs must be 2-dimensional.
self.input_spec = input_spec.InputSpec(ndim=2)
self._num_units = num_units
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
if activation:
self._activation = activations.get(activation)
else:
self._activation = math_ops.tanh
@property
def state_size(self):
return (LSTMStateTuple(self._num_units, self._num_units)
if self._state_is_tuple else 2 * self._num_units)
@property
def output_size(self):
return self._num_units
@tf_utils.shape_type_conversion
def build(self, inputs_shape):
if inputs_shape[-1] is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% str(inputs_shape))
input_depth = inputs_shape[-1]
h_depth = self._num_units
self._kernel = self.add_variable(
_WEIGHTS_VARIABLE_NAME,
shape=[input_depth + h_depth, 4 * self._num_units])
self._bias = self.add_variable(
_BIAS_VARIABLE_NAME,
shape=[4 * self._num_units],
initializer=init_ops.zeros_initializer(dtype=self.dtype))
self.built = True
def call(self, inputs, state):
"""Long short-term memory cell (LSTM).
Args:
inputs: `2-D` tensor with shape `[batch_size, input_size]`.
state: An `LSTMStateTuple` of state tensors, each shaped
`[batch_size, num_units]`, if `state_is_tuple` has been set to
`True`. Otherwise, a `Tensor` shaped
`[batch_size, 2 * num_units]`.
Returns:
A pair containing the new hidden state, and the new state (either a
`LSTMStateTuple` or a concatenated state, depending on
`state_is_tuple`).
"""
sigmoid = math_ops.sigmoid
one = constant_op.constant(1, dtype=dtypes.int32)
# Parameters of gates are concatenated into one multiply for efficiency.
if self._state_is_tuple:
c, h = state
else:
c, h = array_ops.split(value=state, num_or_size_splits=2, axis=one)
gate_inputs = math_ops.matmul(
array_ops.concat([inputs, h], 1), self._kernel)
gate_inputs = nn_ops.bias_add(gate_inputs, self._bias)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = array_ops.split(
value=gate_inputs, num_or_size_splits=4, axis=one)
forget_bias_tensor = constant_op.constant(self._forget_bias, dtype=f.dtype)
# Note that using `add` and `multiply` instead of `+` and `*` gives a
# performance improvement. So using those at the cost of readability.
add = math_ops.add
multiply = math_ops.multiply
new_c = add(multiply(c, sigmoid(add(f, forget_bias_tensor))),
multiply(sigmoid(i), self._activation(j)))
new_h = multiply(self._activation(new_c), sigmoid(o))
if self._state_is_tuple:
new_state = LSTMStateTuple(new_c, new_h)
else:
new_state = array_ops.concat([new_c, new_h], 1)
return new_h, new_state
def get_config(self):
config = {
"num_units": self._num_units,
"forget_bias": self._forget_bias,
"state_is_tuple": self._state_is_tuple,
"activation": activations.serialize(self._activation),
"reuse": self._reuse,
}
base_config = super(BasicLSTMCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export(v1=["nn.rnn_cell.LSTMCell"])
class LSTMCell(LayerRNNCell):
"""Long short-term memory unit (LSTM) recurrent network cell.
The default non-peephole implementation is based on:
https://pdfs.semanticscholar.org/1154/0131eae85b2e11d53df7f1360eeb6476e7f4.pdf
Felix Gers, Jurgen Schmidhuber, and Fred Cummins.
"Learning to forget: Continual prediction with LSTM." IET, 850-855, 1999.
The peephole implementation is based on:
https://research.google.com/pubs/archive/43905.pdf
Hasim Sak, Andrew Senior, and Francoise Beaufays.
"Long short-term memory recurrent neural network architectures for
large scale acoustic modeling." INTERSPEECH, 2014.
The class uses optional peep-hole connections, optional cell clipping, and
an optional projection layer.
Note that this cell is not optimized for performance. Please use
`tf.contrib.cudnn_rnn.CudnnLSTM` for better performance on GPU, or
`tf.contrib.rnn.LSTMBlockCell` and `tf.contrib.rnn.LSTMBlockFusedCell` for
better performance on CPU.
"""
@deprecated(None, "This class is equivalent as tf.keras.layers.LSTMCell,"
" and will be replaced by that in Tensorflow 2.0.")
def __init__(self, num_units,
use_peepholes=False, cell_clip=None,
initializer=None, num_proj=None, proj_clip=None,
num_unit_shards=None, num_proj_shards=None,
forget_bias=1.0, state_is_tuple=True,
activation=None, reuse=None, name=None, dtype=None, **kwargs):
"""Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
use_peepholes: bool, set True to enable diagonal/peephole connections.
cell_clip: (optional) A float value, if provided the cell state is clipped
by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices.
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
proj_clip: (optional) A float value. If `num_proj > 0` and `proj_clip` is
provided, then the projected values are clipped elementwise to within
`[-proj_clip, proj_clip]`.
num_unit_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
num_proj_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training. Must set it manually to `0.0` when restoring from
CudnnLSTM trained checkpoints.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. This latter behavior will soon be deprecated.
activation: Activation function of the inner states. Default: `tanh`. It
could also be string that is within Keras activation function names.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
dtype: Default dtype of the layer (default of `None` means use the type
of the first input). Required when `build` is called before `call`.
**kwargs: Dict, keyword named properties for common layer attributes, like
`trainable` etc when constructing the cell from configs of get_config().
When restoring from CudnnLSTM-trained checkpoints, use
`CudnnCompatibleLSTMCell` instead.
"""
super(LSTMCell, self).__init__(
_reuse=reuse, name=name, dtype=dtype, **kwargs)
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if num_unit_shards is not None or num_proj_shards is not None:
logging.warn(
"%s: The num_unit_shards and proj_unit_shards parameters are "
"deprecated and will be removed in Jan 2017. "
"Use a variable scope with a partitioner instead.", self)
if context.executing_eagerly() and context.num_gpus() > 0:
logging.warn("%s: Note that this cell is not optimized for performance. "
"Please use tf.contrib.cudnn_rnn.CudnnLSTM for better "
"performance on GPU.", self)
# Inputs must be 2-dimensional.
self.input_spec = input_spec.InputSpec(ndim=2)
self._num_units = num_units
self._use_peepholes = use_peepholes
self._cell_clip = cell_clip
self._initializer = initializers.get(initializer)
self._num_proj = num_proj
self._proj_clip = proj_clip
self._num_unit_shards = num_unit_shards
self._num_proj_shards = num_proj_shards
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
if activation:
self._activation = activations.get(activation)
else:
self._activation = math_ops.tanh
if num_proj:
self._state_size = (
LSTMStateTuple(num_units, num_proj)
if state_is_tuple else num_units + num_proj)
self._output_size = num_proj
else:
self._state_size = (
LSTMStateTuple(num_units, num_units)
if state_is_tuple else 2 * num_units)
self._output_size = num_units
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
@tf_utils.shape_type_conversion
def build(self, inputs_shape):
if inputs_shape[-1] is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% str(inputs_shape))
input_depth = inputs_shape[-1]
h_depth = self._num_units if self._num_proj is None else self._num_proj
maybe_partitioner = (
partitioned_variables.fixed_size_partitioner(self._num_unit_shards)
if self._num_unit_shards is not None
else None)
self._kernel = self.add_variable(
_WEIGHTS_VARIABLE_NAME,
shape=[input_depth + h_depth, 4 * self._num_units],
initializer=self._initializer,
partitioner=maybe_partitioner)
if self.dtype is None:
initializer = init_ops.zeros_initializer
else:
initializer = init_ops.zeros_initializer(dtype=self.dtype)
self._bias = self.add_variable(
_BIAS_VARIABLE_NAME,
shape=[4 * self._num_units],
initializer=initializer)
if self._use_peepholes:
self._w_f_diag = self.add_variable("w_f_diag", shape=[self._num_units],
initializer=self._initializer)
self._w_i_diag = self.add_variable("w_i_diag", shape=[self._num_units],
initializer=self._initializer)
self._w_o_diag = self.add_variable("w_o_diag", shape=[self._num_units],
initializer=self._initializer)
if self._num_proj is not None:
maybe_proj_partitioner = (
partitioned_variables.fixed_size_partitioner(self._num_proj_shards)
if self._num_proj_shards is not None
else None)
self._proj_kernel = self.add_variable(
"projection/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[self._num_units, self._num_proj],
initializer=self._initializer,
partitioner=maybe_proj_partitioner)
self.built = True
def call(self, inputs, state):
"""Run one step of LSTM.
Args:
inputs: input Tensor, must be 2-D, `[batch, input_size]`.
state: if `state_is_tuple` is False, this must be a state Tensor,
`2-D, [batch, state_size]`. If `state_is_tuple` is True, this must be a
tuple of state Tensors, both `2-D`, with column sizes `c_state` and
`m_state`.
Returns:
A tuple containing:
- A `2-D, [batch, output_dim]`, Tensor representing the output of the
LSTM after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- Tensor(s) representing the new state of LSTM after reading `inputs` when
the previous state was `state`. Same type and shape(s) as `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
num_proj = self._num_units if self._num_proj is None else self._num_proj
sigmoid = math_ops.sigmoid
if self._state_is_tuple:
(c_prev, m_prev) = state
else:
c_prev = array_ops.slice(state, [0, 0], [-1, self._num_units])
m_prev = array_ops.slice(state, [0, self._num_units], [-1, num_proj])
input_size = inputs.get_shape().with_rank(2).dims[1].value
if input_size is None:
raise ValueError("Could not infer input size from inputs.get_shape()[-1]")
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
lstm_matrix = math_ops.matmul(
array_ops.concat([inputs, m_prev], 1), self._kernel)
lstm_matrix = nn_ops.bias_add(lstm_matrix, self._bias)
i, j, f, o = array_ops.split(
value=lstm_matrix, num_or_size_splits=4, axis=1)
# Diagonal connections
if self._use_peepholes:
c = (sigmoid(f + self._forget_bias + self._w_f_diag * c_prev) * c_prev +
sigmoid(i + self._w_i_diag * c_prev) * self._activation(j))
else:
c = (sigmoid(f + self._forget_bias) * c_prev + sigmoid(i) *
self._activation(j))
if self._cell_clip is not None:
# pylint: disable=invalid-unary-operand-type
c = clip_ops.clip_by_value(c, -self._cell_clip, self._cell_clip)
# pylint: enable=invalid-unary-operand-type
if self._use_peepholes:
m = sigmoid(o + self._w_o_diag * c) * self._activation(c)
else:
m = sigmoid(o) * self._activation(c)
if self._num_proj is not None:
m = math_ops.matmul(m, self._proj_kernel)
if self._proj_clip is not None:
# pylint: disable=invalid-unary-operand-type
m = clip_ops.clip_by_value(m, -self._proj_clip, self._proj_clip)
# pylint: enable=invalid-unary-operand-type
new_state = (LSTMStateTuple(c, m) if self._state_is_tuple else
array_ops.concat([c, m], 1))
return m, new_state
def get_config(self):
config = {
"num_units": self._num_units,
"use_peepholes": self._use_peepholes,
"cell_clip": self._cell_clip,
"initializer": initializers.serialize(self._initializer),
"num_proj": self._num_proj,
"proj_clip": self._proj_clip,
"num_unit_shards": self._num_unit_shards,
"num_proj_shards": self._num_proj_shards,
"forget_bias": self._forget_bias,
"state_is_tuple": self._state_is_tuple,
"activation": activations.serialize(self._activation),
"reuse": self._reuse,
}
base_config = super(LSTMCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def _enumerated_map_structure_up_to(shallow_structure, map_fn, *args, **kwargs):
ix = [0]
def enumerated_fn(*inner_args, **inner_kwargs):
r = map_fn(ix[0], *inner_args, **inner_kwargs)
ix[0] += 1
return r
return nest.map_structure_up_to(shallow_structure,
enumerated_fn, *args, **kwargs)
def _default_dropout_state_filter_visitor(substate):
if isinstance(substate, LSTMStateTuple):
# Do not perform dropout on the memory state.
return LSTMStateTuple(c=False, h=True)
elif isinstance(substate, tensor_array_ops.TensorArray):
return False
return True
@tf_export("nn.rnn_cell.DropoutWrapper")
class DropoutWrapper(RNNCell):
"""Operator adding dropout to inputs and outputs of the given cell."""
def __init__(self, cell, input_keep_prob=1.0, output_keep_prob=1.0,
state_keep_prob=1.0, variational_recurrent=False,
input_size=None, dtype=None, seed=None,
dropout_state_filter_visitor=None):
"""Create a cell with added input, state, and/or output dropout.
If `variational_recurrent` is set to `True` (**NOT** the default behavior),
then the same dropout mask is applied at every step, as described in:
Y. Gal, Z Ghahramani. "A Theoretically Grounded Application of Dropout in
Recurrent Neural Networks". https://arxiv.org/abs/1512.05287
Otherwise a different dropout mask is applied at every time step.
Note, by default (unless a custom `dropout_state_filter` is provided),
the memory state (`c` component of any `LSTMStateTuple`) passing through
a `DropoutWrapper` is never modified. This behavior is described in the
above article.
Args:
cell: an RNNCell, a projection to output_size is added to it.
input_keep_prob: unit Tensor or float between 0 and 1, input keep
probability; if it is constant and 1, no input dropout will be added.
output_keep_prob: unit Tensor or float between 0 and 1, output keep
probability; if it is constant and 1, no output dropout will be added.
state_keep_prob: unit Tensor or float between 0 and 1, output keep
probability; if it is constant and 1, no output dropout will be added.
State dropout is performed on the outgoing states of the cell.
**Note** the state components to which dropout is applied when
`state_keep_prob` is in `(0, 1)` are also determined by
the argument `dropout_state_filter_visitor` (e.g. by default dropout
is never applied to the `c` component of an `LSTMStateTuple`).
variational_recurrent: Python bool. If `True`, then the same
dropout pattern is applied across all time steps per run call.
If this parameter is set, `input_size` **must** be provided.
input_size: (optional) (possibly nested tuple of) `TensorShape` objects
containing the depth(s) of the input tensors expected to be passed in to
the `DropoutWrapper`. Required and used **iff**
`variational_recurrent = True` and `input_keep_prob < 1`.
dtype: (optional) The `dtype` of the input, state, and output tensors.
Required and used **iff** `variational_recurrent = True`.
seed: (optional) integer, the randomness seed.
dropout_state_filter_visitor: (optional), default: (see below). Function
that takes any hierarchical level of the state and returns
a scalar or depth=1 structure of Python booleans describing
which terms in the state should be dropped out. In addition, if the
function returns `True`, dropout is applied across this sublevel. If
the function returns `False`, dropout is not applied across this entire
sublevel.
Default behavior: perform dropout on all terms except the memory (`c`)
state of `LSTMCellState` objects, and don't try to apply dropout to
`TensorArray` objects:
```
def dropout_state_filter_visitor(s):
if isinstance(s, LSTMCellState):
# Never perform dropout on the c state.
return LSTMCellState(c=False, h=True)
elif isinstance(s, TensorArray):
return False
return True
```
Raises:
TypeError: if `cell` is not an `RNNCell`, or `keep_state_fn` is provided
but not `callable`.
ValueError: if any of the keep_probs are not between 0 and 1.
"""
super(DropoutWrapper, self).__init__()
assert_like_rnncell("cell", cell)
if (dropout_state_filter_visitor is not None
and not callable(dropout_state_filter_visitor)):
raise TypeError("dropout_state_filter_visitor must be callable")
self._dropout_state_filter = (
dropout_state_filter_visitor or _default_dropout_state_filter_visitor)
with ops.name_scope("DropoutWrapperInit"):
def tensor_and_const_value(v):
tensor_value = ops.convert_to_tensor(v)
const_value = tensor_util.constant_value(tensor_value)
return (tensor_value, const_value)
for prob, attr in [(input_keep_prob, "input_keep_prob"),
(state_keep_prob, "state_keep_prob"),
(output_keep_prob, "output_keep_prob")]:
tensor_prob, const_prob = tensor_and_const_value(prob)
if const_prob is not None:
if const_prob < 0 or const_prob > 1:
raise ValueError("Parameter %s must be between 0 and 1: %d"
% (attr, const_prob))
setattr(self, "_%s" % attr, float(const_prob))
else:
setattr(self, "_%s" % attr, tensor_prob)
# Set cell, variational_recurrent, seed before running the code below
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
self._track_checkpointable(self._cell, name="cell")
self._variational_recurrent = variational_recurrent
self._seed = seed
self._recurrent_input_noise = None
self._recurrent_state_noise = None
self._recurrent_output_noise = None
if variational_recurrent:
if dtype is None:
raise ValueError(
"When variational_recurrent=True, dtype must be provided")
def convert_to_batch_shape(s):
# Prepend a 1 for the batch dimension; for recurrent
# variational dropout we use the same dropout mask for all
# batch elements.
return array_ops.concat(
([1], tensor_shape.TensorShape(s).as_list()), 0)
def batch_noise(s, inner_seed):
shape = convert_to_batch_shape(s)
return random_ops.random_uniform(shape, seed=inner_seed, dtype=dtype)
if (not isinstance(self._input_keep_prob, numbers.Real) or
self._input_keep_prob < 1.0):
if input_size is None:
raise ValueError(
"When variational_recurrent=True and input_keep_prob < 1.0 or "
"is unknown, input_size must be provided")
self._recurrent_input_noise = _enumerated_map_structure_up_to(
input_size,
lambda i, s: batch_noise(s, inner_seed=self._gen_seed("input", i)),
input_size)
self._recurrent_state_noise = _enumerated_map_structure_up_to(
cell.state_size,
lambda i, s: batch_noise(s, inner_seed=self._gen_seed("state", i)),
cell.state_size)
self._recurrent_output_noise = _enumerated_map_structure_up_to(
cell.output_size,
lambda i, s: batch_noise(s, inner_seed=self._gen_seed("output", i)),
cell.output_size)
def _gen_seed(self, salt_prefix, index):
if self._seed is None:
return None
salt = "%s_%d" % (salt_prefix, index)
string = (str(self._seed) + salt).encode("utf-8")
return int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF
@property
def wrapped_cell(self):
return self._cell
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cell.zero_state(batch_size, dtype)
def _variational_recurrent_dropout_value(
self, index, value, noise, keep_prob):
"""Performs dropout given the pre-calculated noise tensor."""
# uniform [keep_prob, 1.0 + keep_prob)
random_tensor = keep_prob + noise
# 0. if [keep_prob, 1.0) and 1. if [1.0, 1.0 + keep_prob)
binary_tensor = math_ops.floor(random_tensor)
ret = math_ops.div(value, keep_prob) * binary_tensor
ret.set_shape(value.get_shape())
return ret
def _dropout(self, values, salt_prefix, recurrent_noise, keep_prob,
shallow_filtered_substructure=None):
"""Decides whether to perform standard dropout or recurrent dropout."""
if shallow_filtered_substructure is None:
# Put something so we traverse the entire structure; inside the
# dropout function we check to see if leafs of this are bool or not.
shallow_filtered_substructure = values
if not self._variational_recurrent:
def dropout(i, do_dropout, v):
if not isinstance(do_dropout, bool) or do_dropout:
return nn_ops.dropout(
v, keep_prob=keep_prob, seed=self._gen_seed(salt_prefix, i))
else:
return v
return _enumerated_map_structure_up_to(
shallow_filtered_substructure, dropout,
*[shallow_filtered_substructure, values])
else:
def dropout(i, do_dropout, v, n):
if not isinstance(do_dropout, bool) or do_dropout:
return self._variational_recurrent_dropout_value(i, v, n, keep_prob)
else:
return v
return _enumerated_map_structure_up_to(
shallow_filtered_substructure, dropout,
*[shallow_filtered_substructure, values, recurrent_noise])
def __call__(self, inputs, state, scope=None):
"""Run the cell with the declared dropouts."""
def _should_dropout(p):
return (not isinstance(p, float)) or p < 1
if _should_dropout(self._input_keep_prob):
inputs = self._dropout(inputs, "input",
self._recurrent_input_noise,
self._input_keep_prob)
output, new_state = self._cell(inputs, state, scope=scope)
if _should_dropout(self._state_keep_prob):
# Identify which subsets of the state to perform dropout on and
# which ones to keep.
shallow_filtered_substructure = nest.get_traverse_shallow_structure(
self._dropout_state_filter, new_state)
new_state = self._dropout(new_state, "state",
self._recurrent_state_noise,
self._state_keep_prob,
shallow_filtered_substructure)
if _should_dropout(self._output_keep_prob):
output = self._dropout(output, "output",
self._recurrent_output_noise,
self._output_keep_prob)
return output, new_state
@tf_export("nn.rnn_cell.ResidualWrapper")
class ResidualWrapper(RNNCell):
"""RNNCell wrapper that ensures cell inputs are added to the outputs."""
def __init__(self, cell, residual_fn=None):
"""Constructs a `ResidualWrapper` for `cell`.
Args:
cell: An instance of `RNNCell`.
residual_fn: (Optional) The function to map raw cell inputs and raw cell
outputs to the actual cell outputs of the residual network.
Defaults to calling nest.map_structure on (lambda i, o: i + o), inputs
and outputs.
"""
super(ResidualWrapper, self).__init__()
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
self._track_checkpointable(self._cell, name="cell")
self._residual_fn = residual_fn
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cell.zero_state(batch_size, dtype)
def __call__(self, inputs, state, scope=None):
"""Run the cell and then apply the residual_fn on its inputs to its outputs.
Args:
inputs: cell inputs.
state: cell state.
scope: optional cell scope.
Returns:
Tuple of cell outputs and new state.
Raises:
TypeError: If cell inputs and outputs have different structure (type).
ValueError: If cell inputs and outputs have different structure (value).
"""
outputs, new_state = self._cell(inputs, state, scope=scope)
# Ensure shapes match
def assert_shape_match(inp, out):
inp.get_shape().assert_is_compatible_with(out.get_shape())
def default_residual_fn(inputs, outputs):
nest.assert_same_structure(inputs, outputs)
nest.map_structure(assert_shape_match, inputs, outputs)
return nest.map_structure(lambda inp, out: inp + out, inputs, outputs)
res_outputs = (self._residual_fn or default_residual_fn)(inputs, outputs)
return (res_outputs, new_state)
@tf_export("nn.rnn_cell.DeviceWrapper")
class DeviceWrapper(RNNCell):
"""Operator that ensures an RNNCell runs on a particular device."""
def __init__(self, cell, device):
"""Construct a `DeviceWrapper` for `cell` with device `device`.
Ensures the wrapped `cell` is called with `tf.device(device)`.
Args:
cell: An instance of `RNNCell`.
device: A device string or function, for passing to `tf.device`.
"""
super(DeviceWrapper, self).__init__()
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
self._track_checkpointable(self._cell, name="cell")
self._device = device
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with ops.device(self._device):
return self._cell.zero_state(batch_size, dtype)
def __call__(self, inputs, state, scope=None):
"""Run the cell on specified device."""
with ops.device(self._device):
return self._cell(inputs, state, scope=scope)
@tf_export(v1=["nn.rnn_cell.MultiRNNCell"])
class MultiRNNCell(RNNCell):
"""RNN cell composed sequentially of multiple simple cells.
Example:
```python
num_units = [128, 64]
cells = [BasicLSTMCell(num_units=n) for n in num_units]
stacked_rnn_cell = MultiRNNCell(cells)
```
"""
@deprecated(None, "This class is equivalent as "
"tf.keras.layers.StackedRNNCells, and will be replaced by "
"that in Tensorflow 2.0.")
def __init__(self, cells, state_is_tuple=True):
"""Create a RNN cell composed sequentially of a number of RNNCells.
Args:
cells: list of RNNCells that will be composed in this order.
state_is_tuple: If True, accepted and returned states are n-tuples, where
`n = len(cells)`. If False, the states are all
concatenated along the column axis. This latter behavior will soon be
deprecated.
Raises:
ValueError: if cells is empty (not allowed), or at least one of the cells
returns a state tuple but the flag `state_is_tuple` is `False`.
"""
super(MultiRNNCell, self).__init__()
if not cells:
raise ValueError("Must specify at least one cell for MultiRNNCell.")
if not nest.is_sequence(cells):
raise TypeError(
"cells must be a list or tuple, but saw: %s." % cells)
if len(set([id(cell) for cell in cells])) < len(cells):
logging.log_first_n(logging.WARN,
"At least two cells provided to MultiRNNCell "
"are the same object and will share weights.", 1)
self._cells = cells
for cell_number, cell in enumerate(self._cells):
# Add Checkpointable dependencies on these cells so their variables get
# saved with this object when using object-based saving.
if isinstance(cell, checkpointable.CheckpointableBase):
# TODO(allenl): Track down non-Checkpointable callers.
self._track_checkpointable(cell, name="cell-%d" % (cell_number,))
self._state_is_tuple = state_is_tuple
if not state_is_tuple:
if any(nest.is_sequence(c.state_size) for c in self._cells):
raise ValueError("Some cells return tuples of states, but the flag "
"state_is_tuple is not set. State sizes are: %s"
% str([c.state_size for c in self._cells]))
@property
def state_size(self):
if self._state_is_tuple:
return tuple(cell.state_size for cell in self._cells)
else:
return sum(cell.state_size for cell in self._cells)
@property
def output_size(self):
return self._cells[-1].output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
if self._state_is_tuple:
return tuple(cell.zero_state(batch_size, dtype) for cell in self._cells)
else:
# We know here that state_size of each cell is not a tuple and
# presumably does not contain TensorArrays or anything else fancy
return super(MultiRNNCell, self).zero_state(batch_size, dtype)
@property
def trainable_weights(self):
if not self.trainable:
return []
weights = []
for cell in self._cells:
if isinstance(cell, base_layer.Layer):
weights += cell.trainable_weights
return weights
@property
def non_trainable_weights(self):
weights = []
for cell in self._cells:
if isinstance(cell, base_layer.Layer):
weights += cell.non_trainable_weights
if not self.trainable:
trainable_weights = []
for cell in self._cells:
if isinstance(cell, base_layer.Layer):
trainable_weights += cell.trainable_weights
return trainable_weights + weights
return weights
def call(self, inputs, state):
"""Run this multi-layer cell on inputs, starting from state."""
cur_state_pos = 0
cur_inp = inputs
new_states = []
for i, cell in enumerate(self._cells):
with vs.variable_scope("cell_%d" % i):
if self._state_is_tuple:
if not nest.is_sequence(state):
raise ValueError(
"Expected state to be a tuple of length %d, but received: %s" %
(len(self.state_size), state))
cur_state = state[i]
else:
cur_state = array_ops.slice(state, [0, cur_state_pos],
[-1, cell.state_size])
cur_state_pos += cell.state_size
cur_inp, new_state = cell(cur_inp, cur_state)
new_states.append(new_state)
new_states = (tuple(new_states) if self._state_is_tuple else
array_ops.concat(new_states, 1))
return cur_inp, new_states<|fim▁end|>
|
"""
def __init__(self, trainable=True, name=None, dtype=None, **kwargs):
|
<|file_name|>test_sup.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Encapsulate running the `hab-sup` executable for tests.
use std::collections::HashSet;
use std::env;
use std::net::TcpListener;
use std::path::{Path, PathBuf};
use std::process::{Child, Command, Stdio};
use std::string::ToString;
use std::sync::Mutex;
use std::thread;
use std::time::Duration;
use hcore::url::BLDR_URL_ENVVAR;
use rand;
use rand::distributions::{IndependentSample, Range};
use super::test_butterfly;
lazy_static! {
/// Keep track of all TCP ports currently being used by TestSup
/// instances. Allows us to run tests in parallel without fear of
/// port conflicts between them.
static ref CLAIMED_PORTS: Mutex<HashSet<u16>> = {
Mutex::new(HashSet::new())
};
}
pub struct TestSup {
pub hab_root: PathBuf,
pub origin_name: String,
pub package_name: String,
pub service_group: String,
pub http_port: u16,
pub butterfly_port: u16,
pub butterfly_client: test_butterfly::Client,
pub cmd: Command,
pub process: Option<Child>,
}
/// Return a free TCP port number. We test to see that the system has
/// not already bound the port, while also tracking which ports are
/// being used by other test supervisors that may be running alongside
/// this one.
///
/// Once you receive a port number from this function, you can be
/// reasonably sure that you're the only one that will be using
/// it. There could be a race condition if the machine the tests are
/// running on just happens to claim the same port number for
/// something between the time we check and the time the TestSup
/// claims it. If that happens to you, you should probably buy lottery
/// tickets, though.
///
/// This function will recursively call itself with a decremented
/// value for `tries` if it happens to pick a port that's already in
/// use. Once all tries are used up, it panics! Yay!
fn unclaimed_port(tries: u16) -> u16 {
if tries == 0 {
panic!("Couldn't find an unclaimed port for the test Supervisor!")
}
let p = random_port();
match TcpListener::bind(format!("127.0.0.1:{}", p)) {
Ok(_listener) => {
// The system hasn't bound it. Now we make sure none of
// our other tests have bound it.
let mut ports = CLAIMED_PORTS.lock().unwrap();
if ports.contains(&p) {
// Oops, another test is using it, try again
thread::sleep(Duration::from_millis(500));
unclaimed_port(tries - 1)
} else {
// Nobody was using it. Return the port; the TcpListener
// that is currently bound to the port will be dropped,
// thus freeing the port for our use.
ports.insert(p);
p
}
}
Err(_) => {
// port already in use, try again
unclaimed_port(tries - 1)
}
}
}
/// Return a random unprivileged, unregistered TCP port number.
fn random_port() -> u16 {
// IANA port registrations go to 49151
let between = Range::new(49152, ::std::u16::MAX);
let mut rng = rand::thread_rng();
between.ind_sample(&mut rng)
}
/// Find an executable relative to the current integration testing
/// executable.
///
/// Thus if the current executable is
///
/// /home/me/habitat/target/debug/deps/compilation-ccaf2f45c24e3840
///
/// and we look for `hab-sup`, we'll find it at
///
/// /home/me/habitat/target/debug/hab-sup
///
fn find_exe<B>(binary_name: B) -> PathBuf
where
B: AsRef<Path>,
{
let exe_root = env::current_exe()
.unwrap()
.parent() // deps
.unwrap()
.parent() // debug
.unwrap()
.to_path_buf();
let bin = exe_root.join(binary_name.as_ref());
assert!(
bin.exists(),
format!(
"Expected to find a {:?} executable at {:?}",
binary_name.as_ref(),
bin
)
);
bin
}
/// Return whether or not the tests are being run with the `--nocapture` flag meaning we want to
/// see more output.
fn nocapture_set() -> bool {
if env::args().any(|arg| arg == "--nocapture") {
return true;
} else {
match env::var("RUST_TEST_NOCAPTURE") {
Ok(val) => &val != "0",
Err(_) => false,
}
}
}
impl TestSup {
/// Create a new `TestSup` that will listen on randomly-selected
/// ports for both gossip and HTTP requests so tests run in
/// parallel don't step on each other.
///
/// See also `new`.
pub fn new_with_random_ports<R, O, P, S>(
fs_root: R,
origin: O,
pkg_name: P,
service_group: S,
) -> TestSup
where
R: AsRef<Path>,
O: ToString,
P: ToString,
S: ToString,
{
// We'll give 10 tries to find a free port number
let http_port = unclaimed_port(10);
let butterfly_port = unclaimed_port(10);
TestSup::new(
fs_root,
origin,
pkg_name,
service_group,
http_port,
butterfly_port,
)
}
/// Bundle up a Habitat Supervisor process along with an
/// associated Butterfly client for injecting new configuration
/// values. The Supervisor executable is the one that has been
/// compiled for the current `cargo test` invocation.
///
/// The Supervisor is configured to run a single package for a
/// test. This package is assumed to have already been installed
/// relative to `fs_root` (i.e., the `FS_ROOT` environment
/// variable, which in our tests will be a randomly-named
/// temporary directory that this Supervisor will view as `/`.).
///
/// A Butterfly client is also created for interacting with this
/// Supervisor and package. It is properly configured according to
/// the value provided for `butterfly_port`. To use it, see the
/// `apply_config` function.
///
/// (No HTTP interaction with the Supervisor is currently called
/// for, so we don't have a HTTP client.)
pub fn new<R, O, P, S>(
fs_root: R,
origin: O,
pkg_name: P,
service_group: S,
http_port: u16,
butterfly_port: u16,
) -> TestSup
where
R: AsRef<Path>,
O: ToString,
P: ToString,
S: ToString,
{
let sup_exe = find_exe("hab-sup");
let launcher_exe = find_exe("hab-launch");
let mut cmd = Command::new(&launcher_exe);
let listen_host = "0.0.0.0";
let origin = origin.to_string();
let pkg_name = pkg_name.to_string();
let service_group = service_group.to_string();
cmd.env(
"TESTING_FS_ROOT",
fs_root.as_ref().to_string_lossy().as_ref(),
).env("HAB_SUP_BINARY", &sup_exe)
.env(BLDR_URL_ENVVAR, "http://hab.sup.test")
.arg("start")
.arg("--listen-gossip")
.arg(format!("{}:{}", listen_host, butterfly_port))
.arg("--listen-http")
.arg(format!("{}:{}", listen_host, http_port))
.arg(format!("{}/{}", origin, pkg_name))
.stdin(Stdio::null());
if !nocapture_set() {
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::null());
}
let bc = test_butterfly::Client::new(&pkg_name, &service_group, butterfly_port);
TestSup {
hab_root: fs_root.as_ref().to_path_buf(),
origin_name: origin,
package_name: pkg_name,
service_group: service_group.to_string(),
http_port: http_port,
butterfly_port: butterfly_port,
butterfly_client: bc,
cmd: cmd,
process: None,
}
}
/// Spawn a process actually running the Supervisor.
pub fn start(&mut self) {
let child = self.cmd.spawn().expect("Couldn't start the Supervisor!");<|fim▁hole|>
/// The equivalent of performing `hab apply` with the given
/// configuration.
pub fn apply_config<T>(&mut self, toml_config: T)
where
T: ToString,
{
self.butterfly_client.apply(toml_config.to_string())
}
}
// We kill the Supervisor so you don't have to! We also free up the
// ports used by this Supervisor so other tests can use them.
impl Drop for TestSup {
fn drop(&mut self) {
let mut ports = CLAIMED_PORTS.lock().unwrap();
ports.remove(&self.http_port);
ports.remove(&self.butterfly_port);
self.process
.take()
.expect("No process to kill!")
.kill()
.expect("Tried to kill Supervisor!");
}
}<|fim▁end|>
|
self.process = Some(child);
}
|
<|file_name|>org.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
if False:
from gluon import current, URL, SQLFORM, redirect
from gluon import IS_NOT_EMPTY, Field, IS_EMAIL
from gluon import IS_NOT_IN_DB
request = current.request
response = current.response
session = current.session
cache = current.cache
T = current.T
from db import db, auth
@auth.requires_login()
def index():
"""
Show the user the organizations he/she can access
"""
query = (db.organization.id > 0)
query &= (
auth.accessible_query('read', db.organization) |
auth.accessible_query('update', db.organization))
orgs = db(query).select(db.organization.ALL)
return locals()
@auth.requires(
auth.has_permission('read', db.organization, request.args(0)) or
auth.has_permission('update', db.organization, request.args(0))
)
def view():
"""
Show the list of desks in this org
"""
org = db.organization(request.args(0))
session.org_id = org.id
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def edit():
org = db.organization(request.args(0))
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requires = [IS_NOT_EMPTY()]
# edit form
form = SQLFORM(db.organization, record=org, showid=False)
if form.process().accepted:
redirect(URL('view', args=[org.id]))
return locals()
@auth.requires(auth.has_permission('update', db.organization, request.args(0)))
def members():
org = db.organization(request.args(0))
if not request.args(1):
fld_email = Field('email', 'string', label=T("Email"))
fld_email.requires = IS_EMAIL()
form = SQLFORM.factory(
fld_email,
formstyle='bootstrap3_inline',
submit_button=T("Add user"),
table_name='members')
if form.process().accepted:
u = db.auth_user(email=form.vars.email)
if u is not None:
# create new share
if u.id in org.users:
form.errors.email = T(
"The user is already in the organization")
else:<|fim▁hole|> user_list.insert(0, u.id)
org.update_record(users=user_list)
g_id = auth.user_group(u.id)
auth.add_permission(g_id, 'read', db.organization, org.id)
else:
# no user with that email
response.flash = ""
form.errors.email = T("The user don't exists on this system")
elif request.args(1) == 'delete':
# remove the user on args(2) from the org members list
# TODO: remove else any perms on the org desks
user_to_remove = db.auth_user(request.args(2))
if user_to_remove is not None:
user_list = org.users
user_list.remove(user_to_remove.id)
org.update_record(users=user_list)
# remove perms over the org
auth.del_permission(
auth.user_group(user_to_remove.id),
'read',
db.organization,
org.id)
# remove, also, all rights over the desks in the org.
desk_perms = [
'read_desk', 'update_items', 'push_items', 'update_desk']
for desk_id in org.desks:
for perm in desk_perms:
auth.del_permission(
auth.user_group(user_to_remove.id),
perm,
db.desk,
desk_id
)
redirect(URL('org', 'members', args=[org.id]))
return locals()
@auth.requires_login()
def create():
"""Create a new organization"""
tbl = db.organization
tbl.users.readable = False
tbl.users.writable = False
tbl.desks.readable = False
tbl.desks.writable = False
tbl.name.requires = [
IS_NOT_EMPTY(
error_message=T("Cannot be empty")
),
IS_NOT_IN_DB(
db,
'organization.name',
error_message=T(
"An Organization witch that name is allready in nStock"))]
form = SQLFORM(tbl)
form.add_button(T('Cancel'), URL('index'))
if form.process().accepted:
# add the new organization
g_id = auth.user_group(auth.user.id)
# give the user all perms over this org
auth.add_permission(g_id, 'update', tbl, form.vars.id)
auth.add_permission(g_id, 'read', tbl, form.vars.id)
auth.add_permission(g_id, 'delete', tbl, form.vars.id)
redirect(URL('index'))
return locals()<|fim▁end|>
|
user_list = org.users
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>class Blood(object):<|fim▁hole|> """
Most characters will have ordinary blood but some could have acidic blood or with other properties.
"""<|fim▁end|>
|
uid = "blood"
name = "Blood"
|
<|file_name|>jni_generator_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for jni_generator.py.
This test suite contains various tests for the JNI generator.
It exercises the low-level parser all the way up to the
code generator and ensures the output matches a golden
file.
"""
import difflib
import inspect
import optparse
import os
import sys
import unittest
import jni_generator
from jni_generator import CalledByNative, JniParams, NativeMethod, Param
SCRIPT_NAME = 'base/android/jni_generator/jni_generator.py'
INCLUDES = (
'base/android/jni_generator/jni_generator_helper.h'
)
# Set this environment variable in order to regenerate the golden text
# files.
REBASELINE_ENV = 'REBASELINE'
class TestOptions(object):
"""The mock options object which is passed to the jni_generator.py script."""
def __init__(self):
self.namespace = None
self.script_name = SCRIPT_NAME
self.includes = INCLUDES
self.ptr_type = 'long'
self.cpp = 'cpp'
self.javap = 'javap'
self.native_exports = False
self.native_exports_optional = False
class TestGenerator(unittest.TestCase):
def assertObjEquals(self, first, second):
dict_first = first.__dict__
dict_second = second.__dict__
self.assertEquals(dict_first.keys(), dict_second.keys())
for key, value in dict_first.iteritems():
if (type(value) is list and len(value) and
isinstance(type(value[0]), object)):
self.assertListEquals(value, second.__getattribute__(key))
else:
actual = second.__getattribute__(key)
self.assertEquals(value, actual,
'Key ' + key + ': ' + str(value) + '!=' + str(actual))
def assertListEquals(self, first, second):
self.assertEquals(len(first), len(second))
for i in xrange(len(first)):
if isinstance(first[i], object):
self.assertObjEquals(first[i], second[i])
else:
self.assertEquals(first[i], second[i])
def assertTextEquals(self, golden_text, generated_text):
if not self.compareText(golden_text, generated_text):
self.fail('Golden text mismatch.')
def compareText(self, golden_text, generated_text):
def FilterText(text):
return [
l.strip() for l in text.split('\n')
if not l.startswith('// Copyright')
]
stripped_golden = FilterText(golden_text)
stripped_generated = FilterText(generated_text)
if stripped_golden == stripped_generated:
return True
print self.id()
for line in difflib.context_diff(stripped_golden, stripped_generated):
print line
print '\n\nGenerated'
print '=' * 80
print generated_text
print '=' * 80
print 'Run with:'
print 'REBASELINE=1', sys.argv[0]
print 'to regenerate the data files.'
<|fim▁hole|> return None
with file(golden_file, 'r') as f:
return f.read()
def assertGoldenTextEquals(self, generated_text):
script_dir = os.path.dirname(sys.argv[0])
# This is the caller test method.
caller = inspect.stack()[1][3]
self.assertTrue(caller.startswith('test'),
'assertGoldenTextEquals can only be called from a '
'test* method, not %s' % caller)
golden_file = os.path.join(script_dir, caller + '.golden')
golden_text = self._ReadGoldenFile(golden_file)
if os.environ.get(REBASELINE_ENV):
if golden_text != generated_text:
with file(golden_file, 'w') as f:
f.write(generated_text)
return
self.assertTextEquals(golden_text, generated_text)
def testInspectCaller(self):
def willRaise():
# This function can only be called from a test* method.
self.assertGoldenTextEquals('')
self.assertRaises(AssertionError, willRaise)
def testNatives(self):
test_data = """"
interface OnFrameAvailableListener {}
private native int nativeInit();
private native void nativeDestroy(int nativeChromeBrowserProvider);
private native long nativeAddBookmark(
int nativeChromeBrowserProvider,
String url, String title, boolean isFolder, long parentId);
private static native String nativeGetDomainAndRegistry(String url);
private static native void nativeCreateHistoricalTabFromState(
byte[] state, int tab_index);
private native byte[] nativeGetStateAsByteArray(View view);
private static native String[] nativeGetAutofillProfileGUIDs();
private native void nativeSetRecognitionResults(
int sessionId, String[] results);
private native long nativeAddBookmarkFromAPI(
int nativeChromeBrowserProvider,
String url, Long created, Boolean isBookmark,
Long date, byte[] favicon, String title, Integer visits);
native int nativeFindAll(String find);
private static native OnFrameAvailableListener nativeGetInnerClass();
private native Bitmap nativeQueryBitmap(
int nativeChromeBrowserProvider,
String[] projection, String selection,
String[] selectionArgs, String sortOrder);
private native void nativeGotOrientation(
int nativeDataFetcherImplAndroid,
double alpha, double beta, double gamma);
private static native Throwable nativeMessWithJavaException(Throwable e);
"""
jni_generator.JniParams.SetFullyQualifiedClass(
'org/chromium/example/jni_generator/SampleForTests')
jni_generator.JniParams.ExtractImportsAndInnerClasses(test_data)
natives = jni_generator.ExtractNatives(test_data, 'int')
golden_natives = [
NativeMethod(return_type='int', static=False,
name='Init',
params=[],
java_class_name=None,
type='function'),
NativeMethod(return_type='void', static=False, name='Destroy',
params=[Param(datatype='int',
name='nativeChromeBrowserProvider')],
java_class_name=None,
type='method',
p0_type='ChromeBrowserProvider'),
NativeMethod(return_type='long', static=False, name='AddBookmark',
params=[Param(datatype='int',
name='nativeChromeBrowserProvider'),
Param(datatype='String',
name='url'),
Param(datatype='String',
name='title'),
Param(datatype='boolean',
name='isFolder'),
Param(datatype='long',
name='parentId')],
java_class_name=None,
type='method',
p0_type='ChromeBrowserProvider'),
NativeMethod(return_type='String', static=True,
name='GetDomainAndRegistry',
params=[Param(datatype='String',
name='url')],
java_class_name=None,
type='function'),
NativeMethod(return_type='void', static=True,
name='CreateHistoricalTabFromState',
params=[Param(datatype='byte[]',
name='state'),
Param(datatype='int',
name='tab_index')],
java_class_name=None,
type='function'),
NativeMethod(return_type='byte[]', static=False,
name='GetStateAsByteArray',
params=[Param(datatype='View', name='view')],
java_class_name=None,
type='function'),
NativeMethod(return_type='String[]', static=True,
name='GetAutofillProfileGUIDs', params=[],
java_class_name=None,
type='function'),
NativeMethod(return_type='void', static=False,
name='SetRecognitionResults',
params=[Param(datatype='int', name='sessionId'),
Param(datatype='String[]', name='results')],
java_class_name=None,
type='function'),
NativeMethod(return_type='long', static=False,
name='AddBookmarkFromAPI',
params=[Param(datatype='int',
name='nativeChromeBrowserProvider'),
Param(datatype='String',
name='url'),
Param(datatype='Long',
name='created'),
Param(datatype='Boolean',
name='isBookmark'),
Param(datatype='Long',
name='date'),
Param(datatype='byte[]',
name='favicon'),
Param(datatype='String',
name='title'),
Param(datatype='Integer',
name='visits')],
java_class_name=None,
type='method',
p0_type='ChromeBrowserProvider'),
NativeMethod(return_type='int', static=False,
name='FindAll',
params=[Param(datatype='String',
name='find')],
java_class_name=None,
type='function'),
NativeMethod(return_type='OnFrameAvailableListener', static=True,
name='GetInnerClass',
params=[],
java_class_name=None,
type='function'),
NativeMethod(return_type='Bitmap',
static=False,
name='QueryBitmap',
params=[Param(datatype='int',
name='nativeChromeBrowserProvider'),
Param(datatype='String[]',
name='projection'),
Param(datatype='String',
name='selection'),
Param(datatype='String[]',
name='selectionArgs'),
Param(datatype='String',
name='sortOrder'),
],
java_class_name=None,
type='method',
p0_type='ChromeBrowserProvider'),
NativeMethod(return_type='void', static=False,
name='GotOrientation',
params=[Param(datatype='int',
name='nativeDataFetcherImplAndroid'),
Param(datatype='double',
name='alpha'),
Param(datatype='double',
name='beta'),
Param(datatype='double',
name='gamma'),
],
java_class_name=None,
type='method',
p0_type='content::DataFetcherImplAndroid'),
NativeMethod(return_type='Throwable', static=True,
name='MessWithJavaException',
params=[Param(datatype='Throwable', name='e')],
java_class_name=None,
type='function')
]
self.assertListEquals(golden_natives, natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
natives, [], [], TestOptions())
self.assertGoldenTextEquals(h.GetContent())
def testInnerClassNatives(self):
test_data = """
class MyInnerClass {
@NativeCall("MyInnerClass")
private native int nativeInit();
}
"""
natives = jni_generator.ExtractNatives(test_data, 'int')
golden_natives = [
NativeMethod(return_type='int', static=False,
name='Init', params=[],
java_class_name='MyInnerClass',
type='function')
]
self.assertListEquals(golden_natives, natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
natives, [], [], TestOptions())
self.assertGoldenTextEquals(h.GetContent())
def testInnerClassNativesMultiple(self):
test_data = """
class MyInnerClass {
@NativeCall("MyInnerClass")
private native int nativeInit();
}
class MyOtherInnerClass {
@NativeCall("MyOtherInnerClass")
private native int nativeInit();
}
"""
natives = jni_generator.ExtractNatives(test_data, 'int')
golden_natives = [
NativeMethod(return_type='int', static=False,
name='Init', params=[],
java_class_name='MyInnerClass',
type='function'),
NativeMethod(return_type='int', static=False,
name='Init', params=[],
java_class_name='MyOtherInnerClass',
type='function')
]
self.assertListEquals(golden_natives, natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
natives, [], [], TestOptions())
self.assertGoldenTextEquals(h.GetContent())
def testInnerClassNativesBothInnerAndOuter(self):
test_data = """
class MyOuterClass {
private native int nativeInit();
class MyOtherInnerClass {
@NativeCall("MyOtherInnerClass")
private native int nativeInit();
}
}
"""
natives = jni_generator.ExtractNatives(test_data, 'int')
golden_natives = [
NativeMethod(return_type='int', static=False,
name='Init', params=[],
java_class_name=None,
type='function'),
NativeMethod(return_type='int', static=False,
name='Init', params=[],
java_class_name='MyOtherInnerClass',
type='function')
]
self.assertListEquals(golden_natives, natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
natives, [], [], TestOptions())
self.assertGoldenTextEquals(h.GetContent())
def testCalledByNatives(self):
test_data = """"
import android.graphics.Bitmap;
import android.view.View;
import java.io.InputStream;
import java.util.List;
class InnerClass {}
@CalledByNative
InnerClass showConfirmInfoBar(int nativeInfoBar,
String buttonOk, String buttonCancel, String title, Bitmap icon) {
InfoBar infobar = new ConfirmInfoBar(nativeInfoBar, mContext,
buttonOk, buttonCancel,
title, icon);
return infobar;
}
@CalledByNative
InnerClass showAutoLoginInfoBar(int nativeInfoBar,
String realm, String account, String args) {
AutoLoginInfoBar infobar = new AutoLoginInfoBar(nativeInfoBar, mContext,
realm, account, args);
if (infobar.displayedAccountCount() == 0)
infobar = null;
return infobar;
}
@CalledByNative("InfoBar")
void dismiss();
@SuppressWarnings("unused")
@CalledByNative
private static boolean shouldShowAutoLogin(View view,
String realm, String account, String args) {
AccountManagerContainer accountManagerContainer =
new AccountManagerContainer((Activity)contentView.getContext(),
realm, account, args);
String[] logins = accountManagerContainer.getAccountLogins(null);
return logins.length != 0;
}
@CalledByNative
static InputStream openUrl(String url) {
return null;
}
@CalledByNative
private void activateHardwareAcceleration(final boolean activated,
final int iPid, final int iType,
final int iPrimaryID, final int iSecondaryID) {
if (!activated) {
return
}
}
@CalledByNativeUnchecked
private void uncheckedCall(int iParam);
@CalledByNative
public byte[] returnByteArray();
@CalledByNative
public boolean[] returnBooleanArray();
@CalledByNative
public char[] returnCharArray();
@CalledByNative
public short[] returnShortArray();
@CalledByNative
public int[] returnIntArray();
@CalledByNative
public long[] returnLongArray();
@CalledByNative
public double[] returnDoubleArray();
@CalledByNative
public Object[] returnObjectArray();
@CalledByNative
public byte[][] returnArrayOfByteArray();
@CalledByNative
public Bitmap.CompressFormat getCompressFormat();
@CalledByNative
public List<Bitmap.CompressFormat> getCompressFormatList();
"""
jni_generator.JniParams.SetFullyQualifiedClass('org/chromium/Foo')
jni_generator.JniParams.ExtractImportsAndInnerClasses(test_data)
called_by_natives = jni_generator.ExtractCalledByNatives(test_data)
golden_called_by_natives = [
CalledByNative(
return_type='InnerClass',
system_class=False,
static=False,
name='showConfirmInfoBar',
method_id_var_name='showConfirmInfoBar',
java_class_name='',
params=[Param(datatype='int', name='nativeInfoBar'),
Param(datatype='String', name='buttonOk'),
Param(datatype='String', name='buttonCancel'),
Param(datatype='String', name='title'),
Param(datatype='Bitmap', name='icon')],
env_call=('Object', ''),
unchecked=False,
),
CalledByNative(
return_type='InnerClass',
system_class=False,
static=False,
name='showAutoLoginInfoBar',
method_id_var_name='showAutoLoginInfoBar',
java_class_name='',
params=[Param(datatype='int', name='nativeInfoBar'),
Param(datatype='String', name='realm'),
Param(datatype='String', name='account'),
Param(datatype='String', name='args')],
env_call=('Object', ''),
unchecked=False,
),
CalledByNative(
return_type='void',
system_class=False,
static=False,
name='dismiss',
method_id_var_name='dismiss',
java_class_name='InfoBar',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='boolean',
system_class=False,
static=True,
name='shouldShowAutoLogin',
method_id_var_name='shouldShowAutoLogin',
java_class_name='',
params=[Param(datatype='View', name='view'),
Param(datatype='String', name='realm'),
Param(datatype='String', name='account'),
Param(datatype='String', name='args')],
env_call=('Boolean', ''),
unchecked=False,
),
CalledByNative(
return_type='InputStream',
system_class=False,
static=True,
name='openUrl',
method_id_var_name='openUrl',
java_class_name='',
params=[Param(datatype='String', name='url')],
env_call=('Object', ''),
unchecked=False,
),
CalledByNative(
return_type='void',
system_class=False,
static=False,
name='activateHardwareAcceleration',
method_id_var_name='activateHardwareAcceleration',
java_class_name='',
params=[Param(datatype='boolean', name='activated'),
Param(datatype='int', name='iPid'),
Param(datatype='int', name='iType'),
Param(datatype='int', name='iPrimaryID'),
Param(datatype='int', name='iSecondaryID'),
],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='void',
system_class=False,
static=False,
name='uncheckedCall',
method_id_var_name='uncheckedCall',
java_class_name='',
params=[Param(datatype='int', name='iParam')],
env_call=('Void', ''),
unchecked=True,
),
CalledByNative(
return_type='byte[]',
system_class=False,
static=False,
name='returnByteArray',
method_id_var_name='returnByteArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='boolean[]',
system_class=False,
static=False,
name='returnBooleanArray',
method_id_var_name='returnBooleanArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='char[]',
system_class=False,
static=False,
name='returnCharArray',
method_id_var_name='returnCharArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='short[]',
system_class=False,
static=False,
name='returnShortArray',
method_id_var_name='returnShortArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='int[]',
system_class=False,
static=False,
name='returnIntArray',
method_id_var_name='returnIntArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='long[]',
system_class=False,
static=False,
name='returnLongArray',
method_id_var_name='returnLongArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='double[]',
system_class=False,
static=False,
name='returnDoubleArray',
method_id_var_name='returnDoubleArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='Object[]',
system_class=False,
static=False,
name='returnObjectArray',
method_id_var_name='returnObjectArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='byte[][]',
system_class=False,
static=False,
name='returnArrayOfByteArray',
method_id_var_name='returnArrayOfByteArray',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='Bitmap.CompressFormat',
system_class=False,
static=False,
name='getCompressFormat',
method_id_var_name='getCompressFormat',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
CalledByNative(
return_type='List<Bitmap.CompressFormat>',
system_class=False,
static=False,
name='getCompressFormatList',
method_id_var_name='getCompressFormatList',
java_class_name='',
params=[],
env_call=('Void', ''),
unchecked=False,
),
]
self.assertListEquals(golden_called_by_natives, called_by_natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
[], called_by_natives, [],
TestOptions())
self.assertGoldenTextEquals(h.GetContent())
def testCalledByNativeParseError(self):
try:
jni_generator.ExtractCalledByNatives("""
@CalledByNative
public static int foo(); // This one is fine
@CalledByNative
scooby doo
""")
self.fail('Expected a ParseError')
except jni_generator.ParseError, e:
self.assertEquals(('@CalledByNative', 'scooby doo'), e.context_lines)
def testFullyQualifiedClassName(self):
contents = """
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import org.chromium.base.BuildInfo;
"""
self.assertEquals('org/chromium/content/browser/Foo',
jni_generator.ExtractFullyQualifiedJavaClassName(
'org/chromium/content/browser/Foo.java', contents))
self.assertEquals('org/chromium/content/browser/Foo',
jni_generator.ExtractFullyQualifiedJavaClassName(
'frameworks/Foo.java', contents))
self.assertRaises(SyntaxError,
jni_generator.ExtractFullyQualifiedJavaClassName,
'com/foo/Bar', 'no PACKAGE line')
def testMethodNameMangling(self):
self.assertEquals('closeV',
jni_generator.GetMangledMethodName('close', [], 'void'))
self.assertEquals('readI_AB_I_I',
jni_generator.GetMangledMethodName('read',
[Param(name='p1',
datatype='byte[]'),
Param(name='p2',
datatype='int'),
Param(name='p3',
datatype='int'),],
'int'))
self.assertEquals('openJIIS_JLS',
jni_generator.GetMangledMethodName('open',
[Param(name='p1',
datatype='java/lang/String'),],
'java/io/InputStream'))
def testFromJavaPGenerics(self):
contents = """
public abstract class java.util.HashSet<T> extends java.util.AbstractSet<E>
implements java.util.Set<E>, java.lang.Cloneable, java.io.Serializable {
public void dummy();
Signature: ()V
}
"""
jni_from_javap = jni_generator.JNIFromJavaP(contents.split('\n'),
TestOptions())
self.assertEquals(1, len(jni_from_javap.called_by_natives))
self.assertGoldenTextEquals(jni_from_javap.GetContent())
def testSnippnetJavap6_7_8(self):
content_javap6 = """
public class java.util.HashSet {
public boolean add(java.lang.Object);
Signature: (Ljava/lang/Object;)Z
}
"""
content_javap7 = """
public class java.util.HashSet {
public boolean add(E);
Signature: (Ljava/lang/Object;)Z
}
"""
content_javap8 = """
public class java.util.HashSet {
public boolean add(E);
descriptor: (Ljava/lang/Object;)Z
}
"""
jni_from_javap6 = jni_generator.JNIFromJavaP(content_javap6.split('\n'),
TestOptions())
jni_from_javap7 = jni_generator.JNIFromJavaP(content_javap7.split('\n'),
TestOptions())
jni_from_javap8 = jni_generator.JNIFromJavaP(content_javap8.split('\n'),
TestOptions())
self.assertTrue(jni_from_javap6.GetContent())
self.assertTrue(jni_from_javap7.GetContent())
self.assertTrue(jni_from_javap8.GetContent())
# Ensure the javap7 is correctly parsed and uses the Signature field rather
# than the "E" parameter.
self.assertTextEquals(jni_from_javap6.GetContent(),
jni_from_javap7.GetContent())
# Ensure the javap8 is correctly parsed and uses the descriptor field.
self.assertTextEquals(jni_from_javap7.GetContent(),
jni_from_javap8.GetContent())
def testFromJavaP(self):
contents = self._ReadGoldenFile(os.path.join(os.path.dirname(sys.argv[0]),
'testInputStream.javap'))
jni_from_javap = jni_generator.JNIFromJavaP(contents.split('\n'),
TestOptions())
self.assertEquals(10, len(jni_from_javap.called_by_natives))
self.assertGoldenTextEquals(jni_from_javap.GetContent())
def testConstantsFromJavaP(self):
for f in ['testMotionEvent.javap', 'testMotionEvent.javap7']:
contents = self._ReadGoldenFile(os.path.join(os.path.dirname(sys.argv[0]),
f))
jni_from_javap = jni_generator.JNIFromJavaP(contents.split('\n'),
TestOptions())
self.assertEquals(86, len(jni_from_javap.called_by_natives))
self.assertGoldenTextEquals(jni_from_javap.GetContent())
def testREForNatives(self):
# We should not match "native SyncSetupFlow" inside the comment.
test_data = """
/**
* Invoked when the setup process is complete so we can disconnect from the
* native-side SyncSetupFlowHandler.
*/
public void destroy() {
Log.v(TAG, "Destroying native SyncSetupFlow");
if (mNativeSyncSetupFlow != 0) {
nativeSyncSetupEnded(mNativeSyncSetupFlow);
mNativeSyncSetupFlow = 0;
}
}
private native void nativeSyncSetupEnded(
int nativeAndroidSyncSetupFlowHandler);
"""
jni_from_java = jni_generator.JNIFromJavaSource(
test_data, 'foo/bar', TestOptions())
def testRaisesOnNonJNIMethod(self):
test_data = """
class MyInnerClass {
private int Foo(int p0) {
}
}
"""
self.assertRaises(SyntaxError,
jni_generator.JNIFromJavaSource,
test_data, 'foo/bar', TestOptions())
def testJniSelfDocumentingExample(self):
script_dir = os.path.dirname(sys.argv[0])
content = file(os.path.join(script_dir,
'java/src/org/chromium/example/jni_generator/SampleForTests.java')
).read()
golden_file = os.path.join(script_dir, 'golden_sample_for_tests_jni.h')
golden_content = file(golden_file).read()
jni_from_java = jni_generator.JNIFromJavaSource(
content, 'org/chromium/example/jni_generator/SampleForTests',
TestOptions())
generated_text = jni_from_java.GetContent()
if not self.compareText(golden_content, generated_text):
if os.environ.get(REBASELINE_ENV):
with file(golden_file, 'w') as f:
f.write(generated_text)
return
self.fail('testJniSelfDocumentingExample')
def testNoWrappingPreprocessorLines(self):
test_data = """
package com.google.lookhowextremelylongiam.snarf.icankeepthisupallday;
class ReallyLongClassNamesAreAllTheRage {
private static native int nativeTest();
}
"""
jni_from_java = jni_generator.JNIFromJavaSource(
test_data, ('com/google/lookhowextremelylongiam/snarf/'
'icankeepthisupallday/ReallyLongClassNamesAreAllTheRage'),
TestOptions())
jni_lines = jni_from_java.GetContent().split('\n')
line = filter(lambda line: line.lstrip().startswith('#ifndef'),
jni_lines)[0]
self.assertTrue(len(line) > 80,
('Expected #ifndef line to be > 80 chars: ', line))
def testImports(self):
import_header = """
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.app;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.os.IBinder;
import android.os.ParcelFileDescriptor;
import android.os.Process;
import android.os.RemoteException;
import android.util.Log;
import android.view.Surface;
import java.util.ArrayList;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.content.app.ContentMain;
import org.chromium.content.browser.SandboxedProcessConnection;
import org.chromium.content.common.ISandboxedProcessCallback;
import org.chromium.content.common.ISandboxedProcessService;
import org.chromium.content.common.WillNotRaise.AnException;
import org.chromium.content.common.WillRaise.AnException;
import static org.chromium.Bar.Zoo;
class Foo {
public static class BookmarkNode implements Parcelable {
}
public interface PasswordListObserver {
}
}
"""
jni_generator.JniParams.SetFullyQualifiedClass(
'org/chromium/content/app/Foo')
jni_generator.JniParams.ExtractImportsAndInnerClasses(import_header)
self.assertTrue('Lorg/chromium/content/common/ISandboxedProcessService' in
jni_generator.JniParams._imports)
self.assertTrue('Lorg/chromium/Bar/Zoo' in
jni_generator.JniParams._imports)
self.assertTrue('Lorg/chromium/content/app/Foo$BookmarkNode' in
jni_generator.JniParams._inner_classes)
self.assertTrue('Lorg/chromium/content/app/Foo$PasswordListObserver' in
jni_generator.JniParams._inner_classes)
self.assertEquals('Lorg/chromium/content/app/ContentMain$Inner;',
jni_generator.JniParams.JavaToJni('ContentMain.Inner'))
self.assertRaises(SyntaxError,
jni_generator.JniParams.JavaToJni,
'AnException')
def testJniParamsJavaToJni(self):
self.assertTextEquals('I', JniParams.JavaToJni('int'))
self.assertTextEquals('[B', JniParams.JavaToJni('byte[]'))
self.assertTextEquals(
'[Ljava/nio/ByteBuffer;', JniParams.JavaToJni('java/nio/ByteBuffer[]'))
def testNativesLong(self):
test_options = TestOptions()
test_options.ptr_type = 'long'
test_data = """"
private native void nativeDestroy(long nativeChromeBrowserProvider);
"""
jni_generator.JniParams.ExtractImportsAndInnerClasses(test_data)
natives = jni_generator.ExtractNatives(test_data, test_options.ptr_type)
golden_natives = [
NativeMethod(return_type='void', static=False, name='Destroy',
params=[Param(datatype='long',
name='nativeChromeBrowserProvider')],
java_class_name=None,
type='method',
p0_type='ChromeBrowserProvider',
ptr_type=test_options.ptr_type),
]
self.assertListEquals(golden_natives, natives)
h = jni_generator.InlHeaderFileGenerator('', 'org/chromium/TestJni',
natives, [], [], test_options)
self.assertGoldenTextEquals(h.GetContent())
def runNativeExportsOption(self, optional):
test_data = """
package org.chromium.example.jni_generator;
/** The pointer to the native Test. */
long nativeTest;
class Test {
private static native int nativeStaticMethod(long nativeTest, int arg1);
private native int nativeMethod(long nativeTest, int arg1);
@CalledByNative
private void testMethodWithParam(int iParam);
@CalledByNative
private String testMethodWithParamAndReturn(int iParam);
@CalledByNative
private static int testStaticMethodWithParam(int iParam);
@CalledByNative
private static double testMethodWithNoParam();
@CalledByNative
private static String testStaticMethodWithNoParam();
class MyInnerClass {
@NativeCall("MyInnerClass")
private native int nativeInit();
}
class MyOtherInnerClass {
@NativeCall("MyOtherInnerClass")
private native int nativeInit();
}
}
"""
options = TestOptions()
options.native_exports = True
options.native_exports_optional = optional
jni_from_java = jni_generator.JNIFromJavaSource(
test_data, 'org/chromium/example/jni_generator/SampleForTests', options)
return jni_from_java.GetContent()
def testNativeExportsOption(self):
content = self.runNativeExportsOption(False)
self.assertGoldenTextEquals(content)
def testNativeExportsOptionalOption(self):
content = self.runNativeExportsOption(True)
self.assertGoldenTextEquals(content)
def testOuterInnerRaises(self):
test_data = """
package org.chromium.media;
@CalledByNative
static int getCaptureFormatWidth(VideoCapture.CaptureFormat format) {
return format.getWidth();
}
"""
def willRaise():
jni_generator.JNIFromJavaSource(
test_data,
'org/chromium/media/VideoCaptureFactory',
TestOptions())
self.assertRaises(SyntaxError, willRaise)
def testSingleJNIAdditionalImport(self):
test_data = """
package org.chromium.foo;
@JNIAdditionalImport(Bar.class)
class Foo {
@CalledByNative
private static void calledByNative(Bar.Callback callback) {
}
private static native void nativeDoSomething(Bar.Callback callback);
}
"""
jni_from_java = jni_generator.JNIFromJavaSource(test_data,
'org/chromium/foo/Foo',
TestOptions())
self.assertGoldenTextEquals(jni_from_java.GetContent())
def testMultipleJNIAdditionalImport(self):
test_data = """
package org.chromium.foo;
@JNIAdditionalImport({Bar1.class, Bar2.class})
class Foo {
@CalledByNative
private static void calledByNative(Bar1.Callback callback1,
Bar2.Callback callback2) {
}
private static native void nativeDoSomething(Bar1.Callback callback1,
Bar2.Callback callback2);
}
"""
jni_from_java = jni_generator.JNIFromJavaSource(test_data,
'org/chromium/foo/Foo',
TestOptions())
self.assertGoldenTextEquals(jni_from_java.GetContent())
def TouchStamp(stamp_path):
dir_name = os.path.dirname(stamp_path)
if not os.path.isdir(dir_name):
os.makedirs()
with open(stamp_path, 'a'):
os.utime(stamp_path, None)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args(argv[1:])
test_result = unittest.main(argv=argv[0:1], exit=False)
if test_result.result.wasSuccessful() and options.stamp:
TouchStamp(options.stamp)
return not test_result.result.wasSuccessful()
if __name__ == '__main__':
sys.exit(main(sys.argv))<|fim▁end|>
|
def _ReadGoldenFile(self, golden_file):
if not os.path.exists(golden_file):
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var crypto = require('crypto');
var Canvas = require('canvas');
var _ = require('lodash');
var bu = require('./bufutil');
var fmt = require('util').format;
var unpack = require('./unpack');
var bright = require('./bright');
function fprint(buf, len) {
if (len > 64)
throw new Error(fmt("sha512 can only generate 64B of data: %dB requested", len));
return _(crypto.createHash('sha512').update(buf).digest())
.groupBy(function (x, k) { return Math.floor(k/len); })
.reduce(bu.xor);
}
function idhash(str, n, minFill, maxFill) {
var buf = new Buffer(str.length + 1);
buf.write(str);
for (var i=0; i<0x100; i++) {
buf[buf.length - 1] = i;
var f = fprint(buf, Math.ceil(n/8)+6);
var pixels = _(f.slice(6))
.map(function (x) { return unpack(x); })
.flatten().take(n);
var setPixels = pixels.filter().size();
var c = [ f.slice(0, 3), f.slice(3, 6)];
c.sort(bright.cmp);
if (setPixels > (minFill * n) && setPixels < (maxFill * n))
return {
colors: c.map(function (x) { return x.toString('hex'); }),
pixels: pixels.value()<|fim▁hole|> }
throw new Error(fmt("String '''%s''' unhashable in single-byte search space.", str));
}
function reflect(id, dimension) {
var mid = Math.ceil(dimension / 2);
var odd = Boolean(dimension % 2);
var pic = [];
for (var row=0; row<dimension; row++) {
pic[row] = [];
for (var col=0; col<dimension; col++) {
var p = (row * mid) + col;
if (col>=mid) {
var d = mid - (odd ? 1 : 0) - col;
var ad = Math.abs(d);
p = (row * mid) + mid - 1 - ad;
}
pic[row][col] = id.pixels[p];
// console.error(fmt("looking for %d, of %d for %d,%d", p, id.pixels.length, row, col))
}
}
return pic;
}
function retricon(str, opts) {
opts = _.merge({}, retricon.defaults, opts);
var dimension = opts.tiles;
var pixelSize = opts.pixelSize;
var border = opts.pixelPadding;
var mid = Math.ceil(dimension / 2);
var id = idhash(str, mid * dimension, opts.minFill, opts.maxFill);
var pic = reflect(id, dimension);
var csize = (pixelSize * dimension) + (opts.imagePadding * 2);
var c = Canvas.createCanvas(csize, csize);
var ctx = c.getContext('2d');
if (_.isString(opts.bgColor)) {
ctx.fillStyle = opts.bgColor;
} else if (_.isNumber(opts.bgColor)) {
ctx.fillStyle = '#' + id.colors[opts.bgColor];
}
if (! _.isNull(opts.bgColor))
ctx.fillRect(0, 0, csize, csize);
var drawOp = ctx.fillRect.bind(ctx);
if (_.isString(opts.pixelColor)) {
ctx.fillStyle = opts.pixelColor;
} else if (_.isNumber(opts.pixelColor)) {
ctx.fillStyle = '#' + id.colors[opts.pixelColor];
} else {
drawOp = ctx.clearRect.bind(ctx);
}
for (var x=0; x<dimension; x++)
for (var y=0; y<dimension; y++)
if (pic[y][x])
drawOp((x*pixelSize) + border + opts.imagePadding,
(y*pixelSize) + border + opts.imagePadding,
pixelSize - (border * 2),
pixelSize - (border * 2));
return c;
}
retricon.defaults = {
pixelSize: 10,
bgColor: null,
pixelPadding: 0,
imagePadding: 0,
tiles: 5,
minFill: 0.3,
maxFill: 0.90,
pixelColor: 0
};
retricon.style = {
github: {
pixelSize: 70,
bgColor: '#F0F0F0',
pixelPadding: -1,
imagePadding: 35,
tiles: 5
},
gravatar: {
tiles: 8,
bgColor: 1
},
mono: {
bgColor: '#F0F0F0',
pixelColor: '#000000',
tiles: 6,
pixelSize: 12,
pixelPadding: -1,
imagePadding: 6
},
mosaic: {
imagePadding: 2,
pixelPadding: 1,
pixelSize: 16,
bgColor: '#F0F0F0'
},
mini: {
pixelSize: 10,
pixelPadding: 1,
tiles: 3,
bgColor: 0,
pixelColor: 1
},
window: {
pixelColor: null,
bgColor: 0,
imagePadding: 2,
pixelPadding: 1,
pixelSize: 16
}
};
module.exports = retricon;<|fim▁end|>
|
};
|
<|file_name|>Solution.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
impl Solution {
pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {
let mut numIdx: HashMap<i32, i32> = HashMap::new();
for (i, num) in nums.iter().enumerate() {<|fim▁hole|> }
return vec![-1, -1];
}
}<|fim▁end|>
|
if numIdx.contains_key(&(target - num)) {
return vec![numIdx[&(target - num)], i as i32];
}
numIdx.insert(*num, i as i32);
|
<|file_name|>index.rs<|end_file_name|><|fim▁begin|>use super::Value;
use crate::map::Map;
use alloc::borrow::ToOwned;
use alloc::string::String;
use core::fmt::{self, Display};
use core::ops;
/// A type that can be used to index into a `serde_json::Value`.
///
/// The [`get`] and [`get_mut`] methods of `Value` accept any type that
/// implements `Index`, as does the [square-bracket indexing operator]. This
/// trait is implemented for strings which are used as the index into a JSON
/// map, and for `usize` which is used as the index into a JSON array.
///
/// [`get`]: ../enum.Value.html#method.get
/// [`get_mut`]: ../enum.Value.html#method.get_mut
/// [square-bracket indexing operator]: ../enum.Value.html#impl-Index%3CI%3E
///
/// This trait is sealed and cannot be implemented for types outside of
/// `serde_json`.
///
/// # Examples
///
/// ```
/// # use serde_json::json;
/// #
/// let data = json!({ "inner": [1, 2, 3] });
///
/// // Data is a JSON map so it can be indexed with a string.
/// let inner = &data["inner"];
///
/// // Inner is a JSON array so it can be indexed with an integer.
/// let first = &inner[0];
///
/// assert_eq!(first, 1);
/// ```
pub trait Index: private::Sealed {
/// Return None if the key is not already in the array or object.
#[doc(hidden)]
fn index_into<'v>(&self, v: &'v Value) -> Option<&'v Value>;
/// Return None if the key is not already in the array or object.
#[doc(hidden)]
fn index_into_mut<'v>(&self, v: &'v mut Value) -> Option<&'v mut Value>;
/// Panic if array index out of bounds. If key is not already in the object,
/// insert it with a value of null. Panic if Value is a type that cannot be
/// indexed into, except if Value is null then it can be treated as an empty
/// object.
#[doc(hidden)]
fn index_or_insert<'v>(&self, v: &'v mut Value) -> &'v mut Value;
}
impl Index for usize {
fn index_into<'v>(&self, v: &'v Value) -> Option<&'v Value> {
match *v {
Value::Array(ref vec) => vec.get(*self),
_ => None,
}
}
fn index_into_mut<'v>(&self, v: &'v mut Value) -> Option<&'v mut Value> {
match *v {
Value::Array(ref mut vec) => vec.get_mut(*self),
_ => None,
}
}
fn index_or_insert<'v>(&self, v: &'v mut Value) -> &'v mut Value {
match *v {
Value::Array(ref mut vec) => {
let len = vec.len();
vec.get_mut(*self).unwrap_or_else(|| {
panic!(
"cannot access index {} of JSON array of length {}",
self, len
)
})
}
_ => panic!("cannot access index {} of JSON {}", self, Type(v)),
}
}
}
impl Index for str {
fn index_into<'v>(&self, v: &'v Value) -> Option<&'v Value> {
match *v {
Value::Object(ref map) => map.get(self),
_ => None,
}
}
fn index_into_mut<'v>(&self, v: &'v mut Value) -> Option<&'v mut Value> {
match *v {
Value::Object(ref mut map) => map.get_mut(self),
_ => None,
}
}
fn index_or_insert<'v>(&self, v: &'v mut Value) -> &'v mut Value {
if let Value::Null = *v {
*v = Value::Object(Map::new());
}
match *v {
Value::Object(ref mut map) => map.entry(self.to_owned()).or_insert(Value::Null),
_ => panic!("cannot access key {:?} in JSON {}", self, Type(v)),
}
}
}
impl Index for String {
fn index_into<'v>(&self, v: &'v Value) -> Option<&'v Value> {
self[..].index_into(v)
}
fn index_into_mut<'v>(&self, v: &'v mut Value) -> Option<&'v mut Value> {<|fim▁hole|> fn index_or_insert<'v>(&self, v: &'v mut Value) -> &'v mut Value {
self[..].index_or_insert(v)
}
}
impl<'a, T> Index for &'a T
where
T: ?Sized + Index,
{
fn index_into<'v>(&self, v: &'v Value) -> Option<&'v Value> {
(**self).index_into(v)
}
fn index_into_mut<'v>(&self, v: &'v mut Value) -> Option<&'v mut Value> {
(**self).index_into_mut(v)
}
fn index_or_insert<'v>(&self, v: &'v mut Value) -> &'v mut Value {
(**self).index_or_insert(v)
}
}
// Prevent users from implementing the Index trait.
mod private {
pub trait Sealed {}
impl Sealed for usize {}
impl Sealed for str {}
impl Sealed for alloc::string::String {}
impl<'a, T> Sealed for &'a T where T: ?Sized + Sealed {}
}
/// Used in panic messages.
struct Type<'a>(&'a Value);
impl<'a> Display for Type<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match *self.0 {
Value::Null => formatter.write_str("null"),
Value::Bool(_) => formatter.write_str("boolean"),
Value::Number(_) => formatter.write_str("number"),
Value::String(_) => formatter.write_str("string"),
Value::Array(_) => formatter.write_str("array"),
Value::Object(_) => formatter.write_str("object"),
}
}
}
// The usual semantics of Index is to panic on invalid indexing.
//
// That said, the usual semantics are for things like Vec and BTreeMap which
// have different use cases than Value. If you are working with a Vec, you know
// that you are working with a Vec and you can get the len of the Vec and make
// sure your indices are within bounds. The Value use cases are more
// loosey-goosey. You got some JSON from an endpoint and you want to pull values
// out of it. Outside of this Index impl, you already have the option of using
// value.as_array() and working with the Vec directly, or matching on
// Value::Array and getting the Vec directly. The Index impl means you can skip
// that and index directly into the thing using a concise syntax. You don't have
// to check the type, you don't have to check the len, it is all about what you
// expect the Value to look like.
//
// Basically the use cases that would be well served by panicking here are
// better served by using one of the other approaches: get and get_mut,
// as_array, or match. The value of this impl is that it adds a way of working
// with Value that is not well served by the existing approaches: concise and
// careless and sometimes that is exactly what you want.
impl<I> ops::Index<I> for Value
where
I: Index,
{
type Output = Value;
/// Index into a `serde_json::Value` using the syntax `value[0]` or
/// `value["k"]`.
///
/// Returns `Value::Null` if the type of `self` does not match the type of
/// the index, for example if the index is a string and `self` is an array
/// or a number. Also returns `Value::Null` if the given key does not exist
/// in the map or the given index is not within the bounds of the array.
///
/// For retrieving deeply nested values, you should have a look at the
/// `Value::pointer` method.
///
/// # Examples
///
/// ```
/// # use serde_json::json;
/// #
/// let data = json!({
/// "x": {
/// "y": ["z", "zz"]
/// }
/// });
///
/// assert_eq!(data["x"]["y"], json!(["z", "zz"]));
/// assert_eq!(data["x"]["y"][0], json!("z"));
///
/// assert_eq!(data["a"], json!(null)); // returns null for undefined values
/// assert_eq!(data["a"]["b"], json!(null)); // does not panic
/// ```
fn index(&self, index: I) -> &Value {
static NULL: Value = Value::Null;
index.index_into(self).unwrap_or(&NULL)
}
}
impl<I> ops::IndexMut<I> for Value
where
I: Index,
{
/// Write into a `serde_json::Value` using the syntax `value[0] = ...` or
/// `value["k"] = ...`.
///
/// If the index is a number, the value must be an array of length bigger
/// than the index. Indexing into a value that is not an array or an array
/// that is too small will panic.
///
/// If the index is a string, the value must be an object or null which is
/// treated like an empty object. If the key is not already present in the
/// object, it will be inserted with a value of null. Indexing into a value
/// that is neither an object nor null will panic.
///
/// # Examples
///
/// ```
/// # use serde_json::json;
/// #
/// let mut data = json!({ "x": 0 });
///
/// // replace an existing key
/// data["x"] = json!(1);
///
/// // insert a new key
/// data["y"] = json!([false, false, false]);
///
/// // replace an array value
/// data["y"][0] = json!(true);
///
/// // inserted a deeply nested key
/// data["a"]["b"]["c"]["d"] = json!(true);
///
/// println!("{}", data);
/// ```
fn index_mut(&mut self, index: I) -> &mut Value {
index.index_or_insert(self)
}
}<|fim▁end|>
|
self[..].index_into_mut(v)
}
|
<|file_name|>test_qgslayoutview.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutView.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '05/07/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.core import (QgsProject,
QgsLayout,
QgsUnitTypes,
QgsLayoutItemPicture,
QgsLayoutPoint,
QgsLayoutSize,
QgsLayoutAligner)
from qgis.gui import QgsLayoutView
from qgis.PyQt.QtCore import QRectF
from qgis.PyQt.QtGui import QTransform
from qgis.PyQt.QtTest import QSignalSpy
from qgis.testing import start_app, unittest
start_app()
class TestQgsLayoutView(unittest.TestCase):
def testScaleSafe(self):
""" test scaleSafe method """
view = QgsLayoutView()
view.fitInView(QRectF(0, 0, 10, 10))
scale = view.transform().m11()
view.scaleSafe(2)
self.assertAlmostEqual(view.transform().m11(), 2)
view.scaleSafe(4)
self.assertAlmostEqual(view.transform().m11(), 8)
# try to zoom in heaps
view.scaleSafe(99999999)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(2)
self.assertAlmostEqual(view.transform().m11(), scale)
view.setTransform(QTransform.fromScale(1, 1))
self.assertAlmostEqual(view.transform().m11(), 1)
# test zooming out
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), 0.5)
view.scaleSafe(0.1)
self.assertAlmostEqual(view.transform().m11(), 0.05)
# try zooming out heaps
view.scaleSafe(0.000000001)
# assume we have hit the limit
scale = view.transform().m11()
view.scaleSafe(0.5)
self.assertAlmostEqual(view.transform().m11(), scale)
def testLayoutScalePixels(self):
p = QgsProject()
l = QgsLayout(p)
l.setUnits(QgsUnitTypes.LayoutPixels)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.setZoomLevel(1)
# should be no transform, since 100% with pixel units should be pixel-pixel
self.assertEqual(view.transform().m11(), 1)
view.setZoomLevel(0.5)
self.assertEqual(view.transform().m11(), 0.5)
def testSelectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectAll()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.selectAll()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
def testDeselectAll(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.deselectAll()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 1)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.deselectAll()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
def testInvertSelection(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.invertSelection()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
view.invertSelection()
self.assertTrue(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
self.assertEqual(len(focused_item_spy), 1)
item3.setSelected(True) # locked item selection should be cleared
view.invertSelection()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected()) # locked
def testSelectNextByZOrder(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.setLocked(True)
l.addItem(item3)
view = QgsLayoutView()
# no layout, no crash
view.selectNextItemAbove()
view.selectNextItemBelow()
view.setCurrentLayout(l)
focused_item_spy = QSignalSpy(view.itemFocused)
# no selection
view.selectNextItemAbove()
view.selectNextItemBelow()
self.assertEqual(len(focused_item_spy), 0)
l.setSelectedItem(item1)
self.assertEqual(len(focused_item_spy), 1)
# already bottom most
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 1)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 2)
view.selectNextItemAbove()
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemAbove() # already top most
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
self.assertEqual(len(focused_item_spy), 3)
view.selectNextItemBelow()
self.assertFalse(item1.isSelected())
self.assertTrue(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 4)
view.selectNextItemBelow()
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
view.selectNextItemBelow() # back to bottom most
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
self.assertEqual(len(focused_item_spy), 5)
def testLockActions(self):
p = QgsProject()
l = QgsLayout(p)
view = QgsLayoutView()
view.setCurrentLayout(l)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
l.addItem(item3)
item1.setLocked(True)
item3.setLocked(True)
self.assertTrue(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertTrue(item3.isLocked())
view.unlockAllItems()
self.assertFalse(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertFalse(item3.isLocked())
self.assertTrue(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertTrue(item3.isSelected())
view.lockSelectedItems()
self.assertTrue(item1.isLocked())
self.assertFalse(item2.isLocked())
self.assertTrue(item3.isLocked())
self.assertFalse(item1.isSelected())
self.assertFalse(item2.isSelected())
self.assertFalse(item3.isSelected())
def testStacking(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
l.addLayoutItem(item1)
item2 = QgsLayoutItemPicture(l)
l.addLayoutItem(item2)
item3 = QgsLayoutItemPicture(l)
l.addLayoutItem(item3)
view = QgsLayoutView()
view.setCurrentLayout(l)
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 2)
self.assertEqual(item3.zValue(), 3)
# no effect interactions
view.raiseSelectedItems()
view.lowerSelectedItems()
view.moveSelectedItemsToTop()
view.moveSelectedItemsToBottom()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 2)
self.assertEqual(item3.zValue(), 3)
# raising
item3.setSelected(True)
view.raiseSelectedItems()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 2)
self.assertEqual(item3.zValue(), 3)
item3.setSelected(False)
item2.setSelected(True)
view.raiseSelectedItems()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 2)
view.raiseSelectedItems()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 2)
item2.setSelected(False)
item1.setSelected(True)
view.raiseSelectedItems()
self.assertEqual(item1.zValue(), 2)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 1)
# lower
item1.setSelected(False)
item3.setSelected(True)
view.lowerSelectedItems()
self.assertEqual(item1.zValue(), 2)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 1)
item3.setSelected(False)
item2.setSelected(True)
view.lowerSelectedItems()
self.assertEqual(item1.zValue(), 3)
self.assertEqual(item2.zValue(), 2)
self.assertEqual(item3.zValue(), 1)
view.lowerSelectedItems()
self.assertEqual(item1.zValue(), 3)
self.assertEqual(item2.zValue(), 1)
self.assertEqual(item3.zValue(), 2)
# raise to top
item2.setSelected(False)
item1.setSelected(True)
view.moveSelectedItemsToTop()
self.assertEqual(item1.zValue(), 3)
self.assertEqual(item2.zValue(), 1)
self.assertEqual(item3.zValue(), 2)
item1.setSelected(False)
item3.setSelected(True)
view.moveSelectedItemsToTop()
self.assertEqual(item1.zValue(), 2)
self.assertEqual(item2.zValue(), 1)
self.assertEqual(item3.zValue(), 3)
item3.setSelected(False)
item2.setSelected(True)
view.moveSelectedItemsToTop()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 2)
# move to bottom
item2.setSelected(False)
item1.setSelected(True)
view.moveSelectedItemsToBottom()
self.assertEqual(item1.zValue(), 1)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 2)
item1.setSelected(False)
item3.setSelected(True)
view.moveSelectedItemsToBottom()
self.assertEqual(item1.zValue(), 2)
self.assertEqual(item2.zValue(), 3)
self.assertEqual(item3.zValue(), 1)
item3.setSelected(False)
item2.setSelected(True)
view.moveSelectedItemsToBottom()
self.assertEqual(item1.zValue(), 3)
self.assertEqual(item2.zValue(), 1)
self.assertEqual(item3.zValue(), 2)
def testAlign(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
item1.attemptMove(QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
item1.attemptResize(QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
item2.attemptMove(QgsLayoutPoint(6, 10, QgsUnitTypes.LayoutMillimeters))
item2.attemptResize(QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.attemptMove(QgsLayoutPoint(0.8, 1.2, QgsUnitTypes.LayoutCentimeters))
item3.attemptResize(QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
l.addItem(item3)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.alignSelectedItems(QgsLayoutAligner.AlignLeft)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.alignSelectedItems(QgsLayoutAligner.AlignLeft)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(4, 10, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 1.2, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.alignSelectedItems(QgsLayoutAligner.AlignHCenter)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(8, 10, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 1.2, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.alignSelectedItems(QgsLayoutAligner.AlignRight)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(12, 10, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 1.2, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.alignSelectedItems(QgsLayoutAligner.AlignTop)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(12, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 0.8, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.alignSelectedItems(QgsLayoutAligner.AlignVCenter)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 10, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(12, 11.5, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 0.8, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.alignSelectedItems(QgsLayoutAligner.AlignBottom)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.positionWithUnits(), QgsLayoutPoint(12, 15, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.positionWithUnits(), QgsLayoutPoint(0.4, 0.8, QgsUnitTypes.LayoutCentimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
def testDistribute(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
item1.attemptMove(QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
item1.attemptResize(QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
item2.attemptMove(QgsLayoutPoint(7, 10, QgsUnitTypes.LayoutMillimeters))
item2.attemptResize(QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.attemptMove(QgsLayoutPoint(0.8, 1.2, QgsUnitTypes.LayoutCentimeters))
item3.attemptResize(QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
l.addItem(item3)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.distributeSelectedItems(QgsLayoutAligner.DistributeLeft)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.distributeSelectedItems(QgsLayoutAligner.DistributeLeft)
self.assertEqual(item1.positionWithUnits(), QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().x(), 6.0, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().x(), 0.8, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.distributeSelectedItems(QgsLayoutAligner.DistributeHCenter)
self.assertAlmostEqual(item1.positionWithUnits().x(), 5.0, 3)
self.assertEqual(item1.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().x(), 6.0, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().x(), 0.8, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.distributeSelectedItems(QgsLayoutAligner.DistributeRight)
self.assertAlmostEqual(item1.positionWithUnits().x(), 3.0, 3)
self.assertEqual(item1.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().x(), 6.0, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().x(), 0.8, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.distributeSelectedItems(QgsLayoutAligner.DistributeTop)
self.assertAlmostEqual(item1.positionWithUnits().y(), 8.0, 3)
self.assertEqual(item1.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().y(), 10.0, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().y(), 1.2, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.distributeSelectedItems(QgsLayoutAligner.DistributeVCenter)
self.assertAlmostEqual(item1.positionWithUnits().y(), 8.0, 3)
self.assertEqual(item1.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().y(), 12.5, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().y(), 1.2, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
view.distributeSelectedItems(QgsLayoutAligner.DistributeBottom)
self.assertAlmostEqual(item1.positionWithUnits().y(), 8.0, 3)
self.assertEqual(item1.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item2.positionWithUnits().y(), 15.0, 3)
self.assertEqual(item2.positionWithUnits().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertAlmostEqual(item3.positionWithUnits().y(), 1.2, 3)
self.assertEqual(item3.positionWithUnits().units(), QgsUnitTypes.LayoutCentimeters)
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
def testResize(self):
p = QgsProject()
l = QgsLayout(p)
# add some items
item1 = QgsLayoutItemPicture(l)
item1.attemptMove(QgsLayoutPoint(4, 8, QgsUnitTypes.LayoutMillimeters))
item1.attemptResize(QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
l.addItem(item1)
item2 = QgsLayoutItemPicture(l)
item2.attemptMove(QgsLayoutPoint(7, 10, QgsUnitTypes.LayoutMillimeters))
item2.attemptResize(QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
l.addItem(item2)
item3 = QgsLayoutItemPicture(l)
item3.attemptMove(QgsLayoutPoint(0.8, 1.2, QgsUnitTypes.LayoutCentimeters))
item3.attemptResize(QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
l.addItem(item3)
view = QgsLayoutView()
view.setCurrentLayout(l)
view.resizeSelectedItems(QgsLayoutAligner.ResizeNarrowest)
item1.setSelected(True)
item2.setSelected(True)
item3.setSelected(True)
view.resizeSelectedItems(QgsLayoutAligner.ResizeNarrowest)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(10, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.0, 1.6, QgsUnitTypes.LayoutCentimeters))
l.undoStack().stack().undo()
view.resizeSelectedItems(QgsLayoutAligner.ResizeWidest)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 12, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(18, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
l.undoStack().stack().undo()
view.resizeSelectedItems(QgsLayoutAligner.ResizeShortest)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 9, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 0.9, QgsUnitTypes.LayoutCentimeters))
l.undoStack().stack().undo()
view.resizeSelectedItems(QgsLayoutAligner.ResizeTallest)<|fim▁hole|> l.undoStack().stack().undo()
item2.attemptResize(QgsLayoutSize(10, 19, QgsUnitTypes.LayoutMillimeters))
view.resizeSelectedItems(QgsLayoutAligner.ResizeToSquare)
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 18, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(19, 19, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.8, QgsUnitTypes.LayoutCentimeters))
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
self.assertEqual(item1.sizeWithUnits(), QgsLayoutSize(18, 16, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item2.sizeWithUnits(), QgsLayoutSize(10, 16, QgsUnitTypes.LayoutMillimeters))
self.assertEqual(item3.sizeWithUnits(), QgsLayoutSize(1.8, 1.6, QgsUnitTypes.LayoutCentimeters))
|
<|file_name|>climatechange.rsds.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
########################################
#Globale Karte fuer tests
# from Rabea Amther
########################################<|fim▁hole|>import pylab as pl
import Scientific.IO.NetCDF as IO
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import matplotlib.lines as lines
from mpl_toolkits.basemap import Basemap , addcyclic
from matplotlib.colors import LinearSegmentedColormap
import textwrap
pl.close('all')
########################## for CMIP5 charactors
DIR='~/climate/CMIP5/rcp85/SWIO'
VARIABLE='rsds'
PRODUCT='Amon'
ENSEMBLE='r1i1p1'
AbsTemp=273.15
RefTemp=5
CRUmean=8.148 #1900-2100 land
TargetModel=[\
'CanESM2',\
'CNRM-CM5',\
'CNRM-CM5',\
'CSIRO-Mk3-6-0',\
'EC-EARTH',\
'EC-EARTH',\
'EC-EARTH',\
'EC-EARTH',\
'IPSL-CM5A-MR',\
'MIROC5',\
'HadGEM2-ES',\
'HadGEM2-ES',\
'HadGEM2-ES',\
'MPI-ESM-LR',\
'MPI-ESM-LR',\
'NorESM1-M',\
'GFDL-ESM2M',\
]
RCMs=[
'rsds_AFR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_CNRM-CERFACS-CNRM-CM5_rcp85_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_200601-210012.nc',\
'rsds_AFR-44_CNRM-CERFACS-CNRM-CM5_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_CSIRO-QCCCE-CSIRO-Mk3-6-0_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_ICHEC-EC-EARTH_rcp85_r12i1p1_CLMcom-CCLM4-8-17_v1_mon_200601-210012.nc',\
'rsds_AFR-44_ICHEC-EC-EARTH_rcp85_r12i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_ICHEC-EC-EARTH_rcp85_r1i1p1_KNMI-RACMO22T_v1_mon_200601-210012.nc',\
'rsds_AFR-44_ICHEC-EC-EARTH_rcp85_r3i1p1_DMI-HIRHAM5_v2_mon_200601-210012.nc',\
'rsds_AFR-44_IPSL-IPSL-CM5A-MR_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MIROC-MIROC5_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MOHC-HadGEM2-ES_rcp85_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MOHC-HadGEM2-ES_rcp85_r1i1p1_KNMI-RACMO22T_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MOHC-HadGEM2-ES_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MPI-M-MPI-ESM-LR_rcp85_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_200601-210012.nc',\
'rsds_AFR-44_MPI-M-MPI-ESM-LR_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_NCC-NorESM1-M_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
'rsds_AFR-44_NOAA-GFDL-GFDL-ESM2M_rcp85_r1i1p1_SMHI-RCA4_v1_mon_200601-210012.nc',\
]
GCMs=[
'CanESM2',\
'CNRM-CM5',\
'CNRM-CM5',\
'CSIRO-Mk3-6-0',\
'EC-EARTH',\
'EC-EARTH',\
'EC-EARTH',\
'EC-EARTH',\
'IPSL-CM5A-MR',\
'MIROC5',\
'HadGEM2-ES',\
'HadGEM2-ES',\
'HadGEM2-ES',\
'MPI-ESM-LR',\
'MPI-ESM-LR',\
'NorESM1-M',\
'GFDL-ESM2M',\
]
COLORtar=['darkred','black','deeppink','orange',\
'orangered','yellow','gold','brown','chocolate',\
'green','yellowgreen','aqua','olive','teal',\
'blue','purple','darkmagenta','fuchsia','indigo',\
'dimgray','black','navy']
COLORall=['darkred','darkblue','darkgreen','deeppink',\
'red','blue','green','pink','gold',\
'lime','lightcyan','orchid','yellow','lightsalmon',\
'brown','khaki','aquamarine','yellowgreen','blueviolet',\
'snow','skyblue','slateblue','orangered','dimgray',\
'chocolate','teal','mediumvioletred','gray','cadetblue',\
'mediumorchid','bisque','tomato','hotpink','firebrick',\
'Chartreuse','purple','goldenrod',\
'black','orangered','cyan','magenta']
linestyles=['_', '_', '_', '-', '-',\
'-', '--','--','--', '--',\
'_', '_','_','_',\
'_', '_','_','_',\
'_', '-', '--', ':','_', '-', '--', ':','_', '-', '--', ':','_', '-', '--', ':']
#================================================ CMIP5 models
# for rcp8.5
#=================================================== define the Plot:
fig1=plt.figure(figsize=(16,9))
ax = fig1.add_subplot(111)
plt.xlabel('Year',fontsize=16)
plt.ylabel('Surface Downwelling shortwave flux Change(W m-2)',fontsize=16)
plt.title("Surface Downwelling shortwave flux Change (W m-2) in AFRICA simulated by CMIP5 models",\
fontsize=18)
plt.ylim(-5,5)
plt.xlim(1980,2100)
plt.grid()
plt.xticks(np.arange(1960, 2100+10, 20))
plt.tick_params(axis='both', which='major', labelsize=14)
plt.tick_params(axis='both', which='minor', labelsize=14)
# vertical at 2005
plt.axvline(x=2005.5,linewidth=2, color='gray')
plt.axhline(y=0,linewidth=2, color='gray')
#plt.plot(x,y,color="blue",linewidth=4)
########################## for rcp8.5:
########################## for rcp8.5:
print "========== for hist ==============="
GCMsDir='/Users/tang/climate/CMIP5/rcp85/AFRICA'
EXPERIMENT='rcp85'
TIME='200601-210012'
YEAR=range(2006,2101)
Nmonth=1140
SumTemp=np.zeros(Nmonth/12)
K=0
for Model in modelist1:
#define the K-th model input file:
K=K+1 # for average
infile1=DIR+'/'\
+VARIABLE+'_'+PRODUCT+'_'+Model+'_'+EXPERIMENT+'_r1i1p1'+'_'+TIME+'.nc.SWIO.nc'
#rsds_Amon_MPI-ESM-LR_rcp85_r1i1p1_200601-210012.nc.SWIO.nc
print('the file is == ' +infile1)
#open input files
infile=IO.NetCDFFile(infile1,'r')
# read the variable tas
TAS=infile.variables[VARIABLE][:,:,:].copy()
print 'the variable tas ===============: '
print TAS
# calculate the annual mean temp:
TEMP=range(0,Nmonth,12)
for j in range(0,Nmonth,12):
TEMP[j/12]=np.mean(TAS[j:j+11][:][:])-AbsTemp
print " temp ======================== absolut"
print TEMP
# reference temp: mean of 1996-2005
RefTemp=np.mean(TEMP[0:5])
if K==1:
ArrRefTemp=[RefTemp]
else:
ArrRefTemp=ArrRefTemp+[RefTemp]
print 'ArrRefTemp ========== ',ArrRefTemp
TEMP=[t-RefTemp for t in TEMP]
print " temp ======================== relative to mean of 1986-2005"
print TEMP
# get array of temp K*TimeStep
if K==1:
ArrTemp=[TEMP]
else:
ArrTemp=ArrTemp+[TEMP]
SumTemp=SumTemp+TEMP
#print SumTemp
#=================================================== to plot
print "======== to plot =========="
print len(TEMP)
print 'NO. of year:',len(YEAR)
#plot only target models
if Model in TargetModel:
plt.plot(YEAR,TEMP,label=Model,\
#linestyles[TargetModel.index(Model)],\
color=COLORtar[TargetModel.index(Model)],linewidth=2)
#if Model=='CanESM2':
#plt.plot(YEAR,TEMP,color="red",linewidth=1)
#if Model=='MPI-ESM-LR':
#plt.plot(YEAR,TEMP,color="blue",linewidth=1)
#if Model=='MPI-ESM-MR':
#plt.plot(YEAR,TEMP,color="green",linewidth=1)
#=================================================== for ensemble mean
AveTemp=[e/K for e in SumTemp]
ArrTemp=list(np.array(ArrTemp))
print 'shape of ArrTemp:',np.shape(ArrTemp)
StdTemp=np.std(np.array(ArrTemp),axis=0)
print 'shape of StdTemp:',np.shape(StdTemp)
print "ArrTemp ========================:"
print ArrTemp
print "StdTemp ========================:"
print StdTemp
# 5-95% range ( +-1.64 STD)
StdTemp1=[AveTemp[i]+StdTemp[i]*1.64 for i in range(0,len(StdTemp))]
StdTemp2=[AveTemp[i]-StdTemp[i]*1.64 for i in range(0,len(StdTemp))]
print "Model number for historical is :",K
print "models for historical:";print modelist1
plt.plot(YEAR,AveTemp,label=' mean',color="red",linewidth=4)
plt.plot(YEAR,StdTemp1,color="black",linewidth=0.1)
plt.plot(YEAR,StdTemp2,color="black",linewidth=0.1)
plt.fill_between(YEAR,StdTemp1,StdTemp2,color='black',alpha=0.3)
# draw NO. of model used:
plt.text(1980,-2,str(K)+' models',size=16,rotation=0.,
ha="center",va="center",
#bbox = dict(boxstyle="round",
#ec=(1., 0.5, 0.5),
#fc=(1., 0.8, 0.8),
)
plt.legend(loc=2)
plt.show()
quit()
########################## for rcp8.5:
########################## for rcp8.5:<|fim▁end|>
|
# http://gfesuite.noaa.gov/developer/netCDFPythonInterface.html
import math
import numpy as np
|
<|file_name|>updatetickets.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging<|fim▁hole|>logger = logging.getLogger(__name__)
import requests
import os
from django.core.management.base import NoArgsCommand
from apps.subscribers.models import Ticket
class Command(NoArgsCommand):
help = 'Loops through all subscribers and marks each ticket appropriately.'
def handle_noargs(self, **options):
# Prepare our request.
headers = {
'Authorization': 'OAuth %s' % os.environ.get('TWITCH_OAUTH_TOKEN'),
'Accept': 'application/vnd.twitchtv.v3+json' }
url = 'https://api.twitch.tv/kraken/channels/avalonstar/subscriptions'
# Let's not invalidate anything unnecessarily. If we hit an exception
# with the first request, then bail.
try:
r = requests.get(url, headers=headers)
except requests.exceptions.RequestException as e:
logger.exception(e)
pass
# Rather than mark active tickets as inactive, mark all tickets as
# inactive. As we loop through the Twitch API, we'll mark
Ticket.objects.invalidate_tickets()
count = r.json().get('_total') # Total number of tickets.
limit = 100 # Maximum number of tickets we can fetch at once.
while url:
# To keep our dyno-hour usage down, we have to make sure that
# requests aren't hung up. So try the request and if a `Timeout`
# is thrown, bail.
try:
response = requests.get(url, headers=headers, params={'limit': limit}, timeout=1)
except requests.exceptions.RequestException as e:
logger.exception(e)
break
data = response.json()
tickets = data['subscriptions']
# The Twitch API doesn't stop offering `next` URLs when no results
# are available. So if we don't have tickets, shut it down.
if not tickets:
break
# We have tickets. Let's check each ticket and mark if that person
# as active if their ticket still exists in Twitch's API.
for ticket in tickets:
name = ticket['user']['name']
updates = {
'display_name': ticket['user']['display_name'],
'is_active': True,
'updated': ticket['created_at'],
'twid': ticket['_id'] }
t, created = Ticket.objects.update_or_create(name=name, defaults=updates)
# Done. Grab `next` and keep looping.
url = data['_links']['next']<|fim▁end|>
| |
<|file_name|>table-state-storage.spec.ts<|end_file_name|><|fim▁begin|>import { fakeAsync, tick } from '@angular/core/testing';
import { expect } from 'chai';
import { TableState } from './table-state';
import { TableStateStorage } from './table-state-storage';
describe('TableStateStorage', () => {
let store: TableStateStorage;
beforeEach(() => {
store = new TableStateStorage();
});
describe('update', () => {
it('should return a new TableState based on the previous state', () => {
expect(store.update({ page: 4 })).to.deep.equal(new TableState({
page: 4,
pageSize: undefined,
filters: undefined,
sort: undefined
}));
expect(store.update({ pageSize: 20 })).to.deep.equal(new TableState({
page: 4,
pageSize: 20,
filters: undefined,
sort: undefined
}));
});
});
describe('change', () => {
it('should only emit distinct values', fakeAsync(() => {
const emitted: TableState[] = [];
store.change.subscribe((c) => emitted.push(c));
store.update({ pageSize: 10 });
store.update({});<|fim▁hole|>
tick();
expect(emitted).to.deep.equal([
new TableState({}),
new TableState({
pageSize: 10
}),
]);
}));
});
});<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Home Assistant Switcher Component."""
from asyncio import QueueEmpty, TimeoutError as Asyncio_TimeoutError, wait_for
from datetime import datetime, timedelta
import logging
from typing import Dict, Optional
from aioswitcher.api import SwitcherV2Api
from aioswitcher.bridge import SwitcherV2Bridge
from aioswitcher.consts import COMMAND_ON
import voluptuous as vol
from homeassistant.auth.permissions.const import POLICY_EDIT
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback, split_entity_id
from homeassistant.exceptions import Unauthorized, UnknownUser
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.discovery import async_listen_platform, async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import (
ContextType,
DiscoveryInfoType,
EventType,
HomeAssistantType,
ServiceCallType,
)
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
DOMAIN = "switcher_kis"
CONF_AUTO_OFF = "auto_off"
CONF_TIMER_MINUTES = "timer_minutes"
CONF_DEVICE_ID = "device_id"
CONF_DEVICE_PASSWORD = "device_password"
CONF_PHONE_ID = "phone_id"
DATA_DEVICE = "device"
SIGNAL_SWITCHER_DEVICE_UPDATE = "switcher_device_update"
ATTR_AUTO_OFF_SET = "auto_off_set"
ATTR_ELECTRIC_CURRENT = "electric_current"
ATTR_REMAINING_TIME = "remaining_time"
CONFIG_SCHEMA = vol.Schema(<|fim▁hole|> {
DOMAIN: vol.Schema(
{
vol.Required(CONF_PHONE_ID): cv.string,
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_DEVICE_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_SET_AUTO_OFF_NAME = "set_auto_off"
SERVICE_SET_AUTO_OFF_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(CONF_AUTO_OFF): cv.time_period_str,
}
)
SERVICE_TURN_ON_WITH_TIMER_NAME = "turn_on_with_timer"
SERVICE_TURN_ON_WITH_TIMER_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TIMER_MINUTES): vol.All(
cv.positive_int, vol.Range(min=1, max=90)
),
}
)
@bind_hass
async def _validate_edit_permission(
hass: HomeAssistantType, context: ContextType, entity_id: str
) -> None:
"""Use for validating user control permissions."""
splited = split_entity_id(entity_id)
if splited[0] != SWITCH_DOMAIN or not splited[1].startswith(DOMAIN):
raise Unauthorized(context=context, entity_id=entity_id, permission=POLICY_EDIT)
user = await hass.auth.async_get_user(context.user_id)
if user is None:
raise UnknownUser(context=context, entity_id=entity_id, permission=POLICY_EDIT)
if not user.permissions.check_entity(entity_id, POLICY_EDIT):
raise Unauthorized(context=context, entity_id=entity_id, permission=POLICY_EDIT)
async def async_setup(hass: HomeAssistantType, config: Dict) -> bool:
"""Set up the switcher component."""
phone_id = config[DOMAIN][CONF_PHONE_ID]
device_id = config[DOMAIN][CONF_DEVICE_ID]
device_password = config[DOMAIN][CONF_DEVICE_PASSWORD]
v2bridge = SwitcherV2Bridge(hass.loop, phone_id, device_id, device_password)
await v2bridge.start()
async def async_stop_bridge(event: EventType) -> None:
"""On Home Assistant stop, gracefully stop the bridge if running."""
await v2bridge.stop()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_stop_bridge)
try:
device_data = await wait_for(v2bridge.queue.get(), timeout=10.0)
except (Asyncio_TimeoutError, RuntimeError):
_LOGGER.exception("Failed to get response from device")
await v2bridge.stop()
return False
hass.data[DOMAIN] = {DATA_DEVICE: device_data}
async def async_switch_platform_discovered(
platform: str, discovery_info: DiscoveryInfoType
) -> None:
"""Use for registering services after switch platform is discovered."""
if platform != DOMAIN:
return
async def async_set_auto_off_service(service: ServiceCallType) -> None:
"""Use for handling setting device auto-off service calls."""
await _validate_edit_permission(
hass, service.context, service.data[ATTR_ENTITY_ID]
)
async with SwitcherV2Api(
hass.loop, device_data.ip_addr, phone_id, device_id, device_password
) as swapi:
await swapi.set_auto_shutdown(service.data[CONF_AUTO_OFF])
async def async_turn_on_with_timer_service(service: ServiceCallType) -> None:
"""Use for handling turning device on with a timer service calls."""
await _validate_edit_permission(
hass, service.context, service.data[ATTR_ENTITY_ID]
)
async with SwitcherV2Api(
hass.loop, device_data.ip_addr, phone_id, device_id, device_password
) as swapi:
await swapi.control_device(COMMAND_ON, service.data[CONF_TIMER_MINUTES])
hass.services.async_register(
DOMAIN,
SERVICE_SET_AUTO_OFF_NAME,
async_set_auto_off_service,
schema=SERVICE_SET_AUTO_OFF_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_TURN_ON_WITH_TIMER_NAME,
async_turn_on_with_timer_service,
schema=SERVICE_TURN_ON_WITH_TIMER_SCHEMA,
)
async_listen_platform(hass, SWITCH_DOMAIN, async_switch_platform_discovered)
hass.async_create_task(async_load_platform(hass, SWITCH_DOMAIN, DOMAIN, {}, config))
@callback
def device_updates(timestamp: Optional[datetime]) -> None:
"""Use for updating the device data from the queue."""
if v2bridge.running:
try:
device_new_data = v2bridge.queue.get_nowait()
if device_new_data:
async_dispatcher_send(
hass, SIGNAL_SWITCHER_DEVICE_UPDATE, device_new_data
)
except QueueEmpty:
pass
async_track_time_interval(hass, device_updates, timedelta(seconds=4))
return True<|fim▁end|>
| |
<|file_name|>job.py<|end_file_name|><|fim▁begin|># Created By: Virgil Dupras
# Created On: 2004/12/20
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
class JobCancelled(Exception):
"The user has cancelled the job"
class JobInProgressError(Exception):
"A job is already being performed, you can't perform more than one at the same time."
class JobCountError(Exception):
"The number of jobs started have exceeded the number of jobs allowed"
class Job:
"""Manages a job's progression and return it's progression through a callback.
Note that this class is not foolproof. For example, you could call
start_subjob, and then call add_progress from the parent job, and nothing
would stop you from doing it. However, it would mess your progression
because it is the sub job that is supposed to drive the progression.
Another example would be to start a subjob, then start another, and call
add_progress from the old subjob. Once again, it would mess your progression.
There are no stops because it would remove the lightweight aspect of the
class (A Job would need to have a Parent instead of just a callback,
and the parent could be None. A lot of checks for nothing.).
Another one is that nothing stops you from calling add_progress right after
SkipJob.
"""
#---Magic functions
def __init__(self, job_proportions, callback):
"""Initialize the Job with 'jobcount' jobs. Start every job with
start_job(). Every time the job progress is updated, 'callback' is called
'callback' takes a 'progress' int param, and a optional 'desc'
parameter. Callback must return false if the job must be cancelled.
"""
if not hasattr(callback, '__call__'):
raise TypeError("'callback' MUST be set when creating a Job")
if isinstance(job_proportions, int):
job_proportions = [1] * job_proportions
self._job_proportions = list(job_proportions)
self._jobcount = sum(job_proportions)
self._callback = callback
self._current_job = 0
self._passed_jobs = 0
self._progress = 0
self._currmax = 1
#---Private
def _subjob_callback(self, progress, desc=''):
"""This is the callback passed to children jobs.
"""
self.set_progress(progress, desc)
return True #if JobCancelled has to be raised, it will be at the highest level
def _do_update(self, desc):
"""Calls the callback function with a % progress as a parameter.
The parameter is a int in the 0-100 range.
"""
if self._current_job:
passed_progress = self._passed_jobs * self._currmax
current_progress = self._current_job * self._progress
total_progress = self._jobcount * self._currmax
progress = ((passed_progress + current_progress) * 100) // total_progress
else:
progress = -1 # indeterminate
# It's possible that callback doesn't support a desc arg
result = self._callback(progress, desc) if desc else self._callback(progress)
if not result:
raise JobCancelled()
#---Public
def add_progress(self, progress=1, desc=''):
self.set_progress(self._progress + progress, desc)
def check_if_cancelled(self):
self._do_update('')
def iter_with_progress(self, sequence, desc_format=None, every=1):
''' Iterate through sequence while automatically adding progress.
'''
desc = ''
if desc_format:
desc = desc_format % (0, len(sequence))
self.start_job(len(sequence), desc)
for i, element in enumerate(sequence, start=1):
yield element
if i % every == 0:
if desc_format:
desc = desc_format % (i, len(sequence))
self.add_progress(progress=every, desc=desc)
if desc_format:
desc = desc_format % (len(sequence), len(sequence))
self.set_progress(100, desc)
def start_job(self, max_progress=100, desc=''):
"""Begin work on the next job. You must not call start_job more than
'jobcount' (in __init__) times.
'max' is the job units you are to perform.
'desc' is the description of the job.<|fim▁hole|> try:
self._current_job = self._job_proportions.pop(0)
except IndexError:
raise JobCountError()
self._progress = 0
self._currmax = max(1, max_progress)
self._do_update(desc)
def start_subjob(self, job_proportions, desc=''):
"""Starts a sub job. Use this when you want to split a job into
multiple smaller jobs. Pretty handy when starting a process where you
know how many subjobs you will have, but don't know the work unit count
for every of them.
returns the Job object
"""
self.start_job(100, desc)
return Job(job_proportions, self._subjob_callback)
def set_progress(self, progress, desc=''):
"""Sets the progress of the current job to 'progress', and call the
callback
"""
self._progress = progress
if self._progress > self._currmax:
self._progress = self._currmax
if self._progress < 0:
self._progress = 0
self._do_update(desc)
class NullJob:
def __init__(self, *args, **kwargs):
pass
def add_progress(self, *args, **kwargs):
pass
def check_if_cancelled(self):
pass
def iter_with_progress(self, sequence, *args, **kwargs):
return iter(sequence)
def start_job(self, *args, **kwargs):
pass
def start_subjob(self, *args, **kwargs):
return NullJob()
def set_progress(self, *args, **kwargs):
pass
nulljob = NullJob()<|fim▁end|>
|
"""
self._passed_jobs += self._current_job
|
<|file_name|>CuotaServiceImpl.java<|end_file_name|><|fim▁begin|>/**
*
*/
package co.innovate.rentavoz.services.almacen.impl;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import co.innovate.rentavoz.model.Tercero;
import co.innovate.rentavoz.model.almacen.Cuota;
import co.innovate.rentavoz.model.almacen.venta.Venta;
import co.innovate.rentavoz.repositories.GenericRepository;
import co.innovate.rentavoz.repositories.almacen.CuotaDao;
import co.innovate.rentavoz.services.almacen.CuotaService;
import co.innovate.rentavoz.services.impl.GenericServiceImpl;
/**<|fim▁hole|> * @project rentavoz3
* @class CuotaServiceImpl
* @date 7/02/2014
*
*/
@Service("cuotaService")
public class CuotaServiceImpl extends GenericServiceImpl<Cuota, Integer> implements CuotaService,Serializable {
/**
* 7/02/2014
* @author <a href="mailto:[email protected]">Elmer Jose Diaz Lazo</a>
* serialVersionUID
*/
private static final long serialVersionUID = 1L;
@Autowired
private CuotaDao cuotaDao;
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.impl.GenericServiceImpl#getDao()
*/
@Override
public GenericRepository<Cuota, Integer> getDao() {
return cuotaDao;
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#buscarCuotasPendientesPorCliente(co.innovate.rentavoz.model.Tercero)
*/
@Override
public List<Cuota> buscarCuotasPendientesPorCliente(Tercero cliente) {
return cuotaDao.buscarCuotasPendientesPorCliente(cliente);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#buscarRutaDeCuotasPorCobrador(co.innovate.rentavoz.model.Tercero)
*/
@Override
public List<Cuota> buscarRutaDeCuotasPorCobrador(Tercero cobrador) {
return cuotaDao.buscarRutaDeCuotasPorCobrador(cobrador);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#findByVenta(co.innovate.rentavoz.model.almacen.venta.Venta)
*/
@Override
public List<Cuota> findByVenta(Venta venta) {
return cuotaDao.findByVenta(venta);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#findDeudoresMorosos(java.util.Date)
*/
@Override
public List<Tercero> findDeudoresMorosos(Date fechaCierre) {
return cuotaDao.findDeudoresMorosos(fechaCierre);
}
}<|fim▁end|>
|
* @author <a href="mailto:[email protected]">Elmer Jose Diaz Lazo</a>
|
<|file_name|>mavproxy_link.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''enable run-time addition and removal of master link, just like --master on the cnd line'''
''' TO USE:
link add 10.11.12.13:14550
link list
link remove 3 # to remove 3rd output
'''
from pymavlink import mavutil
import time, struct, math, sys, fnmatch, traceback, json, os
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
from MAVProxy.modules.lib.wx_addlink import MPMenulinkAddDialog
dataPackets = frozenset(['BAD_DATA','LOG_DATA'])
delayedPackets = frozenset([ 'MISSION_CURRENT', 'SYS_STATUS', 'VFR_HUD',
'GPS_RAW_INT', 'SCALED_PRESSURE', 'GLOBAL_POSITION_INT',
'NAV_CONTROLLER_OUTPUT' ])
activityPackets = frozenset([ 'HEARTBEAT', 'GPS_RAW_INT', 'GPS_RAW', 'GLOBAL_POSITION_INT', 'SYS_STATUS' ])
radioStatusPackets = frozenset([ 'RADIO', 'RADIO_STATUS'])
preferred_ports = [
'*FTDI*',
"*Arduino_Mega_2560*",
"*3D*",
"*USB_to_UART*",
'*Ardu*',
'*PX4*',
'*Hex_*',
'*Holybro_*',
'*mRo*',
'*FMU*',
'*Swift-Flyer*',
]
class LinkModule(mp_module.MPModule):
def __init__(self, mpstate):
super(LinkModule, self).__init__(mpstate, "link", "link control", public=True, multi_vehicle=True)
self.add_command('link', self.cmd_link, "link control",
["<list|ports|resetstats>",
'add (SERIALPORT)',
'attributes (LINK) (ATTRIBUTES)',
'remove (LINKS)',
'dataratelogging (DLSTATE)',
'hl (HLSTATE)'])
self.add_command('vehicle', self.cmd_vehicle, "vehicle control")
self.add_command('alllinks', self.cmd_alllinks, "send command on all links", ["(COMMAND)"])
self.no_fwd_types = set()
self.no_fwd_types.add("BAD_DATA")
self.add_completion_function('(SERIALPORT)', self.complete_serial_ports)
self.add_completion_function('(LINKS)', self.complete_links)
self.add_completion_function('(LINK)', self.complete_links)
self.add_completion_function('(HLSTATE)', self.complete_hl)
self.add_completion_function('(DLSTATE)', self.complete_dl)
self.last_altitude_announce = 0.0
self.vehicle_list = set()
self.high_latency = False
self.datarate_logging = False
self.datarate_logging_timer = mavutil.periodic_event(1)
self.old_streamrate = 0
self.old_streamrate2 = 0
self.menu_added_console = False
if mp_util.has_wxpython:
self.menu_rm = MPMenuSubMenu('Remove', items=[])
self.menu = MPMenuSubMenu('Link',
items=[MPMenuItem('Add...', 'Add...', '# link add ', handler=MPMenulinkAddDialog()),
self.menu_rm,
MPMenuItem('Ports', 'Ports', '# link ports'),
MPMenuItem('List', 'List', '# link list'),
MPMenuItem('Status', 'Status', '# link')])
self.last_menu_update = 0
def idle_task(self):
'''called on idle'''
if mp_util.has_wxpython:
if self.module('console') is not None:
if not self.menu_added_console:
self.menu_added_console = True
# we don't dynamically update these yet due to a wx bug
self.menu_rm.items = [ MPMenuItem(p, p, '# link remove %s' % p) for p in self.complete_links('') ]
self.module('console').add_menu(self.menu)
else:
self.menu_added_console = False
for m in self.mpstate.mav_master:
m.source_system = self.settings.source_system
m.mav.srcSystem = m.source_system
m.mav.srcComponent = self.settings.source_component
# don't let pending statustext wait forever for last chunk:
for src in self.status.statustexts_by_sysidcompid:
msgids = list(self.status.statustexts_by_sysidcompid[src].keys())
for msgid in msgids:
pending = self.status.statustexts_by_sysidcompid[src][msgid]
if time.time() - pending.last_chunk_time > 1:
self.emit_accumulated_statustext(src, msgid, pending)
# datarate logging if enabled, at 1 Hz
if self.datarate_logging_timer.trigger() and self.datarate_logging:
with open(self.datarate_logging, 'a') as logfile:
for master in self.mpstate.mav_master:
highest_msec_key = (self.target_system, self.target_component)
linkdelay = (self.status.highest_msec.get(highest_msec_key, 0) - master.highest_msec.get(highest_msec_key, 0))*1.0e-3
logfile.write(str(time.strftime("%H:%M:%S")) + "," +
str(self.link_label(master)) + "," +
str(master.linknum) + "," +
str(self.status.counters['MasterIn'][master.linknum]) + "," +
str(self.status.bytecounters['MasterIn'][master.linknum].total()) + "," +
str(linkdelay) + "," +
str(100 * round(master.packet_loss(), 3)) + "\n")
def complete_serial_ports(self, text):
'''return list of serial ports'''
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
return [ p.device for p in ports ]
def complete_hl(self, text):
'''return list of hl options'''
return [ 'on', 'off' ]
def complete_dl(self, text):
'''return list of datarate_logging options'''
return [ 'on', 'off' ]
def complete_links(self, text):
'''return list of links'''
try:
ret = [ m.address for m in self.mpstate.mav_master ]
for m in self.mpstate.mav_master:
ret.append(m.address)
if hasattr(m, 'label'):
ret.append(m.label)
return ret
except Exception as e:
print("Caught exception: %s" % str(e))
def cmd_link(self, args):
'''handle link commands'''
if len(args) < 1:
self.show_link()
elif args[0] == "list":
self.cmd_link_list()
elif args[0] == "hl":
self.cmd_hl(args[1:])
elif args[0] == "dataratelogging":
self.cmd_dl(args[1:])
elif args[0] == "add":
if len(args) != 2:
print("Usage: link add LINK")
print('Usage: e.g. link add 127.0.0.1:9876')
print('Usage: e.g. link add 127.0.0.1:9876:{"label":"rfd900"}')
return
self.cmd_link_add(args[1:])
elif args[0] == "attributes":
if len(args) != 3:
print("Usage: link attributes LINK ATTRIBUTES")
print('Usage: e.g. link attributes rfd900 {"label":"bob"}')
return
self.cmd_link_attributes(args[1:])
elif args[0] == "ports":
self.cmd_link_ports()
elif args[0] == "remove":
if len(args) != 2:
print("Usage: link remove LINK")
return
self.cmd_link_remove(args[1:])
elif args[0] == "resetstats":
self.reset_link_stats()
else:
print("usage: link <list|add|remove|attributes|hl|dataratelogging|resetstats>")
def cmd_dl(self, args):
'''Toggle datarate logging'''
if len(args) < 1:
print("Datarate logging is " + ("on" if self.datarate_logging else "off"))
return
elif args[0] == "on":
self.datarate_logging = os.path.join(self.logdir, "dataratelog.csv")
print("Datarate Logging ON, logfile: " + self.datarate_logging)
# Open a new file handle (don't append) for logging
with open(self.datarate_logging, 'w') as logfile:
logfile.write("time, linkname, linkid, packetsreceived, bytesreceived, delaysec, lostpercent\n")
elif args[0] == "off":
print("Datarate Logging OFF")
self.datarate_logging = None
else:
print("usage: dataratelogging <on|off>")
def cmd_hl(self, args):
'''Toggle high latency mode'''
if len(args) < 1:
print("High latency mode is " + str(self.high_latency))
return
elif args[0] == "on":
print("High latency mode ON")
self.high_latency = True
# Tell ArduPilot to start sending HIGH_LATENCY2 messages
self.master.mav.command_long_send(
self.target_system, # target_system
self.target_component,
mavutil.mavlink.MAV_CMD_SET_MESSAGE_INTERVAL, # command
0, # confirmation
mavutil.mavlink.MAVLINK_MSG_ID_HIGH_LATENCY2, # param1 (msg id)
1000000, # param2 (message interval, us)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
# and stop sending any other messages
self.old_streamrate = self.settings.streamrate
self.old_streamrate2 = self.settings.streamrate2
self.settings.streamrate = -1
self.settings.streamrate2 = -1
for master in self.mpstate.mav_master:
master.mav.request_data_stream_send(self.mpstate.settings.target_system, self.mpstate.settings.target_component,
mavutil.mavlink.MAV_DATA_STREAM_ALL,
0, 1)
return
elif args[0] == "off":
print("High latency mode OFF")
self.high_latency = False
# Start sending the full message set again
self.settings.streamrate = self.old_streamrate
self.settings.streamrate2 = self.old_streamrate2
for master in self.mpstate.mav_master:
if master.linknum == 0:
rate = self.settings.streamrate
else:
rate = self.settings.streamrate2
if rate != -1 and self.mpstate.settings.streamrate != -1:
master.mav.request_data_stream_send(self.mpstate.settings.target_system, self.mpstate.settings.target_component,
mavutil.mavlink.MAV_DATA_STREAM_ALL,
rate, 1)
# Tell ArduPilot to stop sending HIGH_LATENCY2 messages
self.master.mav.command_long_send(
self.target_system, # target_system
self.target_component,
mavutil.mavlink.MAV_CMD_SET_MESSAGE_INTERVAL, # command
0, # confirmation
mavutil.mavlink.MAVLINK_MSG_ID_HIGH_LATENCY2, # param1 (msg id)
-1, # param2 (message interval)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
return
else:
print("usage: hl <on|off>")
def show_link(self):
'''show link information'''
for master in self.mpstate.mav_master:
highest_msec_key = (self.target_system, self.target_component)
linkdelay = (self.status.highest_msec.get(highest_msec_key, 0) - master.highest_msec.get(highest_msec_key, 0))*1.0e-3
if master.linkerror:
status = "DOWN"
else:
status = "OK"
sign_string = ''
try:
if master.mav.signing.sig_count:
if master.mav.signing.secret_key is None:
# unsigned/reject counts are not updated if we
# don't have a signing secret
sign_string = ", (no-signing-secret)"
else:
sign_string = ", unsigned %u reject %u" % (master.mav.signing.unsigned_count, master.mav.signing.reject_count)
except AttributeError as e:
# some mav objects may not have a "signing" attribute
pass
print("link %s %s (%u packets, %u bytes, %.2fs delay, %u lost, %.1f%% loss, rate:%uB/s%s)" % (self.link_label(master),
status,
self.status.counters['MasterIn'][master.linknum],
self.status.bytecounters['MasterIn'][master.linknum].total(),
linkdelay,
master.mav_loss,
master.packet_loss(),
self.status.bytecounters['MasterIn'][master.linknum].rate(),
sign_string))
def reset_link_stats(self):
'''reset link statistics'''
for master in self.mpstate.mav_master:
self.status.counters['MasterIn'][master.linknum] = 0
self.status.bytecounters['MasterIn'][master.linknum].__init__()
master.mav_loss = 0
master.mav_count = 0
def cmd_alllinks(self, args):
'''send command on all links'''
saved_target = self.mpstate.settings.target_system
print("Sending to: ", self.vehicle_list)
for v in sorted(self.vehicle_list):
self.cmd_vehicle([str(v)])
self.mpstate.functions.process_stdin(' '.join(args), True)
self.cmd_vehicle([str(saved_target)])
def cmd_link_list(self):
'''list links'''
print("%u links" % len(self.mpstate.mav_master))
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
if hasattr(conn, 'label'):
print("%u (%s): %s" % (i, conn.label, conn.address))
else:
print("%u: %s" % (i, conn.address))
def parse_link_attributes(self, some_json):
'''return a dict based on some_json (empty if json invalid)'''
try:
return json.loads(some_json)
except ValueError:
print('Invalid JSON argument: {0}'.format(some_json))
return {}
<|fim▁hole|> optional_attributes = {}
link_components = descriptor.split(":{", 1)
device = link_components[0]
if (len(link_components) == 2 and link_components[1].endswith("}")):
# assume json
some_json = "{" + link_components[1]
optional_attributes = self.parse_link_attributes(some_json)
return (device, optional_attributes)
def apply_link_attributes(self, conn, optional_attributes):
for attr in optional_attributes:
print("Applying attribute to link: %s = %s" % (attr, optional_attributes[attr]))
setattr(conn, attr, optional_attributes[attr])
def link_add(self, descriptor, force_connected=False):
'''add new link'''
try:
(device, optional_attributes) = self.parse_link_descriptor(descriptor)
# if there's only 1 colon for port:baud
# and if the first string is a valid serial port, it's a serial connection
if len(device.split(':')) == 2:
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
for p in ports:
if p.device == device.split(':')[0]:
# it's a valid serial port, reformat arguments to fit
self.settings.baudrate = int(device.split(':')[1])
device = device.split(':')[0]
break
print("Connect %s source_system=%d" % (device, self.settings.source_system))
try:
conn = mavutil.mavlink_connection(device, autoreconnect=True,
source_system=self.settings.source_system,
baud=self.settings.baudrate,
force_connected=force_connected)
except Exception as e:
# try the same thing but without force-connected for
# backwards-compatability
conn = mavutil.mavlink_connection(device, autoreconnect=True,
source_system=self.settings.source_system,
baud=self.settings.baudrate)
conn.mav.srcComponent = self.settings.source_component
except Exception as msg:
print("Failed to connect to %s : %s" % (descriptor, msg))
return False
if self.settings.rtscts:
conn.set_rtscts(True)
conn.mav.set_callback(self.master_callback, conn)
if hasattr(conn.mav, 'set_send_callback'):
conn.mav.set_send_callback(self.master_send_callback, conn)
conn.linknum = len(self.mpstate.mav_master)
conn.linkerror = False
conn.link_delayed = False
conn.last_heartbeat = 0
conn.last_message = 0
conn.highest_msec = {}
conn.target_system = self.settings.target_system
self.apply_link_attributes(conn, optional_attributes)
self.mpstate.mav_master.append(conn)
self.status.counters['MasterIn'].append(0)
self.status.bytecounters['MasterIn'].append(self.status.ByteCounter())
self.mpstate.vehicle_link_map[conn.linknum] = set(())
try:
mp_util.child_fd_list_add(conn.port.fileno())
except Exception:
pass
return True
def cmd_link_add(self, args):
'''add new link'''
descriptor = args[0]
print("Adding link %s" % descriptor)
self.link_add(descriptor)
def link_attributes(self, link, attributes):
i = self.find_link(link)
if i is None:
print("Connection (%s) not found" % (link,))
return
conn = self.mpstate.mav_master[i]
atts = self.parse_link_attributes(attributes)
self.apply_link_attributes(conn, atts)
def cmd_link_attributes(self, args):
'''change optional link attributes'''
link = args[0]
attributes = args[1]
print("Setting link %s attributes (%s)" % (link, attributes))
self.link_attributes(link, attributes)
def cmd_link_ports(self):
'''show available ports'''
ports = mavutil.auto_detect_serial(preferred_list=preferred_ports)
for p in ports:
print("%s : %s : %s" % (p.device, p.description, p.hwid))
def find_link(self, device):
'''find a device based on number, name or label'''
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
if (str(i) == device or
conn.address == device or
getattr(conn, 'label', None) == device):
return i
return None
def cmd_link_remove(self, args):
'''remove an link'''
device = args[0]
if len(self.mpstate.mav_master) <= 1:
print("Not removing last link")
return
i = self.find_link(device)
if i is None:
return
conn = self.mpstate.mav_master[i]
print("Removing link %s" % conn.address)
try:
try:
mp_util.child_fd_list_remove(conn.port.fileno())
except Exception:
pass
self.mpstate.mav_master[i].close()
except Exception as msg:
print(msg)
pass
self.mpstate.mav_master.pop(i)
self.status.counters['MasterIn'].pop(i)
self.status.bytecounters['MasterIn'].pop(i)
del self.mpstate.vehicle_link_map[conn.linknum]
# renumber the links
vehicle_link_map_reordered = {}
for j in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[j]
map_old = self.mpstate.vehicle_link_map[conn.linknum]
conn.linknum = j
vehicle_link_map_reordered[j] = map_old
self.mpstate.vehicle_link_map = vehicle_link_map_reordered
def get_usec(self):
'''time since 1970 in microseconds'''
return int(time.time() * 1.0e6)
def master_send_callback(self, m, master):
'''called on sending a message'''
if self.status.watch is not None:
for msg_type in self.status.watch:
if fnmatch.fnmatch(m.get_type().upper(), msg_type.upper()):
self.mpstate.console.writeln('> '+ str(m))
break
mtype = m.get_type()
if mtype != 'BAD_DATA' and self.mpstate.logqueue:
usec = self.get_usec()
usec = (usec & ~3) | 3 # linknum 3
self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf()))
def handle_msec_timestamp(self, m, master):
'''special handling for MAVLink packets with a time_boot_ms field'''
if m.get_type() == 'GLOBAL_POSITION_INT':
# this is fix time, not boot time
return
msec = m.time_boot_ms
if msec == 0:
return
sysid = m.get_srcSystem()
compid = m.get_srcComponent()
highest_msec_key = (sysid,compid)
highest = master.highest_msec.get(highest_msec_key, 0)
if msec + 30000 < highest:
self.say('Time has wrapped')
print('Time has wrapped', msec, highest)
self.status.highest_msec[highest_msec_key] = msec
for mm in self.mpstate.mav_master:
mm.link_delayed = False
mm.highest_msec[highest_msec_key] = msec
return
# we want to detect when a link is delayed
master.highest_msec[highest_msec_key] = msec
if msec > self.status.highest_msec.get(highest_msec_key, 0):
self.status.highest_msec[highest_msec_key] = msec
if msec < self.status.highest_msec.get(highest_msec_key, 0) and len(self.mpstate.mav_master) > 1 and self.mpstate.settings.checkdelay:
master.link_delayed = True
else:
master.link_delayed = False
def colors_for_severity(self, severity):
severity_colors = {
# tuple is (fg, bg) (as in "white on red")
mavutil.mavlink.MAV_SEVERITY_EMERGENCY: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_ALERT: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_CRITICAL: ('white', 'red'),
mavutil.mavlink.MAV_SEVERITY_ERROR: ('black', 'orange'),
mavutil.mavlink.MAV_SEVERITY_WARNING: ('black', 'orange'),
mavutil.mavlink.MAV_SEVERITY_NOTICE: ('black', 'yellow'),
mavutil.mavlink.MAV_SEVERITY_INFO: ('white', 'green'),
mavutil.mavlink.MAV_SEVERITY_DEBUG: ('white', 'green'),
}
try:
return severity_colors[severity]
except Exception as e:
print("Exception: %s" % str(e))
return ('white', 'red')
def report_altitude(self, altitude):
'''possibly report a new altitude'''
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0)*1.0e-7
lon = master.field('GLOBAL_POSITION_INT', 'lon', 0)*1.0e-7
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
if alt1 is not None:
alt2 = self.mpstate.settings.basealt
altitude += alt2 - alt1
self.status.altitude = altitude
altitude_converted = self.height_convert_units(altitude)
if (int(self.mpstate.settings.altreadout) > 0 and
math.fabs(altitude_converted - self.last_altitude_announce) >=
int(self.settings.altreadout)):
self.last_altitude_announce = altitude_converted
rounded_alt = int(self.settings.altreadout) * ((self.settings.altreadout/2 + int(altitude_converted)) / int(self.settings.altreadout))
self.say("height %u" % rounded_alt, priority='notification')
def emit_accumulated_statustext(self, key, id, pending):
out = pending.accumulated_statustext()
if out != self.status.last_apm_msg or time.time() > self.status.last_apm_msg_time+2:
(fg, bg) = self.colors_for_severity(pending.severity)
out = pending.accumulated_statustext()
self.mpstate.console.writeln("AP: %s" % out, bg=bg, fg=fg)
self.status.last_apm_msg = out
self.status.last_apm_msg_time = time.time()
del self.status.statustexts_by_sysidcompid[key][id]
def master_msg_handling(self, m, master):
'''link message handling for an upstream link'''
if self.settings.target_system != 0 and m.get_srcSystem() != self.settings.target_system:
# don't process messages not from our target
if m.get_type() == "BAD_DATA":
if self.mpstate.settings.shownoise and mavutil.all_printable(m.data):
out = m.data
if type(m.data) == bytearray:
out = m.data.decode('ascii')
self.mpstate.console.write(out, bg='red')
return
if self.settings.target_system != 0 and master.target_system != self.settings.target_system:
# keep the pymavlink level target system aligned with the MAVProxy setting
master.target_system = self.settings.target_system
if self.settings.target_component != 0 and master.target_component != self.settings.target_component:
# keep the pymavlink level target component aligned with the MAVProxy setting
print("change target_component %u" % self.settings.target_component)
master.target_component = self.settings.target_component
mtype = m.get_type()
if (mtype == 'HEARTBEAT' or mtype == 'HIGH_LATENCY2') and m.type != mavutil.mavlink.MAV_TYPE_GCS:
if self.settings.target_system == 0 and self.settings.target_system != m.get_srcSystem():
self.settings.target_system = m.get_srcSystem()
self.say("online system %u" % self.settings.target_system,'message')
for mav in self.mpstate.mav_master:
mav.target_system = self.settings.target_system
if self.status.heartbeat_error:
self.status.heartbeat_error = False
self.say("heartbeat OK")
if master.linkerror:
master.linkerror = False
self.say("link %s OK" % (self.link_label(master)))
self.status.last_heartbeat = time.time()
master.last_heartbeat = self.status.last_heartbeat
armed = self.master.motors_armed()
if armed != self.status.armed:
self.status.armed = armed
if armed:
self.say("ARMED")
else:
self.say("DISARMED")
if master.flightmode != self.status.flightmode:
self.status.flightmode = master.flightmode
if self.mpstate.functions.input_handler is None:
self.set_prompt(self.status.flightmode + "> ")
if master.flightmode != self.status.last_mode_announced and time.time() > self.status.last_mode_announce + 2:
self.status.last_mode_announce = time.time()
self.status.last_mode_announced = master.flightmode
self.say("Mode " + self.status.flightmode)
if m.type == mavutil.mavlink.MAV_TYPE_FIXED_WING:
self.mpstate.vehicle_type = 'plane'
self.mpstate.vehicle_name = 'ArduPlane'
elif m.type in [mavutil.mavlink.MAV_TYPE_GROUND_ROVER,
mavutil.mavlink.MAV_TYPE_SURFACE_BOAT]:
self.mpstate.vehicle_type = 'rover'
self.mpstate.vehicle_name = 'APMrover2'
elif m.type in [mavutil.mavlink.MAV_TYPE_SUBMARINE]:
self.mpstate.vehicle_type = 'sub'
self.mpstate.vehicle_name = 'ArduSub'
elif m.type in [mavutil.mavlink.MAV_TYPE_QUADROTOR,
mavutil.mavlink.MAV_TYPE_COAXIAL,
mavutil.mavlink.MAV_TYPE_HEXAROTOR,
mavutil.mavlink.MAV_TYPE_OCTOROTOR,
mavutil.mavlink.MAV_TYPE_TRICOPTER,
mavutil.mavlink.MAV_TYPE_HELICOPTER,
mavutil.mavlink.MAV_TYPE_DODECAROTOR]:
self.mpstate.vehicle_type = 'copter'
self.mpstate.vehicle_name = 'ArduCopter'
elif m.type in [mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER]:
self.mpstate.vehicle_type = 'antenna'
self.mpstate.vehicle_name = 'AntennaTracker'
elif m.type in [mavutil.mavlink.MAV_TYPE_AIRSHIP]:
self.mpstate.vehicle_type = 'blimp'
self.mpstate.vehicle_name = 'Blimp'
elif mtype == 'STATUSTEXT':
class PendingText(object):
def __init__(self):
self.expected_count = None
self.severity = None
self.chunks = {}
self.start_time = time.time()
self.last_chunk_time = time.time()
def add_chunk(self, m): # m is a statustext message
self.severity = m.severity
self.last_chunk_time = time.time()
if hasattr(m, 'chunk_seq'):
# mavlink extensions are present.
chunk_seq = m.chunk_seq
mid = m.id
else:
# Note that m.id may still exist! It will
# contain the value 253, STATUSTEXT's mavlink
# message id. Thus our reliance on the
# presence of chunk_seq.
chunk_seq = 0
mid = 0
self.chunks[chunk_seq] = m.text
if len(m.text) != 50 or mid == 0:
self.expected_count = chunk_seq + 1;
def complete(self):
return (self.expected_count is not None and
self.expected_count == len(self.chunks))
def accumulated_statustext(self):
next_expected_chunk = 0
out = ""
for chunk_seq in sorted(self.chunks.keys()):
if chunk_seq != next_expected_chunk:
out += " ... "
next_expected_chunk = chunk_seq
out += self.chunks[chunk_seq]
next_expected_chunk += 1
return out
key = "%s.%s" % (m.get_srcSystem(), m.get_srcComponent())
if key not in self.status.statustexts_by_sysidcompid:
self.status.statustexts_by_sysidcompid[key] = {}
if hasattr(m, 'chunk_seq'):
mid = m.id
else:
# m.id will have the value of 253, STATUSTEXT mavlink id
mid = 0
if mid not in self.status.statustexts_by_sysidcompid[key]:
self.status.statustexts_by_sysidcompid[key][mid] = PendingText()
pending = self.status.statustexts_by_sysidcompid[key][mid]
pending.add_chunk(m)
if pending.complete():
# we have all of the chunks!
self.emit_accumulated_statustext(key, mid, pending)
elif mtype == "VFR_HUD":
have_gps_lock = False
if 'GPS_RAW' in self.status.msgs and self.status.msgs['GPS_RAW'].fix_type == 2:
have_gps_lock = True
elif 'GPS_RAW_INT' in self.status.msgs and self.status.msgs['GPS_RAW_INT'].fix_type == 3:
have_gps_lock = True
if have_gps_lock and not self.status.have_gps_lock and m.alt != 0:
self.say("GPS lock at %u meters" % m.alt, priority='notification')
self.status.have_gps_lock = True
elif mtype == "GPS_RAW":
if self.status.have_gps_lock:
if m.fix_type != 2 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type == 2 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type == 2:
self.status.last_gps_lock = time.time()
elif mtype == "GPS_RAW_INT":
if self.status.have_gps_lock:
if m.fix_type < 3 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type >= 3 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type >= 3:
self.status.last_gps_lock = time.time()
elif mtype == "NAV_CONTROLLER_OUTPUT" and self.status.flightmode == "AUTO" and self.mpstate.settings.distreadout:
rounded_dist = int(m.wp_dist/self.mpstate.settings.distreadout)*self.mpstate.settings.distreadout
if math.fabs(rounded_dist - self.status.last_distance_announce) >= self.mpstate.settings.distreadout:
if rounded_dist != 0:
self.say("%u" % rounded_dist, priority="progress")
self.status.last_distance_announce = rounded_dist
elif mtype == "GLOBAL_POSITION_INT":
self.report_altitude(m.relative_alt*0.001)
elif mtype == "COMPASSMOT_STATUS":
print(m)
elif mtype == "SIMSTATE":
self.mpstate.is_sitl = True
elif mtype == "ATTITUDE":
att_time = m.time_boot_ms * 0.001
self.mpstate.attitude_time_s = max(self.mpstate.attitude_time_s, att_time)
if self.mpstate.attitude_time_s - att_time > 120:
# cope with wrap
self.mpstate.attitude_time_s = att_time
elif mtype == "COMMAND_ACK":
try:
cmd = mavutil.mavlink.enums["MAV_CMD"][m.command].name
cmd = cmd[8:]
res = mavutil.mavlink.enums["MAV_RESULT"][m.result].name
res = res[11:]
if m.target_component not in [mavutil.mavlink.MAV_COMP_ID_MAVCAN]:
self.mpstate.console.writeln("Got COMMAND_ACK: %s: %s" % (cmd, res))
except Exception:
self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
if m.command == mavutil.mavlink.MAV_CMD_PREFLIGHT_CALIBRATION:
if m.result == mavutil.mavlink.MAV_RESULT_ACCEPTED:
self.say("Calibrated")
elif m.result == mavutil.mavlink.MAV_RESULT_FAILED:
self.say("Calibration failed")
elif m.result == mavutil.mavlink.MAV_RESULT_UNSUPPORTED:
self.say("Calibration unsupported")
elif m.result == mavutil.mavlink.MAV_RESULT_TEMPORARILY_REJECTED:
self.say("Calibration temporarily rejected")
else:
self.say("Calibration response (%u)" % m.result)
elif mtype == "MISSION_ACK":
try:
t = mavutil.mavlink.enums["MAV_MISSION_TYPE"][m.mission_type].name
t = t[12:]
res = mavutil.mavlink.enums["MAV_MISSION_RESULT"][m.type].name
res = res[12:]
self.mpstate.console.writeln("Got MISSION_ACK: %s: %s" % (t, res))
except Exception as e:
self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
else:
#self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
pass
if self.status.watch is not None:
for msg_type in self.status.watch:
if fnmatch.fnmatch(mtype.upper(), msg_type.upper()):
self.mpstate.console.writeln('< '+ str(m))
break
def mavlink_packet(self, msg):
'''handle an incoming mavlink packet'''
pass
def master_callback(self, m, master):
'''process mavlink message m on master, sending any messages to recipients'''
sysid = m.get_srcSystem()
mtype = m.get_type()
if mtype in ['HEARTBEAT', 'HIGH_LATENCY2'] and m.type != mavutil.mavlink.MAV_TYPE_GCS:
compid = m.get_srcComponent()
if sysid not in self.vehicle_list:
self.vehicle_list.add(sysid)
if (sysid, compid) not in self.mpstate.vehicle_link_map[master.linknum]:
self.mpstate.vehicle_link_map[master.linknum].add((sysid, compid))
print("Detected vehicle {0}:{1} on link {2}".format(sysid, compid, master.linknum))
# see if it is handled by a specialised sysid connection
if sysid in self.mpstate.sysid_outputs:
self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf())
if mtype == "GLOBAL_POSITION_INT":
for modname in 'map', 'asterix', 'NMEA', 'NMEA2':
mod = self.module(modname)
if mod is not None:
mod.set_secondary_vehicle_position(m)
return
if getattr(m, '_timestamp', None) is None:
master.post_message(m)
self.status.counters['MasterIn'][master.linknum] += 1
if mtype == 'GLOBAL_POSITION_INT':
# send GLOBAL_POSITION_INT to 2nd GCS for 2nd vehicle display
for sysid in self.mpstate.sysid_outputs:
self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf())
if self.mpstate.settings.fwdpos:
for link in self.mpstate.mav_master:
if link != master:
link.write(m.get_msgbuf())
# and log them
if mtype not in dataPackets and self.mpstate.logqueue:
# put link number in bottom 2 bits, so we can analyse packet
# delay in saved logs
usec = self.get_usec()
usec = (usec & ~3) | master.linknum
self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf()))
# keep the last message of each type around
self.status.msgs[mtype] = m
instance_field = getattr(m, '_instance_field', None)
if mtype not in self.status.msg_count:
self.status.msg_count[mtype] = 0
self.status.msg_count[mtype] += 1
if instance_field is not None:
instance_value = getattr(m, instance_field, None)
if instance_value is not None:
mtype_instance = "%s[%s]" % (mtype, instance_value)
self.status.msgs[mtype_instance] = m
if mtype_instance not in self.status.msg_count:
self.status.msg_count[mtype_instance] = 0
self.status.msg_count[mtype_instance] += 1
if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and mtype == 'HEARTBEAT':
# silence gimbal heartbeat packets for now
return
if getattr(m, 'time_boot_ms', None) is not None and self.settings.target_system == m.get_srcSystem():
# update link_delayed attribute
self.handle_msec_timestamp(m, master)
if mtype in activityPackets:
if master.linkerror:
master.linkerror = False
self.say("link %s OK" % (self.link_label(master)))
self.status.last_message = time.time()
master.last_message = self.status.last_message
if master.link_delayed and self.mpstate.settings.checkdelay:
# don't process delayed packets that cause double reporting
if mtype in delayedPackets:
return
self.master_msg_handling(m, master)
# don't pass along bad data
if mtype != 'BAD_DATA':
# pass messages along to listeners, except for REQUEST_DATA_STREAM, which
# would lead a conflict in stream rate setting between mavproxy and the other
# GCS
if self.mpstate.settings.mavfwd_rate or mtype != 'REQUEST_DATA_STREAM':
if mtype not in self.no_fwd_types:
for r in self.mpstate.mav_outputs:
r.write(m.get_msgbuf())
sysid = m.get_srcSystem()
target_sysid = self.target_system
# pass to modules
for (mod,pm) in self.mpstate.modules:
if not hasattr(mod, 'mavlink_packet'):
continue
# sysid 51/'3' is used by SiK radio for the injected RADIO/RADIO_STATUS mavlink frames.
# In order to be able to pass these to e.g. the graph module, which is not multi-vehicle,
# special handling is needed, so that the module gets both RADIO_STATUS and (single) target
# vehicle information.
if not(sysid == 51 and mtype in radioStatusPackets):
if not mod.multi_vehicle and sysid != target_sysid:
# only pass packets not from our target to modules that
# have marked themselves as being multi-vehicle capable
continue
try:
mod.mavlink_packet(m)
except Exception as msg:
if self.mpstate.settings.moddebug == 1:
print(msg)
elif self.mpstate.settings.moddebug > 1:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
def cmd_vehicle(self, args):
'''handle vehicle commands'''
if len(args) < 1:
print("Usage: vehicle SYSID[:COMPID]")
return
a = args[0].split(':')
self.mpstate.settings.target_system = int(a[0])
if len(a) > 1:
self.mpstate.settings.target_component = int(a[1])
# change default link based on most recent HEARTBEAT
best_link = 0
best_timestamp = 0
for i in range(len(self.mpstate.mav_master)):
m = self.mpstate.mav_master[i]
m.target_system = self.mpstate.settings.target_system
m.target_component = self.mpstate.settings.target_component
if 'HEARTBEAT' in m.messages:
stamp = m.messages['HEARTBEAT']._timestamp
src_system = m.messages['HEARTBEAT'].get_srcSystem()
if stamp > best_timestamp:
best_link = i
best_timestamp = stamp
m.link_delayed = False
self.mpstate.settings.link = best_link + 1
print("Set vehicle %s (link %u)" % (args[0], best_link+1))
def init(mpstate):
'''initialise module'''
return LinkModule(mpstate)<|fim▁end|>
|
def parse_link_descriptor(self, descriptor):
'''parse e.g. 'udpin:127.0.0.1:9877:{"foo":"bar"}' into
python structure ("udpin:127.0.0.1:9877", {"foo":"bar"})'''
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|># - coding: utf-8 -
# Copyright (C) 2007 Patryk Zawadzki <patrys at pld-linux.org>
# Copyright (C) 2007-2009 Toms Baugis <[email protected]>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
import datetime as dt
from calendar import timegm
import dbus, dbus.mainloop.glib
from gi.repository import GObject as gobject
from hamster.lib import Fact
from hamster.lib import trophies
def from_dbus_fact(fact):
"""unpack the struct into a proper dict"""
return Fact(fact[4],
start_time = dt.datetime.utcfromtimestamp(fact[1]),
end_time = dt.datetime.utcfromtimestamp(fact[2]) if fact[2] else None,
description = fact[3],
activity_id = fact[5],
category = fact[6],
tags = fact[7],
date = dt.datetime.utcfromtimestamp(fact[8]).date(),
delta = dt.timedelta(days = fact[9] // (24 * 60 * 60),
seconds = fact[9] % (24 * 60 * 60)),
id = fact[0]
)
class Storage(gobject.GObject):
"""Hamster client class, communicating to hamster storage daemon via d-bus.
Subscribe to the `tags-changed`, `facts-changed` and `activities-changed`
signals to be notified when an appropriate factoid of interest has been
changed.
In storage a distinguishment is made between the classificator of
activities and the event in tracking log.
When talking about the event we use term 'fact'. For the classificator
we use term 'activity'.
The relationship is - one activity can be used in several facts.
The rest is hopefully obvious. But if not, please file bug reports!
"""
__gsignals__ = {
"tags-changed": (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
"facts-changed": (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
"activities-changed": (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
"toggle-called": (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
}
def __init__(self):
gobject.GObject.__init__(self)
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
self.bus = dbus.SessionBus()
self._connection = None # will be initiated on demand
self.bus.add_signal_receiver(self._on_tags_changed, 'TagsChanged', 'org.gnome.Hamster')
self.bus.add_signal_receiver(self._on_facts_changed, 'FactsChanged', 'org.gnome.Hamster')
self.bus.add_signal_receiver(self._on_activities_changed, 'ActivitiesChanged', 'org.gnome.Hamster')
self.bus.add_signal_receiver(self._on_toggle_called, 'ToggleCalled', 'org.gnome.Hamster')
self.bus.add_signal_receiver(self._on_dbus_connection_change, 'NameOwnerChanged',
'org.freedesktop.DBus', arg0='org.gnome.Hamster')
@staticmethod
def _to_dict(columns, result_list):
return [dict(zip(columns, row)) for row in result_list]
@property
def conn(self):
if not self._connection:
self._connection = dbus.Interface(self.bus.get_object('org.gnome.Hamster',
'/org/gnome/Hamster'),
dbus_interface='org.gnome.Hamster')
return self._connection
def _on_dbus_connection_change(self, name, old, new):
self._connection = None
def _on_tags_changed(self):
self.emit("tags-changed")
def _on_facts_changed(self):
self.emit("facts-changed")
def _on_activities_changed(self):
self.emit("activities-changed")
def _on_toggle_called(self):
self.emit("toggle-called")
def toggle(self):
"""toggle visibility of the main application window if any"""
self.conn.Toggle()
def get_todays_facts(self):
"""returns facts of the current date, respecting hamster midnight
hamster midnight is stored in gconf, and presented in minutes
"""
return [from_dbus_fact(fact) for fact in self.conn.GetTodaysFacts()]
def get_facts(self, date, end_date = None, search_terms = ""):
"""Returns facts for the time span matching the optional filter criteria.
In search terms comma (",") translates to boolean OR and space (" ")
to boolean AND.
Filter is applied to tags, categories, activity names and description
"""
date = timegm(date.timetuple())
end_date = end_date or 0
if end_date:
end_date = timegm(end_date.timetuple())
return [from_dbus_fact(fact) for fact in self.conn.GetFacts(date,
end_date,
search_terms)]
def get_activities(self, search = ""):
"""returns list of activities name matching search criteria.
results are sorted by most recent usage.
search is case insensitive
"""
return self._to_dict(('name', 'category'), self.conn.GetActivities(search))
def get_categories(self):
"""returns list of categories"""
return self._to_dict(('id', 'name'), self.conn.GetCategories())
def get_tags(self, only_autocomplete = False):
"""returns list of all tags. by default only those that have been set for autocomplete"""
return self._to_dict(('id', 'name', 'autocomplete'), self.conn.GetTags(only_autocomplete))
def get_tag_ids(self, tags):
"""find tag IDs by name. tags should be a list of labels
if a requested tag had been removed from the autocomplete list, it
will be ressurrected. if tag with such label does not exist, it will
be created.
on database changes the `tags-changed` signal is emitted.
"""
return self._to_dict(('id', 'name', 'autocomplete'), self.conn.GetTagIds(tags))
def update_autocomplete_tags(self, tags):
"""update list of tags that should autocomplete. this list replaces
anything that is currently set"""
self.conn.SetTagsAutocomplete(tags)
def get_fact(self, id):
"""returns fact by it's ID"""
return from_dbus_fact(self.conn.GetFact(id))
def add_fact(self, fact, temporary_activity = False):
"""Add fact. activity name can use the
`[-]start_time[-end_time] activity@category, description #tag1 #tag2`
syntax, or params can be stated explicitly.
Params will take precedence over the derived values.
start_time defaults to current moment.
"""
if not fact.activity:
return None
serialized = fact.serialized_name()
start_timestamp = timegm((fact.start_time or dt.datetime.now()).timetuple())
end_timestamp = fact.end_time or 0
if end_timestamp:
end_timestamp = timegm(end_timestamp.timetuple())
new_id = self.conn.AddFact(serialized,
start_timestamp,
end_timestamp,
temporary_activity)
# TODO - the parsing should happen just once and preferably here
# we should feed (serialized_activity, start_time, end_time) into AddFact and others
if new_id:
trophies.checker.check_fact_based(fact)
return new_id
def stop_tracking(self, end_time = None):
"""Stop tracking current activity. end_time can be passed in if the
activity should have other end time than the current moment"""
end_time = timegm((end_time or dt.datetime.now()).timetuple())
return self.conn.StopTracking(end_time)
def remove_fact(self, fact_id):
"delete fact from database"
self.conn.RemoveFact(fact_id)
def update_fact(self, fact_id, fact, temporary_activity = False):
"""Update fact values. See add_fact for rules.
Update is performed via remove/insert, so the
fact_id after update should not be used anymore. Instead use the ID
from the fact dict that is returned by this function"""
start_time = timegm((fact.start_time or dt.datetime.now()).timetuple())
end_time = fact.end_time or 0
if end_time:
end_time = timegm(end_time.timetuple())
new_id = self.conn.UpdateFact(fact_id,<|fim▁hole|> end_time,
temporary_activity)
trophies.checker.check_update_based(fact_id, new_id, fact)
return new_id
def get_category_activities(self, category_id = None):
"""Return activities for category. If category is not specified, will
return activities that have no category"""
category_id = category_id or -1
return self._to_dict(('id', 'name', 'category_id', 'category'), self.conn.GetCategoryActivities(category_id))
def get_category_id(self, category_name):
"""returns category id by name"""
return self.conn.GetCategoryId(category_name)
def get_activity_by_name(self, activity, category_id = None, resurrect = True):
"""returns activity dict by name and optionally filtering by category.
if activity is found but is marked as deleted, it will be resurrected
unless told otherwise in the resurrect param
"""
category_id = category_id or 0
return self.conn.GetActivityByName(activity, category_id, resurrect)
# category and activity manipulations (normally just via preferences)
def remove_activity(self, id):
self.conn.RemoveActivity(id)
def remove_category(self, id):
self.conn.RemoveCategory(id)
def change_category(self, id, category_id):
return self.conn.ChangeCategory(id, category_id)
def update_activity(self, id, name, category_id):
return self.conn.UpdateActivity(id, name, category_id)
def add_activity(self, name, category_id = -1):
return self.conn.AddActivity(name, category_id)
def update_category(self, id, name):
return self.conn.UpdateCategory(id, name)
def add_category(self, name):
return self.conn.AddCategory(name)<|fim▁end|>
|
fact.serialized_name(),
start_time,
|
<|file_name|>log.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// written by Pierre Chifflier <[email protected]>
use crate::jsonbuilder::{JsonBuilder, JsonError};
use crate::krb::krb5::{KRB5State,KRB5Transaction,test_weak_encryption};
fn krb5_log_response(jsb: &mut JsonBuilder, tx: &mut KRB5Transaction) -> Result<(), JsonError>
{
match tx.error_code {
Some(c) => {
jsb.set_string("msg_type", "KRB_ERROR")?;
jsb.set_string("failed_request", &format!("{:?}", tx.msg_type))?;
jsb.set_string("error_code", &format!("{:?}", c))?;
},
None => { jsb.set_string("msg_type", &format!("{:?}", tx.msg_type))?; },
}
let cname = match tx.cname {
Some(ref x) => format!("{}", x),
None => "<empty>".to_owned(),
};
let realm = match tx.realm {
Some(ref x) => x.0.to_string(),
None => "<empty>".to_owned(),
};
let sname = match tx.sname {
Some(ref x) => format!("{}", x),
None => "<empty>".to_owned(),
};
let encryption = match tx.etype {
Some(ref x) => format!("{:?}", x),
None => "<none>".to_owned(),
};
jsb.set_string("cname", &cname)?;
jsb.set_string("realm", &realm)?;
jsb.set_string("sname", &sname)?;
jsb.set_string("encryption", &encryption)?;
jsb.set_bool("weak_encryption", tx.etype.map_or(false,test_weak_encryption))?;
return Ok(());
}
#[no_mangle]
pub extern "C" fn rs_krb5_log_json_response(jsb: &mut JsonBuilder, _state: &mut KRB5State, tx: &mut KRB5Transaction) -> bool
{
krb5_log_response(jsb, tx).is_ok()
}<|fim▁end|>
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
<|file_name|>_student_t_likelihood_function.py<|end_file_name|><|fim▁begin|>"""A likelihood function representing a Student-t distribution.
Author:
Ilias Bilionis
Date:
1/21/2013
"""
__all__ = ['StudentTLikelihoodFunction']
import numpy as np
import scipy
import math
from . import GaussianLikelihoodFunction
class StudentTLikelihoodFunction(GaussianLikelihoodFunction):
"""An object representing a Student-t likelihood function."""
# The degrees of freedom
_nu = None
@property
def nu(self):
"""Get the degrees of freedom."""
return self._nu
@nu.setter
def nu(self, value):
"""Set the degrees of freedom."""
if not isinstance(value, float):
raise TypeError('nu must be a float.')
self._nu = value
def __init__(self, nu, num_input=None, data=None, mean_function=None, cov=None,
name='Student-t Likelihood Function'):
"""Initialize the object.
Arguments:
nu --- The degrees of freedom of the distribution.
Keyword Arguments
num_input --- The number of inputs. Optional, if
mean_function is a proper Function.
data --- The observed data. A vector. Optional,
if mean_function is a proper Function.
It can be set later.
mean_function --- The mean function. See the super class
for the description.
cov --- The covariance matrix. It can either be
a positive definite matrix, or a number.
The data or a proper mean_funciton is
preassumed.
name --- A name for the likelihood function.
"""
self.nu = nu
super(StudentTLikelihoodFunction, self).__init__(num_input=num_input,
data=data,
mean_function=mean_function,
cov=cov,
name=name)<|fim▁hole|>
def __call__(self, x):
"""Evaluate the function at x."""
mu = self.mean_function(x)
y = scipy.linalg.solve_triangular(self.L_cov, self.data - mu)
return (
- 0.5 * (self.nu + self.num_data) * math.log(1. + np.dot(y, y) / self.nu))<|fim▁end|>
| |
<|file_name|>Field.js<|end_file_name|><|fim▁begin|>var Core = require('cw-core');
var Exception = Core.Exception;
var ArgumentNullException = Core.ArgumentNullException;
var ArgumentException = Core.ArgumentException;
var Arr = Core.Arr;
var Enumerable = require('linq');
var _ = require('underscore');
var Field = (function () {
function Field(form, name) {
this._rules = [];
if (form == null) {
throw new ArgumentNullException('form');
}
this._form = form;
this.name = name;
}
Object.defineProperty(Field.prototype, "name", {
get: function () {
return this._name;
},
set: function (name) {
this._name = name;
},
enumerable: true,
configurable: true
});
Field.prototype.addRule = function () {
var args = [];
for (var _i = 0; _i < (arguments.length - 0); _i++) {
args[_i] = arguments[_i + 0];
}
return this.insertRule.apply(this, [this._rules.length].concat(args));
};
Field.prototype.insertRule = function (idx, rule) {
var args = [];
for (var _i = 0; _i < (arguments.length - 2); _i++) {
args[_i] = arguments[_i + 2];
}
if (idx < 0) {
idx = this._rules.length + idx + 1;
}
if (_.isString(rule)) {
this._rules.splice(idx, 0, new RuleObject(rule, null, null, args));
} else if (!_.isUndefined(rule.validate)) {
var name = Arr.get(args, 0, null);
args = args.splice(0, 1);
this._rules.splice(idx, 0, new RuleObject(name, null, rule, args));
} else if (_.isFunction(rule)) {
this._rules.splice(idx, 0, new RuleObject(null, rule, null, args));
} else {
throw new ArgumentException('rule', 'rule parameter must be string or function.');
}
return this;
};
Field.prototype.removeRule = function (rule) {
var idx;
while ((idx = this.findRule(rule)) >= 0) {
delete this._rules[idx];
}
return this;
};
Field.prototype.validate = function (v, result) {
var _this = this;
var rules = Enumerable.from(this._rules);
var error = null;
var validated = true;
var validatedValue = v;
rules.forEach(function (rule) {
if (validated) {
var args = {
value: validatedValue,
result: result,
parameters: rule.arguments,
form: _this._form
};
var result = rule.validate(_this, args);
if (_.isBoolean(result)) {
validated = validated && result;
} else {
validatedValue = result;
}
if (!validated) {
error = {
field: _this,
rule: rule.name,
value: validatedValue,
arguments: rule.arguments
};
}
}
});
return {
hasError: !validated,
error: error,
validatedValue: validatedValue
};
};
Field.prototype.findRule = function (rule) {
if (_.isString(rule)) {
return Enumerable.from(this._rules).indexOf(function (v) {
return v.name == rule;
});
} else if (_.isFunction(rule)) {
return Enumerable.from(this._rules).indexOf(function (v) {
return v.validator == rule;
});
}
return -1;
};
<|fim▁hole|> Object.defineProperty(Field.prototype, "form", {
get: function () {
return this._form;
},
enumerable: true,
configurable: true
});
return Field;
})();
var RuleObject = (function () {
function RuleObject(name, func, validator, arguments) {
this.name = name;
this.function = func;
this.validator = validator;
this.arguments = arguments;
}
RuleObject.prototype.validate = function (field, args) {
if (this.function != null) {
return this.function.apply(field, [args.value].concat(this.arguments));
} else if (this.validator != null) {
return this.validator.validate(args);
} else {
var validator = field.form.getValidator(this.name);
if (!validator) {
throw new Exception('Rule:' + this.name + " is not found.");
}
return validator.validate(args);
}
};
return RuleObject;
})();
module.exports = Field;
//# sourceMappingURL=Field.js.map<|fim▁end|>
| |
<|file_name|>assignability-trait.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that type assignability is used to search for instances when
// making method calls, but only if there aren't any matches without
// it.
trait iterable<A> {
fn iterate(&self, blk: |x: &A| -> bool) -> bool;
}
impl<'a,A> iterable<A> for &'a [A] {
fn iterate(&self, f: |x: &A| -> bool) -> bool {
self.iter().advance(f)
}
}
impl<A> iterable<A> for Vec<A> {
fn iterate(&self, f: |x: &A| -> bool) -> bool {
self.iter().advance(f)
}
}
fn length<A, T: iterable<A>>(x: T) -> uint {<|fim▁hole|> true
});
return len;
}
pub fn main() {
let x: Vec<int> = vec!(0,1,2,3);
// Call a method
x.iterate(|y| { assert!(*x.get(*y as uint) == *y); true });
// Call a parameterized function
assert_eq!(length(x.clone()), x.len());
// Call a parameterized function, with type arguments that require
// a borrow
assert_eq!(length::<int, &[int]>(x.as_slice()), x.len());
// Now try it with a type that *needs* to be borrowed
let z = [0,1,2,3];
// Call a method
z.iterate(|y| { assert!(z[*y] == *y); true });
// Call a parameterized function
assert_eq!(length::<int, &[int]>(z), z.len());
}<|fim▁end|>
|
let mut len = 0;
x.iterate(|_y| {
len += 1;
|
<|file_name|>serialServer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import serial
gSerialName = '/dev/ttyS0'
gBaudrate = 9600
gTimeout = 0
gRequestByte = 1
if __name__ == "__main__":
ser = serial.Serial(
port = gSerialName,
baudrate = gBaudrate,
bytesize = serial.EIGHTBITS,
parity = serial.PARITY_NONE,
stopbits = serial.STOPBITS_ONE,
timeout = gTimeout,
xonxoff = False,
rtscts = False,
writeTimeout = None,
dsrdtr = False,
interCharTimeout = None)
print 'wating for message... ',
print ser.portstr + ',',
print str(ser.timeout) + ',',
print ser.baudrate<|fim▁hole|> while True:
r = ser.read(gRequestByte)
if 0 != len(r):
print repr(r)
print
ser.close()<|fim▁end|>
| |
<|file_name|>cloudspeech_demo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A demo of the Google CloudSpeech recognizer."""
import argparse
import locale
import logging
from aiy.board import Board, Led
from aiy.cloudspeech import CloudSpeechClient
<|fim▁hole|> return ('turn on the light',
'turn off the light',
'blink the light',
'goodbye')
return None
def locale_language():
language, _ = locale.getdefaultlocale()
return language
def main():
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(description='Assistant service example.')
parser.add_argument('--language', default=locale_language())
args = parser.parse_args()
logging.info('Initializing for language %s...', args.language)
hints = get_hints(args.language)
client = CloudSpeechClient()
with Board() as board:
while True:
if hints:
logging.info('Say something, e.g. %s.' % ', '.join(hints))
else:
logging.info('Say something.')
text = client.recognize(language_code=args.language,
hint_phrases=hints)
if text is None:
logging.info('You said nothing.')
continue
logging.info('You said: "%s"' % text)
text = text.lower()
if 'turn on the light' in text:
board.led.state = Led.ON
elif 'turn off the light' in text:
board.led.state = Led.OFF
elif 'blink the light' in text:
board.led.state = Led.BLINK
elif 'goodbye' in text:
break
if __name__ == '__main__':
main()<|fim▁end|>
|
def get_hints(language_code):
if language_code.startswith('en_'):
|
<|file_name|>partial.js<|end_file_name|><|fim▁begin|>/* */
var convert = require('./convert');<|fim▁hole|><|fim▁end|>
|
module.exports = convert('partial', require('../partial'));
|
<|file_name|>test_recent_enrollments.py<|end_file_name|><|fim▁begin|>"""
Tests for the recently enrolled messaging within the Dashboard.
"""
import datetime
import unittest
import ddt
from django.conf import settings
from django.urls import reverse
from django.utils.timezone import now
from opaque_keys.edx import locator
from pytz import UTC
from six.moves import range, zip
from common.test.utils import XssTestMixin
from course_modes.tests.factories import CourseModeFactory
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration_context
from student.models import CourseEnrollment, DashboardConfiguration
from student.tests.factories import UserFactory
from student.views import get_course_enrollments
from student.views.dashboard import _get_recently_enrolled_courses
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.ddt
class TestRecentEnrollments(ModuleStoreTestCase, XssTestMixin):
"""
Unit tests for getting the list of courses for a logged in user
"""
PASSWORD = 'test'
def setUp(self):
"""
Add a student
"""
super(TestRecentEnrollments, self).setUp()
self.student = UserFactory()
self.student.set_password(self.PASSWORD)
self.student.save()
# Old Course
old_course_location = locator.CourseLocator('Org0', 'Course0', 'Run0')
__, enrollment = self._create_course_and_enrollment(old_course_location)
enrollment.created = datetime.datetime(1900, 12, 31, 0, 0, 0, 0, tzinfo=UTC)
enrollment.save()
# New Course
course_location = locator.CourseLocator('Org1', 'Course1', 'Run1')
self.course, self.enrollment = self._create_course_and_enrollment(course_location)
def _create_course_and_enrollment(self, course_location):
""" Creates a course and associated enrollment. """
course = CourseFactory.create(
org=course_location.org,
number=course_location.course,
run=course_location.run
)
enrollment = CourseEnrollment.enroll(self.student, course.id)
return course, enrollment
def _configure_message_timeout(self, timeout):
"""Configure the amount of time the enrollment message will be displayed. """
config = DashboardConfiguration(recent_enrollment_time_delta=timeout)
config.save()
def test_recently_enrolled_courses(self):
"""<|fim▁hole|> # get courses through iterating all courses
courses_list = list(get_course_enrollments(self.student, None, []))
self.assertEqual(len(courses_list), 2)
recent_course_list = _get_recently_enrolled_courses(courses_list)
self.assertEqual(len(recent_course_list), 1)
def test_zero_second_delta(self):
"""
Tests that the recent enrollment list is empty if configured to zero seconds.
"""
self._configure_message_timeout(0)
courses_list = list(get_course_enrollments(self.student, None, []))
self.assertEqual(len(courses_list), 2)
recent_course_list = _get_recently_enrolled_courses(courses_list)
self.assertEqual(len(recent_course_list), 0)
def test_enrollments_sorted_most_recent(self):
"""
Test that the list of newly created courses are properly sorted to show the most
recent enrollments first.
Also test recent enrollment message rendered appropriately for more than two courses.
"""
self._configure_message_timeout(600)
# Create a number of new enrollments and courses, and force their creation behind
# the first enrollment
courses = []
for idx, seconds_past in zip(list(range(2, 6)), [5, 10, 15, 20]):
course_location = locator.CourseLocator(
'Org{num}'.format(num=idx),
'Course{num}'.format(num=idx),
'Run{num}'.format(num=idx)
)
course, enrollment = self._create_course_and_enrollment(course_location)
enrollment.created = now() - datetime.timedelta(seconds=seconds_past)
enrollment.save()
courses.append(course)
courses_list = list(get_course_enrollments(self.student, None, []))
self.assertEqual(len(courses_list), 6)
recent_course_list = _get_recently_enrolled_courses(courses_list)
self.assertEqual(len(recent_course_list), 5)
self.assertEqual(recent_course_list[1].course.id, courses[0].id)
self.assertEqual(recent_course_list[2].course.id, courses[1].id)
self.assertEqual(recent_course_list[3].course.id, courses[2].id)
self.assertEqual(recent_course_list[4].course.id, courses[3].id)
self.client.login(username=self.student.username, password=self.PASSWORD)
response = self.client.get(reverse("dashboard"))
# verify recent enrollment message
self.assertContains(
response,
'Thank you for enrolling in:'.format(course_name=self.course.display_name)
)
self.assertContains(
response,
', '.join(enrollment.course.display_name for enrollment in recent_course_list)
)
def test_dashboard_rendering_with_single_course(self):
"""
Tests that the dashboard renders the recent enrollment message appropriately for single course.
"""
self._configure_message_timeout(600)
self.client.login(username=self.student.username, password=self.PASSWORD)
response = self.client.get(reverse("dashboard"))
self.assertContains(
response,
"Thank you for enrolling in {course_name}".format(course_name=self.course.display_name)
)
def test_dashboard_rendering_with_two_courses(self):
"""
Tests that the dashboard renders the recent enrollment message appropriately for two courses.
"""
self._configure_message_timeout(600)
course_location = locator.CourseLocator(
'Org2',
'Course2',
'Run2'
)
course, _ = self._create_course_and_enrollment(course_location)
self.client.login(username=self.student.username, password=self.PASSWORD)
response = self.client.get(reverse("dashboard"))
courses_enrollments = list(get_course_enrollments(self.student, None, []))
courses_enrollments.sort(key=lambda x: x.created, reverse=True)
self.assertEqual(len(courses_enrollments), 3)
recent_course_enrollments = _get_recently_enrolled_courses(courses_enrollments)
self.assertEqual(len(recent_course_enrollments), 2)
self.assertContains(
response,
"Thank you for enrolling in:".format(course_name=self.course.display_name)
)
self.assertContains(
response,
' and '.join(enrollment.course.display_name for enrollment in recent_course_enrollments)
)
def test_dashboard_escaped_rendering(self):
"""
Tests that the dashboard renders the escaped recent enrollment messages appropriately.
"""
self._configure_message_timeout(600)
self.client.login(username=self.student.username, password=self.PASSWORD)
# New Course
course_location = locator.CourseLocator('TestOrg', 'TestCourse', 'TestRun')
xss_content = "<script>alert('XSS')</script>"
course = CourseFactory.create(
org=course_location.org,
number=course_location.course,
run=course_location.run,
display_name=xss_content
)
CourseEnrollment.enroll(self.student, course.id)
response = self.client.get(reverse("dashboard"))
self.assertContains(response, "Thank you for enrolling in")
# Check if response is escaped
self.assert_no_xss(response, xss_content)<|fim▁end|>
|
Test if the function for filtering recent enrollments works appropriately.
"""
self._configure_message_timeout(60)
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
# Create your models here.
class Event(models.Model):
url = models.URLField(null=True)
img_url = models.URLField(null=True)<|fim▁hole|>
def __str__(self):
return self.title<|fim▁end|>
|
title = models.CharField(max_length=200)
description = models.TextField()
|
<|file_name|>renderer.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::error::Error;
use super::format::{Format, DefaultFormat};<|fim▁hole|>use super::super::ast::Block;
#[derive(Debug)]
pub enum RenderError {
ParseError(ParseError),
}
///
/// # Example
///
/// ```
/// use squid::html::Renderer;
/// use squid::ast::{Block, HeadingLevel};
///
/// let blocks = vec![
/// Ok(Block::Heading(HeadingLevel::Level1, "Hello World".into())),
/// ];
///
/// let mut renderer = Renderer::new(blocks.into_iter());
///
/// for node in renderer {
/// println!("{}", node.unwrap());
/// }
/// ```
///
/// ## Output
/// ```text
/// <h1>hello world</h1>
/// ```
///
#[derive(Debug)]
pub struct Renderer<F, I>
where
F: Format + 'static,
I: Iterator<Item = Result<Block, ParseError>>,
{
// Not using Cow because Cow would require F to be `Clone`able
format: F,
input: I,
}
impl<I> Renderer<DefaultFormat, I>
where
I: Iterator<Item = Result<Block, ParseError>>,
{
///
/// Creates a new renderer with the default implementation of `Format`.
///
pub fn new(input: I) -> Self {
Renderer {
input,
format: DefaultFormat,
}
}
}
impl fmt::Display for RenderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for RenderError {
fn description(&self) -> &str {
match *self {
RenderError::ParseError(ref err) => err.description(),
}
}
fn cause(&self) -> Option<&Error> {
match *self {
RenderError::ParseError(ref err) => Some(err),
}
}
}
impl From<ParseError> for RenderError {
fn from(err: ParseError) -> Self {
RenderError::ParseError(err)
}
}
impl<F, I> Renderer<F, I>
where
F: Format + 'static,
I: Iterator<Item = Result<Block, ParseError>>,
{
pub fn with_format(format: F, input: I) -> Self {
Renderer { format, input }
}
}
impl<F, I> Iterator for Renderer<F, I>
where
F: Format + 'static,
I: Iterator<Item = Result<Block, ParseError>>,
{
type Item = Result<Output, RenderError>;
fn next(&mut self) -> Option<Self::Item> {
let node = self.input.next()?.and_then(|block| {
let mut builder = Builder::new();
match block {
Block::Heading(level, content) => self.format.heading(&mut builder, level, content),
Block::Paragraph(text) => self.format.paragraph(&mut builder, text),
Block::Quote(text) => self.format.quote(&mut builder, text),
Block::List(list_type, items) => self.format.list(&mut builder, list_type, items),
_ => unimplemented!(),
}
Ok(builder.consume())
});
Some(node.map_err(Into::into))
}
}<|fim▁end|>
|
use super::builders::Builder;
use super::output::Output;
use super::super::error::ParseError;
|
<|file_name|>test-platform.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Platform} from '../../src/service/platform-impl';
describe('Platform', () => {
let isIos;
let isAndroid;
let isChrome;
let isFirefox;
let isSafari;
let isIe;
let isEdge;
let isWebKit;
let isStandalone;
let majorVersion;
let iosVersion;
let iosMajorVersion;
let userAgent;
beforeEach(() => {
isIos = false;
isAndroid = false;
isChrome = false;
isSafari = false;
isFirefox = false;
isIe = false;
isEdge = false;
isWebKit = false;
isStandalone = false;
majorVersion = 0;
iosVersion = '';
iosMajorVersion = null;
userAgent = '';
});
function testUserAgent(userAgentString) {
const platform = new Platform({navigator: {userAgent: userAgentString}});
expect(platform.isIos()).to.equal(isIos);
expect(platform.isAndroid()).to.equal(isAndroid);
expect(platform.isChrome()).to.equal(isChrome);
expect(platform.isSafari()).to.equal(isSafari);
expect(platform.isFirefox()).to.equal(isFirefox);
expect(platform.isIe()).to.equal(isIe);
expect(platform.isEdge()).to.equal(isEdge);
expect(platform.isWebKit()).to.equal(isWebKit);
expect(platform.getMajorVersion()).to.equal(majorVersion);
expect(platform.getIosVersionString()).to.equal(iosVersion);
expect(platform.getIosMajorVersion()).to.equal(iosMajorVersion);
}
function testStandalone(userAgentString, standAloneBoolean) {
const platform = new Platform({
navigator: {
standalone: standAloneBoolean,
userAgent: userAgentString,
},
});
expect(platform.isStandalone()).to.equal(isStandalone);
}
it('should tolerate empty or null', () => {
testUserAgent(null);
testUserAgent('');
testUserAgent(' ');
testStandalone(null, null);
testStandalone('', null);<|fim▁hole|> it('iPhone 6 Plus v8', () => {
isIos = true;
isSafari = true;
isWebKit = true;
majorVersion = 8;
iosVersion = '8.0';
iosMajorVersion = 8;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X)' +
' AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0' +
' Mobile/12A4345d Safari/600.1.4';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('iPhone 6 Plus v9', () => {
isIos = true;
isSafari = true;
isWebKit = true;
majorVersion = 9;
iosVersion = '9.3';
iosMajorVersion = 9;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3 like Mac OS X)' +
' AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0' +
' Mobile/13E230 Safari/601.1';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('iPhone 6 Plus no version', () => {
isIos = true;
isSafari = true;
isWebKit = true;
majorVersion = 9;
iosVersion = '9.3';
iosMajorVersion = 9;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3 like Mac OS X)' +
' AppleWebKit/601.1.46 (KHTML, like Gecko)' +
' Mobile/13E230 Safari/601.1';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('iPhone ios 10.2.1', () => {
isIos = true;
isSafari = true;
isWebKit = true;
majorVersion = 10;
iosVersion = '10.2.1';
iosMajorVersion = 10;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_2_1 like Mac OS X)' +
' AppleWebKit/602.4.6 (KHTML, like Gecko) Version/10.0' +
' Mobile/14D27 Safari/602.1';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('iPad 2', () => {
isIos = true;
isSafari = true;
isWebKit = true;
majorVersion = 7;
iosVersion = '7.0';
iosMajorVersion = 7;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPad; CPU OS 7_0 like Mac OS X)' +
' AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0' +
' Mobile/11A465 Safari/9537.53';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('iPhone ios 10.2, Chrome ios', () => {
isIos = true;
isChrome = true;
isWebKit = true;
majorVersion = 56;
iosVersion = '10.2';
iosMajorVersion = 10;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_2 like Mac OS X)' +
' AppleWebKit/602.1.50 (KHTML, like Gecko) CriOS/56.0.2924.73' +
' Mobile/16D32 Safari/602.1';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('Desktop Safari', () => {
isSafari = true;
isWebKit = true;
majorVersion = 7;
userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) ' +
'AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 ' +
'Safari/7046A194A';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('Nexus 6 Chrome', () => {
isAndroid = true;
isChrome = true;
isWebKit = true;
majorVersion = 44;
userAgent = 'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 6 Build/LYZ28E)' +
' AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.20' +
' Mobile Safari/537.36';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('Firefox', () => {
isFirefox = true;
majorVersion = 40;
userAgent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) ' +
'Gecko/20100101 Firefox/40.1';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('Firefox ios', () => {
isIos = true;
isFirefox = true;
isWebKit = true;
majorVersion = 7;
iosVersion = '10.3.1';
iosMajorVersion = 10;
isStandalone = true;
userAgent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X)' +
' AppleWebKit/603.1.30 (KHTML, like Gecko) FxiOS/7.5b3349' +
' Mobile/14E304 Safari/603.1.30';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('IE', () => {
isIe = true;
majorVersion = 10;
userAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0;' +
' InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('IEMobile', () => {
isIe = true;
majorVersion = 10;
userAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0;' +
' Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 520)';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
it('Edge', () => {
isEdge = true;
majorVersion = 12;
userAgent = 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36' +
' (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36' +
' Edge/12.10136';
testUserAgent(userAgent);
testStandalone(userAgent, isStandalone);
});
});<|fim▁end|>
|
testStandalone(' ', null);
});
|
<|file_name|>JobUtils.java<|end_file_name|><|fim▁begin|>/*
* The MIT License
*
* Copyright 2019 Intuit Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.<|fim▁hole|>
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.function.Predicate;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
/**
*
* @author pthomas3
*/
public class JobUtils {
public static void zip(File src, File dest) {
try {
src = src.getCanonicalFile();
FileOutputStream fos = new FileOutputStream(dest);
ZipOutputStream zipOut = new ZipOutputStream(fos);
zip(src, "", zipOut, 0);
zipOut.close();
fos.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void zip(File fileToZip, String fileName, ZipOutputStream zipOut, int level) throws IOException {
if (fileToZip.isHidden()) {
return;
}
if (fileToZip.isDirectory()) {
String entryName = fileName;
zipOut.putNextEntry(new ZipEntry(entryName + "/"));
zipOut.closeEntry();
File[] children = fileToZip.listFiles();
for (File childFile : children) {
String childFileName = childFile.getName();
// TODO improve ?
if (childFileName.equals("target") || childFileName.equals("build")) {
continue;
}
if (level != 0) {
childFileName = entryName + "/" + childFileName;
}
zip(childFile, childFileName, zipOut, level + 1);
}
return;
}
ZipEntry zipEntry = new ZipEntry(fileName);
zipOut.putNextEntry(zipEntry);
FileInputStream fis = new FileInputStream(fileToZip);
byte[] bytes = new byte[1024];
int length;
while ((length = fis.read(bytes)) >= 0) {
zipOut.write(bytes, 0, length);
}
fis.close();
}
public static void unzip(File src, File dest) {
try {
byte[] buffer = new byte[1024];
ZipInputStream zis = new ZipInputStream(new FileInputStream(src));
ZipEntry zipEntry = zis.getNextEntry();
while (zipEntry != null) {
File newFile = createFile(dest, zipEntry);
if (zipEntry.isDirectory()) {
newFile.mkdirs();
} else {
File parentFile = newFile.getParentFile();
if (parentFile != null && !parentFile.exists()) {
parentFile.mkdirs();
}
FileOutputStream fos = new FileOutputStream(newFile);
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
fos.close();
}
zipEntry = zis.getNextEntry();
}
zis.closeEntry();
zis.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static File createFile(File destinationDir, ZipEntry zipEntry) throws IOException {
File destFile = new File(destinationDir, zipEntry.getName());
String destDirPath = destinationDir.getCanonicalPath();
String destFilePath = destFile.getCanonicalPath();
if (!destFilePath.startsWith(destDirPath)) {
throw new IOException("entry outside target dir: " + zipEntry.getName());
}
return destFile;
}
public static File getFirstFileMatching(File parent, Predicate<String> predicate) {
File[] files = parent.listFiles((f, n) -> predicate.test(n));
return files == null || files.length == 0 ? null : files[0];
}
}<|fim▁end|>
|
*/
package com.intuit.karate.job;
|
<|file_name|>toy_build.py<|end_file_name|><|fim▁begin|># #
# Copyright 2013-2014 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Toy build unit test
@author: Kenneth Hoste (Ghent University)
"""
import glob
import grp
import os
import re
import shutil
import stat
import sys
import tempfile
from test.framework.utilities import EnhancedTestCase
from unittest import TestLoader
from unittest import main as unittestmain
from vsc.utils.fancylogger import setLogLevelDebug, logToScreen
import easybuild.tools.module_naming_scheme # required to dynamically load test module naming scheme(s)
from easybuild.framework.easyconfig.easyconfig import EasyConfig
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir, read_file, write_file
from easybuild.tools.modules import modules_tool
class ToyBuildTest(EnhancedTestCase):
"""Toy build unit test."""
def setUp(self):
"""Test setup."""
super(ToyBuildTest, self).setUp()
fd, self.dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
# adjust PYTHONPATH such that test easyblocks are found
import easybuild
eb_blocks_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'sandbox'))
if not eb_blocks_path in sys.path:
sys.path.append(eb_blocks_path)
easybuild = reload(easybuild)
import easybuild.easyblocks
reload(easybuild.easyblocks)
reload(easybuild.tools.module_naming_scheme)
# clear log
write_file(self.logfile, '')
def tearDown(self):
"""Cleanup."""
super(ToyBuildTest, self).tearDown()
# remove logs
if os.path.exists(self.dummylogfn):
os.remove(self.dummylogfn)
def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versionsuffix=''):
"""Check whether toy build succeeded."""
full_version = ''.join([versionprefix, version, versionsuffix])
# check for success
success = re.compile("COMPLETED: Installation ended successfully")
self.assertTrue(success.search(outtxt), "COMPLETED message found in '%s" % outtxt)
# if the module exists, it should be fine
toy_module = os.path.join(installpath, 'modules', 'all', 'toy', full_version)
msg = "module for toy build toy/%s found (path %s)" % (full_version, toy_module)
self.assertTrue(os.path.exists(toy_module), msg)
# module file is symlinked according to moduleclass
toy_module_symlink = os.path.join(installpath, 'modules', 'tools', 'toy', full_version)
self.assertTrue(os.path.islink(toy_module_symlink))
self.assertTrue(os.path.exists(toy_module_symlink))
# make sure installation log file and easyconfig file are copied to install dir
software_path = os.path.join(installpath, 'software', 'toy', full_version)
install_log_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*.log' % version)
self.assertTrue(len(glob.glob(install_log_path_pattern)) == 1, "Found 1 file at %s" % install_log_path_pattern)
# make sure test report is available
test_report_path_pattern = os.path.join(software_path, 'easybuild', 'easybuild-toy-%s*test_report.md' % version)
self.assertTrue(len(glob.glob(test_report_path_pattern)) == 1, "Found 1 file at %s" % test_report_path_pattern)
ec_file_path = os.path.join(software_path, 'easybuild', 'toy-%s.eb' % full_version)
self.assertTrue(os.path.exists(ec_file_path))
devel_module_path = os.path.join(software_path, 'easybuild', 'toy-%s-easybuild-devel' % full_version)
self.assertTrue(os.path.exists(devel_module_path))
def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True, fails=False, verbose=True,
raise_error=False, test_report=None, versionsuffix=''):
"""Perform a toy build."""
if extra_args is None:
extra_args = []
test_readme = False
if ec_file is None:
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
test_readme = True
full_ver = '0.0%s' % versionsuffix
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
]
if tmpdir is not None:
args.append('--tmpdir=%s' % tmpdir)
if test_report is not None:
args.append('--dump-test-report=%s' % test_report)
args.extend(extra_args)
myerr = None
try:
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=verbose,
raise_error=raise_error)
except Exception, err:
myerr = err
if raise_error:
raise myerr
if verify:
self.check_toy(self.test_installpath, outtxt, versionsuffix=versionsuffix)
if test_readme:
# make sure postinstallcmds were used
toy_install_path = os.path.join(self.test_installpath, 'software', 'toy', full_ver)
self.assertEqual(read_file(os.path.join(toy_install_path, 'README')), "TOY\n")
# make sure full test report was dumped, and contains sensible information
if test_report is not None:
self.assertTrue(os.path.exists(test_report))
if fails:
test_result = 'FAIL'
else:
test_result = 'SUCCESS'
regex_patterns = [
r"Test result[\S\s]*Build succeeded for %d out of 1" % (not fails),
r"Overview of tested easyconfig[\S\s]*%s[\S\s]*%s" % (test_result, os.path.basename(ec_file)),
r"Time info[\S\s]*start:[\S\s]*end:",
r"EasyBuild info[\S\s]*framework version:[\S\s]*easyblocks ver[\S\s]*command line[\S\s]*configuration",
r"System info[\S\s]*cpu model[\S\s]*os name[\S\s]*os version[\S\s]*python version",
r"List of loaded modules",
r"Environment",
]
test_report_txt = read_file(test_report)
for regex_pattern in regex_patterns:
regex = re.compile(regex_pattern, re.M)
msg = "Pattern %s found in full test report: %s" % (regex.pattern, test_report_txt)
self.assertTrue(regex.search(test_report_txt), msg)
return outtxt
def test_toy_broken(self):
"""Test deliberately broken toy build."""
tmpdir = tempfile.mkdtemp()
broken_toy_ec = os.path.join(tmpdir, "toy-broken.eb")
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
broken_toy_ec_txt = read_file(toy_ec_file)
broken_toy_ec_txt += "checksums = ['clearywrongchecksum']"
write_file(broken_toy_ec, broken_toy_ec_txt)
error_regex = "Checksum verification .* failed"
self.assertErrorRegex(EasyBuildError, error_regex, self.test_toy_build, ec_file=broken_toy_ec, tmpdir=tmpdir,
verify=False, fails=True, verbose=False, raise_error=True)
# make sure log file is retained, also for failed build
log_path_pattern = os.path.join(tmpdir, 'easybuild-*', 'easybuild-toy-0.0*.log')
self.assertTrue(len(glob.glob(log_path_pattern)) == 1, "Log file found at %s" % log_path_pattern)
# make sure individual test report is retained, also for failed build
test_report_fp_pattern = os.path.join(tmpdir, 'easybuild-*', 'easybuild-toy-0.0*test_report.md')
self.assertTrue(len(glob.glob(test_report_fp_pattern)) == 1, "Test report %s found" % test_report_fp_pattern)
# test dumping full test report (doesn't raise an exception)
test_report_fp = os.path.join(self.test_buildpath, 'full_test_report.md')
self.test_toy_build(ec_file=broken_toy_ec, tmpdir=tmpdir, verify=False, fails=True, verbose=False,
raise_error=True, test_report=test_report_fp)
# cleanup
shutil.rmtree(tmpdir)
def test_toy_tweaked(self):
"""Test toy build with tweaked easyconfig, for testing extra easyconfig parameters."""
test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
ec_file = os.path.join(self.test_buildpath, 'toy-0.0-tweaked.eb')
shutil.copy2(os.path.join(test_ecs_dir, 'toy-0.0.eb'), ec_file)
# tweak easyconfig by appending to it
ec_extra = '\n'.join([
"versionsuffix = '-tweaked'",
"modextrapaths = {'SOMEPATH': ['foo/bar', 'baz']}",
"modextravars = {'FOO': 'bar'}",
"modloadmsg = 'THANKS FOR LOADING ME, I AM %(name)s v%(version)s'",
"modtclfooter = 'puts stderr \"oh hai!\"'",
])
write_file(ec_file, ec_extra, append=True)
args = [
ec_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--force',
]
outtxt = self.eb_main(args, do_build=True, verbose=True, raise_error=True)
self.check_toy(self.test_installpath, outtxt, versionsuffix='-tweaked')
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0-tweaked')
toy_module_txt = read_file(toy_module)
self.assertTrue(re.search('setenv\s*FOO\s*"bar"', toy_module_txt))
self.assertTrue(re.search('prepend-path\s*SOMEPATH\s*\$root/foo/bar', toy_module_txt))
self.assertTrue(re.search('prepend-path\s*SOMEPATH\s*\$root/baz', toy_module_txt))
self.assertTrue(re.search('module-info mode load.*\n\s*puts stderr\s*.*I AM toy v0.0', toy_module_txt))
self.assertTrue(re.search('puts stderr "oh hai!"', toy_module_txt))
def test_toy_buggy_easyblock(self):
"""Test build using a buggy/broken easyblock, make sure a traceback is reported."""
ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
kwargs = {
'ec_file': ec_file,
'extra_args': ['--easyblock=EB_toy_buggy'],
'raise_error': True,
'verify': False,
'verbose': False,
}
err_regex = r"crashed with an error.*Traceback[\S\s]*toy_buggy.py.*build_step[\S\s]*global name 'run_cmd'"
self.assertErrorRegex(EasyBuildError, err_regex, self.test_toy_build, **kwargs)
def test_toy_build_formatv2(self):
"""Perform a toy build (format v2)."""
# set $MODULEPATH such that modules for specified dependencies are found
modulepath = os.environ.get('MODULEPATH')
os.environ['MODULEPATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), 'modules'))
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=0.0',
'--toolchain=dummy,dummy',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(self.test_installpath, outtxt)
# restore
if modulepath is not None:
os.environ['MODULEPATH'] = modulepath
else:
del os.environ['MODULEPATH']
def test_toy_build_with_blocks(self):
"""Test a toy build with multiple blocks."""
orig_sys_path = sys.path[:]
# add directory in which easyconfig file can be found to Python search path, since we're not specifying it full path below
tmpdir = tempfile.mkdtemp()
# note get_paths_for expects easybuild/easyconfigs subdir
ecs_path = os.path.join(tmpdir, "easybuild", "easyconfigs")
os.makedirs(ecs_path)
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0-multiple.eb'), ecs_path)
sys.path.append(tmpdir)
args = [
'toy-0.0-multiple.eb',
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
for toy_prefix, toy_version, toy_suffix in [
('', '0.0', '-somesuffix'),
('someprefix-', '0.0', '-somesuffix')
]:
self.check_toy(self.test_installpath, outtxt, version=toy_version,
versionprefix=toy_prefix, versionsuffix=toy_suffix)
# cleanup
shutil.rmtree(tmpdir)
sys.path = orig_sys_path
def test_toy_build_formatv2_sections(self):
"""Perform a toy build (format v2, using sections)."""
versions = {
'0.0': {'versionprefix': '', 'versionsuffix': ''},
'1.0': {'versionprefix': '', 'versionsuffix': ''},
'1.1': {'versionprefix': 'stable-', 'versionsuffix': ''},
'1.5': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'1.6': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'2.0': {'versionprefix': 'stable-', 'versionsuffix': '-early'},
'3.0': {'versionprefix': 'stable-', 'versionsuffix': '-mature'},
}
for version, specs in versions.items():
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'v2.0', 'toy-with-sections.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.pathsep.join([self.test_buildpath, os.path.dirname(__file__)]),
'--software-version=%s' % version,
'--toolchain=dummy,dummy',
'--experimental',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
specs['version'] = version
self.check_toy(self.test_installpath, outtxt, **specs)
def test_toy_download_sources(self):
"""Test toy build with sources that still need to be 'downloaded'."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'), tmpdir)
source_url = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'sandbox', 'sources', 'toy')
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, '\nsource_urls = ["file://%s"]\n' % source_url, append=True)
# unset $EASYBUILD_XPATH env vars, to make sure --prefix is picked up
for cfg_opt in ['build', 'install', 'source']:
del os.environ['EASYBUILD_%sPATH' % cfg_opt.upper()]
sourcepath = os.path.join(tmpdir, 'mysources')
args = [
ec_file,
'--prefix=%s' % tmpdir,
'--sourcepath=%s' % ':'.join([sourcepath, '/bar']), # include senseless path which should be ignored
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True)
self.check_toy(tmpdir, outtxt)
self.assertTrue(os.path.exists(os.path.join(sourcepath, 't', 'toy', 'toy-0.0.tar.gz')))
shutil.rmtree(tmpdir)
def test_toy_permissions(self):
"""Test toy build with custom umask settings."""
toy_ec_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
args = [
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
]
# set umask hard to verify default reliably
orig_umask = os.umask(0022)
# test specifying a non-existing group
allargs = [toy_ec_file] + args + ['--group=thisgroupdoesnotexist']
outtxt, err = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, return_error=True)
err_regex = re.compile("Failed to get group ID .* group does not exist")
self.assertTrue(err_regex.search(outtxt), "Pattern '%s' found in '%s'" % (err_regex.pattern, outtxt))
# determine current group name (at least we can use that)
gid = os.getgid()
curr_grp = grp.getgrgid(gid).gr_name
for umask, cfg_group, ec_group, dir_perms, fil_perms, bin_perms in [
(None, None, None, 0755, 0644, 0755), # default: inherit session umask
(None, None, curr_grp, 0750, 0640, 0750), # default umask, but with specified group in ec
(None, curr_grp, None, 0750, 0640, 0750), # default umask, but with specified group in cfg
(None, 'notagrp', curr_grp, 0750, 0640, 0750), # default umask, but with specified group in both cfg and ec
('000', None, None, 0777, 0666, 0777), # stupid empty umask
('032', None, None, 0745, 0644, 0745), # no write/execute for group, no write for other
('030', None, curr_grp, 0740, 0640, 0740), # no write for group, with specified group
('077', None, None, 0700, 0600, 0700), # no access for other/group
]:
if cfg_group is None and ec_group is None:
allargs = [toy_ec_file]
elif ec_group is not None:
shutil.copy2(toy_ec_file, self.test_buildpath)
tmp_ec_file = os.path.join(self.test_buildpath, os.path.basename(toy_ec_file))
write_file(tmp_ec_file, "\ngroup = '%s'" % ec_group, append=True)
allargs = [tmp_ec_file]
allargs.extend(args)
if umask is not None:
allargs.append("--umask=%s" % umask)
if cfg_group is not None:
allargs.append("--group=%s" % cfg_group)
outtxt = self.eb_main(allargs, logfile=self.dummylogfn, do_build=True, verbose=True)
# verify that installation was correct
self.check_toy(self.test_installpath, outtxt)
# group specified in easyconfig overrules configured group
group = cfg_group
if ec_group is not None:
group = ec_group
# verify permissions
paths_perms = [
# no write permissions for group/other, regardless of umask
(('software', 'toy', '0.0'), dir_perms & ~ 0022),
(('software', 'toy', '0.0', 'bin'), dir_perms & ~ 0022),
(('software', 'toy', '0.0', 'bin', 'toy'), bin_perms & ~ 0022),
]
# only software subdirs are chmod'ed for 'protected' installs, so don't check those if a group is specified
if group is None:
paths_perms.extend([
(('software', ), dir_perms),
(('software', 'toy'), dir_perms),
(('software', 'toy', '0.0', 'easybuild', '*.log'), fil_perms),
(('modules', ), dir_perms),
(('modules', 'all'), dir_perms),
(('modules', 'all', 'toy'), dir_perms),
(('modules', 'all', 'toy', '0.0'), fil_perms),
])
for path, correct_perms in paths_perms:
fullpath = glob.glob(os.path.join(self.test_installpath, *path))[0]
perms = os.stat(fullpath).st_mode & 0777
msg = "Path %s has %s permissions: %s" % (fullpath, oct(correct_perms), oct(perms))
self.assertEqual(perms, correct_perms, msg)
if group is not None:
path_gid = os.stat(fullpath).st_gid
self.assertEqual(path_gid, grp.getgrnam(group).gr_gid)
# cleanup for next iteration
shutil.rmtree(self.test_installpath)
# restore original umask
os.umask(orig_umask)
def test_toy_gid_sticky_bits(self):
"""Test setting gid and sticky bits."""
subdirs = [
(('',), False),
(('software',), False),
(('software', 'toy'), False),
(('software', 'toy', '0.0'), True),
(('modules', 'all'), False),
(('modules', 'all', 'toy'), False),
]
# no gid/sticky bits by default
self.test_toy_build()
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# git/sticky bits are set, but only on (re)created directories
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, bits_set in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
if bits_set:
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
else:
self.assertFalse(perms & stat.S_ISGID, "no gid bit on %s" % fullpath)
self.assertFalse(perms & stat.S_ISVTX, "no sticky bit on %s" % fullpath)
# start with a clean slate, now gid/sticky bits should be set on everything
shutil.rmtree(self.test_installpath)
self.test_toy_build(extra_args=['--set-gid-bit', '--sticky-bit'])
for subdir, _ in subdirs:
fullpath = os.path.join(self.test_installpath, *subdir)
perms = os.stat(fullpath).st_mode
self.assertTrue(perms & stat.S_ISGID, "gid bit set on %s" % fullpath)
self.assertTrue(perms & stat.S_ISVTX, "sticky bit set on %s" % fullpath)
def test_allow_system_deps(self):
"""Test allow_system_deps easyconfig parameter."""
tmpdir = tempfile.mkdtemp()
# copy toy easyconfig file, and append source_urls to it
shutil.copy2(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'), tmpdir)
ec_file = os.path.join(tmpdir, 'toy-0.0.eb')
write_file(ec_file, "\nallow_system_deps = [('Python', SYS_PYTHON_VERSION)]\n", append=True)
self.test_toy_build(ec_file=ec_file)
shutil.rmtree(tmpdir)
def test_toy_hierarchical(self):
"""Test toy build under example hierarchical module naming scheme."""
self.setup_hierarchical_modules()
mod_prefix = os.path.join(self.test_installpath, 'modules', 'all')
args = [
os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'),
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--debug',
'--unittest-file=%s' % self.logfile,
'--force',
'--robot=%s' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs'),
'--module-naming-scheme=HierarchicalMNS',
]
# test module paths/contents with gompi build
extra_args = [
'--try-toolchain=goolf,1.4.10',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'OpenMPI', '1.6.4', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# check that toolchain load is expanded to loads for toolchain dependencies,
# except for the ones that extend $MODULEPATH to make the toy module available
modtxt = read_file(toy_module_path)
for dep in ['goolf', 'GCC', 'OpenMPI']:
load_regex = re.compile("load %s" % dep)
self.assertFalse(load_regex.search(modtxt), "Pattern '%s' not found in %s" % (load_regex.pattern, modtxt))
for dep in ['OpenBLAS', 'FFTW', 'ScaLAPACK']:
load_regex = re.compile("load %s" % dep)
self.assertTrue(load_regex.search(modtxt), "Pattern '%s' found in %s" % (load_regex.pattern, modtxt))
os.remove(toy_module_path)
# test module path with GCC/4.7.2 build
extra_args = [
'--try-toolchain=GCC,4.7.2',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with GCC/4.7.2 build, pretend to be an MPI lib by setting moduleclass
extra_args = [
'--try-toolchain=GCC,4.7.2',
'--try-amend=moduleclass=mpi',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Compiler', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# 'module use' statements to extend $MODULEPATH are present
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'toy', '0.0')
self.assertTrue(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# ... unless they shouldn't be
extra_args.append('--try-amend=include_modpath_extensions=') # pass empty string as equivalent to False
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'MPI', 'GCC', '4.7.2', 'toy', '0.0')
self.assertFalse(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# test module path with dummy/dummy build
extra_args = [
'--try-toolchain=dummy,dummy',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
self.assertFalse(re.search("module load", modtxt))
os.remove(toy_module_path)
# test module path with dummy/dummy build, pretend to be a compiler by setting moduleclass
extra_args = [
'--try-toolchain=dummy,dummy',
'--try-amend=moduleclass=compiler',
]
self.eb_main(args + extra_args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
# make sure module file is installed in correct path
toy_module_path = os.path.join(mod_prefix, 'Core', 'toy', '0.0')
self.assertTrue(os.path.exists(toy_module_path))
# no dependencies or toolchain => no module load statements in module file
modtxt = read_file(toy_module_path)
modpath_extension = os.path.join(mod_prefix, 'Compiler', 'toy', '0.0')
self.assertTrue(re.search("^module\s*use\s*%s" % modpath_extension, modtxt, re.M))
os.remove(toy_module_path)
# building a toolchain module should also work
args = ['gompi-1.4.10.eb'] + args[1:]
modules_tool().purge()
self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=True, raise_error=True)
def test_toy_advanced(self):
"""Test toy build with extensions and non-dummy toolchain."""
test_dir = os.path.abspath(os.path.dirname(__file__))
os.environ['MODULEPATH'] = os.path.join(test_dir, 'modules')
test_ec = os.path.join(test_dir, 'easyconfigs', 'toy-0.0-gompi-1.3.12.eb')
self.test_toy_build(ec_file=test_ec, versionsuffix='-gompi-1.3.12')
def test_toy_hidden(self):
"""Test installing a hidden module."""
ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'toy-0.0.eb')
self.test_toy_build(ec_file=ec_file, extra_args=['--hidden'], verify=False)
# module file is hidden
toy_module = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '.0.0')
self.assertTrue(os.path.exists(toy_module), 'Found hidden module %s' % toy_module)
# installed software is not hidden
toybin = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy')
self.assertTrue(os.path.exists(toybin))
def test_module_filepath_tweaking(self):
"""Test using --suffix-modules-path."""
# install test module naming scheme dynamically
test_mns_parent_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
sys.path.append(test_mns_parent_dir)
reload(easybuild)
reload(easybuild.tools)
reload(easybuild.tools.module_naming_scheme)
mns_path = "easybuild.tools.module_naming_scheme.test_module_naming_scheme"
__import__(mns_path, globals(), locals(), [''])
eb_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb')
args = [
eb_file,
'--sourcepath=%s' % self.test_sourcepath,
'--buildpath=%s' % self.test_buildpath,
'--installpath=%s' % self.test_installpath,
'--force',
'--debug',
'--suffix-modules-path=foobarbaz',
'--module-naming-scheme=TestModuleNamingScheme',
]
self.eb_main(args, do_build=True, verbose=True)
mod_file_prefix = os.path.join(self.test_installpath, 'modules')
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'foobarbaz', 'toy', '0.0')))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0')))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 'TOOLS', 'toy', '0.0')))
self.assertTrue(os.path.exists(os.path.join(mod_file_prefix, 't', 'toy', '0.0')))
self.assertTrue(os.path.islink(os.path.join(mod_file_prefix, 't', 'toy', '0.0')))
def test_toy_archived_easyconfig(self):
"""Test archived easyconfig for a succesful build."""
repositorypath = os.path.join(self.test_installpath, 'easyconfigs_archive')
extra_args = [
'--repository=FileRepository',
'--repositorypath=%s' % repositorypath,
]
self.test_toy_build(raise_error=True, extra_args=extra_args)
archived_ec = os.path.join(repositorypath, 'toy', 'toy-0.0.eb')
self.assertTrue(os.path.exists(archived_ec))
ec = EasyConfig(archived_ec)
self.assertEqual(ec.name, 'toy')
self.assertEqual(ec.version, '0.0')
def suite():
""" return all the tests in this file """
return TestLoader().loadTestsFromTestCase(ToyBuildTest)
<|fim▁hole|>if __name__ == '__main__':
#logToScreen(enable=True)
#setLogLevelDebug()
unittestmain()<|fim▁end|>
| |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.test import TestCase<|fim▁hole|> pass<|fim▁end|>
|
class CollectionTests(TestCase):
|
<|file_name|>content_test.py<|end_file_name|><|fim▁begin|>import unittest
from mock import Mock
from biicode.common.model.content import Content
from biicode.common.model.content import ContentDeserializer
from biicode.common.model.content import content_diff
from biicode.common.exception import BiiSerializationException
from biicode.common.model.id import ID
class ContentTest(unittest.TestCase):
def test_deserialize_exception(self):
self.assertRaises(BiiSerializationException,
ContentDeserializer(ID((0, 0, 0))).deserialize,
"wrong object")
self.assertIsNone(ContentDeserializer(ID).deserialize(None))
def test_content_diff(self):
content_load1 = Mock()
content_load2 = Mock()
content_load1.is_binary = Mock(return_value=True)
self.assertEquals(content_diff(content_load1, content_load2),
"Unable to diff binary contents of base")<|fim▁hole|>
def test_content_similarity(self):
content = Content(ID((0, 0, 0)), load=None)
self.assertEquals(content.similarity(content), 1)<|fim▁end|>
|
content_load1.is_binary = Mock(return_value=False)
content_load2.is_binary = Mock(return_value=True)
self.assertEquals(content_diff(content_load1, content_load2),
"Unable to diff binary contents of base")
|
<|file_name|>signal_blocker.hpp<|end_file_name|><|fim▁begin|>//
// detail/signal_blocker.hpp
// ~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2014 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef ASIO_DETAIL_SIGNAL_BLOCKER_HPP
#define ASIO_DETAIL_SIGNAL_BLOCKER_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include "asio/detail/config.hpp"
#if !defined(ASIO_HAS_THREADS) || defined(ASIO_WINDOWS) \
|| defined(ASIO_WINDOWS_RUNTIME) \
|| defined(__CYGWIN__) || defined(__SYMBIAN32__)
# include "asio/detail/null_signal_blocker.hpp"
#elif defined(ASIO_HAS_PTHREADS)
# include "asio/detail/posix_signal_blocker.hpp"
#else
# error Only Windows and POSIX are supported!
#endif
namespace asio {
namespace detail {
#if !defined(ASIO_HAS_THREADS) || defined(ASIO_WINDOWS) \
|| defined(ASIO_WINDOWS_RUNTIME) \
|| defined(__CYGWIN__) || defined(__SYMBIAN32__)
typedef null_signal_blocker signal_blocker;
#elif defined(ASIO_HAS_PTHREADS)
typedef posix_signal_blocker signal_blocker;
#endif
<|fim▁hole|><|fim▁end|>
|
} // namespace detail
} // namespace asio
#endif // ASIO_DETAIL_SIGNAL_BLOCKER_HPP
|
<|file_name|>list1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic list exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in list2.py.
# A. match_ends
# Given a list of strings, return the count of the number of
# strings where the string length is 2 or more and the first
# and last chars of the string are the same.
# Note: python does not have a ++ operator, but += works.
def match_ends(words):
# +++your code here+++
count = 0
for word in words:
if len(word) >= 2 and word[0] == word[-1]:
count += 1
return count
# B. front_x
# Given a list of strings, return a list with the strings
# in sorted order, except group all the strings that begin with 'x' first.
# e.g. ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields
# ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
# Hint: this can be done by making 2 lists and sorting each of them
# before combining them.
def front_x(words):
# +++your code here+++
#for word in words:
# if word[0] == 'x':
# takeOut =
return
<|fim▁hole|># Given a list of non-empty tuples, return a list sorted in increasing
# order by the last element in each tuple.
# e.g. [(1, 7), (1, 3), (3, 4, 5), (2, 2)] yields
# [(2, 2), (1, 3), (3, 4, 5), (1, 7)]
# Hint: use a custom key= function to extract the last element form each tuple.
def sort_last(tuples):
# +++your code here+++
return
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Calls the above functions with interesting inputs.
def main():
print 'match_ends'
test(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print
print 'front_x'
test(front_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']),
['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test(front_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']),
['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test(front_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']),
['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print
print 'sort_last'
test(sort_last([(1, 3), (3, 2), (2, 1)]),
[(2, 1), (3, 2), (1, 3)])
test(sort_last([(2, 3), (1, 2), (3, 1)]),
[(3, 1), (1, 2), (2, 3)])
test(sort_last([(1, 7), (1, 3), (3, 4, 5), (2, 2)]),
[(2, 2), (1, 3), (3, 4, 5), (1, 7)])
if __name__ == '__main__':
main()<|fim▁end|>
|
# C. sort_last
|
<|file_name|>origin.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use servo_rand;
use std::cell::RefCell;
use std::rc::Rc;
use url::{Host, Origin};
use url_serde;
use uuid::Uuid;
/// The origin of an URL
#[derive(Clone, Debug, Deserialize, Eq, MallocSizeOf, PartialEq, Serialize)]
pub enum ImmutableOrigin {
/// A globally unique identifier
Opaque(OpaqueOrigin),
/// Consists of the URL's scheme, host and port
Tuple(
String,
#[serde(deserialize_with = "url_serde::deserialize", serialize_with = "url_serde::serialize")]
Host,
u16,
)
}
impl ImmutableOrigin {
pub fn new(origin: Origin) -> ImmutableOrigin {
match origin {
Origin::Opaque(_) => ImmutableOrigin::new_opaque(),
Origin::Tuple(scheme, host, port) => ImmutableOrigin::Tuple(scheme, host, port),
}
}
<|fim▁hole|> pub fn same_origin_domain(&self, other: &MutableOrigin) -> bool {
!other.has_domain() && self == other.immutable()
}
/// Creates a new opaque origin that is only equal to itself.
pub fn new_opaque() -> ImmutableOrigin {
ImmutableOrigin::Opaque(OpaqueOrigin(servo_rand::random_uuid()))
}
pub fn scheme(&self) -> Option<&str> {
match *self {
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(ref scheme, _, _) => Some(&**scheme),
}
}
pub fn host(&self) -> Option<&Host> {
match *self {
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(_, ref host, _) => Some(host),
}
}
pub fn port(&self) -> Option<u16> {
match *self {
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(_, _, port) => Some(port),
}
}
pub fn into_url_origin(self) -> Origin {
match self {
ImmutableOrigin::Opaque(_) => Origin::new_opaque(),
ImmutableOrigin::Tuple(scheme, host, port) => Origin::Tuple(scheme, host, port),
}
}
/// Return whether this origin is a (scheme, host, port) tuple
/// (as opposed to an opaque origin).
pub fn is_tuple(&self) -> bool {
match *self {
ImmutableOrigin::Opaque(..) => false,
ImmutableOrigin::Tuple(..) => true,
}
}
/// <https://html.spec.whatwg.org/multipage/#ascii-serialisation-of-an-origin>
pub fn ascii_serialization(&self) -> String {
self.clone().into_url_origin().ascii_serialization()
}
/// <https://html.spec.whatwg.org/multipage/#unicode-serialisation-of-an-origin>
pub fn unicode_serialization(&self) -> String {
self.clone().into_url_origin().unicode_serialization()
}
}
/// Opaque identifier for URLs that have file or other schemes
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct OpaqueOrigin(Uuid);
malloc_size_of_is_0!(OpaqueOrigin);
/// A representation of an [origin](https://html.spec.whatwg.org/multipage/#origin-2).
#[derive(Clone, Debug)]
pub struct MutableOrigin(Rc<(ImmutableOrigin, RefCell<Option<Host>>)>);
malloc_size_of_is_0!(MutableOrigin);
impl MutableOrigin {
pub fn new(origin: ImmutableOrigin) -> MutableOrigin {
MutableOrigin(Rc::new((origin, RefCell::new(None))))
}
pub fn immutable(&self) -> &ImmutableOrigin {
&(self.0).0
}
pub fn is_tuple(&self) -> bool {
self.immutable().is_tuple()
}
pub fn scheme(&self) -> Option<&str> {
self.immutable().scheme()
}
pub fn host(&self) -> Option<&Host> {
self.immutable().host()
}
pub fn port(&self) -> Option<u16> {
self.immutable().port()
}
pub fn same_origin(&self, other: &MutableOrigin) -> bool {
self.immutable() == other.immutable()
}
pub fn same_origin_domain(&self, other: &MutableOrigin) -> bool {
if let Some(ref self_domain) = *(self.0).1.borrow() {
if let Some(ref other_domain) = *(other.0).1.borrow() {
self_domain == other_domain &&
self.immutable().scheme() == other.immutable().scheme()
} else {
false
}
} else {
self.immutable().same_origin_domain(other)
}
}
pub fn domain(&self) -> Option<Host> {
(self.0).1.borrow().clone()
}
pub fn set_domain(&self, domain: Host) {
*(self.0).1.borrow_mut() = Some(domain);
}
pub fn has_domain(&self) -> bool {
(self.0).1.borrow().is_some()
}
pub fn effective_domain(&self) -> Option<Host> {
self.immutable().host()
.map(|host| self.domain().unwrap_or_else(|| host.clone()))
}
}<|fim▁end|>
|
pub fn same_origin(&self, other: &MutableOrigin) -> bool {
self == other.immutable()
}
|
<|file_name|>x5_7.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
print " __ "
print " |__|____ ___ __ "
print " | \__ \\\\ \/ / "
print " | |/ __ \\\\ / "
print " /\__| (____ /\_/ "
print " \______| \/ "
print " "
print 'Module 5'
print 'Exploitation Techniques'
print
"""
Write a pyCommand script to find if the DEP, ASLR, SafeSEH modules are enabled
"""
import immlib
import struct
DESC = "DEP, ASLR and SafeSEH Detection in all Modules"
# More information
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms680339(v=vs.85).aspx
# How to detect presence of security mechanisms
IMAGE_DLLCHARACTERISTICS_NX_COMPAT = 0x0100 # DEP compatible
IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE = 0x0040 # ASLR
def main(args) :
imm = immlib.Debugger()
# code borrowed from safeseh pycommand
allmodules=imm.getAllModules()
for key in allmodules.keys():
dep = aslr = "NO"<|fim▁hole|> pe_offset = struct.unpack('<L',imm.readMemory(module_baseAddress + 0x3c,4))[0]
pebase = module_baseAddress + pe_offset
flags = struct.unpack('<H',imm.readMemory(pebase + 0x5e,2))[0]
if (flags & IMAGE_DLLCHARACTERISTICS_NX_COMPAT != 0) :
dep = "YES"
if (flags & IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE != 0) :
aslr = "YES"
imm.log("---- %s ----" %key)
imm.log("DEP: %s ASLR: %s" %(dep, aslr))
imm.log("--------------")
return "[+] Executed Successfully"<|fim▁end|>
|
module = imm.getModule(key)
module_baseAddress = module.getBaseAddress()
|
<|file_name|>document.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
* Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
* Created By:
* Maintained By:
*/
//= require can.jquery-all
//= require models/cacheable
(function(ns, can) {
can.Model.Cacheable("CMS.Models.Document", {
root_object : "document"
, root_collection : "documents"
, findAll : "GET /api/documents"
, create : function(params) {
var _params = {
document : {
title : params.document.title
, description : params.document.description
, link : params.document.link
}
};
return $.ajax({
type : "POST"
, "url" : "/api/documents"
, dataType : "json"
, data : _params
});
}
, search : function(request, response) {
return $.ajax({
type : "get"
, url : "/api/documents"
, dataType : "json"
, data : {s : request.term}
, success : function(data) {
response($.map( data, function( item ) {
return can.extend({}, item.document, {
label: item.document.title
? item.document.title
+ (item.document.link_url
? " (" + item.document.link_url + ")"
: "")
: item.document.link_url
, value: item.document.id
});
}));
}
});
}
}, {
init : function () {
this._super && this._super();
// this.bind("change", function(ev, attr, how, newVal, oldVal) {
// var obj;
// if(obj = CMS.Models.ObjectDocument.findInCacheById(this.id) && attr !== "id") {
// obj.attr(attr, newVal);
// }
// });
var that = this;
this.each(function(value, name) {
if (value === null)
that.attr(name, undefined);
});
}
});
can.Model.Cacheable("CMS.Models.ObjectDocument", {
root_object : "object_document"
, root_collection : "object_documents"
, findAll: "GET /api/object_documents"
, create: "POST /api/object_documents"
, destroy : "DELETE /api/object_documents/{id}"
}, {
init : function() {
var _super = this._super;
function reinit() {
var that = this;
typeof _super === "function" && _super.call(this);
this.attr("document", CMS.Models.get_instance(
"Document", this.document_id || (this.document && this.document.id)));
this.attr("documentable", CMS.Models.get_instance(<|fim▁hole|> /*this.attr(
"document"
, CMS.Models.Document.findInCacheById(this.document_id)
|| new CMS.Models.Document(this.document && this.document.serialize ? this.document.serialize() : this.document));
*/
this.each(function(value, name) {
if (value === null)
that.removeAttr(name);
});
}
this.bind("created", can.proxy(reinit, this));
reinit.call(this);
}
});
})(this, can);<|fim▁end|>
|
this.documentable_type || (this.documentable && this.documentable.type),
this.documentable_id || (this.documentable && this.documentable.id)));
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
import codecs
try:
from setuptools import (setup, find_packages)
except ImportError:
from distutils.core import (setup, find_packages)
VERSION = (0, 2, 0)
__version__ = '.'.join(map(str, VERSION[:3])) + "".join(VERSION[3:])
__package_name__ = 'pelican-readtime'
__description__ = 'Plugin for Pelican that computes average read time.'
__contact_names__ = 'David Jenkins, Deepak Bhalla, Jonathan Dektiar'
__contact_emails__ = '[email protected], [email protected], [email protected]'
__homepage__ = 'https://github.com/JenkinsDev/pelican-readtime'
__repository_url__ = 'https://github.com/JenkinsDev/pelican-readtime'
__download_url__ = 'https://github.com/JenkinsDev/pelican-readtime'
__docformat__ = 'markdown'
__license__ = 'MIT'<|fim▁hole|>here = os.path.abspath(os.path.dirname(__file__))
if os.path.exists('README.rst'):
# codec is used for consistent encoding
long_description = codecs.open(
os.path.join(here, 'README.rst'), 'r', 'utf-8').read()
else:
long_description = 'See ' + __homepage__
setup(
name=__package_name__,
version=__version__,
description=__description__,
long_description=long_description,
url=__repository_url__,
download_url=__download_url__,
license='MIT',
author=__contact_names__,
author_email=__contact_emails__,
maintainer=__contact_names__,
maintainer_email=__contact_emails__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Python Modules'
],
keywords=__keywords__,
packages=[''],
install_requires=['pelican>=3.6'],
zip_safe=True,
include_package_data=True
)<|fim▁end|>
|
__keywords__ = 'pelican blogging blog static webdevelopment plugin pelican-plugin readtime python python3 python2'
|
<|file_name|>Form.js<|end_file_name|><|fim▁begin|>import React, {Component} from 'react'
import {findDOMNode} from 'react-dom'
import styles from '../sass/Form'
import {Button, Form, Input, DatePicker, Select} from 'antd'
class FormComponent extends Component {
constructor() {
super()
this.handleSubmit = this.handleSubmit.bind(this)
}
handleSubmit(e) {
e.preventDefault()
const {form, actions} = this.props
form.validateFields((err, values) => {
if (!err) {
actions.getItems(values)
}
})
}
render() {
const {form, schools, cats} = this.props
return (
<Form onSubmit={this.handleSubmit}
className={styles.form}
inline>
<Form.Item
label="学校">
{form.getFieldDecorator('school',{initialValue: schools[0]?schools[0].id:''})(
<Select className={styles.select}>
{schools.map(school =>
<Select.Option key={school.id} value={school.id}>{school.schoolName}</Select.Option>
)}
</Select>
)}
</Form.Item>
<Form.Item
label="分类">
{form.getFieldDecorator('cat',{initialValue: ''})(
<Select className={styles.select}>
<Select.Option value="">全部</Select.Option>
{cats.map(cat =>
<Select.Option key={cat.id} value={cat.id}>{cat.catName}</Select.Option>
)}
</Select>
)}
</Form.Item>
<Form.Item>
<Button htmlType="submit" type="primary">查询</Button>
</Form.Item>
</Form>
)
}<|fim▁hole|>
export default Form.create()(FormComponent)<|fim▁end|>
|
}
|
<|file_name|>CardNumberProviderTest.java<|end_file_name|><|fim▁begin|>package com.omnicrola.panoptes.ui.autocomplete;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import com.omnicrola.panoptes.control.DataController;
import com.omnicrola.panoptes.control.IControlObserver;
import com.omnicrola.panoptes.control.TimeblockSet;
import com.omnicrola.panoptes.data.IReadTimeblock;
import com.omnicrola.panoptes.data.TimeData;
import com.omnicrola.testing.util.EnhancedTestCase;
public class CardNumberProviderTest extends EnhancedTestCase {
private DataController mockController;
private String expectedNumber1;
private String expectedNumber2;
private String expectedNumber3;
@Test
public void testImplementsInterfaces() throws Exception {
assertImplementsInterface(IOptionProvider.class, CardNumberProvider.class);
assertImplementsInterface(IControlObserver.class, CardNumberProvider.class);
}
@Test
public void testConstructorParams() throws Exception {
DataController mockController = useMock(DataController.class);
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(mockController);
assertConstructionParamSame("dataController", mockController, cardNumberProvider);
}
@Test
public void testDataChanged_UpdatesOptionList() throws Exception {
setupMockTimeblockSet();
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(this.mockController);
assertEquals(0, cardNumberProvider.getOptionsList().size());
cardNumberProvider.dataChanged();
List<Object> optionsList = cardNumberProvider.getOptionsList();
assertEquals(3, optionsList.size());
assertTrue(optionsList.contains(this.expectedNumber1));
assertTrue(optionsList.contains(this.expectedNumber2));
assertTrue(optionsList.contains(this.expectedNumber3));
}
@Test
public void testTimeblockSetChanged_UpdatesOptionList() throws Exception {
TimeblockSet mockTimeblockSet = useMock(TimeblockSet.class);
setupMockTimeblockSet();
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(this.mockController);
assertEquals(0, cardNumberProvider.getOptionsList().size());
cardNumberProvider.timeblockSetChanged(mockTimeblockSet);
List<Object> optionsList = cardNumberProvider.getOptionsList();
assertEquals(3, optionsList.size());<|fim▁hole|>
public void setupMockTimeblockSet() {
TimeblockSet mockTimeblockSet = useMock(TimeblockSet.class);
this.expectedNumber1 = "cardnumber";
this.expectedNumber2 = "a different number";
this.expectedNumber3 = "duplicate";
IReadTimeblock mockTimeblock1 = createMockBlockWithCardNumber(this.expectedNumber1);
IReadTimeblock mockTimeblock2 = createMockBlockWithCardNumber(this.expectedNumber2);
IReadTimeblock mockTimeblock3 = createMockBlockWithCardNumber(this.expectedNumber3);
IReadTimeblock mockTimeblock4 = createMockBlockWithCardNumber(this.expectedNumber3);
List<IReadTimeblock> timblocks = Arrays.asList(mockTimeblock1, mockTimeblock2, mockTimeblock3, mockTimeblock4);
expect(mockTimeblockSet.getBlockSet()).andReturn(timblocks);
this.mockController = useMock(DataController.class);
expect(this.mockController.getAllTimeblocks()).andReturn(mockTimeblockSet);
}
private IReadTimeblock createMockBlockWithCardNumber(String expectedNumber) {
IReadTimeblock mockTimeblock = useMock(IReadTimeblock.class);
TimeData mockData = useMock(TimeData.class);
expect(mockTimeblock.getTimeData()).andReturn(mockData);
expect(mockData.getCard()).andReturn(expectedNumber);
return mockTimeblock;
}
}<|fim▁end|>
|
assertTrue(optionsList.contains(this.expectedNumber1));
assertTrue(optionsList.contains(this.expectedNumber2));
assertTrue(optionsList.contains(this.expectedNumber3));
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/bin/env/python
# coding: utf-8
import logging
import os
import time
import uuid
from logging import Formatter
from logging.handlers import RotatingFileHandler
from multiprocessing import Queue
from time import strftime
import dill
from .commands import *
from .processing import MultiprocessingLogger
class TaskProgress(object):
"""
Holds both data and graphics-related information for a task's progress bar.
The logger will iterate over TaskProgress objects to draw progress bars on screen.
"""
def __init__(self,
total,
prefix='',
suffix='',
decimals=0,
bar_length=60,
keep_alive=False,
display_time=False):
"""
Creates a new progress bar using the given information.
:param total: The total number of iteration for this progress bar.
:param prefix: [Optional] The text that should be displayed at the left side of the
progress bar. Note that progress bars will always stay left-aligned at the
shortest possible.
:param suffix: [Optional] The text that should be displayed at the very right side of the
progress bar.
:param decimals: [Optional] The number of decimals to display for the percentage.
:param bar_length: [Optional] The graphical bar size displayed on screen. Unit is character.
:param keep_alive: [Optional] Specify whether the progress bar should stay displayed forever
once completed or if it should vanish.
:param display_time: [Optional] Specify whether the duration since the progress has begun should
be displayed. Running time will be displayed between parenthesis, whereas it
will be displayed between brackets when the progress has completed.
"""
super(TaskProgress, self).__init__()
self.progress = 0
# Minimum number of seconds at maximum completion before a progress bar is removed from display
# The progress bar may vanish at a further time as the redraw rate depends upon chrono AND method calls
self.timeout_chrono = None
self.begin_time = None
self.end_time = None
self.elapsed_time_at_end = None
# Graphics related information
self.keep_alive = keep_alive
self.display_time = display_time
self.total = total
self.prefix = prefix
self.suffix = suffix
self.decimals = decimals
self.bar_length = bar_length
def set_progress(self, progress):
"""
Defines the current progress for this progress bar in iteration units (not percent).
:param progress: Current progress in iteration units regarding its total (not percent).
:return: True if the progress has changed. If the given progress is higher than the total or lower
than 0 then it will be ignored.
"""
_progress = progress
if _progress > self.total:
_progress = self.total
elif _progress < 0:
_progress = 0
# Stop task chrono if needed
if _progress == self.total and self.display_time:
self.end_time = time.time() * 1000
# If the task has completed instantly then define its begin_time too
if not self.begin_time:
self.begin_time = self.end_time
has_changed = self.progress != _progress
if has_changed:
self.progress = _progress
return has_changed
class FancyLogger(object):
"""
Defines a multiprocess logger object. Logger uses a redraw rate because of console flickering. That means it will
not draw new messages or progress at the very time they are being logged but their timestamp will be captured at the
right time. Logger will redraw at a given time period AND when new messages or progress are logged.
If you still want to force redraw immediately (may produce flickering) then call 'flush' method.
Logger uses one file handler and then uses standard output (stdout) to draw on screen.
"""
queue = None
"Handles all messages and progress to be sent to the logger process."
default_message_number = 20
"Default value for the logger configuration."
default_exception_number = 5
"Default value for the logger configuration."
default_permanent_progressbar_slots = 0
"Default value for the logger configuration."
default_redraw_frequency_millis = 500
"Default value for the logger configuration."
default_level = logging.INFO
"Default value for the logger configuration."
default_task_millis_to_removal = 500
"Default value for the logger configuration."
default_console_format_strftime = '%d %B %Y %H:%M:%S'
"Default value for the logger configuration."
default_console_format = '{T} [{L}]'
"Default value for the logger configuration."
default_file_handlers = []
"Default value for the logger configuration. Filled in constructor."
def __init__(self,
message_number=default_message_number,
exception_number=default_exception_number,
permanent_progressbar_slots=default_permanent_progressbar_slots,
redraw_frequency_millis=default_redraw_frequency_millis,
console_level=default_level,
task_millis_to_removal=default_task_millis_to_removal,
console_format_strftime=default_console_format_strftime,
console_format=default_console_format,
file_handlers=None,
application_name=None):
"""
Initializes a new logger and starts its process immediately using given configuration.
:param message_number: [Optional] Number of simultaneously displayed messages below progress bars.
:param exception_number: [Optional] Number of simultaneously displayed exceptions below messages.
:param permanent_progressbar_slots: [Optional] The amount of vertical space (bar slots) to keep at all times,
so the message logger will not move anymore if the bar number is equal or
lower than this parameter.
:param redraw_frequency_millis: [Optional] Minimum time lapse in milliseconds between two redraws. It may be
more because the redraw rate depends upon time AND method calls.
:param console_level: [Optional] The logging level (from standard logging module).
:param task_millis_to_removal: [Optional] Minimum time lapse in milliseconds at maximum completion before
a progress bar is removed from display. The progress bar may vanish at a
further time as the redraw rate depends upon time AND method calls.
:param console_format_strftime: [Optional] Specify the time format for console log lines using python
strftime format. Defaults to format: '29 november 2016 21:52:12'.
:param console_format: [Optional] Specify the format of the console log lines. There are two
variables available: {T} for timestamp, {L} for level. Will then add some
tabulations in order to align text beginning for all levels.
Defaults to format: '{T} [{L}]'
Which will produce: '29 november 2016 21:52:12 [INFO] my log text'
'29 november 2016 21:52:13 [WARNING] my log text'
'29 november 2016 21:52:14 [DEBUG] my log text'
:param file_handlers: [Optional] Specify the file handlers to use. Each file handler will use its
own regular formatter and level. Console logging is distinct from file
logging. Console logging uses custom stdout formatting, while file logging
uses regular python logging rules. All handlers are permitted except
StreamHandler if used with stdout or stderr which are reserved by this
library for custom console output.
:param application_name: [Optional] Used only if 'file_handlers' parameter is ignored. Specifies the
application name to use to format the default file logger using format:
application_%Y-%m-%d_%H-%M-%S.log
"""
super(FancyLogger, self).__init__()
# Define default file handlers
if not file_handlers:
if not application_name:
app_name = 'application'
else:
app_name = application_name
handler = RotatingFileHandler(filename=os.path.join(os.getcwd(), '{}_{}.log'
.format(app_name, strftime('%Y-%m-%d_%H-%M-%S'))),
encoding='utf8',
maxBytes=5242880, # 5 MB
backupCount=10,
delay=True)
handler.setLevel(logging.INFO)
handler.setFormatter(fmt=Formatter(fmt='%(asctime)s [%(levelname)s]\t%(message)s',
datefmt=self.default_console_format_strftime))
self.default_file_handlers.append(handler)
file_handlers = self.default_file_handlers
if not self.queue:
self.queue = Queue()
self.process = MultiprocessingLogger(queue=self.queue,
console_level=console_level,
message_number=message_number,
exception_number=exception_number,
permanent_progressbar_slots=permanent_progressbar_slots,
redraw_frequency_millis=redraw_frequency_millis,
task_millis_to_removal=task_millis_to_removal,
console_format_strftime=console_format_strftime,
console_format=console_format,
file_handlers=file_handlers)
self.process.start()
def flush(self):
"""
Flushes the remaining messages and progress bars state by forcing redraw. Can be useful if you want to be sure
that a message or progress has been updated in display at a given moment in code, like when you are exiting an
application or doing some kind of synchronized operations.
"""
self.queue.put(dill.dumps(FlushCommand()))
def terminate(self):
"""
Tells the logger process to exit immediately. If you do not call 'flush' method before, you may lose some
messages of progresses that have not been displayed yet. This method blocks until logger process has stopped.
"""
self.queue.put(dill.dumps(ExitCommand()))
if self.process:
self.process.join()
def set_configuration(self,
message_number=default_message_number,
exception_number=default_exception_number,
permanent_progressbar_slots=default_permanent_progressbar_slots,
redraw_frequency_millis=default_redraw_frequency_millis,
console_level=default_level,
task_millis_to_removal=default_task_millis_to_removal,
console_format_strftime=default_console_format_strftime,
console_format=default_console_format,
file_handlers=default_file_handlers):
"""
Defines the current configuration of the logger. Can be used at any moment during runtime to modify the logger
behavior.
:param message_number: [Optional] Number of simultaneously displayed messages below progress bars.
:param exception_number: [Optional] Number of simultaneously displayed exceptions below messages.
:param permanent_progressbar_slots: [Optional] The amount of vertical space (bar slots) to keep at all times,
so the message logger will not move anymore if the bar number is equal or
lower than this parameter.
:param redraw_frequency_millis: [Optional] Minimum time lapse in milliseconds between two redraws. It may be
more because the redraw rate depends upon time AND method calls.
:param console_level: [Optional] The logging level (from standard logging module).
:param task_millis_to_removal: [Optional] Minimum time lapse in milliseconds at maximum completion before
a progress bar is removed from display. The progress bar may vanish at a
further time as the redraw rate depends upon time AND method calls.
:param console_format_strftime: [Optional] Specify the time format for console log lines using python
strftime format. Defaults to format: '29 november 2016 21:52:12'.
:param console_format: [Optional] Specify the format of the console log lines. There are two
variables available: {T} for timestamp, {L} for level. Will then add some
tabulations in order to align text beginning for all levels.
Defaults to format: '{T} [{L}]'
Which will produce: '29 november 2016 21:52:12 [INFO] my log text'
'29 november 2016 21:52:13 [WARNING] my log text'
'29 november 2016 21:52:14 [DEBUG] my log text'
:param file_handlers: [Optional] Specify the file handlers to use. Each file handler will use its
own regular formatter and level. Console logging is distinct from file
logging. Console logging uses custom stdout formatting, while file logging
uses regular python logging rules. All handlers are permitted except
StreamHandler if used with stdout or stderr which are reserved by this
library for custom console output.
"""
self.queue.put(dill.dumps(SetConfigurationCommand(task_millis_to_removal=task_millis_to_removal,
console_level=console_level,
permanent_progressbar_slots=permanent_progressbar_slots,
message_number=message_number,
exception_number=exception_number,
redraw_frequency_millis=redraw_frequency_millis,
console_format_strftime=console_format_strftime,
console_format=console_format,
file_handlers=file_handlers)))
def set_level(self,
level,
console_only=False):
"""
Defines the logging level (from standard logging module) for log messages.
:param level: Level of logging for the file logger.
:param console_only: [Optional] If True then the file logger will not be affected.
"""
self.queue.put(dill.dumps(SetLevelCommand(level=level,
console_only=console_only)))
def set_task_object(self,
task_id,
task_progress_object):
"""
Defines a new progress bar with the given information using a TaskProgress object.
:param task_id: Unique identifier for this progress bar. Will erase if already existing.
:param task_progress_object: TaskProgress object holding the progress bar information.
"""
self.set_task(task_id=task_id,
total=task_progress_object.total,
prefix=task_progress_object.prefix,
suffix=task_progress_object.suffix,
decimals=task_progress_object.decimals,
bar_length=task_progress_object.bar_length,
keep_alive=task_progress_object.keep_alive,
display_time=task_progress_object.display_time)
def set_task(self,
task_id,
total,
prefix,
suffix='',
decimals=0,
bar_length=60,
keep_alive=False,
display_time=False):
"""
Defines a new progress bar with the given information.
:param task_id: Unique identifier for this progress bar. Will erase if already existing.
:param total: The total number of iteration for this progress bar.
:param prefix: The text that should be displayed at the left side of the progress bar. Note that
progress bars will always stay left-aligned at the shortest possible.
:param suffix: [Optional] The text that should be displayed at the very right side of the progress bar.
:param decimals: [Optional] The number of decimals to display for the percentage.
:param bar_length: [Optional] The graphical bar size displayed on screen. Unit is character.
:param keep_alive: [Optional] Specify whether the progress bar should stay displayed forever once completed
or if it should vanish.
:param display_time: [Optional] Specify whether the duration since the progress has begun should be
displayed. Running time will be displayed between parenthesis, whereas it will be
displayed between brackets when the progress has completed.
"""
self.queue.put(dill.dumps(NewTaskCommand(task_id=task_id,
task=TaskProgress(total,
prefix,
suffix,
decimals,
bar_length,
keep_alive,
display_time))))
def update(self,
task_id,
progress):
"""
Defines the current progress for this progress bar id in iteration units (not percent).
If the given id does not exist or the given progress is identical to the current, then does nothing.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param task_id: Unique identifier for this progress bar. Will erase if already existing.
:param progress: Current progress in iteration units regarding its total (not percent).
"""
self.queue.put(dill.dumps(UpdateProgressCommand(task_id=task_id,
progress=progress)))
def debug(self, text):
"""
Posts a debug message adding a timestamp and logging level to it for both file and console handlers.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param text: The text to log into file and console.
"""
self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.DEBUG)))
def info(self, text):
"""
Posts an info message adding a timestamp and logging level to it for both file and console handlers.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param text: The text to log into file and console.
"""
self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.INFO)))
def warning(self, text):
"""
Posts a warning message adding a timestamp and logging level to it for both file and console handlers.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param text: The text to log into file and console.
"""
self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.WARNING)))
def error(self, text):
"""
Posts an error message adding a timestamp and logging level to it for both file and console handlers.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param text: The text to log into file and console.
"""
self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.ERROR)))
def critical(self, text):
"""
Posts a critical message adding a timestamp and logging level to it for both file and console handlers.
Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress
at the very time they are being logged but their timestamp will be captured at the right time. Logger will
redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw
immediately (may produce flickering) then call 'flush' method.
:param text: The text to log into file and console.
"""
self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.CRITICAL)))
def throw(self, stacktrace, process_title=None):
"""
Sends an exception to the logger so it can display it as a special message. Prevents console refresh cycles from
hiding exceptions that could be thrown by processes.
:param stacktrace: Stacktrace string as returned by 'traceback.format_exc()' in an 'except' block.
:param process_title: [Optional] Define the current process title to display into the logger for this
exception.
"""
self.queue.put(dill.dumps(StacktraceCommand(pid=os.getpid(),
stacktrace=stacktrace,
process_title=process_title)))
# --------------------------------------------------------------------
# Iterator implementation
def progress(self,
enumerable,
task_progress_object=None):
"""
Enables the object to be used as an iterator. Each iteration will produce a progress update in the logger.
:param enumerable: Collection to iterate over.
:param task_progress_object: [Optional] TaskProgress object holding the progress bar information.
:return: The logger instance.
"""
self.list = enumerable
self.list_length = len(enumerable)
self.task_id = uuid.uuid4()
self.index = 0
if task_progress_object:
# Force total attribute
task_progress_object.total = self.list_length
else:
task_progress_object = TaskProgress(total=self.list_length,
display_time=True,
prefix='Progress')
# Create a task progress
self.set_task_object(task_id=self.task_id,
task_progress_object=task_progress_object)
return self
def __iter__(self):
"""
Enables the object to be used as an iterator. Each iteration will produce a progress update in the logger.
:return: The logger instance.
"""
return self<|fim▁hole|> :return: The current object of the iterator.
"""
if self.index >= self.list_length:
raise StopIteration
else:
self.index += 1
self.update(task_id=self.task_id,
progress=self.index)
return self.list[self.index - 1]
# ---------------------------------------------------------------------<|fim▁end|>
|
def __next__(self):
"""
Enables the object to be used as an iterator. Each iteration will produce a progress update in the logger.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import sublime
HOST_PLATFORM = sublime.platform()
<|fim▁hole|>LINUX = 'linux'
OSX = 'osx'<|fim▁end|>
|
WINDOWS = 'windows'
|
<|file_name|>DP20120709A.py<|end_file_name|><|fim▁begin|>"""
The Fibonacci numbers, which we are all familiar with, start like this:
0,1,1,2,3,5,8,13,21,34,...
Where each new number in the sequence is the sum of the previous two.
It turns out that by summing different Fibonacci numbers with each other, you can create every single positive integer.
In fact, a much stronger statement holds:
Every single positive integer can be represented in one and only one way as a sum of non-consecutive Fibonacci numbers.
This is called the number's "Zeckendorf representation" [http://en.wikipedia.org/wiki/Zeckendorf%27s_theorem].
For instance, the Zeckendorf representation of the number 100 is 89 + 8 + 3, and the Zeckendorf representation of 1234
is 987 + 233 + 13 + 1. Note that all these numbers are Fibonacci numbers, and that they are non-consecutive (i.e. no
two numbers in a Zeckendorf representation can be next to each other in the Fibonacci sequence).
There are other ways of summing Fibonacci numbers to get these numbers. For instance, 100 is also equal to 89 + 5 + 3 +
2 + 1, but 1, 2, 3, 5 are all consecutive Fibonacci numbers. If no consecutive Fibonacci numbers are allowed, the
representation is unique.
Finding the Zeckendorf representation is actually not very hard. Lets use the number 100 as an example of how it's done:
First, you find the largest fibonacci number less than or equal to 100. In this case that is 89. This number will always
be of the representation, so we remember that number and proceed recursively, and figure out the representation of
100 - 89 = 11.
The largest Fibonacci number less than or equal to 11 is 8. We remember that number and proceed recursively with
11 - 8 = 3.
3 is a Fibonacci number itself, so now we're done. The answer is 89 + 8 + 3.
Write a program that finds the Zeckendorf representation of different numbers.
What is the Zeckendorf representation of 315 ?
Thanks to SwimmingPastaDevil for suggesting this problem in /r/dailyprogrammer_ideas! Do you have a problem you
think would be good for us? Why not head over there and post it?
"""
def zeckendorf(target, fib_list):
res = []
for f in fib_list[::-1]:
if f <= target:
res.append(f)<|fim▁hole|> target -= f
return res
def get_fibonacci_list(target):
""" returns fibonacci numbers upto less than the target and not including zero"""
fib = [1, 1]
while fib[-1] < target:
fib.append(fib[-1] + fib[-2])
return fib[:-1]
def main():
target = 3**15
fib_list = get_fibonacci_list(target)
zeck = zeckendorf(target, fib_list)
print(zeck)
print(' 3**15 = {} \nsum of zeckendorf = {}'.format(3**15, sum(zeck)))
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>dev.py<|end_file_name|><|fim▁begin|>from .base import *
<|fim▁hole|># See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!i%7s@1+v&293zcy*kljuke=_l176nqpj2-3dtms()pw^et!we'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}<|fim▁end|>
|
# Quick-start development settings - unsuitable for production
|
<|file_name|>term.js<|end_file_name|><|fim▁begin|>document.write('<div id="terminal" class="terminal-content"></div>');
var session = {};
// return a parameter value from the current URL
function getParam(sname) {
var params = location.search.substr(location.search.indexOf("?") + 1);
var sval = "";
params = params.split("&");
// split param and value into individual pieces
for (var i = 0; i < params.length; i++) {
temp = params[i].split("=");
if ([temp[0]] == sname) {
sval = temp[1];
}
}
return sval;
}
function getBaseURL() {
return location.protocol + "//" + location.hostname +
(location.port && ":" + location.port) + location.pathname;
}
function greetings(term) {
term.echo(session.welcomeMessage);
term.echo(' ');
}
function createNewSession(expression, snap) {
var newSession = [];
newSession.expression = expression;
newSession.snap = snap;
$.ajax({
type: 'POST',
async: false,
url: '/create',
data: (expression ? "expression=" + expression : "") + "&" + (snap ? "snap=" + snap : "")
}
).done(function (data) {
newSession.clientId = data.id;
newSession.welcomeMessage = data.welcomeMessage
});
newSession.requesting = false;
session = newSession;
}
function closeSession() {
$.ajax({type: 'POST', async: false, url: '/remove', data: 'id=' + session.clientId})
.fail(function (xhr, textStatus, errorThrown) {/* ignore failure when closing */ });
}
function restartSession(term) {
term.echo("[[;#CC7832;black]Session terminated. Starting new session...]");
closeSession();
createNewSession(session.expression, session.snap)
}
function readExpressionLine(line, term) {
var expression = null;
$.ajax({type: 'POST', async: false, url: '/readExpression', data: {id: session.clientId, line: line}})
.done(function (data) {
expression = data.expression;
})
.fail(function (xhr, textStatus, errorThrown) {
restartSession(term)
});
return expression;
}
function makeSnap(term) {
var snapUrl = null;
$.ajax({type: 'POST', async: false, url: '/snap', data: 'id=' + session.clientId})
.done(function (data) {
snapUrl = getBaseURL() + '?snap=' + data.snap;
}).fail(function (xhr, textStatus, errorThrown) {
restartSession(term)
});
return snapUrl;
}
function messageStyle(style) {
return {
finalize: function (div) {
div.addClass(style);
}
}
}
function layoutCompletions(candidates, widthInChars) {
var max = 0;
for (var i = 0; i < candidates.length; i++) {
max = Math.max(max, candidates[i].length);
}
max += 2;
var n = Math.floor(widthInChars / max);
var buffer = "";
var col = 0;
for (i = 0; i < candidates.length; i++) {
var completion = candidates[i];
buffer += candidates[i];
for (var j = completion.length; j < max; j++) {
buffer += " ";
}
if (++col >= n) {
buffer += "\n";
col = 0;
}
}
return buffer;
}
function echoCompletionCandidates(term, candidates) {
term.echo(term.get_prompt() + term.get_command());
term.echo(layoutCompletions(candidates, term.width() / 8));
}
function handleTerminalCommand(log, term) {
if (log.type == "CONTROL") {
switch (log.message) {
case "CLEAR_SCREEN":
term.clear();
term.echo(session.welcomeMessage);
term.echo(' ');
break;
}
return true;
}
return false;
}
function handleTerminalMessage(log, term) {
if (log.type != "CONTROL") {
var style = log.type == "ERROR" ? "terminal-message-error" : "terminal-message-success";
term.echo(log.message, messageStyle(style))
return log.type == "ERROR";
}
return false;
}
$(document).ready(function () {
jQuery(function ($, undefined) {
createNewSession(getParam("expression"), getParam("snap"));
$('#terminal').terminal(function (command, term) {
if (command == ":snap") {
var snapUri = makeSnap(term);
term.echo("Created terminal snapshot [[!;;]" + snapUri + "]", messageStyle("terminal-message-success"));
return;
}
var expression = readExpressionLine(command, term);
if (expression) {
$.ajax({
type: 'POST',
async: false,
url: '/execute',
data: {id: session.clientId, expression: expression}
}).done(function (data) {
var hadError = false;
for (var i = 0; i < data.logs.length; i++) {
var log = data.logs[i];
if (!handleTerminalCommand(log, term)) {
hadError = handleTerminalMessage(log, term) || hadError;
}
}
if (!hadError) {
_gaq.push(["_trackEvent", "console", "evaluation", "success"]);
} else {
_gaq.push(["_trackEvent", "console", "evaluation", "error"]);
}
session.requesting = false;
}).fail(function (xhr, textStatus, errorThrown) {
restartSession(term)
});
} else {
term.echo(" ");
session.requesting = false;
}
}, {
greetings: null,
name: 'js_demo',
prompt: '[[;white;black]java> ]',
onInit: function (term) {
greetings(term);
},
keydown: function (event, term) {
if (event.keyCode == 9) //Tab
{
var completionResult = [];
$.ajax({
type: 'GET',
async: false,
cache: false,
url: '/completions',
data: {id: session.clientId, expression: term.get_command()}
})
.done(function (data) {
completionResult = data;
});
var candidates = _.map(completionResult.candidates, function (cand) {
return cand.value;<|fim▁hole|> return cand.forms;
});
var promptText = term.get_command();
if (candidates.length == 0) {
term.set_command(promptText);
return false;
}
if (candidates.length == 1) {
var uniqueForms = _.filter(_.unique(candidatesForms[0]), function (form) {
return form != candidates[0]
});
var text = term.get_command().substr(0, parseInt(completionResult.position)) + candidates[0];
term.set_command(text);
if (uniqueForms.length > 0) {
echoCompletionCandidates(term, candidatesForms[0]);
}
return false;
}
echoCompletionCandidates(term, candidates);
for (var i = candidates[0].length; i > 0; --i) {
var prefixedCandidatesCount = _.filter(candidates, function (cand) {
return i > cand.length ? false : cand.substr(0, i) == candidates[0].substr(0, i);
}).length;
if (prefixedCandidatesCount == candidates.length) {
term.set_command(promptText.substr(0, parseInt(completionResult.position)) + candidates[0].substr(0, i));
return false;
}
}
term.set_command(promptText);
return false;
}
}
});
});
});<|fim▁end|>
|
});
var candidatesForms = _.map(completionResult.candidates, function (cand) {
|
<|file_name|>message_based_server.py<|end_file_name|><|fim▁begin|>###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
<|fim▁hole|>
import hashlib
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class MessageBasedHashServerProtocol(WebSocketServerProtocol):
"""
Message-based WebSockets server that computes a SHA-256 for every
message it receives and sends back the computed digest.
"""
def onMessage(self, payload, isBinary):
sha256 = hashlib.sha256()
sha256.update(payload)
digest = sha256.hexdigest()
self.sendMessage(digest.encode('utf8'))
print("Sent digest for message: {}".format(digest))
if __name__ == '__main__':
factory = WebSocketServerFactory(u"ws://127.0.0.1:9000")
factory.protocol = MessageBasedHashServerProtocol
listenWS(factory)
reactor.run()<|fim▁end|>
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
|
<|file_name|>consumer.py<|end_file_name|><|fim▁begin|>from channels.auth import channel_session_user_from_http
from .models import Stream, Notification
import redis
import ast
from .task import sendNotifications, send_notifications
from channels import Group
import json
redis_con = redis.Redis('demo.scorebeyond.com', 8007)
subs = redis_con.pubsub()
subs.subscribe('test')
@channel_session_user_from_http
def ws_connect(message):
'''Capture redis stream and save it into database'''
Group('stream').add(message.reply_channel)
for message in subs.listen():
if message['type'] == "message":
data1 = ast.literal_eval(message['data'])<|fim▁hole|> send_notifications(data1)
else:
sendNotifications(data1, capture=notif.delay)
if not Stream.objects.filter(name=data1['name']):
type_list = []
if not data1['info']:
Stream.objects.create(name=data1['name'], info="")
else:
for k, v in data1['info'].iteritems():
type_list.append(k+":"+type(v).__name__)
Stream.objects.create(name=data1['name'], info=','.join(type_list))
Group('stream').send({
'text': json.dumps({
'event_name': data1['name'],
'blueprint': ','.join(type_list),
})
})
else:
print message<|fim▁end|>
|
print data1['name']
if Notification.objects.filter(event_name=data1['name']):
notif = Notification.objects.get(event_name=data1['name'])
if notif.no_delay:
|
<|file_name|>angular-locale_ss-za.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
|
oid sha256:b7405262706997cffc865837cffd6bd9eb92a8f12c3da71795815fb2da9be9f6
size 2483
|
<|file_name|>test_readers.py<|end_file_name|><|fim▁begin|>import unittest
from exporters.readers.base_reader import BaseReader
from exporters.readers.random_reader import RandomReader
from .utils import meta
class BaseReaderTest(unittest.TestCase):
def setUp(self):
self.reader = BaseReader({}, meta())
def test_get_next_batch_not_implemented(self):
with self.assertRaises(NotImplementedError):
self.reader.get_next_batch()
def test_set_last_position(self):
self.reader.set_last_position(dict(position=5))
self.assertEqual(self.reader.last_position, dict(position=5))
class RandomReaderTest(unittest.TestCase):
def setUp(self):
self.options = {
'exporter_options': {
'log_level': 'DEBUG',
'logger_name': 'export-pipeline'
},
'reader': {
'name': 'exporters.readers.random_reader.RandomReader',
'options': {
'number_of_items': 1000,
'batch_size': 100
}
},
}
self.reader = RandomReader(self.options, meta())
self.reader.set_last_position(None)
def test_get_next_batch(self):
batch = list(self.reader.get_next_batch())
self.assertEqual(len(batch), self.options['reader']['options']['batch_size'])
def test_get_second_batch(self):
self.reader.get_next_batch()
batch = list(self.reader.get_next_batch())
self.assertEqual(len(batch), self.options['reader']['options']['batch_size'])
self.assertEqual(self.reader.get_metadata('read_items'),
self.options['reader']['options']['batch_size'])
def test_get_all(self):
total_items = 0
while not self.reader.finished:
batch = list(self.reader.get_next_batch())
total_items += len(batch)
self.assertEqual(total_items, self.options['reader']['options']['number_of_items'])<|fim▁hole|> self.reader.set_last_position({'last_read': 123})
self.assertEqual({'last_read': 123}, self.reader.last_position)<|fim▁end|>
|
def test_set_last_position_none(self):
|
<|file_name|>tick.rs<|end_file_name|><|fim▁begin|>//use std::time::Duration;
use mio::{EventLoop, Evented, EventLoopConfig, TryAccept};
use handler::LoopHandler;
use internal::Message;
use transport::Transport;
use ::ProtocolFactory;
pub struct Tick<T: TryAccept + Evented, F: ProtocolFactory<T::Output>> where <T as TryAccept>::Output: Transport {
handler: LoopHandler<F, T>,
event_loop: EventLoop<LoopHandler<F, T>>
}
pub struct TickConfig {
transports_capacity: usize,
notify_capacity: usize,
}
impl TickConfig {
pub fn new() -> TickConfig {
TickConfig {
transports_capacity: 8_192,
notify_capacity: 8_192,
}
}
}
impl<T: TryAccept + Evented, F: ProtocolFactory<T::Output>> Tick<T, F> where <T as TryAccept>::Output: Transport {
pub fn new(protocol_factory: F) -> Tick<T, F> {
Tick::configured(protocol_factory, TickConfig::new())
}
pub fn configured(factory: F, config: TickConfig) -> Tick<T, F> {
let mut loop_config = EventLoopConfig::new();
loop_config.notify_capacity(config.notify_capacity);
Tick {
handler: LoopHandler::new(factory, config.transports_capacity),
event_loop: EventLoop::configured(loop_config).unwrap()
}
}
pub fn notify(&self) -> Notify {
Notify { sender: self.event_loop.channel() }
}
pub fn accept(&mut self, listener: T) -> ::Result<::Id> {
self.handler.listener(&mut self.event_loop, listener).map(::Id)
}
pub fn stream(&mut self, transport: T::Output) -> ::Result<::Id> {
self.handler.stream(&mut self.event_loop, transport).map(::Id)
}
pub fn run_until_complete(&mut self, id: ::Id) -> ::Result<()> {
while self.handler.transports.contains(id.0) {
try!(self.event_loop.run_once(&mut self.handler, None));
}
Ok(())
}
pub fn run(&mut self) -> ::Result<()> {
self.event_loop.run(&mut self.handler).map_err(From::from)
}
}
#[derive(Clone)]
pub struct Notify {
sender: ::mio::Sender<Message>
}
impl Notify {
/*
pub fn timeout<F: FnOnce() + Send + 'static>(&self, f: F, when: Duration) {
let mut env = Some(f);
let ms = when.as_secs() * 1_000 + (when.subsec_nanos() as u64) / 1_000_000;
self.sender.send(Message::Timeout(Box::new(move || {
env.take().map(|f| f());
}), ms));
}
*/
pub fn shutdown(&self) {
self.sender.send(Message::Shutdown).unwrap();
}<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>python-camera.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import cv2
# device number "0"
cap = cv2.VideoCapture(0)
while(True):
# Capture a frame
ret, frame = cap.read()
# show on display
cv2.imshow('frame',frame)
# waiting for keyboard input<|fim▁hole|> break
# Save if "s" pressed
if key == ord('s'):
path = "photo.jpg"
cv2.imwrite(path,frame)
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()<|fim▁end|>
|
key = cv2.waitKey(1) & 0xFF
# Exit if "q" pressed
if key == ord('q'):
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.