prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>deadbeef_np.py<|end_file_name|><|fim▁begin|>import commands
import weechat
def weechat_np(data, buffer, args):
read_track = commands.getoutput('deadbeef --nowplaying "%a - (%b) - %t [%@:BPS@bit / %@:BITRATE@kbps / %@:SAMPLERATE@Hz]"').split('\n')<|fim▁hole|> return weechat.WEECHAT_RC_OK
weechat.register("deadbeef_np", "mwgg", "0.9", "MIT", "Show name of the song currently played by DeaDBeeF", "", "")
weechat.hook_command("np", "Get/send now playing info.", "", "", "", "weechat_np", "")<|fim▁end|> | weechat.command(buffer, '/me is currently listening to: ' + read_track[1]) |
<|file_name|>test_jsonmerge.py<|end_file_name|><|fim▁begin|># vim:ts=4 sw=4 expandtab softtabstop=4
import unittest
import warnings
from collections import OrderedDict
import jsonmerge
import jsonmerge.strategies
from jsonmerge.exceptions import (
HeadInstanceError,
BaseInstanceError,
SchemaError
)
from jsonmerge.jsonvalue import JSONValue
import jsonschema
try:
Draft6Validator = jsonschema.validators.Draft6Validator
except AttributeError:
Draft6Validator = None
warnings.simplefilter("always")
class TestMerge(unittest.TestCase):
def test_default(self):
schema = {}
base = None
base = jsonmerge.merge(base, "a", schema)
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, "b")
def test_overwrite(self):
schema = {'mergeStrategy': 'overwrite'}
base = None
base = jsonmerge.merge(base, "a", schema)
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, "b")
def test_version(self):
schema = {'mergeStrategy': 'version'}
base = None
base = jsonmerge.merge(base, "a", schema)
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, [{'value': "a"}, {'value': "b"}])
def test_version_does_not_duplicate(self):
# Don't record change if it didn't change
schema = {'mergeStrategy': 'version'}
base = None
base = jsonmerge.merge(base, "a", schema)
base = jsonmerge.merge(base, "b", schema)
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, [{'value': "a"}, {'value': "b"}])
def test_version_meta(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, "a", merge_options={
'version': {'metadata': {'uri': 'http://example.com/a'}}})
base = merger.merge(base, "b", merge_options={
'version': {'metadata': {'uri': 'http://example.com/b'}}})
self.assertEqual(base, [
{'value': "a",
'uri': 'http://example.com/a'},
{'value': "b",
'uri': 'http://example.com/b'}])
def test_version_meta_not_obj(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
with self.assertRaises(SchemaError) as cm:
merger.merge(None, "a", merge_options={'version': {'metadata': 'foo'}})
def test_version_meta_deprecated(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
with warnings.catch_warnings(record=True) as w:
base = merger.merge(None, 'a', meta={'foo': 'bar'})
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
def test_version_ignoredups_false(self):
schema = {'mergeStrategy': 'version',
'mergeOptions': {'ignoreDups': False}}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, "a")
base = merger.merge(base, "a")
self.assertEqual(base, [{'value': "a"}, {'value': "a"}])
def test_version_unique_false(self):
schema = {'mergeStrategy': 'version',
'mergeOptions': {'unique': False}}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, "a")
base = merger.merge(base, "a")
self.assertEqual(base, [{'value': "a"}, {'value': "a"}])
def test_version_ignoredups_true(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, "a")
base = merger.merge(base, "a")
self.assertEqual(base, [{'value': "a"}])
def test_version_last(self):
schema = {'mergeStrategy': 'version',
'mergeOptions': {'limit': 1}}
base = None
base = jsonmerge.merge(base, "a", schema)
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, [{'value': "b"}])
def test_version_base_not_a_list(self):
schema = {'mergeStrategy': 'version'}
base = "a"
with self.assertRaises(BaseInstanceError) as cm:
jsonmerge.merge(base, "b", schema)
def test_version_base_not_a_list_of_objects(self):
schema = {'mergeStrategy': 'version'}
base = ["a"]
with self.assertRaises(BaseInstanceError) as cm:
jsonmerge.merge(base, "b", schema)
def test_version_base_no_value_in_object(self):
schema = {'mergeStrategy': 'version'}
base = [{}]
with self.assertRaises(BaseInstanceError) as cm:
jsonmerge.merge(base, "b", schema)
def test_version_base_empty_list(self):
schema = {'mergeStrategy': 'version'}
base = []
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, [{'value': 'b'}])
def test_append(self):
schema = {'mergeStrategy': 'append'}
base = None
base = jsonmerge.merge(base, ["a"], schema)
base = jsonmerge.merge(base, ["b"], schema)
self.assertEqual(base, ["a", "b"])
def test_append_type_error(self):
schema = {'mergeStrategy': 'append'}
base = None
with self.assertRaises(HeadInstanceError) as cm:
jsonmerge.merge(base, "a", schema)
self.assertEqual(cm.exception.value.ref, "#")
def test_append_type_error_base(self):
schema = {'mergeStrategy': 'append'}
base = "ab"
with self.assertRaises(BaseInstanceError) as cm:
jsonmerge.merge(base, ["a"], schema)
self.assertEqual(cm.exception.value.ref, "#")
def test_merge_default(self):
schema = {}
base = None
base = jsonmerge.merge(base, {'a': "a"}, schema)
base = jsonmerge.merge(base, {'b': "b"}, schema)
self.assertEqual(base, {'a': "a", 'b': "b"})
def test_merge_empty_schema(self):
schema = {}
base = None
base = jsonmerge.merge(base, {'a': {'b': 'c'}}, schema)
self.assertEqual(base, {'a': {'b': 'c'}})
def test_merge_trivial(self):
schema = {'mergeStrategy': 'objectMerge'}
base = None
base = jsonmerge.merge(base, {'a': "a"}, schema)
base = jsonmerge.merge(base, {'b': "b"}, schema)
self.assertTrue(isinstance(base, dict))
self.assertEqual(base, {'a': "a", 'b': "b"})
def test_merge_null(self):
schema = {'mergeStrategy': 'objectMerge'}
base = {'a': 'a'}
head = {'a': None}
r = jsonmerge.merge(base, head, schema)
self.assertEqual(head, r)
def test_merge_type_error(self):
schema = {'mergeStrategy': 'objectMerge'}
base = None
with self.assertRaises(HeadInstanceError) as cm:
jsonmerge.merge(base, "a", schema)
self.assertEqual(cm.exception.value.ref, "#")
def test_merge_type_error_base(self):
schema = {'mergeStrategy': 'objectMerge'}
base = "ab"
with self.assertRaises(BaseInstanceError) as cm:
jsonmerge.merge(base, {'foo': 1}, schema)
self.assertEqual(cm.exception.value.ref, "#")
def test_merge_overwrite(self):
schema = {'mergeStrategy': 'objectMerge'}
base = None
base = jsonmerge.merge(base, {'a': "a"}, schema)
base = jsonmerge.merge(base, {'a': "b"}, schema)
self.assertEqual(base, {'a': "b"})
def test_merge_objclass(self):
schema = {'mergeStrategy': 'objectMerge', 'mergeOptions': { 'objClass': 'OrderedDict'}}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, OrderedDict([('c', "a"), ('a', "a")]))
self.assertIsInstance(base, OrderedDict)
self.assertEqual([k for k in base], ['c', 'a'])
base = merger.merge(base, {'a': "b"})
self.assertIsInstance(base, OrderedDict)
self.assertEqual([k for k in base], ['c', 'a'])
self.assertEqual(base, {'a': "b", 'c': "a"})
def test_merge_objclass2(self):
schema = {'mergeStrategy': 'objectMerge',
'properties': {
'a': {'mergeStrategy': 'objectMerge',
'mergeOptions': { 'objClass': 'OrderedDict'}}}}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, {'a': {'b': 'c'}, 'd': {'e': 'f'}})
self.assertIsInstance(base, dict)
self.assertIsInstance(base['a'], OrderedDict)
self.assertIsInstance(base['d'], dict)
def test_merge_objclass_bad_cls(self):
schema = {'mergeStrategy': 'objectMerge', 'mergeOptions': { 'objClass': 'foo'}}
merger = jsonmerge.Merger(schema)
base = None
with self.assertRaises(SchemaError) as cm:
merger.merge(base, OrderedDict([('c', "a"), ('a', "a")]))
self.assertEqual(cm.exception.value.ref, '#')
def test_merge_objclass_menu(self):
schema = {'mergeStrategy': 'objectMerge', 'mergeOptions': { 'objClass': 'foo'}}
class MyDict(dict):
pass
objclass_menu = {'foo': MyDict}
merger = jsonmerge.Merger(schema, objclass_menu=objclass_menu)
<|fim▁hole|> self.assertTrue(isinstance(base, MyDict))
def test_merge_objclass_def(self):
schema = {'mergeStrategy': 'objectMerge'}
merger = jsonmerge.Merger(schema, objclass_def='OrderedDict')
base = None
base = merger.merge(base, OrderedDict([('c', "a"), ('a', "a")]))
self.assertIsInstance(base, OrderedDict)
self.assertEqual([k for k in base], ['c', 'a'])
base = merger.merge(base, {'a': "b"})
self.assertIsInstance(base, OrderedDict)
self.assertEqual([k for k in base], ['c', 'a'])
self.assertEqual(base, {'a': "b", 'c': "a"})
def test_merge_append(self):
schema = {'mergeStrategy': 'objectMerge',
'properties': {
'a': {'mergeStrategy': 'append'}
}}
base = None
base = jsonmerge.merge(base, {'a': ["a"]}, schema)
base = jsonmerge.merge(base, {'a': ["b"], 'b': 'c'}, schema)
self.assertEqual(base, {'a': ["a", "b"], 'b': 'c'})
def test_merge_append_pattern(self):
schema = {'mergeStrategy': 'objectMerge',
'patternProperties': {
'a': {'mergeStrategy': 'append'}
}}
base = None
base = jsonmerge.merge(base, {'a': ["a"]}, schema)
base = jsonmerge.merge(base, {'a': ["b"], 'b': 'c'}, schema)
self.assertEqual(base, {'a': ["a", "b"], 'b': 'c'})
def test_merge_append_additional(self):
schema = {'mergeStrategy': 'objectMerge',
'properties': {
'b': {'mergeStrategy': 'overwrite'}
},
'additionalProperties': {
'mergeStrategy': 'append'
}}
base = None
base = jsonmerge.merge(base, {'a': ["a"]}, schema)
base = jsonmerge.merge(base, {'a': ["b"], 'b': 'c'}, schema)
self.assertEqual(base, {'a': ["a", "b"], 'b': 'c'})
def test_merge_additional_bool(self):
schema = {'additionalProperties': True}
base = {}
head = {'a': 'a'}
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, {'a': 'a'})
def test_example(self):
head1 = {
'buyer': {
'id': {
'name': "Test old",
},
'uri': 'Test uri old',
}
}
head2 = {
'buyer': {
'id': {
'name': "Test new"
},
'uri': 'Test uri new',
},
'award': "Award"
}
base_expect = {
'buyer': {
'id': {
'name': [
{'value': "Test old"},
{'value': "Test new"},
]
},
'uri': 'Test uri new',
},
'award': "Award"
}
schema = {
'mergeStrategy': 'objectMerge',
'properties': {
'buyer': {
'properties': {
'id': {
'properties': {
'name': {
'mergeStrategy': 'version',
}
}
},
'uri': {
'mergeStrategy': 'overwrite',
}
},
},
'award': {
'mergeStrategy': 'overwrite',
}
},
}
base = None
base = jsonmerge.merge(base, head1, schema)
base = jsonmerge.merge(base, head2, schema)
self.assertEqual(base, base_expect)
def test_internal_refs(self):
schema = {
'id': 'http://example.com/schema_1.json',
'properties': {
'a': {'$ref': "#/definitions/a"},
},
'definitions': {
"a": {
"properties": {
"b": {'mergeStrategy': 'version'},
}
},
}
}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, {"a": {"b": "c"}})
base = merger.merge(base, {"a": {"b": "d"}})
self.assertEqual(base, {"a": {"b": [{"value": "c"}, {"value": "d"}]}})
def test_external_refs(self):
schema_1 = {
'id': 'http://example.com/schema_1.json',
'properties': {
'a': {'$ref': "schema_2.json#/definitions/a"},
},
}
schema_2 = {
'id': 'http://example.com/schema_2.json',
'definitions': {
"a": {
"properties": {
"b": {'mergeStrategy': 'version'},
}
},
}
}
merger = jsonmerge.Merger(schema_1)
# merge() would otherwise make a HTTP request
merger.cache_schema(schema_2)
base = None
base = merger.merge(base, {"a": {"b": "c"}})
base = merger.merge(base, {"a": {"b": "d"}})
self.assertEqual(base, {"a": {"b": [{"value": "c"}, {"value": "d"}]}})
@unittest.skipIf(Draft6Validator is None, 'jsonschema too old')
def test_external_refs_draft6(self):
schema_1 = {
'$id': 'http://example.com/schema_1.json',
'properties': {
'a': {'$ref': "schema_2.json#/definitions/a"},
},
}
schema_2 = {
'$id': 'http://example.com/schema_2.json',
'definitions': {
"a": {
"properties": {
"b": {'mergeStrategy': 'version'},
}
},
}
}
merger = jsonmerge.Merger(schema_1, validatorclass=Draft6Validator)
# merge() would otherwise make a HTTP request
merger.cache_schema(schema_2)
base = None
base = merger.merge(base, {"a": {"b": "c"}})
base = merger.merge(base, {"a": {"b": "d"}})
self.assertEqual(base, {"a": {"b": [{"value": "c"}, {"value": "d"}]}})
def test_oneof(self):
schema = {
'oneOf': [
{
'type': 'array',
'mergeStrategy': 'append'
},
{
'type': 'object'
}
]
}
merger = jsonmerge.Merger(schema)
base = [1]
base = merger.merge(base, [2])
self.assertEqual(base, [1,2])
base = {'a': 1}
base = merger.merge(base, {'b': 2})
self.assertEqual(base, {'a': 1, 'b': 2})
base = [1]
with self.assertRaises(HeadInstanceError) as cm:
merger.merge(base, {'b': 2})
self.assertEqual(cm.exception.value.ref, '#')
def test_oneof_recursive(self):
# Schema to merge all arrays with "append" strategy and all objects
# with the default "objectMerge" strategy.
schema = {
"oneOf": [
{
"type": "array",
"mergeStrategy": "append"
},
{
"type": "object",
"additionalProperties": {
"$ref": "#"
}
},
{
"type": "string"
},
]
}
base = {"a": ["1"], "b": "3", "c": {"d": ["4"], "e": "f"}}
head = {"a": ["2"], "b": "4", "g": "7", "c": {"d": ["3"]}}
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, {"a": ["1", "2"], "b": "4", "g": "7", "c": {"d": ["4", "3"], "e": "f"}})
def test_oneof_overwrite_toplevel(self):
schema = {
'mergeStrategy': 'overwrite',
'oneOf': [
{
'type': 'array'
},
{
'type': 'string'
},
]
}
merger = jsonmerge.Merger(schema)
self.assertEqual(merger.merge([2, 3, 4], 'a'), 'a')
self.assertEqual(merger.merge('a', [2, 3, 4]), [2, 3, 4])
def test_oneof_multiple_validate(self):
schema = {
'oneOf': [
{
'type': 'array',
'maxItems': 3,
'mergeStrategy': 'append'
},
{
'type': 'array',
'minItems': 2,
'mergeStrategy': 'overwrite'
}
]
}
merger = jsonmerge.Merger(schema)
base = [1]
base = merger.merge(base, [2])
self.assertEqual(base, [1, 2])
base = [1, 2]
with self.assertRaises(HeadInstanceError) as cm:
base = merger.merge(base, [3, 4])
def test_anyof(self):
schema = {
'anyOf': [
{
'type': 'array'
},
{
'type': 'string'
},
]
}
merger = jsonmerge.Merger(schema)
with self.assertRaises(SchemaError) as cm:
merger.merge([2, 3, 4], 'a')
self.assertEqual(cm.exception.value.ref, '#')
def test_anyof_overwrite_toplevel(self):
schema = {
'mergeStrategy': 'overwrite',
'anyOf': [
{
'type': 'array'
},
{
'type': 'string'
},
]
}
merger = jsonmerge.Merger(schema)
self.assertEqual(merger.merge([2, 3, 4], 'a'), 'a')
self.assertEqual(merger.merge('a', [2, 3, 4]), [2, 3, 4])
def test_custom_strategy(self):
schema = {'mergeStrategy': 'myStrategy'}
class MyStrategy(jsonmerge.strategies.Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
if base is None:
ref = ""
else:
ref = base.ref
return JSONValue("foo", ref)
merger = jsonmerge.Merger(schema=schema,
strategies={'myStrategy': MyStrategy()})
base = None
base = merger.merge(base, {'a': 1})
self.assertEqual(base, "foo")
def test_merge_by_id(self):
schema = {
"properties": {
"awards": {
"type": "array",
"mergeStrategy": "arrayMergeById",
"items": {
"properties": {
"id": {"type": "string"},
"field": {"type": "number"},
}
}
}
}
}
a = {
"awards": [
{"id": "A", "field": 1},
{"id": "B", "field": 2}
]
}
b = {
"awards": [
{"id": "B", "field": 3},
{"id": "C", "field": 4}
]
}
expected = {
"awards": [
{"id": "A", "field": 1},
{"id": "B", "field": 3},
{"id": "C", "field": 4}
]
}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_when_key_is_empty_should_do_nothing(self):
schema = {
"properties": {
"awards": {
"type": "array",
"mergeStrategy": "arrayMergeById",
"mergeOptions": {"ignoreId": ""},
"items": {
"properties": {
"id": {"type": "string"},
"field": {"type": "number"},
}
}
}
}
}
a = {
"awards": [
{"id": "A", "field": 1},
{"id": "", "field": ""}
]
}
b = {
"awards": [
{"id": "B", "field": 3},
{"id": "C", "field": 4}
]
}
expected = {
"awards": [
{"id": "A", "field": 1},
{"id": "B", "field": 3},
{"id": "C", "field": 4}
]
}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_no_items(self):
schema = {
"mergeStrategy": "arrayMergeById",
"mergeOptions": {"idRef": "id"},
}
a = [
{"id": "A", "field": 1},
]
b = [
{"id": "A", "field": 2},
]
# by default, it should fall back to "replace" strategy for integers.
expected = [
{"id": "A", "field": 2},
]
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_simple_ref(self):
schema = {
"mergeStrategy": "arrayMergeById",
"mergeOptions": {"idRef": "key"}
}
a = [
{"key": "A", "field": 1},
]
b = [
{"key": "A", "field": 2},
]
expected = [
{"key": "A", "field": 2},
]
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_no_key(self):
schema = {
"mergeStrategy": "arrayMergeById",
}
a = [
{"id": "A", "field": 1},
]
b = [
{'field': 2}
]
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
# it should ignore array elements that do not have the id
self.assertEqual(base, a)
def test_merge_by_id_compex_ref(self):
schema = {
"mergeStrategy": "arrayMergeById",
"mergeOptions": {"idRef": "/foo/bar"},
}
a = [
{'foo': {'bar': 1}, 'baz': 1}
]
b = [
{'foo': {'bar': 2}}
]
c = [
{'foo': {'bar': 1}, 'baz': 2}
]
# by default, it should fall back to "replace" strategy for integers.
expected = [
{'foo': {'bar': 1}, 'baz': 2},
{'foo': {'bar': 2}}
]
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
base = merger.merge(base, c)
self.assertEqual(base, expected)
def test_merge_by_id_complex_id(self):
schema = {
"mergeStrategy": "arrayMergeById",
}
a = [
{"id": ["A", {"B": "C"} ], "field": 1},
{"id": ["A", {"B": "D"} ], "field": 2},
{"id": ["A", {"B": "E"} ], "field": 3},
]
b = [
{"id": ["A", {"B": "D"} ], "field": 4},
{"id": ["E", {"B": "C"} ], "field": 5},
]
merger = jsonmerge.Merger(schema)
c = merger.merge(a, b)
expected = [
{"id": ["A", {"B": "C"} ], "field": 1},
{"id": ["A", {"B": "D"} ], "field": 4},
{"id": ["A", {"B": "E"} ], "field": 3},
{"id": ["E", {"B": "C"} ], "field": 5},
]
self.assertEqual(expected, c)
def test_merge_by_id_with_complex_array(self):
schema = {
"properties": {
"awards": {
"type": "array",
"mergeStrategy": "arrayMergeById",
"items": {
"properties": {
"id": {"type": "string"},
"field": {
"type": "array",
"items": {
"properties": {
"xx": {
"type": "string"
}
}
}
}
}
}
}
}
}
a = {
"awards": [
{"id": "A", "field": [{"xx": "testA1"}, {"xx": "testA2"}]},
{"id": "B", "field": [{"xx": "testA3"}, {"xx": "testA4"}]}
]
}
b = {
"awards": [
{"id": "B", "field": [{"xx": "testA3"}, {"xx": "testA6"}]},
{"id": "C", "field": [{"xx": "testA7"}, {"xx": "testA8"}]}
]
}
expected = {
"awards": [
{"id": "A", "field": [{"xx": "testA1"}, {"xx": "testA2"}]},
{"id": "B", "field": [{"xx": "testA3"}, {"xx": "testA6"}]},
{"id": "C", "field": [{"xx": "testA7"}, {"xx": "testA8"}]}
]
}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_with_subschema(self):
schema = {
"properties": {
"awards": {
"type": "array",
"mergeStrategy": "arrayMergeById",
"items": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"field": {
"type": "number",
"mergeStrategy": "version"
}
}
}
}
}
}
a = {
"awards": [
{"id": "A", "field": 1},
{"id": "B", "field": 2}
]
}
b = {
"awards": [
{"id": "B", "field": 3},
{"id": "C", "field": 4}
]
}
expected = {
"awards": [
{"id": "A", "field": [{"value": 1}]},
{"id": "B", "field": [{"value": 2}, {"value": 3}]},
{"id": "C", "field": [{"value": 4}]}
]
}
merger = jsonmerge.Merger(schema)
base = None
base = merger.merge(base, a)
base = merger.merge(base, b)
self.assertEqual(base, expected)
def test_merge_by_id_items_array(self):
schema = {
"mergeStrategy": "arrayMergeById",
"items": [
{},
{},
]
}
head = [
{'id': 'A'},
{'id': 'B'}
]
merger = jsonmerge.Merger(schema)
base = None
with self.assertRaises(SchemaError) as cm:
merger.merge(base, head)
self.assertEqual(cm.exception.value.ref, '#/items')
def test_merge_by_id_only_integers(self):
# arrayMergeById strategy can be used to treat simple arrays of
# integers as Python sets by setting idRef to root (i.e. pointing to
# the array element itself)
#
# https://github.com/avian2/jsonmerge/issues/24
schema = {
"mergeStrategy": "arrayMergeById",
"mergeOptions": {"idRef": "/"},
}
base = [ 1, 2 ]
head = [ 2, 3 ]
expected = [ 1, 2, 3]
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_merge_by_id_bad_head_type(self):
schema = {
'mergeStrategy': 'arrayMergeById'
}
head = {'foo': 'bar'}
base = []
merger = jsonmerge.Merger(schema)
with self.assertRaises(HeadInstanceError) as cm:
merger.merge(base, head)
self.assertEqual(cm.exception.value.ref, '#')
def test_merge_by_id_bad_base_type(self):
schema = {
'mergeStrategy': 'arrayMergeById'
}
head = []
base = {'foo': 'bar'}
merger = jsonmerge.Merger(schema)
with self.assertRaises(BaseInstanceError) as cm:
merger.merge(base, head)
self.assertEqual(cm.exception.value.ref, '#')
def test_merge_by_id_no_base_id(self):
schema = {
'mergeStrategy': 'arrayMergeById'
}
head = [ {'id': 'a'} ]
base = [ {} ]
merger = jsonmerge.Merger(schema)
r = merger.merge(base, head)
self.assertEqual(r, [ {}, {'id': 'a'} ])
def test_merge_by_id_non_unique_base(self):
schema = {
"mergeStrategy": "arrayMergeById",
}
base = [
{'id': 'a'},
{'id': 'a'}
]
head = [
{'id': 'a',
'foo': 1}
]
merger = jsonmerge.Merger(schema)
with self.assertRaises(BaseInstanceError) as cm:
merger.merge(base, head)
self.assertEqual(cm.exception.value.ref, '#/1')
def test_merge_by_id_non_unique_head(self):
schema = {
"mergeStrategy": "arrayMergeById",
}
base = [
{'id': 'a',
'foo': 1},
]
head = [
{'id': 'a',
'foo': 2},
{'id': 'a',
'foo': 3}
]
merger = jsonmerge.Merger(schema)
with self.assertRaises(HeadInstanceError) as cm:
merger.merge(base, head)
self.assertEqual(cm.exception.value.ref, '#/1')
def test_merge_by_id_order_issue_31_1(self):
# There was an issue with arrayMergeById where head value would be
# merged with the last item in the base list, not the matching item.
# The result was then assigned to the matching item.
#
# If the last item in the base list was just created in the same
# arrayMergeById (i.e. by another item in the head list), then merge
# would fail with "Unresolvable JSON pointer".
#
# https://github.com/avian2/jsonmerge/pull/31
schema = {
"mergeStrategy": "arrayMergeById",
}
base = [
{'id': 'a', 'val': {'a': 1}},
{'id': 'b', 'val': {'b': 2}},
]
head = [
{'id': 'a', 'val': {'c': 3}}
]
expected = [
# bug would produce {'b': 2, 'c': 3} here
{'id': 'a', 'val': {'a': 1, 'c': 3}},
{'id': 'b', 'val': {'b': 2}},
]
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_merge_by_id_order_issue_31_2(self):
schema = {
"mergeStrategy": "arrayMergeById",
}
base = [
{'id': 'a', 'val': {'a': 1}},
{'id': 'b', 'val': {'b': 2}},
]
head = [
# this caused "Unresolvable JSON pointer"
{'id': 'c', 'val': {'c': 3}},
{'id': 'a', 'val': {'c': 3}}
]
expected = [
{'id': 'a', 'val': {'a': 1, 'c': 3}},
{'id': 'b', 'val': {'b': 2}},
{'id': 'c', 'val': {'c': 3}}
]
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_merge_by_id_subclass_get_key(self):
class MyArrayMergeById(jsonmerge.strategies.ArrayMergeById):
def get_key(self, walk, item, idRef):
return item.val[-1]
schema = {'mergeStrategy': 'myArrayMergeById'}
merger = jsonmerge.Merger(schema=schema,
strategies={'myArrayMergeById': MyArrayMergeById()})
base = [
[ 'a', 'b', 'id1' ],
[ 'c', 'id2' ],
]
head = [
[ 'e', 'f', 'g', 'id3' ],
[ 'd', 'id1' ],
]
expected = [
[ 'd', 'id1' ],
[ 'c', 'id2' ],
[ 'e', 'f', 'g', 'id3' ],
]
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_merge_by_id_multiple_ids(self):
schema = {
'mergeStrategy': 'arrayMergeById',
'mergeOptions': { 'idRef': ['/a', '/b'] }
}
base = [
{
'a': 1,
'b': 2
},
{
'a': 1,
'b': 1,
}
]
head = [
{
'a': 1,
'b': 1,
'c': 2,
},
{
# incomplete key, ignored
'b': 1,
},
{
'a': 2,
'b': 2,
'c': 3,
}
]
expected = [
{
'a': 1,
'b': 2
},
{
'a': 1,
'b': 1,
'c': 2,
},
{
'a': 2,
'b': 2,
'c': 3,
}
]
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_merge_by_id_multiple_ids_ignore(self):
schema = {
'mergeStrategy': 'arrayMergeById',
'mergeOptions': {
'idRef': ['/a', '/b'],
'ignoreId': [1, 2],
}
}
base = [
{
'a': 1,
'b': 1,
}
]
head = [
{
# ignoreId matches
'a': 1,
'b': 2,
'c': 2,
},
{
'a': 2,
'b': 2,
'c': 3,
}
]
expected = [
{
'a': 1,
'b': 1
},
{
'a': 2,
'b': 2,
'c': 3,
}
]
merger = jsonmerge.Merger(schema)
base = merger.merge(base, head)
self.assertEqual(base, expected)
def test_append_with_maxitems(self):
schema = {
"mergeStrategy": "append",
"maxItems": 2,
}
merger = jsonmerge.Merger(schema)
head = ["a"]
base = None
base = merger.merge(base, head)
base = merger.merge(base, head)
base = merger.merge(base, head)
schema2 = merger.get_schema()
jsonschema.validate(head, schema2)
jsonschema.validate(base, schema2)
def test_append_with_unique(self):
schema = {
"mergeStrategy": "append",
"uniqueItems": True,
}
merger = jsonmerge.Merger(schema)
head = ["a"]
base = None
base = merger.merge(base, head)
base = merger.merge(base, head)
schema2 = merger.get_schema()
jsonschema.validate(head, schema2)
jsonschema.validate(base, schema2)
def test_slash_in_property_name(self):
base = {'a': 0}
head = {'b': {'c/d': 1}}
base = jsonmerge.merge(base, head)
self.assertEqual(base, {'a': 0, 'b': {'c/d': 1}})
def test_tilde_in_property_name(self):
base = {'a': 0}
head = {'~1': 1}
base = jsonmerge.merge(base, head)
self.assertEqual(base, {'a': 0, '~1': 1})
def test_discard(self):
schema = {'mergeStrategy': 'discard'}
base = "a"
base = jsonmerge.merge(base, "b", schema)
self.assertEqual(base, "a")
def test_discard_objectmerge_null(self):
schema = {
'properties': {
'a': {
'mergeStrategy': 'discard'
}
} }
base = {}
head = {'a': 1}
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, {})
def test_discard_arraymergebyid(self):
schema = {
'mergeStrategy': 'arrayMergeById',
'items': {
'mergeStrategy': 'discard'
} }
base = [ {'id': 1, 'val': 1} ]
head = [ {'id': 1, 'val': 2} ]
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, [{'id': 1, 'val': 1}])
def test_discard_arraymergebyid_null(self):
schema = {
'mergeStrategy': 'arrayMergeById',
'items': {
'mergeStrategy': 'discard'
} }
base = [ ]
head = [ {'id': 1, 'val': 1} ]
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, [])
def test_discard_null_keep(self):
schema = {
'properties': {
'a': {
'mergeStrategy': 'discard',
'mergeOptions': {
'keepIfUndef': True
}
}
} }
base = {}
head = {'a': 1}
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, {'a': 1})
head = {'a': 2}
base = jsonmerge.merge(base, head, schema)
self.assertEqual(base, {'a': 1})
def test_bad_strategy(self):
schema = {
'properties': {
'a': {
'mergeStrategy': 'invalidStrategy'
} } }
base = {'a': 1 }
head = {'a': 2 }
with self.assertRaises(SchemaError) as cm:
jsonmerge.merge(base, head, schema)
self.assertEqual(cm.exception.value.ref, '#/properties/a')
def test_nan(self):
# float('nan') == float('nan') evaluates to false.
#
# https://github.com/avian2/jsonmerge/issues/39
base = {
"foo": 1,
"bar": float('nan')
}
head = {
"foo": 1,
"bar": float('nan')
}
base = jsonmerge.merge(base, head)
def test_merge_by_index(self):
schema = {
'mergeStrategy': 'arrayMergeByIndex'
}
base = [ {'a': 0 }, {'b': 1} ]
head = [ {'c': 2 }, {'d': 3} ]
result = jsonmerge.merge(base, head, schema)
self.assertEqual(result, [ {'a': 0, 'c': 2}, {'b': 1, 'd': 3} ])
def test_merge_by_index_empty(self):
schema = {
'mergeStrategy': 'arrayMergeByIndex'
}
base = [ ]
head = [ {'c': 2 }, {'d': 3} ]
result = jsonmerge.merge(base, head, schema)
self.assertEqual(result, [ {'c': 2}, {'d': 3} ])
class TestGetSchema(unittest.TestCase):
def test_default_overwrite(self):
schema = {'description': 'test'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, {'description': 'test'})
def test_default_object_merge_trivial(self):
schema = {'type': 'object'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, {'type': 'object'})
def test_default_object_merge(self):
schema = {
'properties': {
'foo': {
'mergeStrategy': 'version',
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2,
{
'properties': {
'foo': {
'type': 'array',
'items': {
'properties': {
'value': {},
}
}
}
}
})
def test_overwrite(self):
schema = {'mergeStrategy': 'overwrite'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, {})
def test_append(self):
schema = {'type': 'array',
'mergeStrategy': 'append'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, {'type': 'array'})
def test_version(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2,
{
'type': 'array',
'items': {
'properties': {
'value': {}
}
}
})
def test_version_ref_twice(self):
schema = {
'properties': {
'a': {
'$ref': '#/definitions/item'
},
'b': {
'$ref': '#/definitions/item'
},
},
'definitions': {
'item': {
'type': 'object',
'mergeStrategy': 'version'
}
}
}
expected = {
'properties': {
'a': {
'$ref': '#/definitions/item'
},
'b': {
'$ref': '#/definitions/item'
},
},
'definitions': {
'item': {
'type': 'array',
'items': {
'properties': {
'value': {
'type': 'object',
}
}
}
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(expected, schema2)
def test_version_meta(self):
schema = {'type': 'object',
'mergeStrategy': 'version'}
meta = {
'properties': {
'date': {},
'version': {}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema(merge_options={
'version': {'metadataSchema': meta}})
self.assertEqual(schema2,
{
'type': 'array',
'items': {
'properties': {
'value': {'type': 'object'},
'date': {},
'version': {}
}
}
})
def test_version_meta_deprecated(self):
schema = {'mergeStrategy': 'version'}
merger = jsonmerge.Merger(schema)
with warnings.catch_warnings(record=True) as w:
merger.get_schema(meta={'foo': 'bar'})
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, DeprecationWarning))
def test_version_meta_in_schema(self):
schema = {
'type': 'object',
'mergeStrategy': 'version',
'mergeOptions': {
'metadataSchema': {
'properties': {
'date': {},
'version': {},
},
},
},
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2,
{
'type': 'array',
'items': {
'properties': {
'value': {'type': 'object'},
'date': {},
'version': {}
}
}
})
def test_version_limit(self):
schema = {'mergeStrategy': 'version',
'mergeOptions': {'limit': 5}}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2,
{
'type': 'array',
'items': {
'properties': {
'value': {}
}
},
'maxItems': 5
})
def test_object_merge_simple(self):
schema = {'mergeStrategy': 'objectMerge'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, {})
def test_object_merge_nested(self):
schema = {'mergeStrategy': 'objectMerge',
'properties': {
'foo': {'mergeStrategy': 'version'}
}}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2,
{
'properties': {
'foo': {
'type': 'array',
'items': {
'properties': {
'value': {}
}
}
}
}
})
def test_anyof_descend(self):
# We don't support descending through 'anyOf', since each branch could
# have its own rules for merging. How could we then decide which rule
# to follow?
schema = {
'anyOf': [
{'properties': {'a': {}}},
{'properties': {'b': {}}}
]
}
merger = jsonmerge.Merger(schema)
with self.assertRaises(SchemaError) as cm:
merger.get_schema()
self.assertEqual(cm.exception.value.ref, '#')
def test_anyof_dont_descend(self):
# However, 'anyOf' should be fine if we don't descend through it (e.g.
# if it's after a 'overwrite' strategy for instance.
schema = {
'properties': {
'a': {
'mergeStrategy': 'overwrite',
'properties': {
'b': {
'anyOf': [
{'properties': {'c': {}}},
{'properties': {'d': {}}},
]
}
}
}
}
}
expected = {
'properties': {
'a': {
'properties': {
'b': {
'anyOf': [
{'properties': {'c': {}}},
{'properties': {'d': {}}},
]
}
}
}
}
}
merger = jsonmerge.Merger(schema)
mschema = merger.get_schema()
self.assertEqual(expected, mschema)
def test_external_refs(self):
schema_1 = {
'id': 'http://example.com/schema_1.json',
'$ref': 'schema_2.json#/definitions/foo'
}
# get_schema() shouldn't do external HTTP requests for schemas.
merger = jsonmerge.Merger(schema_1)
mschema = merger.get_schema()
d = {
'id': 'http://example.com/schema_1.json',
'$ref': 'schema_2.json#/definitions/foo'
}
self.assertEqual(d, mschema)
def test_internal_refs(self):
schema = {
'id': 'http://example.com/schema_1.json',
'mergeStrategy': 'overwrite',
'properties': {
'foo': {
'$ref': '#/definitions/bar'
}
},
'definitions': {
'bar': {
'properties': {
'baz': {}
}
}
}
}
expected = {
'id': 'http://example.com/schema_1.json',
'properties': {
'foo': {
'$ref': '#/definitions/bar'
}
},
'definitions': {
'bar': {
'properties': {
'baz': {}
}
}
}
}
merger = jsonmerge.Merger(schema)
mschema = merger.get_schema()
self.assertEqual(expected, mschema)
def test_ref_to_non_object_is_an_error(self):
schema = {
'properties': {
'foo': {
'$ref': '#/definitions/bar'
}
},
'definitions': {
'bar': []
}
}
merger = jsonmerge.Merger(schema)
with self.assertRaises(SchemaError) as cm:
merger.get_schema()
self.assertEqual(cm.exception.value.ref, '#/properties/foo')
def test_reference_in_meta(self):
schema = {'mergeStrategy': 'version'}
meta_schema = {
'id': 'http://example.com/schema_1.json',
'$ref': 'schema_2.json#/definitions/meta'
}
schema_2 = {
'id': 'http://example.com/schema_2.json',
'definitions': {
'meta': {
'properties': {
'foo': {
'type': 'string'
},
'bar': {
'enum': [ 'a', 'b' ]
}
}
}
}
}
merger = jsonmerge.Merger(schema)
merger.cache_schema(schema_2)
mschema = merger.get_schema(merge_options={
'version': {'metadataSchema': meta_schema}})
self.assertEqual(mschema,
{
'type': 'array',
'items': {
'properties': {
'value': {},
'foo': {'type': 'string'},
'bar': {'enum': ['a', 'b'] },
}
}
})
def test_local_reference_in_meta(self):
schema = {
'properties': {
'content': {
'mergeStrategy': 'version',
'mergeOptions': {
'metadataSchema': {
'$ref': '#/definitions/metadata',
},
},
},
},
'definitions': {
'metadata': {
'properties': {
'revision': {
'type': 'number',
},
},
},
},
}
merger = jsonmerge.Merger(schema)
mschema = merger.get_schema()
self.assertEqual(mschema, {
'properties': {
'content': {
'type': 'array',
'items': {
'properties': {
'value': {},
'revision': {
'type': 'number',
},
},
},
},
},
'definitions': {
'metadata': {
'properties': {
'revision': {
'type': 'number',
},
},
},
},
})
def test_array_in_schema(self):
schema = {
'mergeStrategy': 'overwrite',
'enum': [
"foo",
"bar",
]
}
expected = {
'enum': [
"foo",
"bar",
]
}
merger = jsonmerge.Merger(schema)
mschema = merger.get_schema()
self.assertEqual(expected, mschema)
def test_version_adds_array_type(self):
schema = {
"type": "object",
"properties": {
"buyer": {
"properties": {
"id": {
"type": "object",
"properties": {
"name": {
"type": "string",
"mergeStrategy": "version"
}
}
}
}
}
}
}
expected = {
"type": "object",
"properties": {
"buyer": {
"properties": {
"id": {
"type": "object",
"properties": {
"name": {
"type": "array",
"items": {
"properties": {
"value": {
"type": "string"
}
}
}
}
}
}
}
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_by_id(self):
schema = {
"mergeStrategy": "arrayMergeById",
"items": {
'type': 'object'
}
}
expected = {
"items": {
'type': 'object'
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_by_id_with_depth(self):
schema = {
"properties": {
"test": {
"mergeStrategy": "arrayMergeById",
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
}
},
"definitions": {
"refitem": {
"type": "object",
"properties": {
"field1": {
"type": "string",
"mergeStrategy": "version"
}
}
}
}
}
expected = {
"properties": {
"test": {
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
}
},
"definitions": {
"refitem": {
"type": "object",
"properties": {
"field1": {
"type": "array",
"items": {
"properties": {
"value": {
"type": "string"
}
}
}
}
}
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_by_id_with_depth_twice(self):
# Here were have a $ref that get_schema() should descend into twice.
schema = {
"properties": {
"test": {
"mergeStrategy": "arrayMergeById",
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
},
"test2": {
"mergeStrategy": "arrayMergeById",
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
}
},
"definitions": {
"refitem": {
"type": "object",
"properties": {
"field1": {
"type": "string",
"mergeStrategy": "version"
}
}
}
}
}
expected = {
"properties": {
"test": {
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
},
"test2": {
"type": "array",
"items": {
"$ref": "#/definitions/refitem"
}
}
},
"definitions": {
"refitem": {
"type": "object",
"properties": {
"field1": {
"type": "array",
"items": {
"properties": {
"value": {
"type": "string"
}
}
}
}
}
}
}
}
self.maxDiff = None
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_by_id_with_depth_no_ref(self):
schema = {
"properties": {
"test": {
"mergeStrategy": "arrayMergeById",
"type": "array",
"items": {
"type": "object",
"properties": {
"field1": {
"type": "string",
"mergeStrategy": "version"
}
}
}
}
}
}
expected = {
"properties": {
"test": {
"type": "array",
"items": {
"type": "object",
"properties": {
"field1": {
"type": "array",
"items": {
"properties": {
"value": {
"type": "string"
}
}
}
}
}
}
}
},
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_append_additional(self):
schema = {'mergeStrategy': 'objectMerge',
'properties': {
'b': {'mergeStrategy': 'overwrite'}
},
'additionalProperties': {
'mergeStrategy': 'append'
}}
expected = {'properties': {
'b': {},
},
'additionalProperties': {}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_merge_additional_bool(self):
schema = {'additionalProperties': True}
base = {}
head = {'a': 'a'}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, schema)
def test_oneof(self):
schema = {
'oneOf': [
{
'type': 'array',
'mergeStrategy': 'append'
},
{
'type': 'object'
}
]
}
expected = {
'oneOf': [
{
'type': 'array',
},
{
'type': 'object'
}
]
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_oneof_recursive(self):
# Schema to merge all arrays with "append" strategy and all objects
# with the default "objectMerge" strategy.
schema = {
"oneOf": [
{
"type": "array",
"mergeStrategy": "append"
},
{
"type": "object",
"additionalProperties": {
"$ref": "#"
}
},
{
"type": "string"
},
]
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, schema)
def test_oneof_toplevel(self):
schema = {
"mergeStrategy": "version",
"oneOf": [
{"type": "string", "pattern": "^!?(?:[0-9]{1,3}\\.){3}[0-9]{1,3}(?:\\/[0-9]{1,2})?$"},
{"type": "string", "format": "hostname"}
]
}
expected = {
"type": "array",
"items": {
"properties": {
"value": {
"oneOf": [
{"type": "string", "pattern": "^!?(?:[0-9]{1,3}\\.){3}[0-9]{1,3}(?:\\/[0-9]{1,2})?$"},
{"type": "string", "format": "hostname"}
]
}
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_anyof_toplevel(self):
schema = {
"mergeStrategy": "version",
"anyOf": [
{"type": "string", "pattern": "^!?(?:[0-9]{1,3}\\.){3}[0-9]{1,3}(?:\\/[0-9]{1,2})?$"},
{"type": "string", "format": "hostname"}
]
}
expected = {
"type": "array",
"items": {
"properties": {
"value": {
"anyOf": [
{"type": "string", "pattern": "^!?(?:[0-9]{1,3}\\.){3}[0-9]{1,3}(?:\\/[0-9]{1,2})?$"},
{"type": "string", "format": "hostname"}
]
}
}
}
}
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
self.assertEqual(schema2, expected)
def test_discard(self):
schema = { 'type': 'string',
'mergeStrategy': 'discard' }
merger = jsonmerge.Merger(schema)
schema2 = merger.get_schema()
expected = { 'type': 'string' }
self.assertEqual(schema2, expected)
def test_bad_strategy(self):
schema = {
'properties': {
'a': {
'mergeStrategy': 'invalidStrategy'
} } }
merger = jsonmerge.Merger(schema)
with self.assertRaises(SchemaError) as cm:
merger.get_schema()
self.assertEqual(cm.exception.value.ref, '#/properties/a')
def test_merge_by_index(self):
schema = {
'type': 'array',
'mergeStrategy': 'arrayMergeByIndex'
}
merger = jsonmerge.Merger(schema)
result = merger.get_schema()
self.assertEqual(result, {'type': 'array'})
def test_merge_by_index_name_in_exception(self):
schema = {
'properties': {
'a': {
'mergeStrategy': 'arrayMergeByIndex'
}
}
}
head = {'a': {}}
base = {'a': []}
merger = jsonmerge.Merger(schema)
with self.assertRaises(HeadInstanceError) as cm:
merger.merge(base, head)
self.assertIn('arrayMergeByIndex', str(cm.exception))
class TestExceptions(unittest.TestCase):
def test_str_with_ref(self):
e = SchemaError("Test error", JSONValue({}, '#'))
self.assertEqual(str(e), 'Test error: #')
def test_str(self):
e = SchemaError("Test error")
self.assertEqual(str(e), 'Test error')
def test_str_with_name(self):
e = SchemaError("Test error", JSONValue({}, '#'), 'test')
self.assertEqual(str(e), "'test' merge strategy: Test error: #")
if __name__ == '__main__':
unittest.main()<|fim▁end|> | base = None
base = merger.merge(base, {'c': "a", 'a': "a"})
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup<|fim▁hole|>from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("capture", ["capture.pyx"])]
)<|fim▁end|> | from distutils.extension import Extension |
<|file_name|>Display.java<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2016 SonarSource SA
* mailto:contact AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.<|fim▁hole|> * Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.api.server.authentication;
import javax.annotation.concurrent.Immutable;
import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.commons.lang.StringUtils.isNotBlank;
/**
* Display information provided by the Identity Provider to be displayed into the login form.
*
* @since 5.4
*/
@Immutable
public final class Display {
private final String iconPath;
private final String backgroundColor;
private Display(Builder builder) {
this.iconPath = builder.iconPath;
this.backgroundColor = builder.backgroundColor;
}
/**
* URL path to the provider icon, as deployed at runtime, for example "/static/authgithub/github.svg" (in this
* case "authgithub" is the plugin key. Source file is "src/main/resources/static/github.svg").
* It can also be an external URL, for example "http://www.mydomain/myincon.png".
*
* Must not be blank.
* <br>
* The recommended format is SVG with a size of 24x24 pixels.
* Other supported format is PNG, with a size of 40x40 pixels.
*/
public String getIconPath() {
return iconPath;
}
/**
* Background color for the provider button displayed in the login form.
* It's a Hexadecimal value, for instance #205081.
* <br>
* If not provided, the default value is #236a97
*/
public String getBackgroundColor() {
return backgroundColor;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String iconPath;
private String backgroundColor = "#236a97";
private Builder() {
}
/**
* @see Display#getIconPath()
*/
public Builder setIconPath(String iconPath) {
this.iconPath = iconPath;
return this;
}
/**
* @see Display#getBackgroundColor()
*/
public Builder setBackgroundColor(String backgroundColor) {
this.backgroundColor = backgroundColor;
return this;
}
public Display build() {
checkArgument(isNotBlank(iconPath), "Icon path must not be blank");
validateBackgroundColor();
return new Display(this);
}
private void validateBackgroundColor() {
checkArgument(isNotBlank(backgroundColor), "Background color must not be blank");
checkArgument(backgroundColor.length() == 7 && backgroundColor.indexOf('#') == 0,
"Background color must begin with a sharp followed by 6 characters");
}
}
}<|fim▁end|> | *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
<|file_name|>encounter.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2015, HL7, Inc & The MITRE Corporation
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of HL7 nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package models
import (
"encoding/json"
"errors"
"fmt"
)
type Encounter struct {
DomainResource `bson:",inline"`
Identifier []Identifier `bson:"identifier,omitempty" json:"identifier,omitempty"`
Status string `bson:"status,omitempty" json:"status,omitempty"`
StatusHistory []EncounterStatusHistoryComponent `bson:"statusHistory,omitempty" json:"statusHistory,omitempty"`
Class string `bson:"class,omitempty" json:"class,omitempty"`
Type []CodeableConcept `bson:"type,omitempty" json:"type,omitempty"`
Priority *CodeableConcept `bson:"priority,omitempty" json:"priority,omitempty"`
Patient *Reference `bson:"patient,omitempty" json:"patient,omitempty"`
EpisodeOfCare []Reference `bson:"episodeOfCare,omitempty" json:"episodeOfCare,omitempty"`
IncomingReferral []Reference `bson:"incomingReferral,omitempty" json:"incomingReferral,omitempty"`
Participant []EncounterParticipantComponent `bson:"participant,omitempty" json:"participant,omitempty"`
Appointment *Reference `bson:"appointment,omitempty" json:"appointment,omitempty"`
Period *Period `bson:"period,omitempty" json:"period,omitempty"`
Length *Quantity `bson:"length,omitempty" json:"length,omitempty"`
Reason []CodeableConcept `bson:"reason,omitempty" json:"reason,omitempty"`
Indication []Reference `bson:"indication,omitempty" json:"indication,omitempty"`
Hospitalization *EncounterHospitalizationComponent `bson:"hospitalization,omitempty" json:"hospitalization,omitempty"`
Location []EncounterLocationComponent `bson:"location,omitempty" json:"location,omitempty"`
ServiceProvider *Reference `bson:"serviceProvider,omitempty" json:"serviceProvider,omitempty"`
PartOf *Reference `bson:"partOf,omitempty" json:"partOf,omitempty"`
}
// Custom marshaller to add the resourceType property, as required by the specification
func (resource *Encounter) MarshalJSON() ([]byte, error) {
resource.ResourceType = "Encounter"
// Dereferencing the pointer to avoid infinite recursion.
// Passing in plain old x (a pointer to Encounter), would cause this same
// MarshallJSON function to be called again
return json.Marshal(*resource)
}
func (x *Encounter) GetBSON() (interface{}, error) {
x.ResourceType = "Encounter"
// See comment in MarshallJSON to see why we dereference
return *x, nil
}
// The "encounter" sub-type is needed to avoid infinite recursion in UnmarshalJSON
type encounter Encounter
// Custom unmarshaller to properly unmarshal embedded resources (represented as interface{})
func (x *Encounter) UnmarshalJSON(data []byte) (err error) {
x2 := encounter{}
if err = json.Unmarshal(data, &x2); err == nil {
if x2.Contained != nil {
for i := range x2.Contained {
x2.Contained[i] = MapToResource(x2.Contained[i], true)
}
}
*x = Encounter(x2)
return x.checkResourceType()
}
return
}
func (x *Encounter) checkResourceType() error {
if x.ResourceType == "" {
x.ResourceType = "Encounter"
} else if x.ResourceType != "Encounter" {
return errors.New(fmt.Sprintf("Expected resourceType to be Encounter, instead received %s", x.ResourceType))
}
return nil
}
type EncounterStatusHistoryComponent struct {
BackboneElement `bson:",inline"`
Status string `bson:"status,omitempty" json:"status,omitempty"`
Period *Period `bson:"period,omitempty" json:"period,omitempty"`
}
type EncounterParticipantComponent struct {
BackboneElement `bson:",inline"`
Type []CodeableConcept `bson:"type,omitempty" json:"type,omitempty"`
Period *Period `bson:"period,omitempty" json:"period,omitempty"`
Individual *Reference `bson:"individual,omitempty" json:"individual,omitempty"`
}
type EncounterHospitalizationComponent struct {
BackboneElement `bson:",inline"`
PreAdmissionIdentifier *Identifier `bson:"preAdmissionIdentifier,omitempty" json:"preAdmissionIdentifier,omitempty"`
Origin *Reference `bson:"origin,omitempty" json:"origin,omitempty"`
AdmitSource *CodeableConcept `bson:"admitSource,omitempty" json:"admitSource,omitempty"`
AdmittingDiagnosis []Reference `bson:"admittingDiagnosis,omitempty" json:"admittingDiagnosis,omitempty"`
ReAdmission *CodeableConcept `bson:"reAdmission,omitempty" json:"reAdmission,omitempty"`
DietPreference []CodeableConcept `bson:"dietPreference,omitempty" json:"dietPreference,omitempty"`
SpecialCourtesy []CodeableConcept `bson:"specialCourtesy,omitempty" json:"specialCourtesy,omitempty"`
SpecialArrangement []CodeableConcept `bson:"specialArrangement,omitempty" json:"specialArrangement,omitempty"`
Destination *Reference `bson:"destination,omitempty" json:"destination,omitempty"`
DischargeDisposition *CodeableConcept `bson:"dischargeDisposition,omitempty" json:"dischargeDisposition,omitempty"`
DischargeDiagnosis []Reference `bson:"dischargeDiagnosis,omitempty" json:"dischargeDiagnosis,omitempty"`
}
type EncounterLocationComponent struct {
BackboneElement `bson:",inline"`
Location *Reference `bson:"location,omitempty" json:"location,omitempty"`
Status string `bson:"status,omitempty" json:"status,omitempty"`
Period *Period `bson:"period,omitempty" json:"period,omitempty"`
}
type EncounterPlus struct {
Encounter `bson:",inline"`
EncounterPlusRelatedResources `bson:",inline"`
}
type EncounterPlusRelatedResources struct {
IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare *[]EpisodeOfCare `bson:"_includedEpisodeOfCareResourcesReferencedByEpisodeofcare,omitempty"`
IncludedReferralRequestResourcesReferencedByIncomingreferral *[]ReferralRequest `bson:"_includedReferralRequestResourcesReferencedByIncomingreferral,omitempty"`
IncludedPractitionerResourcesReferencedByPractitioner *[]Practitioner `bson:"_includedPractitionerResourcesReferencedByPractitioner,omitempty"`
IncludedAppointmentResourcesReferencedByAppointment *[]Appointment `bson:"_includedAppointmentResourcesReferencedByAppointment,omitempty"`
IncludedEncounterResourcesReferencedByPartof *[]Encounter `bson:"_includedEncounterResourcesReferencedByPartof,omitempty"`
IncludedProcedureResourcesReferencedByProcedure *[]Procedure `bson:"_includedProcedureResourcesReferencedByProcedure,omitempty"`
IncludedPractitionerResourcesReferencedByParticipant *[]Practitioner `bson:"_includedPractitionerResourcesReferencedByParticipant,omitempty"`
IncludedRelatedPersonResourcesReferencedByParticipant *[]RelatedPerson `bson:"_includedRelatedPersonResourcesReferencedByParticipant,omitempty"`
IncludedConditionResourcesReferencedByCondition *[]Condition `bson:"_includedConditionResourcesReferencedByCondition,omitempty"`
IncludedPatientResourcesReferencedByPatient *[]Patient `bson:"_includedPatientResourcesReferencedByPatient,omitempty"`
IncludedLocationResourcesReferencedByLocation *[]Location `bson:"_includedLocationResourcesReferencedByLocation,omitempty"`
IncludedConditionResourcesReferencedByIndication *[]Condition `bson:"_includedConditionResourcesReferencedByIndication,omitempty"`
IncludedProcedureResourcesReferencedByIndication *[]Procedure `bson:"_includedProcedureResourcesReferencedByIndication,omitempty"`
RevIncludedProvenanceResourcesReferencingTarget *[]Provenance `bson:"_revIncludedProvenanceResourcesReferencingTarget,omitempty"`
RevIncludedDocumentManifestResourcesReferencingContentref *[]DocumentManifest `bson:"_revIncludedDocumentManifestResourcesReferencingContentref,omitempty"`
RevIncludedDocumentManifestResourcesReferencingRelatedref *[]DocumentManifest `bson:"_revIncludedDocumentManifestResourcesReferencingRelatedref,omitempty"`
RevIncludedProcedureResourcesReferencingEncounter *[]Procedure `bson:"_revIncludedProcedureResourcesReferencingEncounter,omitempty"`
RevIncludedListResourcesReferencingItem *[]List `bson:"_revIncludedListResourcesReferencingItem,omitempty"`
RevIncludedListResourcesReferencingEncounter *[]List `bson:"_revIncludedListResourcesReferencingEncounter,omitempty"`
RevIncludedDocumentReferenceResourcesReferencingEncounter *[]DocumentReference `bson:"_revIncludedDocumentReferenceResourcesReferencingEncounter,omitempty"`
RevIncludedDocumentReferenceResourcesReferencingRelatedref *[]DocumentReference `bson:"_revIncludedDocumentReferenceResourcesReferencingRelatedref,omitempty"`
RevIncludedOrderResourcesReferencingDetail *[]Order `bson:"_revIncludedOrderResourcesReferencingDetail,omitempty"`
RevIncludedVisionPrescriptionResourcesReferencingEncounter *[]VisionPrescription `bson:"_revIncludedVisionPrescriptionResourcesReferencingEncounter,omitempty"`
RevIncludedProcedureRequestResourcesReferencingEncounter *[]ProcedureRequest `bson:"_revIncludedProcedureRequestResourcesReferencingEncounter,omitempty"`
RevIncludedFlagResourcesReferencingEncounter *[]Flag `bson:"_revIncludedFlagResourcesReferencingEncounter,omitempty"`
RevIncludedObservationResourcesReferencingEncounter *[]Observation `bson:"_revIncludedObservationResourcesReferencingEncounter,omitempty"`
RevIncludedMedicationAdministrationResourcesReferencingEncounter *[]MedicationAdministration `bson:"_revIncludedMedicationAdministrationResourcesReferencingEncounter,omitempty"`
RevIncludedCommunicationRequestResourcesReferencingEncounter *[]CommunicationRequest `bson:"_revIncludedCommunicationRequestResourcesReferencingEncounter,omitempty"`
RevIncludedRiskAssessmentResourcesReferencingEncounter *[]RiskAssessment `bson:"_revIncludedRiskAssessmentResourcesReferencingEncounter,omitempty"`
RevIncludedBasicResourcesReferencingSubject *[]Basic `bson:"_revIncludedBasicResourcesReferencingSubject,omitempty"`
RevIncludedDiagnosticReportResourcesReferencingEncounter *[]DiagnosticReport `bson:"_revIncludedDiagnosticReportResourcesReferencingEncounter,omitempty"`
RevIncludedNutritionOrderResourcesReferencingEncounter *[]NutritionOrder `bson:"_revIncludedNutritionOrderResourcesReferencingEncounter,omitempty"`
RevIncludedEncounterResourcesReferencingPartof *[]Encounter `bson:"_revIncludedEncounterResourcesReferencingPartof,omitempty"`
RevIncludedAuditEventResourcesReferencingReference *[]AuditEvent `bson:"_revIncludedAuditEventResourcesReferencingReference,omitempty"`
RevIncludedMedicationOrderResourcesReferencingEncounter *[]MedicationOrder `bson:"_revIncludedMedicationOrderResourcesReferencingEncounter,omitempty"`
RevIncludedCommunicationResourcesReferencingEncounter *[]Communication `bson:"_revIncludedCommunicationResourcesReferencingEncounter,omitempty"`
RevIncludedConditionResourcesReferencingEncounter *[]Condition `bson:"_revIncludedConditionResourcesReferencingEncounter,omitempty"`
RevIncludedCompositionResourcesReferencingSubject *[]Composition `bson:"_revIncludedCompositionResourcesReferencingSubject,omitempty"`
RevIncludedCompositionResourcesReferencingEncounter *[]Composition `bson:"_revIncludedCompositionResourcesReferencingEncounter,omitempty"`
RevIncludedCompositionResourcesReferencingEntry *[]Composition `bson:"_revIncludedCompositionResourcesReferencingEntry,omitempty"`
RevIncludedDetectedIssueResourcesReferencingImplicated *[]DetectedIssue `bson:"_revIncludedDetectedIssueResourcesReferencingImplicated,omitempty"`
RevIncludedDiagnosticOrderResourcesReferencingEncounter *[]DiagnosticOrder `bson:"_revIncludedDiagnosticOrderResourcesReferencingEncounter,omitempty"`
RevIncludedOrderResponseResourcesReferencingFulfillment *[]OrderResponse `bson:"_revIncludedOrderResponseResourcesReferencingFulfillment,omitempty"`
RevIncludedQuestionnaireResponseResourcesReferencingSubject *[]QuestionnaireResponse `bson:"_revIncludedQuestionnaireResponseResourcesReferencingSubject,omitempty"`
RevIncludedQuestionnaireResponseResourcesReferencingEncounter *[]QuestionnaireResponse `bson:"_revIncludedQuestionnaireResponseResourcesReferencingEncounter,omitempty"`
RevIncludedProcessResponseResourcesReferencingRequest *[]ProcessResponse `bson:"_revIncludedProcessResponseResourcesReferencingRequest,omitempty"`
RevIncludedClinicalImpressionResourcesReferencingTrigger *[]ClinicalImpression `bson:"_revIncludedClinicalImpressionResourcesReferencingTrigger,omitempty"`
RevIncludedMessageHeaderResourcesReferencingData *[]MessageHeader `bson:"_revIncludedMessageHeaderResourcesReferencingData,omitempty"`
}
func (e *EncounterPlusRelatedResources) GetIncludedEpisodeOfCareResourcesReferencedByEpisodeofcare() (episodeOfCares []EpisodeOfCare, err error) {
if e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare == nil {
err = errors.New("Included episodeOfCares not requested")
} else {
episodeOfCares = *e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedReferralRequestResourcesReferencedByIncomingreferral() (referralRequests []ReferralRequest, err error) {
if e.IncludedReferralRequestResourcesReferencedByIncomingreferral == nil {
err = errors.New("Included referralRequests not requested")
} else {
referralRequests = *e.IncludedReferralRequestResourcesReferencedByIncomingreferral
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedPractitionerResourceReferencedByPractitioner() (practitioner *Practitioner, err error) {
if e.IncludedPractitionerResourcesReferencedByPractitioner == nil {
err = errors.New("Included practitioners not requested")
} else if len(*e.IncludedPractitionerResourcesReferencedByPractitioner) > 1 {
err = fmt.Errorf("Expected 0 or 1 practitioner, but found %d", len(*e.IncludedPractitionerResourcesReferencedByPractitioner))
} else if len(*e.IncludedPractitionerResourcesReferencedByPractitioner) == 1 {
practitioner = &(*e.IncludedPractitionerResourcesReferencedByPractitioner)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedAppointmentResourceReferencedByAppointment() (appointment *Appointment, err error) {
if e.IncludedAppointmentResourcesReferencedByAppointment == nil {
err = errors.New("Included appointments not requested")
} else if len(*e.IncludedAppointmentResourcesReferencedByAppointment) > 1 {
err = fmt.Errorf("Expected 0 or 1 appointment, but found %d", len(*e.IncludedAppointmentResourcesReferencedByAppointment))
} else if len(*e.IncludedAppointmentResourcesReferencedByAppointment) == 1 {
appointment = &(*e.IncludedAppointmentResourcesReferencedByAppointment)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedEncounterResourceReferencedByPartof() (encounter *Encounter, err error) {
if e.IncludedEncounterResourcesReferencedByPartof == nil {
err = errors.New("Included encounters not requested")
} else if len(*e.IncludedEncounterResourcesReferencedByPartof) > 1 {
err = fmt.Errorf("Expected 0 or 1 encounter, but found %d", len(*e.IncludedEncounterResourcesReferencedByPartof))
} else if len(*e.IncludedEncounterResourcesReferencedByPartof) == 1 {
encounter = &(*e.IncludedEncounterResourcesReferencedByPartof)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedProcedureResourcesReferencedByProcedure() (procedures []Procedure, err error) {
if e.IncludedProcedureResourcesReferencedByProcedure == nil {
err = errors.New("Included procedures not requested")
} else {
procedures = *e.IncludedProcedureResourcesReferencedByProcedure
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedPractitionerResourceReferencedByParticipant() (practitioner *Practitioner, err error) {
if e.IncludedPractitionerResourcesReferencedByParticipant == nil {
err = errors.New("Included practitioners not requested")
} else if len(*e.IncludedPractitionerResourcesReferencedByParticipant) > 1 {
err = fmt.Errorf("Expected 0 or 1 practitioner, but found %d", len(*e.IncludedPractitionerResourcesReferencedByParticipant))
} else if len(*e.IncludedPractitionerResourcesReferencedByParticipant) == 1 {
practitioner = &(*e.IncludedPractitionerResourcesReferencedByParticipant)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedRelatedPersonResourceReferencedByParticipant() (relatedPerson *RelatedPerson, err error) {
if e.IncludedRelatedPersonResourcesReferencedByParticipant == nil {
err = errors.New("Included relatedpeople not requested")
} else if len(*e.IncludedRelatedPersonResourcesReferencedByParticipant) > 1 {
err = fmt.Errorf("Expected 0 or 1 relatedPerson, but found %d", len(*e.IncludedRelatedPersonResourcesReferencedByParticipant))
} else if len(*e.IncludedRelatedPersonResourcesReferencedByParticipant) == 1 {
relatedPerson = &(*e.IncludedRelatedPersonResourcesReferencedByParticipant)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedConditionResourcesReferencedByCondition() (conditions []Condition, err error) {
if e.IncludedConditionResourcesReferencedByCondition == nil {
err = errors.New("Included conditions not requested")
} else {
conditions = *e.IncludedConditionResourcesReferencedByCondition
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedPatientResourceReferencedByPatient() (patient *Patient, err error) {
if e.IncludedPatientResourcesReferencedByPatient == nil {
err = errors.New("Included patients not requested")
} else if len(*e.IncludedPatientResourcesReferencedByPatient) > 1 {
err = fmt.Errorf("Expected 0 or 1 patient, but found %d", len(*e.IncludedPatientResourcesReferencedByPatient))
} else if len(*e.IncludedPatientResourcesReferencedByPatient) == 1 {
patient = &(*e.IncludedPatientResourcesReferencedByPatient)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedLocationResourceReferencedByLocation() (location *Location, err error) {
if e.IncludedLocationResourcesReferencedByLocation == nil {
err = errors.New("Included locations not requested")
} else if len(*e.IncludedLocationResourcesReferencedByLocation) > 1 {
err = fmt.Errorf("Expected 0 or 1 location, but found %d", len(*e.IncludedLocationResourcesReferencedByLocation))
} else if len(*e.IncludedLocationResourcesReferencedByLocation) == 1 {
location = &(*e.IncludedLocationResourcesReferencedByLocation)[0]
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedConditionResourcesReferencedByIndication() (conditions []Condition, err error) {
if e.IncludedConditionResourcesReferencedByIndication == nil {
err = errors.New("Included conditions not requested")
} else {
conditions = *e.IncludedConditionResourcesReferencedByIndication
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedProcedureResourcesReferencedByIndication() (procedures []Procedure, err error) {
if e.IncludedProcedureResourcesReferencedByIndication == nil {
err = errors.New("Included procedures not requested")
} else {
procedures = *e.IncludedProcedureResourcesReferencedByIndication
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedProvenanceResourcesReferencingTarget() (provenances []Provenance, err error) {
if e.RevIncludedProvenanceResourcesReferencingTarget == nil {
err = errors.New("RevIncluded provenances not requested")
} else {
provenances = *e.RevIncludedProvenanceResourcesReferencingTarget
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDocumentManifestResourcesReferencingContentref() (documentManifests []DocumentManifest, err error) {
if e.RevIncludedDocumentManifestResourcesReferencingContentref == nil {
err = errors.New("RevIncluded documentManifests not requested")
} else {
documentManifests = *e.RevIncludedDocumentManifestResourcesReferencingContentref
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDocumentManifestResourcesReferencingRelatedref() (documentManifests []DocumentManifest, err error) {
if e.RevIncludedDocumentManifestResourcesReferencingRelatedref == nil {
err = errors.New("RevIncluded documentManifests not requested")
} else {
documentManifests = *e.RevIncludedDocumentManifestResourcesReferencingRelatedref
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedProcedureResourcesReferencingEncounter() (procedures []Procedure, err error) {
if e.RevIncludedProcedureResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded procedures not requested")
} else {
procedures = *e.RevIncludedProcedureResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedListResourcesReferencingItem() (lists []List, err error) {
if e.RevIncludedListResourcesReferencingItem == nil {
err = errors.New("RevIncluded lists not requested")
} else {
lists = *e.RevIncludedListResourcesReferencingItem
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedListResourcesReferencingEncounter() (lists []List, err error) {
if e.RevIncludedListResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded lists not requested")
} else {
lists = *e.RevIncludedListResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDocumentReferenceResourcesReferencingEncounter() (documentReferences []DocumentReference, err error) {
if e.RevIncludedDocumentReferenceResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded documentReferences not requested")
} else {
documentReferences = *e.RevIncludedDocumentReferenceResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDocumentReferenceResourcesReferencingRelatedref() (documentReferences []DocumentReference, err error) {
if e.RevIncludedDocumentReferenceResourcesReferencingRelatedref == nil {
err = errors.New("RevIncluded documentReferences not requested")
} else {
documentReferences = *e.RevIncludedDocumentReferenceResourcesReferencingRelatedref
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedOrderResourcesReferencingDetail() (orders []Order, err error) {
if e.RevIncludedOrderResourcesReferencingDetail == nil {
err = errors.New("RevIncluded orders not requested")
} else {
orders = *e.RevIncludedOrderResourcesReferencingDetail
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedVisionPrescriptionResourcesReferencingEncounter() (visionPrescriptions []VisionPrescription, err error) {
if e.RevIncludedVisionPrescriptionResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded visionPrescriptions not requested")
} else {
visionPrescriptions = *e.RevIncludedVisionPrescriptionResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedProcedureRequestResourcesReferencingEncounter() (procedureRequests []ProcedureRequest, err error) {
if e.RevIncludedProcedureRequestResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded procedureRequests not requested")
} else {
procedureRequests = *e.RevIncludedProcedureRequestResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedFlagResourcesReferencingEncounter() (flags []Flag, err error) {
if e.RevIncludedFlagResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded flags not requested")
} else {
flags = *e.RevIncludedFlagResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedObservationResourcesReferencingEncounter() (observations []Observation, err error) {
if e.RevIncludedObservationResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded observations not requested")
} else {
observations = *e.RevIncludedObservationResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedMedicationAdministrationResourcesReferencingEncounter() (medicationAdministrations []MedicationAdministration, err error) {
if e.RevIncludedMedicationAdministrationResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded medicationAdministrations not requested")
} else {
medicationAdministrations = *e.RevIncludedMedicationAdministrationResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedCommunicationRequestResourcesReferencingEncounter() (communicationRequests []CommunicationRequest, err error) {
if e.RevIncludedCommunicationRequestResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded communicationRequests not requested")
} else {
communicationRequests = *e.RevIncludedCommunicationRequestResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedRiskAssessmentResourcesReferencingEncounter() (riskAssessments []RiskAssessment, err error) {
if e.RevIncludedRiskAssessmentResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded riskAssessments not requested")
} else {
riskAssessments = *e.RevIncludedRiskAssessmentResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedBasicResourcesReferencingSubject() (basics []Basic, err error) {
if e.RevIncludedBasicResourcesReferencingSubject == nil {
err = errors.New("RevIncluded basics not requested")
} else {
basics = *e.RevIncludedBasicResourcesReferencingSubject
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDiagnosticReportResourcesReferencingEncounter() (diagnosticReports []DiagnosticReport, err error) {
if e.RevIncludedDiagnosticReportResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded diagnosticReports not requested")
} else {
diagnosticReports = *e.RevIncludedDiagnosticReportResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedNutritionOrderResourcesReferencingEncounter() (nutritionOrders []NutritionOrder, err error) {
if e.RevIncludedNutritionOrderResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded nutritionOrders not requested")
} else {
nutritionOrders = *e.RevIncludedNutritionOrderResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedEncounterResourcesReferencingPartof() (encounters []Encounter, err error) {
if e.RevIncludedEncounterResourcesReferencingPartof == nil {
err = errors.New("RevIncluded encounters not requested")
} else {
encounters = *e.RevIncludedEncounterResourcesReferencingPartof
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedAuditEventResourcesReferencingReference() (auditEvents []AuditEvent, err error) {
if e.RevIncludedAuditEventResourcesReferencingReference == nil {
err = errors.New("RevIncluded auditEvents not requested")
} else {
auditEvents = *e.RevIncludedAuditEventResourcesReferencingReference
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedMedicationOrderResourcesReferencingEncounter() (medicationOrders []MedicationOrder, err error) {
if e.RevIncludedMedicationOrderResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded medicationOrders not requested")
} else {
medicationOrders = *e.RevIncludedMedicationOrderResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedCommunicationResourcesReferencingEncounter() (communications []Communication, err error) {
if e.RevIncludedCommunicationResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded communications not requested")
} else {
communications = *e.RevIncludedCommunicationResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedConditionResourcesReferencingEncounter() (conditions []Condition, err error) {
if e.RevIncludedConditionResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded conditions not requested")
} else {
conditions = *e.RevIncludedConditionResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedCompositionResourcesReferencingSubject() (compositions []Composition, err error) {
if e.RevIncludedCompositionResourcesReferencingSubject == nil {
err = errors.New("RevIncluded compositions not requested")
} else {
compositions = *e.RevIncludedCompositionResourcesReferencingSubject
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedCompositionResourcesReferencingEncounter() (compositions []Composition, err error) {
if e.RevIncludedCompositionResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded compositions not requested")
} else {
compositions = *e.RevIncludedCompositionResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedCompositionResourcesReferencingEntry() (compositions []Composition, err error) {
if e.RevIncludedCompositionResourcesReferencingEntry == nil {
err = errors.New("RevIncluded compositions not requested")
} else {
compositions = *e.RevIncludedCompositionResourcesReferencingEntry
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDetectedIssueResourcesReferencingImplicated() (detectedIssues []DetectedIssue, err error) {
if e.RevIncludedDetectedIssueResourcesReferencingImplicated == nil {
err = errors.New("RevIncluded detectedIssues not requested")
} else {
detectedIssues = *e.RevIncludedDetectedIssueResourcesReferencingImplicated
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedDiagnosticOrderResourcesReferencingEncounter() (diagnosticOrders []DiagnosticOrder, err error) {
if e.RevIncludedDiagnosticOrderResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded diagnosticOrders not requested")
} else {
diagnosticOrders = *e.RevIncludedDiagnosticOrderResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedOrderResponseResourcesReferencingFulfillment() (orderResponses []OrderResponse, err error) {
if e.RevIncludedOrderResponseResourcesReferencingFulfillment == nil {
err = errors.New("RevIncluded orderResponses not requested")
} else {
orderResponses = *e.RevIncludedOrderResponseResourcesReferencingFulfillment
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedQuestionnaireResponseResourcesReferencingSubject() (questionnaireResponses []QuestionnaireResponse, err error) {
if e.RevIncludedQuestionnaireResponseResourcesReferencingSubject == nil {
err = errors.New("RevIncluded questionnaireResponses not requested")
} else {
questionnaireResponses = *e.RevIncludedQuestionnaireResponseResourcesReferencingSubject
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedQuestionnaireResponseResourcesReferencingEncounter() (questionnaireResponses []QuestionnaireResponse, err error) {
if e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter == nil {
err = errors.New("RevIncluded questionnaireResponses not requested")
} else {
questionnaireResponses = *e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedProcessResponseResourcesReferencingRequest() (processResponses []ProcessResponse, err error) {
if e.RevIncludedProcessResponseResourcesReferencingRequest == nil {
err = errors.New("RevIncluded processResponses not requested")
} else {
processResponses = *e.RevIncludedProcessResponseResourcesReferencingRequest
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedClinicalImpressionResourcesReferencingTrigger() (clinicalImpressions []ClinicalImpression, err error) {
if e.RevIncludedClinicalImpressionResourcesReferencingTrigger == nil {
err = errors.New("RevIncluded clinicalImpressions not requested")
} else {
clinicalImpressions = *e.RevIncludedClinicalImpressionResourcesReferencingTrigger
}
return
}
func (e *EncounterPlusRelatedResources) GetRevIncludedMessageHeaderResourcesReferencingData() (messageHeaders []MessageHeader, err error) {
if e.RevIncludedMessageHeaderResourcesReferencingData == nil {
err = errors.New("RevIncluded messageHeaders not requested")
} else {
messageHeaders = *e.RevIncludedMessageHeaderResourcesReferencingData
}
return
}
func (e *EncounterPlusRelatedResources) GetIncludedResources() map[string]interface{} {
resourceMap := make(map[string]interface{})
if e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare != nil {
for idx := range *e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare {
rsc := (*e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedReferralRequestResourcesReferencedByIncomingreferral != nil {
for idx := range *e.IncludedReferralRequestResourcesReferencedByIncomingreferral {
rsc := (*e.IncludedReferralRequestResourcesReferencedByIncomingreferral)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPractitionerResourcesReferencedByPractitioner != nil {
for idx := range *e.IncludedPractitionerResourcesReferencedByPractitioner {
rsc := (*e.IncludedPractitionerResourcesReferencedByPractitioner)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedAppointmentResourcesReferencedByAppointment != nil {
for idx := range *e.IncludedAppointmentResourcesReferencedByAppointment {
rsc := (*e.IncludedAppointmentResourcesReferencedByAppointment)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedEncounterResourcesReferencedByPartof != nil {
for idx := range *e.IncludedEncounterResourcesReferencedByPartof {
rsc := (*e.IncludedEncounterResourcesReferencedByPartof)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedProcedureResourcesReferencedByProcedure != nil {
for idx := range *e.IncludedProcedureResourcesReferencedByProcedure {
rsc := (*e.IncludedProcedureResourcesReferencedByProcedure)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPractitionerResourcesReferencedByParticipant != nil {
for idx := range *e.IncludedPractitionerResourcesReferencedByParticipant {
rsc := (*e.IncludedPractitionerResourcesReferencedByParticipant)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedRelatedPersonResourcesReferencedByParticipant != nil {
for idx := range *e.IncludedRelatedPersonResourcesReferencedByParticipant {
rsc := (*e.IncludedRelatedPersonResourcesReferencedByParticipant)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedConditionResourcesReferencedByCondition != nil {
for idx := range *e.IncludedConditionResourcesReferencedByCondition {
rsc := (*e.IncludedConditionResourcesReferencedByCondition)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPatientResourcesReferencedByPatient != nil {
for idx := range *e.IncludedPatientResourcesReferencedByPatient {
rsc := (*e.IncludedPatientResourcesReferencedByPatient)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedLocationResourcesReferencedByLocation != nil {
for idx := range *e.IncludedLocationResourcesReferencedByLocation {
rsc := (*e.IncludedLocationResourcesReferencedByLocation)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedConditionResourcesReferencedByIndication != nil {
for idx := range *e.IncludedConditionResourcesReferencedByIndication {
rsc := (*e.IncludedConditionResourcesReferencedByIndication)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedProcedureResourcesReferencedByIndication != nil {
for idx := range *e.IncludedProcedureResourcesReferencedByIndication {
rsc := (*e.IncludedProcedureResourcesReferencedByIndication)[idx]
resourceMap[rsc.Id] = &rsc
}
}
return resourceMap
}
func (e *EncounterPlusRelatedResources) GetRevIncludedResources() map[string]interface{} {
resourceMap := make(map[string]interface{})
if e.RevIncludedProvenanceResourcesReferencingTarget != nil {
for idx := range *e.RevIncludedProvenanceResourcesReferencingTarget {
rsc := (*e.RevIncludedProvenanceResourcesReferencingTarget)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentManifestResourcesReferencingContentref != nil {
for idx := range *e.RevIncludedDocumentManifestResourcesReferencingContentref {
rsc := (*e.RevIncludedDocumentManifestResourcesReferencingContentref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentManifestResourcesReferencingRelatedref != nil {
for idx := range *e.RevIncludedDocumentManifestResourcesReferencingRelatedref {
rsc := (*e.RevIncludedDocumentManifestResourcesReferencingRelatedref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcedureResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedProcedureResourcesReferencingEncounter {
rsc := (*e.RevIncludedProcedureResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedListResourcesReferencingItem != nil {
for idx := range *e.RevIncludedListResourcesReferencingItem {
rsc := (*e.RevIncludedListResourcesReferencingItem)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedListResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedListResourcesReferencingEncounter {
rsc := (*e.RevIncludedListResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}<|fim▁hole|> }
if e.RevIncludedDocumentReferenceResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDocumentReferenceResourcesReferencingEncounter {
rsc := (*e.RevIncludedDocumentReferenceResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentReferenceResourcesReferencingRelatedref != nil {
for idx := range *e.RevIncludedDocumentReferenceResourcesReferencingRelatedref {
rsc := (*e.RevIncludedDocumentReferenceResourcesReferencingRelatedref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedOrderResourcesReferencingDetail != nil {
for idx := range *e.RevIncludedOrderResourcesReferencingDetail {
rsc := (*e.RevIncludedOrderResourcesReferencingDetail)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedVisionPrescriptionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedVisionPrescriptionResourcesReferencingEncounter {
rsc := (*e.RevIncludedVisionPrescriptionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcedureRequestResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedProcedureRequestResourcesReferencingEncounter {
rsc := (*e.RevIncludedProcedureRequestResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedFlagResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedFlagResourcesReferencingEncounter {
rsc := (*e.RevIncludedFlagResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedObservationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedObservationResourcesReferencingEncounter {
rsc := (*e.RevIncludedObservationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMedicationAdministrationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedMedicationAdministrationResourcesReferencingEncounter {
rsc := (*e.RevIncludedMedicationAdministrationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCommunicationRequestResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCommunicationRequestResourcesReferencingEncounter {
rsc := (*e.RevIncludedCommunicationRequestResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedRiskAssessmentResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedRiskAssessmentResourcesReferencingEncounter {
rsc := (*e.RevIncludedRiskAssessmentResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedBasicResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedBasicResourcesReferencingSubject {
rsc := (*e.RevIncludedBasicResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDiagnosticReportResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDiagnosticReportResourcesReferencingEncounter {
rsc := (*e.RevIncludedDiagnosticReportResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedNutritionOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedNutritionOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedNutritionOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedEncounterResourcesReferencingPartof != nil {
for idx := range *e.RevIncludedEncounterResourcesReferencingPartof {
rsc := (*e.RevIncludedEncounterResourcesReferencingPartof)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedAuditEventResourcesReferencingReference != nil {
for idx := range *e.RevIncludedAuditEventResourcesReferencingReference {
rsc := (*e.RevIncludedAuditEventResourcesReferencingReference)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMedicationOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedMedicationOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedMedicationOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCommunicationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCommunicationResourcesReferencingEncounter {
rsc := (*e.RevIncludedCommunicationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedConditionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedConditionResourcesReferencingEncounter {
rsc := (*e.RevIncludedConditionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingSubject {
rsc := (*e.RevIncludedCompositionResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingEncounter {
rsc := (*e.RevIncludedCompositionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingEntry != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingEntry {
rsc := (*e.RevIncludedCompositionResourcesReferencingEntry)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDetectedIssueResourcesReferencingImplicated != nil {
for idx := range *e.RevIncludedDetectedIssueResourcesReferencingImplicated {
rsc := (*e.RevIncludedDetectedIssueResourcesReferencingImplicated)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDiagnosticOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDiagnosticOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedDiagnosticOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedOrderResponseResourcesReferencingFulfillment != nil {
for idx := range *e.RevIncludedOrderResponseResourcesReferencingFulfillment {
rsc := (*e.RevIncludedOrderResponseResourcesReferencingFulfillment)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedQuestionnaireResponseResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedQuestionnaireResponseResourcesReferencingSubject {
rsc := (*e.RevIncludedQuestionnaireResponseResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter {
rsc := (*e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcessResponseResourcesReferencingRequest != nil {
for idx := range *e.RevIncludedProcessResponseResourcesReferencingRequest {
rsc := (*e.RevIncludedProcessResponseResourcesReferencingRequest)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedClinicalImpressionResourcesReferencingTrigger != nil {
for idx := range *e.RevIncludedClinicalImpressionResourcesReferencingTrigger {
rsc := (*e.RevIncludedClinicalImpressionResourcesReferencingTrigger)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMessageHeaderResourcesReferencingData != nil {
for idx := range *e.RevIncludedMessageHeaderResourcesReferencingData {
rsc := (*e.RevIncludedMessageHeaderResourcesReferencingData)[idx]
resourceMap[rsc.Id] = &rsc
}
}
return resourceMap
}
func (e *EncounterPlusRelatedResources) GetIncludedAndRevIncludedResources() map[string]interface{} {
resourceMap := make(map[string]interface{})
if e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare != nil {
for idx := range *e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare {
rsc := (*e.IncludedEpisodeOfCareResourcesReferencedByEpisodeofcare)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedReferralRequestResourcesReferencedByIncomingreferral != nil {
for idx := range *e.IncludedReferralRequestResourcesReferencedByIncomingreferral {
rsc := (*e.IncludedReferralRequestResourcesReferencedByIncomingreferral)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPractitionerResourcesReferencedByPractitioner != nil {
for idx := range *e.IncludedPractitionerResourcesReferencedByPractitioner {
rsc := (*e.IncludedPractitionerResourcesReferencedByPractitioner)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedAppointmentResourcesReferencedByAppointment != nil {
for idx := range *e.IncludedAppointmentResourcesReferencedByAppointment {
rsc := (*e.IncludedAppointmentResourcesReferencedByAppointment)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedEncounterResourcesReferencedByPartof != nil {
for idx := range *e.IncludedEncounterResourcesReferencedByPartof {
rsc := (*e.IncludedEncounterResourcesReferencedByPartof)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedProcedureResourcesReferencedByProcedure != nil {
for idx := range *e.IncludedProcedureResourcesReferencedByProcedure {
rsc := (*e.IncludedProcedureResourcesReferencedByProcedure)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPractitionerResourcesReferencedByParticipant != nil {
for idx := range *e.IncludedPractitionerResourcesReferencedByParticipant {
rsc := (*e.IncludedPractitionerResourcesReferencedByParticipant)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedRelatedPersonResourcesReferencedByParticipant != nil {
for idx := range *e.IncludedRelatedPersonResourcesReferencedByParticipant {
rsc := (*e.IncludedRelatedPersonResourcesReferencedByParticipant)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedConditionResourcesReferencedByCondition != nil {
for idx := range *e.IncludedConditionResourcesReferencedByCondition {
rsc := (*e.IncludedConditionResourcesReferencedByCondition)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedPatientResourcesReferencedByPatient != nil {
for idx := range *e.IncludedPatientResourcesReferencedByPatient {
rsc := (*e.IncludedPatientResourcesReferencedByPatient)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedLocationResourcesReferencedByLocation != nil {
for idx := range *e.IncludedLocationResourcesReferencedByLocation {
rsc := (*e.IncludedLocationResourcesReferencedByLocation)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedConditionResourcesReferencedByIndication != nil {
for idx := range *e.IncludedConditionResourcesReferencedByIndication {
rsc := (*e.IncludedConditionResourcesReferencedByIndication)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.IncludedProcedureResourcesReferencedByIndication != nil {
for idx := range *e.IncludedProcedureResourcesReferencedByIndication {
rsc := (*e.IncludedProcedureResourcesReferencedByIndication)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProvenanceResourcesReferencingTarget != nil {
for idx := range *e.RevIncludedProvenanceResourcesReferencingTarget {
rsc := (*e.RevIncludedProvenanceResourcesReferencingTarget)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentManifestResourcesReferencingContentref != nil {
for idx := range *e.RevIncludedDocumentManifestResourcesReferencingContentref {
rsc := (*e.RevIncludedDocumentManifestResourcesReferencingContentref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentManifestResourcesReferencingRelatedref != nil {
for idx := range *e.RevIncludedDocumentManifestResourcesReferencingRelatedref {
rsc := (*e.RevIncludedDocumentManifestResourcesReferencingRelatedref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcedureResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedProcedureResourcesReferencingEncounter {
rsc := (*e.RevIncludedProcedureResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedListResourcesReferencingItem != nil {
for idx := range *e.RevIncludedListResourcesReferencingItem {
rsc := (*e.RevIncludedListResourcesReferencingItem)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedListResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedListResourcesReferencingEncounter {
rsc := (*e.RevIncludedListResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentReferenceResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDocumentReferenceResourcesReferencingEncounter {
rsc := (*e.RevIncludedDocumentReferenceResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDocumentReferenceResourcesReferencingRelatedref != nil {
for idx := range *e.RevIncludedDocumentReferenceResourcesReferencingRelatedref {
rsc := (*e.RevIncludedDocumentReferenceResourcesReferencingRelatedref)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedOrderResourcesReferencingDetail != nil {
for idx := range *e.RevIncludedOrderResourcesReferencingDetail {
rsc := (*e.RevIncludedOrderResourcesReferencingDetail)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedVisionPrescriptionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedVisionPrescriptionResourcesReferencingEncounter {
rsc := (*e.RevIncludedVisionPrescriptionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcedureRequestResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedProcedureRequestResourcesReferencingEncounter {
rsc := (*e.RevIncludedProcedureRequestResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedFlagResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedFlagResourcesReferencingEncounter {
rsc := (*e.RevIncludedFlagResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedObservationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedObservationResourcesReferencingEncounter {
rsc := (*e.RevIncludedObservationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMedicationAdministrationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedMedicationAdministrationResourcesReferencingEncounter {
rsc := (*e.RevIncludedMedicationAdministrationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCommunicationRequestResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCommunicationRequestResourcesReferencingEncounter {
rsc := (*e.RevIncludedCommunicationRequestResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedRiskAssessmentResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedRiskAssessmentResourcesReferencingEncounter {
rsc := (*e.RevIncludedRiskAssessmentResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedBasicResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedBasicResourcesReferencingSubject {
rsc := (*e.RevIncludedBasicResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDiagnosticReportResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDiagnosticReportResourcesReferencingEncounter {
rsc := (*e.RevIncludedDiagnosticReportResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedNutritionOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedNutritionOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedNutritionOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedEncounterResourcesReferencingPartof != nil {
for idx := range *e.RevIncludedEncounterResourcesReferencingPartof {
rsc := (*e.RevIncludedEncounterResourcesReferencingPartof)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedAuditEventResourcesReferencingReference != nil {
for idx := range *e.RevIncludedAuditEventResourcesReferencingReference {
rsc := (*e.RevIncludedAuditEventResourcesReferencingReference)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMedicationOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedMedicationOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedMedicationOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCommunicationResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCommunicationResourcesReferencingEncounter {
rsc := (*e.RevIncludedCommunicationResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedConditionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedConditionResourcesReferencingEncounter {
rsc := (*e.RevIncludedConditionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingSubject {
rsc := (*e.RevIncludedCompositionResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingEncounter {
rsc := (*e.RevIncludedCompositionResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedCompositionResourcesReferencingEntry != nil {
for idx := range *e.RevIncludedCompositionResourcesReferencingEntry {
rsc := (*e.RevIncludedCompositionResourcesReferencingEntry)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDetectedIssueResourcesReferencingImplicated != nil {
for idx := range *e.RevIncludedDetectedIssueResourcesReferencingImplicated {
rsc := (*e.RevIncludedDetectedIssueResourcesReferencingImplicated)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedDiagnosticOrderResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedDiagnosticOrderResourcesReferencingEncounter {
rsc := (*e.RevIncludedDiagnosticOrderResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedOrderResponseResourcesReferencingFulfillment != nil {
for idx := range *e.RevIncludedOrderResponseResourcesReferencingFulfillment {
rsc := (*e.RevIncludedOrderResponseResourcesReferencingFulfillment)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedQuestionnaireResponseResourcesReferencingSubject != nil {
for idx := range *e.RevIncludedQuestionnaireResponseResourcesReferencingSubject {
rsc := (*e.RevIncludedQuestionnaireResponseResourcesReferencingSubject)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter != nil {
for idx := range *e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter {
rsc := (*e.RevIncludedQuestionnaireResponseResourcesReferencingEncounter)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedProcessResponseResourcesReferencingRequest != nil {
for idx := range *e.RevIncludedProcessResponseResourcesReferencingRequest {
rsc := (*e.RevIncludedProcessResponseResourcesReferencingRequest)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedClinicalImpressionResourcesReferencingTrigger != nil {
for idx := range *e.RevIncludedClinicalImpressionResourcesReferencingTrigger {
rsc := (*e.RevIncludedClinicalImpressionResourcesReferencingTrigger)[idx]
resourceMap[rsc.Id] = &rsc
}
}
if e.RevIncludedMessageHeaderResourcesReferencingData != nil {
for idx := range *e.RevIncludedMessageHeaderResourcesReferencingData {
rsc := (*e.RevIncludedMessageHeaderResourcesReferencingData)[idx]
resourceMap[rsc.Id] = &rsc
}
}
return resourceMap
}<|fim▁end|> | |
<|file_name|>Music.java<|end_file_name|><|fim▁begin|>package adts;
/**
* Interface that represents any music item. MusicSymbol and MusicPart extend
* this. Thus, a Music could be a MusicPiece, a Voice, a Chord, a Lyric, a
* Pitch, or a Rest. a The objects are immutable. The equals, toString, and
* hashCode methods work recursively and individually different from each class
* extending Music. Read their documentation for full specs.
*
**/
/*
* Representation Music = MusicPiece(signature: Signature, voices: List<Voice>)
* + Measure(notes: List<MusicSymbol>, lyrics: Lyric) + Voice(name: String,
* measures: List<Measure>) + Chord(notes: List<Pitch>)+ + Lyric(syllables:
* List<String>) + Pitch(value: string, octave: int, length: int) + Rest(length:
* int)
*/
public interface Music {
/**
* Calculates the required number of ticks per beat, so that each note can
* be represented as an integer number of ticks.
*
* @return integer representing number of ticks per beat.
*/
public int calculateTicksPerBeat();
/**
* Tests the equality of one music to to another, such that two expressions
* with equal attributes (observationally indistinguishable) are considered
<|fim▁hole|> *
* @param _that
* music to compare to
* @return whether or not the two musics are equal
*/
@Override
public boolean equals(Object _that);
/**
* Returns the string representation of the music
*
* @returns the music as a string
*/
@Override
public String toString();
/**
* Calculates the hashcode for this music. HashCode for two equal musics
* will be identical.
*
* @return the hashcode for the music
*/
@Override
public int hashCode();
}<|fim▁end|> | * equal
|
<|file_name|>network_interface_attachment.go<|end_file_name|><|fim▁begin|>package ec2
import (
"github.com/jagregory/cfval/constraints"
. "github.com/jagregory/cfval/schema"
)
// see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-network-interface-attachment.html<|fim▁hole|> AwsType: "AWS::EC2::NetworkInterfaceAttachment",
// Name
ReturnValue: Schema{
Type: ValueString,
},
Properties: Properties{
"DeleteOnTermination": Schema{
Type: ValueBool,
Default: true,
},
"DeviceIndex": Schema{
Type: ValueString,
Required: constraints.Always,
},
"InstanceId": Schema{
Type: InstanceID,
Required: constraints.Always,
},
"NetworkInterfaceId": Schema{
Type: NetworkInterfaceID,
Required: constraints.Always,
},
},
}<|fim▁end|> | var NetworkInterfaceAttachment = Resource{ |
<|file_name|>streamdf.py<|end_file_name|><|fim▁begin|>#The DF of a tidal stream
import copy
import numpy
import multiprocessing
import scipy
from scipy import special, interpolate, integrate
if int(scipy.__version__.split('.')[1]) < 10: #pragma: no cover
from scipy.maxentropy import logsumexp
else:
from scipy.misc import logsumexp
from galpy.orbit import Orbit
from galpy.util import bovy_coords, fast_cholesky_invert, \
bovy_conversion, multi, bovy_plot, stable_cho_factor, bovy_ars
import warnings
from galpy.util import galpyWarning
_INTERPDURINGSETUP= True
_USEINTERP= True
_USESIMPLE= True
_labelDict= {'x': r'$X$',
'y': r'$Y$',
'z': r'$Z$',
'r': r'$R$',
'phi': r'$\phi$',
'vx':r'$V_X$',
'vy':r'$V_Y$',
'vz':r'$V_Z$',
'vr':r'$V_R$',
'vt':r'$V_T$',
'll':r'$\mathrm{Galactic\ longitude\, (deg)}$',
'bb':r'$\mathrm{Galactic\ latitude\, (deg)}$',
'dist':r'$\mathrm{distance\, (kpc)}$',
'pmll':r'$\mu_l\,(\mathrm{mas\,yr}^{-1})$',
'pmbb':r'$\mu_b\,(\mathrm{mas\,yr}^{-1})$',
'vlos':r'$V_{\mathrm{los}}\,(\mathrm{km\,s}^{-1})$'}
class streamdf(object):
"""The DF of a tidal stream"""
def __init__(self,sigv,progenitor=None,pot=None,aA=None,
tdisrupt=None,sigMeanOffset=6.,leading=True,
sigangle=None,
deltaAngleTrack=None,nTrackChunks=None,nTrackIterations=None,
progIsTrack=False,
Vnorm=220.,Rnorm=8.,
R0=8.,Zsun=0.025,vsun=[-11.1,8.*30.24,7.25],
multi=None,interpTrack=_INTERPDURINGSETUP,
useInterp=_USEINTERP,nosetup=False):
"""
NAME:
__init__
PURPOSE:
Initialize a quasi-isothermal DF
INPUT:
sigv - radial velocity dispersion of the progenitor
tdisrupt= (5 Gyr) time since start of disruption (natural units)
leading= (True) if True, model the leading part of the stream
if False, model the trailing part
progenitor= progenitor orbit as Orbit instance (will be re-integrated, so don't bother integrating the orbit before)
progIsTrack= (False) if True, then the progenitor (x,v) is actually the (x,v) of the stream track at zero angle separation; useful when initializing with an orbit fit; the progenitor's position will be calculated
pot= Potential instance or list thereof
aA= actionAngle instance used to convert (x,v) to actions
sigMeanOffset= (6.) offset between the mean of the frequencies
and the progenitor, in units of the largest
eigenvalue of the frequency covariance matrix
(along the largest eigenvector), should be positive;
to model the trailing part, set leading=False
sigangle= (sigv/122/[1km/s]=1.8sigv in natural coordinates)
estimate of the angle spread of the debris initially
deltaAngleTrack= (None) angle to estimate the stream track over (rad)
nTrackChunks= (floor(deltaAngleTrack/0.15)+1) number of chunks to divide the progenitor track in
nTrackIterations= Number of iterations to perform when establishing the track; each iteration starts from a previous approximation to the track in (x,v) and calculates a new track based on the deviation between the previous track and the desired track in action-angle coordinates; if not set, an appropriate value is determined based on the magnitude of the misalignment between stream and orbit, with larger numbers of iterations for larger misalignments
interpTrack= (might change), interpolate the stream track while
setting up the instance (can be done by hand by
calling self._interpolate_stream_track() and
self._interpolate_stream_track_aA())
useInterp= (might change), use interpolation by default when
calculating approximated frequencies and angles
nosetup= (False) if True, don't setup the stream track and anything
else that is expensive
multi= (None) if set, use multi-processing
Coordinate transformation inputs:
Vnorm= (220) circular velocity to normalize velocities with
Rnorm= (8) Galactocentric radius to normalize positions with
R0= (8) Galactocentric radius of the Sun (kpc)
Zsun= (0.025) Sun's height above the plane (kpc)
vsun= ([-11.1,241.92,7.25]) Sun's motion in cylindrical coordinates (vR positive away from center)
OUTPUT:
object
HISTORY:
2013-09-16 - Started - Bovy (IAS)
2013-11-25 - Started over - Bovy (IAS)
"""
self._sigv= sigv
if tdisrupt is None:
self._tdisrupt= 5./bovy_conversion.time_in_Gyr(Vnorm,Rnorm)
else:
self._tdisrupt= tdisrupt
self._sigMeanOffset= sigMeanOffset
if pot is None: #pragma: no cover
raise IOError("pot= must be set")
self._pot= pot
self._aA= aA
if not self._aA._pot == self._pot:
raise IOError("Potential in aA does not appear to be the same as given potential pot")
if (multi is True): #if set to boolean, enable cpu_count processes
self._multi= multiprocessing.cpu_count()
else:
self._multi= multi
self._progenitor_setup(progenitor,leading)
self._offset_setup(sigangle,leading,deltaAngleTrack)
# if progIsTrack, calculate the progenitor that gives a track that is approximately the given orbit
if progIsTrack:
self._setup_progIsTrack()
self._setup_coord_transform(Rnorm,Vnorm,R0,Zsun,vsun,progenitor)
#Determine the stream track
if not nosetup:
self._determine_nTrackIterations(nTrackIterations)
self._determine_stream_track(nTrackChunks)
self._useInterp= useInterp
if interpTrack or self._useInterp:
self._interpolate_stream_track()
self._interpolate_stream_track_aA()
self.calc_stream_lb()
self._determine_stream_spread()
return None
def _progenitor_setup(self,progenitor,leading):
"""The part of the setup relating to the progenitor's orbit"""
#Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor
self._progenitor= progenitor() #call to get new Orbit
# Make sure we do not use physical coordinates
self._progenitor.turn_physical_off()
acfs= self._aA.actionsFreqsAngles(self._progenitor,maxn=3,
_firstFlip=(not leading))
self._progenitor_jr= acfs[0][0]
self._progenitor_lz= acfs[1][0]
self._progenitor_jz= acfs[2][0]
self._progenitor_Omegar= acfs[3]
self._progenitor_Omegaphi= acfs[4]
self._progenitor_Omegaz= acfs[5]
self._progenitor_Omega= numpy.array([acfs[3],acfs[4],acfs[5]]).reshape(3)
self._progenitor_angler= acfs[6]
self._progenitor_anglephi= acfs[7]
self._progenitor_anglez= acfs[8]
self._progenitor_angle= numpy.array([acfs[6],acfs[7],acfs[8]]).reshape(3)
#Calculate dO/dJ Jacobian at the progenitor
self._dOdJp= calcaAJac(self._progenitor._orb.vxvv,
self._aA,dxv=None,dOdJ=True,
_initacfs=acfs)
self._dOdJpEig= numpy.linalg.eig(self._dOdJp)
return None
def _offset_setup(self,sigangle,leading,deltaAngleTrack):
"""The part of the setup related to calculating the stream/progenitor offset"""
#From the progenitor orbit, determine the sigmas in J and angle
self._sigjr= (self._progenitor.rap()-self._progenitor.rperi())/numpy.pi*self._sigv
self._siglz= self._progenitor.rperi()*self._sigv
self._sigjz= 2.*self._progenitor.zmax()/numpy.pi*self._sigv
#Estimate the frequency covariance matrix from a diagonal J matrix x dOdJ
self._sigjmatrix= numpy.diag([self._sigjr**2.,
self._siglz**2.,
self._sigjz**2.])
self._sigomatrix= numpy.dot(self._dOdJp,
numpy.dot(self._sigjmatrix,self._dOdJp.T))
#Estimate angle spread as the ratio of the largest to the middle eigenvalue
self._sigomatrixEig= numpy.linalg.eig(self._sigomatrix)
self._sigomatrixEigsortIndx= numpy.argsort(self._sigomatrixEig[0])
self._sortedSigOEig= sorted(self._sigomatrixEig[0])
if sigangle is None:
self._sigangle= self._sigv*1.8
else:
self._sigangle= sigangle
self._sigangle2= self._sigangle**2.
self._lnsigangle= numpy.log(self._sigangle)
#Estimate the frequency mean as lying along the direction of the largest eigenvalue
self._dsigomeanProgDirection= self._sigomatrixEig[1][:,numpy.argmax(self._sigomatrixEig[0])]
self._progenitor_Omega_along_dOmega= \
numpy.dot(self._progenitor_Omega,self._dsigomeanProgDirection)
#Make sure we are modeling the correct part of the stream
self._leading= leading
self._sigMeanSign= 1.
if self._leading and self._progenitor_Omega_along_dOmega < 0.:
self._sigMeanSign= -1.
elif not self._leading and self._progenitor_Omega_along_dOmega > 0.:
self._sigMeanSign= -1.
self._progenitor_Omega_along_dOmega*= self._sigMeanSign
self._sigomean= self._progenitor_Omega\
+self._sigMeanOffset*self._sigMeanSign\
*numpy.sqrt(numpy.amax(self._sigomatrixEig[0]))\
*self._dsigomeanProgDirection
#numpy.dot(self._dOdJp,
# numpy.array([self._sigjr,self._siglz,self._sigjz]))
self._dsigomeanProg= self._sigomean-self._progenitor_Omega
self._meandO= self._sigMeanOffset\
*numpy.sqrt(numpy.amax(self._sigomatrixEig[0]))
#Store cholesky of sigomatrix for fast evaluation
self._sigomatrixNorm=\
numpy.sqrt(numpy.sum(self._sigomatrix**2.))
self._sigomatrixinv, self._sigomatrixLogdet= \
fast_cholesky_invert(self._sigomatrix/self._sigomatrixNorm,
tiny=10.**-15.,logdet=True)
self._sigomatrixinv/= self._sigomatrixNorm
deltaAngleTrackLim = (self._sigMeanOffset+4.) * numpy.sqrt(
self._sortedSigOEig[2]) * self._tdisrupt
if (deltaAngleTrack is None):
deltaAngleTrack = deltaAngleTrackLim
else:
if (deltaAngleTrack > deltaAngleTrackLim):
warnings.warn("WARNING: angle range large compared to plausible value.", galpyWarning)
self._deltaAngleTrack= deltaAngleTrack
return None
def _setup_coord_transform(self,Rnorm,Vnorm,R0,Zsun,vsun,progenitor):
#Set the coordinate-transformation parameters; check that these do not conflict with those in the progenitor orbit object; need to use the original, since this objects _progenitor has physical turned off
if progenitor._roSet \
and (numpy.fabs(Rnorm-progenitor._orb._ro) > 10.**-.8 \
or numpy.fabs(R0-progenitor._orb._ro) > 10.**-8.):
warnings.warn("Warning: progenitor's ro does not agree with streamdf's Rnorm and R0; this may have unexpected consequences when projecting into observables", galpyWarning)
if progenitor._voSet \
and numpy.fabs(Vnorm-progenitor._orb._vo) > 10.**-8.:
warnings.warn("Warning: progenitor's vo does not agree with streamdf's Vnorm; this may have unexpected consequences when projecting into observables", galpyWarning)
if (progenitor._roSet or progenitor._voSet) \
and numpy.fabs(Zsun-progenitor._orb._zo) > 10.**-8.:
warnings.warn("Warning: progenitor's zo does not agree with streamdf's Zsun; this may have unexpected consequences when projecting into observables", galpyWarning)
if (progenitor._roSet or progenitor._voSet) \
and numpy.any(numpy.fabs(vsun-numpy.array([0.,Vnorm,0.])\
-progenitor._orb._solarmotion) > 10.**-8.):
warnings.warn("Warning: progenitor's solarmotion does not agree with streamdf's vsun (after accounting for Vnorm); this may have unexpected consequences when projecting into observables", galpyWarning)
self._Vnorm= Vnorm
self._Rnorm= Rnorm
self._R0= R0
self._Zsun= Zsun
self._vsun= vsun
return None
def _setup_progIsTrack(self):
"""If progIsTrack, the progenitor orbit that was passed to the
streamdf initialization is the track at zero angle separation;
this routine computes an actual progenitor position that gives
the desired track given the parameters of the streamdf"""
# We need to flip the sign of the offset, to go to the progenitor
self._sigMeanSign*= -1.
# Use _determine_stream_track_single to calculate the track-progenitor
# offset at zero angle separation
prog_stream_offset=\
_determine_stream_track_single(self._aA,
self._progenitor,
0., #time = 0
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
0.) #angle = 0
# Setup the new progenitor orbit
progenitor= Orbit(prog_stream_offset[3])
# Flip the offset sign again
self._sigMeanSign*= -1.
# Now re-do the previous setup
self._progenitor_setup(progenitor,self._leading)
self._offset_setup(self._sigangle,self._leading,
self._deltaAngleTrack)
return None
def misalignment(self,isotropic=False):
"""
NAME:
misalignment
PURPOSE:
calculate the misalignment between the progenitor's frequency
and the direction along which the stream disrupts
INPUT:
isotropic= (False), if True, return the misalignment assuming an isotropic action distribution
OUTPUT:
misalignment in degree
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
if isotropic:
dODir= self._dOdJpEig[1][:,numpy.argmax(numpy.fabs(self._dOdJpEig[0]))]
else:
dODir= self._dsigomeanProgDirection
out= numpy.arccos(numpy.sum(self._progenitor_Omega*dODir)/numpy.sqrt(numpy.sum(self._progenitor_Omega**2.)))/numpy.pi*180.
if out > 90.: return out-180.
else: return out
def freqEigvalRatio(self,isotropic=False):
"""
NAME:
freqEigvalRatio
PURPOSE:
calculate the ratio between the largest and 2nd-to-largest (in abs)
eigenvalue of sqrt(dO/dJ^T V_J dO/dJ)
(if this is big, a 1D stream will form)
INPUT:
isotropic= (False), if True, return the ratio assuming an isotropic action distribution (i.e., just of dO/dJ)
OUTPUT:
ratio between eigenvalues of |dO / dJ|
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
if isotropic:
sortedEig= sorted(numpy.fabs(self._dOdJpEig[0]))
return sortedEig[2]/sortedEig[1]
else:
return numpy.sqrt(self._sortedSigOEig)[2]\
/numpy.sqrt(self._sortedSigOEig)[1]
def estimateTdisrupt(self,deltaAngle):
"""
NAME:
estimateTdisrupt
PURPOSE:
estimate the time of disruption
INPUT:
deltaAngle- spread in angle since disruption
OUTPUT:
time in natural units
HISTORY:
2013-11-27 - Written - Bovy (IAS)
"""
return deltaAngle\
/numpy.sqrt(numpy.sum(self._dsigomeanProg**2.))
############################STREAM TRACK FUNCTIONS#############################
def plotTrack(self,d1='x',d2='z',interp=True,spread=0,simple=_USESIMPLE,
*args,**kwargs):
"""
NAME:
plotTrack
PURPOSE:
plot the stream track
INPUT:
d1= plot this on the X axis ('x','y','z','R','phi','vx','vy','vz','vR','vt','ll','bb','dist','pmll','pmbb','vlos')
d2= plot this on the Y axis (same list as for d1)
interp= (True) if True, use the interpolated stream track
spread= (0) if int > 0, also plot the spread around the track as spread x sigma
scaleToPhysical= (False), if True, plot positions in kpc and velocities in km/s
simple= (False), if True, use a simple estimate for the spread in perpendicular angle
bovy_plot.bovy_plot args and kwargs
OUTPUT:
plot to output device
HISTORY:
2013-12-09 - Written - Bovy (IAS)
"""
if not hasattr(self,'_ObsTrackLB') and \
(d1.lower() == 'll' or d1.lower() == 'bb'
or d1.lower() == 'dist' or d1.lower() == 'pmll'
or d1.lower() == 'pmbb' or d1.lower() == 'vlos'
or d2.lower() == 'll' or d2.lower() == 'bb'
or d2.lower() == 'dist' or d2.lower() == 'pmll'
or d2.lower() == 'pmbb' or d2.lower() == 'vlos'):
self.calc_stream_lb()
phys= kwargs.pop('scaleToPhysical',False)
tx= self._parse_track_dim(d1,interp=interp,phys=phys)
ty= self._parse_track_dim(d2,interp=interp,phys=phys)
bovy_plot.bovy_plot(tx,ty,*args,
xlabel=_labelDict[d1.lower()],
ylabel=_labelDict[d2.lower()],
**kwargs)
if spread:
addx, addy= self._parse_track_spread(d1,d2,interp=interp,phys=phys,
simple=simple)
if ('ls' in kwargs and kwargs['ls'] == 'none') \
or ('linestyle' in kwargs \
and kwargs['linestyle'] == 'none'):
kwargs.pop('ls',None)
kwargs.pop('linestyle',None)
spreadls= 'none'
else:
spreadls= '-.'
spreadmarker= kwargs.pop('marker',None)
spreadcolor= kwargs.pop('color',None)
spreadlw= kwargs.pop('lw',1.)
bovy_plot.bovy_plot(tx+spread*addx,ty+spread*addy,ls=spreadls,
marker=spreadmarker,color=spreadcolor,
lw=spreadlw,
overplot=True)
bovy_plot.bovy_plot(tx-spread*addx,ty-spread*addy,ls=spreadls,
marker=spreadmarker,color=spreadcolor,
lw=spreadlw,
overplot=True)
return None
def plotProgenitor(self,d1='x',d2='z',*args,**kwargs):
"""
NAME:
plotProgenitor
PURPOSE:
plot the progenitor orbit
INPUT:
d1= plot this on the X axis ('x','y','z','R','phi','vx','vy','vz','vR','vt','ll','bb','dist','pmll','pmbb','vlos')
d2= plot this on the Y axis (same list as for d1)
scaleToPhysical= (False), if True, plot positions in kpc and velocities in km/s
bovy_plot.bovy_plot args and kwargs
OUTPUT:
plot to output device
HISTORY:
2013-12-09 - Written - Bovy (IAS)
"""
tts= self._progenitor._orb.t[self._progenitor._orb.t \
< self._trackts[self._nTrackChunks-1]]
obs= [self._R0,0.,self._Zsun]
obs.extend(self._vsun)
phys= kwargs.pop('scaleToPhysical',False)
tx= self._parse_progenitor_dim(d1,tts,ro=self._Rnorm,vo=self._Vnorm,
obs=obs,phys=phys)
ty= self._parse_progenitor_dim(d2,tts,ro=self._Rnorm,vo=self._Vnorm,
obs=obs,phys=phys)
bovy_plot.bovy_plot(tx,ty,*args,
xlabel=_labelDict[d1.lower()],
ylabel=_labelDict[d2.lower()],
**kwargs)
return None
def _parse_track_dim(self,d1,interp=True,phys=False):
"""Parse the dimension to plot the stream track for"""
if interp: interpStr= 'interpolated'
else: interpStr= ''
if d1.lower() == 'x':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,0]
elif d1.lower() == 'y':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,1]
elif d1.lower() == 'z':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,2]
elif d1.lower() == 'r':
tx= self.__dict__['_%sObsTrack' % interpStr][:,0]
elif d1.lower() == 'phi':
tx= self.__dict__['_%sObsTrack' % interpStr][:,5]
elif d1.lower() == 'vx':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,3]
elif d1.lower() == 'vy':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,4]
elif d1.lower() == 'vz':
tx= self.__dict__['_%sObsTrackXY' % interpStr][:,5]
elif d1.lower() == 'vr':
tx= self.__dict__['_%sObsTrack' % interpStr][:,1]
elif d1.lower() == 'vt':
tx= self.__dict__['_%sObsTrack' % interpStr][:,2]
elif d1.lower() == 'll':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,0]
elif d1.lower() == 'bb':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,1]
elif d1.lower() == 'dist':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,2]
elif d1.lower() == 'pmll':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,4]
elif d1.lower() == 'pmbb':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,5]
elif d1.lower() == 'vlos':
tx= self.__dict__['_%sObsTrackLB' % interpStr][:,3]
if phys and (d1.lower() == 'x' or d1.lower() == 'y' \
or d1.lower() == 'z' or d1.lower() == 'r'):
tx= copy.copy(tx)
tx*= self._Rnorm
if phys and (d1.lower() == 'vx' or d1.lower() == 'vy' \
or d1.lower() == 'vz' or d1.lower() == 'vr' \
or d1.lower() == 'vt'):
tx= copy.copy(tx)
tx*= self._Vnorm
return tx
def _parse_progenitor_dim(self,d1,ts,ro=None,vo=None,obs=None,
phys=False):
"""Parse the dimension to plot the progenitor orbit for"""
if d1.lower() == 'x':
tx= self._progenitor.x(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'y':
tx= self._progenitor.y(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'z':
tx= self._progenitor.z(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'r':
tx= self._progenitor.R(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'phi':
tx= self._progenitor.phi(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'vx':
tx= self._progenitor.vx(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'vy':
tx= self._progenitor.vy(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'vz':
tx= self._progenitor.vz(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'vr':
tx= self._progenitor.vR(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'vt':
tx= self._progenitor.vT(ts,ro=ro,vo=vo,obs=obs,use_physical=False)
elif d1.lower() == 'll':
tx= self._progenitor.ll(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'bb':
tx= self._progenitor.bb(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'dist':
tx= self._progenitor.dist(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'pmll':
tx= self._progenitor.pmll(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'pmbb':
tx= self._progenitor.pmbb(ts,ro=ro,vo=vo,obs=obs)
elif d1.lower() == 'vlos':
tx= self._progenitor.vlos(ts,ro=ro,vo=vo,obs=obs)
if phys and (d1.lower() == 'x' or d1.lower() == 'y' \
or d1.lower() == 'z' or d1.lower() == 'r'):
tx= copy.copy(tx)
tx*= self._Rnorm
if phys and (d1.lower() == 'vx' or d1.lower() == 'vy' \
or d1.lower() == 'vz' or d1.lower() == 'vr' \
or d1.lower() == 'vt'):
tx= copy.copy(tx)
tx*= self._Vnorm
return tx
def _parse_track_spread(self,d1,d2,interp=True,phys=False,
simple=_USESIMPLE):
"""Determine the spread around the track"""
if not hasattr(self,'_allErrCovs'):
self._determine_stream_spread(simple=simple)
okaySpreadR= ['r','vr','vt','z','vz','phi']
okaySpreadXY= ['x','y','z','vx','vy','vz']
okaySpreadLB= ['ll','bb','dist','vlos','pmll','pmbb']
#Determine which coordinate system we're in
coord= [False,False,False] #R, XY, LB
if d1.lower() in okaySpreadR and d2.lower() in okaySpreadR:
coord[0]= True
elif d1.lower() in okaySpreadXY and d2.lower() in okaySpreadXY:
coord[1]= True
elif d1.lower() in okaySpreadLB and d2.lower() in okaySpreadLB:
coord[2]= True
else:
raise NotImplementedError("plotting the spread for coordinates from different systems not implemented yet ...")
#Get the right 2D Jacobian
indxDict= {}
indxDict['r']= 0
indxDict['vr']= 1
indxDict['vt']= 2
indxDict['z']= 3
indxDict['vz']= 4
indxDict['phi']= 5
indxDictXY= {}
indxDictXY['x']= 0
indxDictXY['y']= 1
indxDictXY['z']= 2
indxDictXY['vx']= 3
indxDictXY['vy']= 4
indxDictXY['vz']= 5
indxDictLB= {}
indxDictLB['ll']= 0
indxDictLB['bb']= 1
indxDictLB['dist']= 2
indxDictLB['vlos']= 3
indxDictLB['pmll']= 4
indxDictLB['pmbb']= 5
if coord[0]:
relevantCov= self._allErrCovs
relevantDict= indxDict
if phys:#apply scale factors
tcov= copy.copy(relevantCov)
scaleFac= numpy.array([self._Rnorm,self._Vnorm,self._Vnorm,
self._Rnorm,self._Vnorm,1.])
tcov*= numpy.tile(scaleFac,(6,1))
tcov*= numpy.tile(scaleFac,(6,1)).T
relevantCov= tcov
elif coord[1]:
relevantCov= self._allErrCovsXY
relevantDict= indxDictXY
if phys:#apply scale factors
tcov= copy.copy(relevantCov)
scaleFac= numpy.array([self._Rnorm,self._Rnorm,self._Rnorm,
self._Vnorm,self._Vnorm,self._Vnorm])
tcov*= numpy.tile(scaleFac,(6,1))
tcov*= numpy.tile(scaleFac,(6,1)).T
relevantCov= tcov
elif coord[2]:
relevantCov= self._allErrCovsLBUnscaled
relevantDict= indxDictLB
indx0= numpy.array([[relevantDict[d1.lower()],relevantDict[d1.lower()]],
[relevantDict[d2.lower()],relevantDict[d2.lower()]]])
indx1= numpy.array([[relevantDict[d1.lower()],relevantDict[d2.lower()]],
[relevantDict[d1.lower()],relevantDict[d2.lower()]]])
cov= relevantCov[:,indx0,indx1] #cov contains all nTrackChunks covs
if not interp:
out= numpy.empty((self._nTrackChunks,2))
eigDir= numpy.array([1.,0.])
for ii in range(self._nTrackChunks):
covEig= numpy.linalg.eig(cov[ii])
minIndx= numpy.argmin(covEig[0])
minEigvec= covEig[1][:,minIndx] #this is the direction of the transverse spread
if numpy.sum(minEigvec*eigDir) < 0.: minEigvec*= -1. #Keep them pointing in the same direction
out[ii]= minEigvec*numpy.sqrt(covEig[0][minIndx])
eigDir= minEigvec
else:
#We slerp the minor eigenvector and interpolate the eigenvalue
#First store all of the eigenvectors on the track
allEigval= numpy.empty(self._nTrackChunks)
allEigvec= numpy.empty((self._nTrackChunks,2))
eigDir= numpy.array([1.,0.])
for ii in range(self._nTrackChunks):
covEig= numpy.linalg.eig(cov[ii])
minIndx= numpy.argmin(covEig[0])
minEigvec= covEig[1][:,minIndx] #this is the direction of the transverse spread
if numpy.sum(minEigvec*eigDir) < 0.: minEigvec*= -1. #Keep them pointing in the same direction
allEigval[ii]= numpy.sqrt(covEig[0][minIndx])
allEigvec[ii]= minEigvec
eigDir= minEigvec
#Now interpolate where needed
interpEigval=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
allEigval,k=3)
interpolatedEigval= interpEigval(self._interpolatedThetasTrack)
#Interpolate in chunks
interpolatedEigvec= numpy.empty((len(self._interpolatedThetasTrack),
2))
for ii in range(self._nTrackChunks-1):
slerpOmega= numpy.arccos(numpy.sum(allEigvec[ii]*allEigvec[ii+1]))
slerpts= (self._interpolatedThetasTrack-self._thetasTrack[ii])/\
(self._thetasTrack[ii+1]-self._thetasTrack[ii])
slerpIndx= (slerpts >= 0.)*(slerpts <= 1.)
for jj in range(2):
interpolatedEigvec[slerpIndx,jj]=\
(numpy.sin((1-slerpts[slerpIndx])*slerpOmega)*allEigvec[ii,jj]
+numpy.sin(slerpts[slerpIndx]*slerpOmega)*allEigvec[ii+1,jj])/numpy.sin(slerpOmega)
out= numpy.tile(interpolatedEigval.T,(2,1)).T*interpolatedEigvec
if coord[2]: #if LB, undo rescalings that were applied before
out[:,0]*= self._ErrCovsLBScale[relevantDict[d1.lower()]]
out[:,1]*= self._ErrCovsLBScale[relevantDict[d2.lower()]]
return (out[:,0],out[:,1])
def plotCompareTrackAAModel(self,**kwargs):
"""
NAME:
plotCompareTrackAAModel
PURPOSE:
plot the comparison between the underlying model's dOmega_perp vs. dangle_r (line) and the track in (x,v)'s dOmega_perp vs. dangle_r (dots; explicitly calculating the track's action-angle coordinates)
INPUT:
bovy_plot.bovy_plot kwargs
OUTPUT:
plot
HISTORY:
2014-08-27 - Written - Bovy (IAS)
"""
#First calculate the model
model_adiff= (self._ObsTrackAA[:,3:]-self._progenitor_angle)[:,0]\
*self._sigMeanSign
model_operp= numpy.dot(self._ObsTrackAA[:,:3]-self._progenitor_Omega,
self._dsigomeanProgDirection)\
*self._sigMeanSign
#Then calculate the track's frequency-angle coordinates
if self._multi is None:
aatrack= numpy.empty((self._nTrackChunks,6))
for ii in range(self._nTrackChunks):
aatrack[ii]= self._aA.actionsFreqsAngles(Orbit(self._ObsTrack[ii,:]),
maxn=3)[3:]
else:
aatrack= numpy.reshape(\
multi.parallel_map(
(lambda x: self._aA.actionsFreqsAngles(Orbit(self._ObsTrack[x,:]), maxn=3)[3:]),
range(self._nTrackChunks),
numcores=numpy.amin([self._nTrackChunks,
multiprocessing.cpu_count(),
self._multi])),(self._nTrackChunks,6))
track_adiff= (aatrack[:,3:]-self._progenitor_angle)[:,0]\
*self._sigMeanSign
track_operp= numpy.dot(aatrack[:,:3]-self._progenitor_Omega,
self._dsigomeanProgDirection)\
*self._sigMeanSign
overplot= kwargs.pop('overplot',False)
yrange= kwargs.pop('yrange',
[0.,numpy.amax(numpy.hstack((model_operp,track_operp)))*1.1])
xlabel= kwargs.pop('xlabel',r'$\Delta \theta_R$')
ylabel= kwargs.pop('ylabel',r'$\Delta \Omega_\parallel$')
bovy_plot.bovy_plot(model_adiff,model_operp,'k-',overplot=overplot,
xlabel=xlabel,ylabel=ylabel,yrange=yrange,**kwargs)
bovy_plot.bovy_plot(track_adiff,track_operp,'ko',overplot=True,
**kwargs)
return None
def _determine_nTrackIterations(self,nTrackIterations):
"""Determine a good value for nTrackIterations based on the misalignment between stream and orbit; just based on some rough experience for now"""
if not nTrackIterations is None:
self.nTrackIterations= nTrackIterations
return None
if numpy.fabs(self.misalignment()) < 1.:
self.nTrackIterations= 0
elif numpy.fabs(self.misalignment()) >= 1. \
and numpy.fabs(self.misalignment()) < 3.:
self.nTrackIterations= 1
elif numpy.fabs(self.misalignment()) >= 3.:
self.nTrackIterations= 2
return None
def _determine_stream_track(self,nTrackChunks):
"""Determine the track of the stream in real space"""
#Determine how much orbital time is necessary for the progenitor's orbit to cover the stream
if nTrackChunks is None:
#default is floor(self._deltaAngleTrack/0.15)+1
self._nTrackChunks= int(numpy.floor(self._deltaAngleTrack/0.15))+1
else:
self._nTrackChunks= nTrackChunks
dt= self._deltaAngleTrack\
/self._progenitor_Omega_along_dOmega
self._trackts= numpy.linspace(0.,2*dt,2*self._nTrackChunks-1) #to be sure that we cover it
#Instantiate an auxiliaryTrack, which is an Orbit instance at the mean frequency of the stream, and zero angle separation wrt the progenitor; prog_stream_offset is the offset between this track and the progenitor at zero angle
prog_stream_offset=\
_determine_stream_track_single(self._aA,
self._progenitor,
0., #time = 0
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
0.) #angle = 0
auxiliaryTrack= Orbit(prog_stream_offset[3])
if dt < 0.:
self._trackts= numpy.linspace(0.,-2.*dt,2.*self._nTrackChunks-1)
#Flip velocities before integrating
auxiliaryTrack= auxiliaryTrack.flip()
auxiliaryTrack.integrate(self._trackts,self._pot)
if dt < 0.:
#Flip velocities again
auxiliaryTrack._orb.orbit[:,1]= -auxiliaryTrack._orb.orbit[:,1]
auxiliaryTrack._orb.orbit[:,2]= -auxiliaryTrack._orb.orbit[:,2]
auxiliaryTrack._orb.orbit[:,4]= -auxiliaryTrack._orb.orbit[:,4]
#Calculate the actions, frequencies, and angle for this auxiliary orbit
acfs= self._aA.actionsFreqs(auxiliaryTrack(0.),maxn=3)
auxiliary_Omega= numpy.array([acfs[3],acfs[4],acfs[5]]).reshape(3\
)
auxiliary_Omega_along_dOmega= \
numpy.dot(auxiliary_Omega,self._dsigomeanProgDirection)
#Now calculate the actions, frequencies, and angles + Jacobian for each chunk
allAcfsTrack= numpy.empty((self._nTrackChunks,9))
alljacsTrack= numpy.empty((self._nTrackChunks,6,6))
allinvjacsTrack= numpy.empty((self._nTrackChunks,6,6))
thetasTrack= numpy.linspace(0.,self._deltaAngleTrack,
self._nTrackChunks)
ObsTrack= numpy.empty((self._nTrackChunks,6))
ObsTrackAA= numpy.empty((self._nTrackChunks,6))
detdOdJps= numpy.empty((self._nTrackChunks))
if self._multi is None:
for ii in range(self._nTrackChunks):
multiOut= _determine_stream_track_single(self._aA,
auxiliaryTrack,
self._trackts[ii]*numpy.fabs(self._progenitor_Omega_along_dOmega/auxiliary_Omega_along_dOmega), #this factor accounts for the difference in frequency between the progenitor and the auxiliary track
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
thetasTrack[ii])
allAcfsTrack[ii,:]= multiOut[0]
alljacsTrack[ii,:,:]= multiOut[1]
allinvjacsTrack[ii,:,:]= multiOut[2]
ObsTrack[ii,:]= multiOut[3]
ObsTrackAA[ii,:]= multiOut[4]
detdOdJps[ii]= multiOut[5]
else:
multiOut= multi.parallel_map(\
(lambda x: _determine_stream_track_single(self._aA,auxiliaryTrack,
self._trackts[x]*numpy.fabs(self._progenitor_Omega_along_dOmega/auxiliary_Omega_along_dOmega),
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
thetasTrack[x])),
range(self._nTrackChunks),
numcores=numpy.amin([self._nTrackChunks,
multiprocessing.cpu_count(),
self._multi]))
for ii in range(self._nTrackChunks):
allAcfsTrack[ii,:]= multiOut[ii][0]
alljacsTrack[ii,:,:]= multiOut[ii][1]
allinvjacsTrack[ii,:,:]= multiOut[ii][2]
ObsTrack[ii,:]= multiOut[ii][3]
ObsTrackAA[ii,:]= multiOut[ii][4]
detdOdJps[ii]= multiOut[ii][5]
#Repeat the track calculation using the previous track, to get closer to it
for nn in range(self.nTrackIterations):
if self._multi is None:
for ii in range(self._nTrackChunks):
multiOut= _determine_stream_track_single(self._aA,
Orbit(ObsTrack[ii,:]),
0.,
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
thetasTrack[ii])
allAcfsTrack[ii,:]= multiOut[0]
alljacsTrack[ii,:,:]= multiOut[1]
allinvjacsTrack[ii,:,:]= multiOut[2]
ObsTrack[ii,:]= multiOut[3]
ObsTrackAA[ii,:]= multiOut[4]
detdOdJps[ii]= multiOut[5]
else:
multiOut= multi.parallel_map(\
(lambda x: _determine_stream_track_single(self._aA,Orbit(ObsTrack[x,:]),0.,
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
self.meanOmega,
thetasTrack[x])),
range(self._nTrackChunks),
numcores=numpy.amin([self._nTrackChunks,
multiprocessing.cpu_count(),
self._multi]))
for ii in range(self._nTrackChunks):
allAcfsTrack[ii,:]= multiOut[ii][0]
alljacsTrack[ii,:,:]= multiOut[ii][1]
allinvjacsTrack[ii,:,:]= multiOut[ii][2]
ObsTrack[ii,:]= multiOut[ii][3]
ObsTrackAA[ii,:]= multiOut[ii][4]
detdOdJps[ii]= multiOut[ii][5]
#Store the track
self._thetasTrack= thetasTrack
self._ObsTrack= ObsTrack
self._ObsTrackAA= ObsTrackAA
self._allAcfsTrack= allAcfsTrack
self._alljacsTrack= alljacsTrack
self._allinvjacsTrack= allinvjacsTrack
self._detdOdJps= detdOdJps
self._meandetdOdJp= numpy.mean(self._detdOdJps)
self._logmeandetdOdJp= numpy.log(self._meandetdOdJp)
#Also calculate _ObsTrackXY in XYZ,vXYZ coordinates
self._ObsTrackXY= numpy.empty_like(self._ObsTrack)
TrackX= self._ObsTrack[:,0]*numpy.cos(self._ObsTrack[:,5])
TrackY= self._ObsTrack[:,0]*numpy.sin(self._ObsTrack[:,5])
TrackZ= self._ObsTrack[:,3]
TrackvX, TrackvY, TrackvZ=\
bovy_coords.cyl_to_rect_vec(self._ObsTrack[:,1],
self._ObsTrack[:,2],
self._ObsTrack[:,4],
self._ObsTrack[:,5])
self._ObsTrackXY[:,0]= TrackX
self._ObsTrackXY[:,1]= TrackY
self._ObsTrackXY[:,2]= TrackZ
self._ObsTrackXY[:,3]= TrackvX
self._ObsTrackXY[:,4]= TrackvY
self._ObsTrackXY[:,5]= TrackvZ
return None
def _determine_stream_spread(self,simple=_USESIMPLE):
"""Determine the spread around the stream track, just sets matrices that describe the covariances"""
allErrCovs= numpy.empty((self._nTrackChunks,6,6))
if self._multi is None:
for ii in range(self._nTrackChunks):
allErrCovs[ii]= _determine_stream_spread_single(self._sigomatrixEig,
self._thetasTrack[ii],
self.sigOmega,
lambda y: self.sigangledAngle(y,simple=simple),
self._allinvjacsTrack[ii])
else:
multiOut= multi.parallel_map(\
(lambda x: _determine_stream_spread_single(self._sigomatrixEig,
self._thetasTrack[x],
self.sigOmega,
lambda y: self.sigangledAngle(y,simple=simple),
self._allinvjacsTrack[x])),
range(self._nTrackChunks),
numcores=numpy.amin([self._nTrackChunks,
multiprocessing.cpu_count(),
self._multi]))
for ii in range(self._nTrackChunks):
allErrCovs[ii]= multiOut[ii]
self._allErrCovs= allErrCovs
#Also propagate to XYZ coordinates
allErrCovsXY= numpy.empty_like(self._allErrCovs)
allErrCovsEigvalXY= numpy.empty((len(self._thetasTrack),6))
allErrCovsEigvecXY= numpy.empty_like(self._allErrCovs)
eigDir= numpy.array([numpy.array([1.,0.,0.,0.,0.,0.]) for ii in range(6)])
for ii in range(self._nTrackChunks):
tjac= bovy_coords.cyl_to_rect_jac(*self._ObsTrack[ii])
allErrCovsXY[ii]=\
numpy.dot(tjac,numpy.dot(self._allErrCovs[ii],tjac.T))
#Eigen decomposition for interpolation
teig= numpy.linalg.eig(allErrCovsXY[ii])
#Sort them to match them up later
sortIndx= numpy.argsort(teig[0])
allErrCovsEigvalXY[ii]= teig[0][sortIndx]
#Make sure the eigenvectors point in the same direction
for jj in range(6):
if numpy.sum(eigDir[jj]*teig[1][:,sortIndx[jj]]) < 0.:
teig[1][:,sortIndx[jj]]*= -1.
eigDir[jj]= teig[1][:,sortIndx[jj]]
allErrCovsEigvecXY[ii]= teig[1][:,sortIndx]
self._allErrCovsXY= allErrCovsXY
#Interpolate the allErrCovsXY covariance matrices along the interpolated track
#Interpolate the eigenvalues
interpAllErrCovsEigvalXY=\
[interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
allErrCovsEigvalXY[:,ii],
k=3) for ii in range(6)]
#Now build the interpolated allErrCovsXY using slerp
interpolatedAllErrCovsXY= numpy.empty((len(self._interpolatedThetasTrack),
6,6))
interpolatedEigval=\
numpy.array([interpAllErrCovsEigvalXY[ii](self._interpolatedThetasTrack) for ii in range(6)]) #6,ninterp
#Interpolate in chunks
interpolatedEigvec= numpy.empty((len(self._interpolatedThetasTrack),
6,6))
for ii in range(self._nTrackChunks-1):
slerpOmegas=\
[numpy.arccos(numpy.sum(allErrCovsEigvecXY[ii,:,jj]*allErrCovsEigvecXY[ii+1,:,jj])) for jj in range(6)]
slerpts= (self._interpolatedThetasTrack-self._thetasTrack[ii])/\
(self._thetasTrack[ii+1]-self._thetasTrack[ii])
slerpIndx= (slerpts >= 0.)*(slerpts <= 1.)
for jj in range(6):
for kk in range(6):
interpolatedEigvec[slerpIndx,kk,jj]=\
(numpy.sin((1-slerpts[slerpIndx])*slerpOmegas[jj])*allErrCovsEigvecXY[ii,kk,jj]
+numpy.sin(slerpts[slerpIndx]*slerpOmegas[jj])*allErrCovsEigvecXY[ii+1,kk,jj])/numpy.sin(slerpOmegas[jj])
for ii in range(len(self._interpolatedThetasTrack)):
interpolatedAllErrCovsXY[ii]=\
numpy.dot(interpolatedEigvec[ii],
numpy.dot(numpy.diag(interpolatedEigval[:,ii]),
interpolatedEigvec[ii].T))
self._interpolatedAllErrCovsXY= interpolatedAllErrCovsXY
#Also interpolate in l and b coordinates
self._determine_stream_spreadLB(simple=simple)
return None
def _determine_stream_spreadLB(self,simple=_USESIMPLE,
Rnorm=None,Vnorm=None,
R0=None,Zsun=None,vsun=None):
"""Determine the spread in the stream in observable coordinates"""
if not hasattr(self,'_allErrCovs'):
self._determine_stream_spread(simple=simple)
if Rnorm is None:
Rnorm= self._Rnorm
if Vnorm is None:
Vnorm= self._Vnorm
if R0 is None:
R0= self._R0
if Zsun is None:
Zsun= self._Zsun
if vsun is None:
vsun= self._vsun
allErrCovsLB= numpy.empty_like(self._allErrCovs)
obs= [R0,0.,Zsun]
obs.extend(vsun)
obskwargs= {}
obskwargs['ro']= Rnorm
obskwargs['vo']= Vnorm
obskwargs['obs']= obs
self._ErrCovsLBScale= [180.,90.,
self._progenitor.dist(**obskwargs),
numpy.fabs(self._progenitor.vlos(**obskwargs)),
numpy.sqrt(self._progenitor.pmll(**obskwargs)**2.
+self._progenitor.pmbb(**obskwargs)**2.),
numpy.sqrt(self._progenitor.pmll(**obskwargs)**2.
+self._progenitor.pmbb(**obskwargs)**2.)]
allErrCovsEigvalLB= numpy.empty((len(self._thetasTrack),6))
allErrCovsEigvecLB= numpy.empty_like(self._allErrCovs)
eigDir= numpy.array([numpy.array([1.,0.,0.,0.,0.,0.]) for ii in range(6)])
for ii in range(self._nTrackChunks):
tjacXY= bovy_coords.galcenrect_to_XYZ_jac(*self._ObsTrackXY[ii])
tjacLB= bovy_coords.lbd_to_XYZ_jac(*self._ObsTrackLB[ii],
degree=True)
tjacLB[:3,:]/= Rnorm
tjacLB[3:,:]/= Vnorm
for jj in range(6):
tjacLB[:,jj]*= self._ErrCovsLBScale[jj]
tjac= numpy.dot(numpy.linalg.inv(tjacLB),tjacXY)
allErrCovsLB[ii]=\
numpy.dot(tjac,numpy.dot(self._allErrCovsXY[ii],tjac.T))
#Eigen decomposition for interpolation
teig= numpy.linalg.eig(allErrCovsLB[ii])
#Sort them to match them up later
sortIndx= numpy.argsort(teig[0])
allErrCovsEigvalLB[ii]= teig[0][sortIndx]
#Make sure the eigenvectors point in the same direction
for jj in range(6):
if numpy.sum(eigDir[jj]*teig[1][:,sortIndx[jj]]) < 0.:
teig[1][:,sortIndx[jj]]*= -1.
eigDir[jj]= teig[1][:,sortIndx[jj]]
allErrCovsEigvecLB[ii]= teig[1][:,sortIndx]
self._allErrCovsLBUnscaled= allErrCovsLB
#Interpolate the allErrCovsLB covariance matrices along the interpolated track
#Interpolate the eigenvalues
interpAllErrCovsEigvalLB=\
[interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
allErrCovsEigvalLB[:,ii],
k=3) for ii in range(6)]
#Now build the interpolated allErrCovsXY using slerp
interpolatedAllErrCovsLB= numpy.empty((len(self._interpolatedThetasTrack),
6,6))
interpolatedEigval=\
numpy.array([interpAllErrCovsEigvalLB[ii](self._interpolatedThetasTrack) for ii in range(6)]) #6,ninterp
#Interpolate in chunks
interpolatedEigvec= numpy.empty((len(self._interpolatedThetasTrack),
6,6))
for ii in range(self._nTrackChunks-1):
slerpOmegas=\
[numpy.arccos(numpy.sum(allErrCovsEigvecLB[ii,:,jj]*allErrCovsEigvecLB[ii+1,:,jj])) for jj in range(6)]
slerpts= (self._interpolatedThetasTrack-self._thetasTrack[ii])/\
(self._thetasTrack[ii+1]-self._thetasTrack[ii])
slerpIndx= (slerpts >= 0.)*(slerpts <= 1.)
for jj in range(6):
for kk in range(6):
interpolatedEigvec[slerpIndx,kk,jj]=\
(numpy.sin((1-slerpts[slerpIndx])*slerpOmegas[jj])*allErrCovsEigvecLB[ii,kk,jj]
+numpy.sin(slerpts[slerpIndx]*slerpOmegas[jj])*allErrCovsEigvecLB[ii+1,kk,jj])/numpy.sin(slerpOmegas[jj])
for ii in range(len(self._interpolatedThetasTrack)):
interpolatedAllErrCovsLB[ii]=\
numpy.dot(interpolatedEigvec[ii],
numpy.dot(numpy.diag(interpolatedEigval[:,ii]),
interpolatedEigvec[ii].T))
self._interpolatedAllErrCovsLBUnscaled= interpolatedAllErrCovsLB
#Also calculate the (l,b,..) -> (X,Y,..) Jacobian at all of the interpolated and not interpolated points
trackLogDetJacLB= numpy.empty_like(self._thetasTrack)
interpolatedTrackLogDetJacLB=\
numpy.empty_like(self._interpolatedThetasTrack)
for ii in range(self._nTrackChunks):
tjacLB= bovy_coords.lbd_to_XYZ_jac(*self._ObsTrackLB[ii],
degree=True)
trackLogDetJacLB[ii]= numpy.log(numpy.linalg.det(tjacLB))
self._trackLogDetJacLB= trackLogDetJacLB
for ii in range(len(self._interpolatedThetasTrack)):
tjacLB=\
bovy_coords.lbd_to_XYZ_jac(*self._interpolatedObsTrackLB[ii],
degree=True)
interpolatedTrackLogDetJacLB[ii]=\
numpy.log(numpy.linalg.det(tjacLB))
self._interpolatedTrackLogDetJacLB= interpolatedTrackLogDetJacLB
return None
def _interpolate_stream_track(self):
"""Build interpolations of the stream track"""
if hasattr(self,'_interpolatedThetasTrack'):
return None #Already did this
TrackX= self._ObsTrack[:,0]*numpy.cos(self._ObsTrack[:,5])
TrackY= self._ObsTrack[:,0]*numpy.sin(self._ObsTrack[:,5])
TrackZ= self._ObsTrack[:,3]
TrackvX, TrackvY, TrackvZ=\
bovy_coords.cyl_to_rect_vec(self._ObsTrack[:,1],
self._ObsTrack[:,2],
self._ObsTrack[:,4],
self._ObsTrack[:,5])
#Interpolate
self._interpTrackX=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackX,k=3)
self._interpTrackY=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackY,k=3)
self._interpTrackZ=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackZ,k=3)
self._interpTrackvX=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackvX,k=3)
self._interpTrackvY=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackvY,k=3)
self._interpTrackvZ=\
interpolate.InterpolatedUnivariateSpline(self._thetasTrack,
TrackvZ,k=3)
#Now store an interpolated version of the stream track
self._interpolatedThetasTrack=\
numpy.linspace(0.,self._deltaAngleTrack,1001)
self._interpolatedObsTrackXY= numpy.empty((len(self._interpolatedThetasTrack),6))
self._interpolatedObsTrackXY[:,0]=\
self._interpTrackX(self._interpolatedThetasTrack)
self._interpolatedObsTrackXY[:,1]=\
self._interpTrackY(self._interpolatedThetasTrack)
self._interpolatedObsTrackXY[:,2]=\
self._interpTrackZ(self._interpolatedThetasTrack)
self._interpolatedObsTrackXY[:,3]=\
self._interpTrackvX(self._interpolatedThetasTrack)
self._interpolatedObsTrackXY[:,4]=\
self._interpTrackvY(self._interpolatedThetasTrack)
self._interpolatedObsTrackXY[:,5]=\
self._interpTrackvZ(self._interpolatedThetasTrack)
#Also in cylindrical coordinates
self._interpolatedObsTrack= \
numpy.empty((len(self._interpolatedThetasTrack),6))
tR,tphi,tZ= bovy_coords.rect_to_cyl(self._interpolatedObsTrackXY[:,0],
self._interpolatedObsTrackXY[:,1],
self._interpolatedObsTrackXY[:,2])
tvR,tvT,tvZ=\
bovy_coords.rect_to_cyl_vec(self._interpolatedObsTrackXY[:,3],
self._interpolatedObsTrackXY[:,4],
self._interpolatedObsTrackXY[:,5],
tR,tphi,tZ,cyl=True)
self._interpolatedObsTrack[:,0]= tR
self._interpolatedObsTrack[:,1]= tvR
self._interpolatedObsTrack[:,2]= tvT
self._interpolatedObsTrack[:,3]= tZ
self._interpolatedObsTrack[:,4]= tvZ
self._interpolatedObsTrack[:,5]= tphi
return None
def _interpolate_stream_track_aA(self):
"""Build interpolations of the stream track in action-angle coordinates"""
if hasattr(self,'_interpolatedObsTrackAA'):
return None #Already did this
#Calculate 1D meanOmega on a fine grid in angle and interpolate
if not hasattr(self,'_interpolatedThetasTrack'):
self._interpolate_stream_track()
dmOs= numpy.array([self.meanOmega(da,oned=True)
for da in self._interpolatedThetasTrack])
self._interpTrackAAdmeanOmegaOneD=\
interpolate.InterpolatedUnivariateSpline(\
self._interpolatedThetasTrack,dmOs,k=3)
#Build the interpolated AA
self._interpolatedObsTrackAA=\
numpy.empty((len(self._interpolatedThetasTrack),6))
for ii in range(len(self._interpolatedThetasTrack)):
self._interpolatedObsTrackAA[ii,:3]=\
self._progenitor_Omega+dmOs[ii]*self._dsigomeanProgDirection\
*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
self._progenitor_angle+self._interpolatedThetasTrack[ii]\
*self._dsigomeanProgDirection*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
numpy.mod(self._interpolatedObsTrackAA[ii,3:],2.*numpy.pi)
return None
def calc_stream_lb(self,
Vnorm=None,Rnorm=None,
R0=None,Zsun=None,vsun=None):
"""
NAME:
calc_stream_lb
PURPOSE:
convert the stream track to observational coordinates and store
INPUT:
Coordinate transformation inputs (all default to the instance-wide
values):
Vnorm= circular velocity to normalize velocities with
Rnorm= Galactocentric radius to normalize positions with
R0= Galactocentric radius of the Sun (kpc)
Zsun= Sun's height above the plane (kpc)
vsun= Sun's motion in cylindrical coordinates (vR positive away from center)
OUTPUT:
(none)
HISTORY:
2013-12-02 - Written - Bovy (IAS)
"""
if Vnorm is None:
Vnorm= self._Vnorm
if Rnorm is None:
Rnorm= self._Rnorm
if R0 is None:
R0= self._R0
if Zsun is None:
Zsun= self._Zsun
if vsun is None:
vsun= self._vsun
self._ObsTrackLB= numpy.empty_like(self._ObsTrack)
XYZ= bovy_coords.galcencyl_to_XYZ(self._ObsTrack[:,0]*Rnorm,
self._ObsTrack[:,5],
self._ObsTrack[:,3]*Rnorm,
Xsun=R0,Zsun=Zsun)
vXYZ= bovy_coords.galcencyl_to_vxvyvz(self._ObsTrack[:,1]*Vnorm,
self._ObsTrack[:,2]*Vnorm,
self._ObsTrack[:,4]*Vnorm,
self._ObsTrack[:,5],
vsun=vsun)
slbd=bovy_coords.XYZ_to_lbd(XYZ[0],XYZ[1],XYZ[2],
degree=True)
svlbd= bovy_coords.vxvyvz_to_vrpmllpmbb(vXYZ[0],vXYZ[1],vXYZ[2],
slbd[:,0],slbd[:,1],slbd[:,2],
degree=True)
self._ObsTrackLB[:,0]= slbd[:,0]
self._ObsTrackLB[:,1]= slbd[:,1]
self._ObsTrackLB[:,2]= slbd[:,2]
self._ObsTrackLB[:,3]= svlbd[:,0]
self._ObsTrackLB[:,4]= svlbd[:,1]
self._ObsTrackLB[:,5]= svlbd[:,2]
if hasattr(self,'_interpolatedObsTrackXY'):
#Do the same for the interpolated track
self._interpolatedObsTrackLB=\
numpy.empty_like(self._interpolatedObsTrackXY)
XYZ=\
bovy_coords.galcenrect_to_XYZ(\
self._interpolatedObsTrackXY[:,0]*Rnorm,
self._interpolatedObsTrackXY[:,1]*Rnorm,
self._interpolatedObsTrackXY[:,2]*Rnorm,
Xsun=R0,Zsun=Zsun)
vXYZ=\
bovy_coords.galcenrect_to_vxvyvz(\
self._interpolatedObsTrackXY[:,3]*Vnorm,
self._interpolatedObsTrackXY[:,4]*Vnorm,
self._interpolatedObsTrackXY[:,5]*Vnorm,
vsun=vsun)
slbd=bovy_coords.XYZ_to_lbd(XYZ[0],XYZ[1],XYZ[2],
degree=True)
svlbd= bovy_coords.vxvyvz_to_vrpmllpmbb(vXYZ[0],vXYZ[1],vXYZ[2],
slbd[:,0],slbd[:,1],
slbd[:,2],
degree=True)
self._interpolatedObsTrackLB[:,0]= slbd[:,0]
self._interpolatedObsTrackLB[:,1]= slbd[:,1]
self._interpolatedObsTrackLB[:,2]= slbd[:,2]
self._interpolatedObsTrackLB[:,3]= svlbd[:,0]
self._interpolatedObsTrackLB[:,4]= svlbd[:,1]
self._interpolatedObsTrackLB[:,5]= svlbd[:,2]
if hasattr(self,'_allErrCovsLBUnscaled'):
#Re-calculate this
self._determine_stream_spreadLB(simple=_USESIMPLE,
Vnorm=Vnorm,Rnorm=Rnorm,
R0=R0,Zsun=Zsun,vsun=vsun)
return None
def _find_closest_trackpoint(self,R,vR,vT,z,vz,phi,interp=True,xy=False,
usev=False):
"""For backward compatibility"""
return self.find_closest_trackpoint(R,vR,vT,z,vz,phi,
interp=interp,xy=xy,
usev=usev)
def find_closest_trackpoint(self,R,vR,vT,z,vz,phi,interp=True,xy=False,
usev=False):
"""
NAME:
find_closest_trackpoint
PURPOSE:
find the closest point on the stream track to a given point
INPUT:
R,vR,vT,z,vz,phi - phase-space coordinates of the given point
interp= (True), if True, return the index of the interpolated track
xy= (False) if True, input is X,Y,Z,vX,vY,vZ in Galactocentric rectangular coordinates; if xy, some coordinates may be missing (given as None) and they will not be used
usev= (False) if True, also use velocities to find the closest point
OUTPUT:
index into the track of the closest track point
HISTORY:
2013-12-04 - Written - Bovy (IAS)
"""
if xy:
X= R
Y= vR
Z= vT
else:
X= R*numpy.cos(phi)
Y= R*numpy.sin(phi)
Z= z
if xy and usev:
vX= z
vY= vz
vZ= phi
elif usev:
vX= vR*numpy.cos(phi)-vT*numpy.sin(phi)
vY= vR*numpy.sin(phi)+vT*numpy.cos(phi)
vZ= vz
present= [not X is None,not Y is None,not Z is None]
if usev: present.extend([not vX is None,not vY is None,not vZ is None])
present= numpy.array(present,dtype='float')
if X is None: X= 0.
if Y is None: Y= 0.
if Z is None: Z= 0.
if usev and vX is None: vX= 0.
if usev and vY is None: vY= 0.
if usev and vZ is None: vZ= 0.
if interp:
dist2= present[0]*(X-self._interpolatedObsTrackXY[:,0])**2.\
+present[1]*(Y-self._interpolatedObsTrackXY[:,1])**2.\
+present[2]*(Z-self._interpolatedObsTrackXY[:,2])**2.
if usev:
dist2+= present[3]*(vX-self._interpolatedObsTrackXY[:,3])**2.\
+present[4]*(vY-self._interpolatedObsTrackXY[:,4])**2.\
+present[5]*(vZ-self._interpolatedObsTrackXY[:,5])**2.
else:
dist2= present[0]*(X-self._ObsTrackXY[:,0])**2.\
+present[1]*(Y-self._ObsTrackXY[:,1])**2.\
+present[2]*(Z-self._ObsTrackXY[:,2])**2.
if usev:
dist2+= present[3]*(vX-self._ObsTrackXY[:,3])**2.\
+present[4]*(vY-self._ObsTrackXY[:,4])**2.\
+present[5]*(vZ-self._ObsTrackXY[:,5])**2.
return numpy.argmin(dist2)
def _find_closest_trackpointLB(self,l,b,D,vlos,pmll,pmbb,interp=True,
usev=False):
return self.find_closest_trackpointLB(l,b,D,vlos,pmll,pmbb,
interp=interp,
usev=usev)
def find_closest_trackpointLB(self,l,b,D,vlos,pmll,pmbb,interp=True,
usev=False):
"""
NAME:
find_closest_trackpointLB
PURPOSE:
find the closest point on the stream track to a given point in (l,b,...) coordinates
INPUT:
l,b,D,vlos,pmll,pmbb- coordinates in (deg,deg,kpc,km/s,mas/yr,mas/yr)
interp= (True) if True, return the closest index on the interpolated track
usev= (False) if True, also use the velocity components (default is to only use the positions)
OUTPUT:
index of closest track point on the interpolated or not-interpolated track
HISTORY:
2013-12-17- Written - Bovy (IAS)
"""
if interp:
nTrackPoints= len(self._interpolatedThetasTrack)
else:
nTrackPoints= len(self._thetasTrack)
if l is None:
l= 0.
trackL= numpy.zeros(nTrackPoints)
elif interp:
trackL= self._interpolatedObsTrackLB[:,0]
else:
trackL= self._ObsTrackLB[:,0]
if b is None:
b= 0.
trackB= numpy.zeros(nTrackPoints)
elif interp:
trackB= self._interpolatedObsTrackLB[:,1]
else:
trackB= self._ObsTrackLB[:,1]
if D is None:
D= 1.
trackD= numpy.ones(nTrackPoints)
elif interp:
trackD= self._interpolatedObsTrackLB[:,2]
else:
trackD= self._ObsTrackLB[:,2]
if usev:
if vlos is None:
vlos= 0.
trackVlos= numpy.zeros(nTrackPoints)
elif interp:
trackVlos= self._interpolatedObsTrackLB[:,3]
else:
trackVlos= self._ObsTrackLB[:,3]
if pmll is None:
pmll= 0.
trackPmll= numpy.zeros(nTrackPoints)
elif interp:
trackPmll= self._interpolatedObsTrackLB[:,4]
else:
trackPmll= self._ObsTrackLB[:,4]
if pmbb is None:
pmbb= 0.
trackPmbb= numpy.zeros(nTrackPoints)
elif interp:
trackPmbb= self._interpolatedObsTrackLB[:,5]
else:
trackPmbb= self._ObsTrackLB[:,5]
#Calculate rectangular coordinates
XYZ= bovy_coords.lbd_to_XYZ(l,b,D,degree=True)
trackXYZ= bovy_coords.lbd_to_XYZ(trackL,trackB,trackD,degree=True)
if usev:
vxvyvz= bovy_coords.vrpmllpmbb_to_vxvyvz(vlos,pmll,pmbb,
XYZ[0],XYZ[1],XYZ[2],
XYZ=True)
trackvxvyvz= bovy_coords.vrpmllpmbb_to_vxvyvz(trackVlos,trackPmll,
trackPmbb,
trackXYZ[:,0],
trackXYZ[:,1],
trackXYZ[:,2],
XYZ=True)
#Calculate distance
dist2= (XYZ[0]-trackXYZ[:,0])**2.\
+(XYZ[1]-trackXYZ[:,1])**2.\
+(XYZ[2]-trackXYZ[:,2])**2.
if usev:
dist2+= (vxvyvz[0]-trackvxvyvz[:,0])**2.\
+(vxvyvz[1]-trackvxvyvz[:,1])**2.\
+(vxvyvz[2]-trackvxvyvz[:,2])**2.
return numpy.argmin(dist2)
def _find_closest_trackpointaA(self,Or,Op,Oz,ar,ap,az,interp=True):
"""
NAME:
_find_closest_trackpointaA
PURPOSE:
find the closest point on the stream track to a given point in
frequency-angle coordinates
INPUT:
Or,Op,Oz,ar,ap,az - phase-space coordinates of the given point
interp= (True), if True, return the index of the interpolated track
OUTPUT:
index into the track of the closest track point
HISTORY:
2013-12-22 - Written - Bovy (IAS)
"""
#Calculate angle offset along the stream parallel to the stream track
angle= numpy.hstack((ar,ap,az))
da= angle-self._progenitor_angle
dapar= self._sigMeanSign*numpy.sum(da*self._dsigomeanProgDirection)
if interp:
dist= numpy.fabs(dapar-self._interpolatedThetasTrack)
else:
dist= numpy.fabs(dapar-self._thetasTrack)
return numpy.argmin(dist)
#########DISTRIBUTION AS A FUNCTION OF ANGLE ALONG THE STREAM##################
def meanOmega(self,dangle,oned=False):
"""
NAME:
meanOmega
PURPOSE:
calculate the mean frequency as a function of angle, assuming a uniform time distribution up to a maximum time
INPUT:
dangle - angle offset
oned= (False) if True, return the 1D offset from the progenitor (along the direction of disruption)
OUTPUT:
mean Omega
HISTORY:
2013-12-01 - Written - Bovy (IAS)
"""
dOmin= dangle/self._tdisrupt
meandO= self._meandO
dO1D= ((numpy.sqrt(2./numpy.pi)*numpy.sqrt(self._sortedSigOEig[2])\
*numpy.exp(-0.5*(meandO-dOmin)**2.\
/self._sortedSigOEig[2])/
(1.+special.erf((meandO-dOmin)\
/numpy.sqrt(2.*self._sortedSigOEig[2]))))\
+meandO)
if oned: return dO1D
else:
return self._progenitor_Omega+dO1D*self._dsigomeanProgDirection\
*self._sigMeanSign
def sigOmega(self,dangle):
"""
NAME:
sigmaOmega
PURPOSE:
calculate the 1D sigma in frequency as a function of angle, assuming a uniform time distribution up to a maximum time
INPUT:
dangle - angle offset
OUTPUT:
sigma Omega
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
dOmin= dangle/self._tdisrupt
meandO= self._meandO
sO1D2= ((numpy.sqrt(2./numpy.pi)*numpy.sqrt(self._sortedSigOEig[2])\
*(meandO+dOmin)\
*numpy.exp(-0.5*(meandO-dOmin)**2.\
/self._sortedSigOEig[2])/
(1.+special.erf((meandO-dOmin)\
/numpy.sqrt(2.*self._sortedSigOEig[2]))))\
+meandO**2.+self._sortedSigOEig[2])
mO= self.meanOmega(dangle,oned=True)
return numpy.sqrt(sO1D2-mO**2.)
def ptdAngle(self,t,dangle):
"""
NAME:
ptdangle
PURPOSE:
return the probability of a given stripping time at a given angle along the stream
INPUT:
t - stripping time
dangle - angle offset along the stream
OUTPUT:
p(td|dangle)
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
if isinstance(t,(int,float,numpy.float32,numpy.float64)):
t= numpy.array([t])
out= numpy.zeros(len(t))
if t > 0.:
dO= dangle/t[t < self._tdisrupt]
else:
return 0.
#p(t|a) = \int dO p(O,t|a) = \int dO p(t|O,a) p(O|a) = \int dO delta (t-a/O)p(O|a) = O*2/a p(O|a); p(O|a) = \int dt p(a|O,t) p(O)p(t) = 1/O p(O)
out[t < self._tdisrupt]=\
dO**2./dangle*numpy.exp(-0.5*(dO-self._meandO)**2.\
/self._sortedSigOEig[2])/\
numpy.sqrt(self._sortedSigOEig[2])
return out
def meantdAngle(self,dangle):
"""<|fim▁hole|> PURPOSE:
calculate the mean stripping time at a given angle
INPUT:
dangle - angle offset along the stream
OUTPUT:
mean stripping time at this dangle
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
Tlow= dangle/(self._meandO+3.*numpy.sqrt(self._sortedSigOEig[2]))
Thigh= dangle/(self._meandO-3.*numpy.sqrt(self._sortedSigOEig[2]))
num= integrate.quad(lambda x: x*self.ptdAngle(x,dangle),
Tlow,Thigh)[0]
denom= integrate.quad(self.ptdAngle,Tlow,Thigh,(dangle,))[0]
if denom == 0.: return self._tdisrupt
elif numpy.isnan(denom): return 0.
else: return num/denom
def sigtdAngle(self,dangle):
"""
NAME:
sigtdAngle
PURPOSE:
calculate the dispersion in the stripping times at a given angle
INPUT:
dangle - angle offset along the stream
OUTPUT:
dispersion in the stripping times at this angle
HISTORY:
2013-12-05 - Written - Bovy (IAS)
"""
Tlow= dangle/(self._meandO+3.*numpy.sqrt(self._sortedSigOEig[2]))
Thigh= dangle/(self._meandO-3.*numpy.sqrt(self._sortedSigOEig[2]))
numsig2= integrate.quad(lambda x: x**2.*self.ptdAngle(x,dangle),
Tlow,Thigh)[0]
nummean= integrate.quad(lambda x: x*self.ptdAngle(x,dangle),
Tlow,Thigh)[0]
denom= integrate.quad(self.ptdAngle,Tlow,Thigh,(dangle,))[0]
if denom == 0.: return numpy.nan
else: return numpy.sqrt(numsig2/denom-(nummean/denom)**2.)
def pangledAngle(self,angleperp,dangle,smallest=False):
"""
NAME:
pangledAngle
PURPOSE:
return the probability of a given perpendicular angle at a given angle along the stream
INPUT:
angleperp - perpendicular angle
dangle - angle offset along the stream
smallest= (False) calculate for smallest eigenvalue direction rather than for middle
OUTPUT:
p(angle_perp|dangle)
HISTORY:
2013-12-06 - Written - Bovy (IAS)
"""
if isinstance(angleperp,(int,float,numpy.float32,numpy.float64)):
angleperp= numpy.array([angleperp])
out= numpy.zeros(len(angleperp))
out= numpy.array([\
integrate.quad(self._pangledAnglet,0.,self._tdisrupt,
(ap,dangle,smallest))[0] for ap in angleperp])
return out
def meanangledAngle(self,dangle,smallest=False):
"""
NAME:
meanangledAngle
PURPOSE:
calculate the mean perpendicular angle at a given angle
INPUT:
dangle - angle offset along the stream
smallest= (False) calculate for smallest eigenvalue direction rather than for middle
OUTPUT:
mean perpendicular angle
HISTORY:
2013-12-06 - Written - Bovy (IAS)
"""
if smallest: eigIndx= 0
else: eigIndx= 1
aplow= numpy.amax([numpy.sqrt(self._sortedSigOEig[eigIndx])\
*self._tdisrupt*5.,
self._sigangle])
num= integrate.quad(lambda x: x*self.pangledAngle(x,dangle,smallest),
aplow,-aplow)[0]
denom= integrate.quad(self.pangledAngle,aplow,-aplow,
(dangle,smallest))[0]
if denom == 0.: return numpy.nan
else: return num/denom
def sigangledAngle(self,dangle,assumeZeroMean=True,smallest=False,
simple=False):
"""
NAME:
sigangledAngle
PURPOSE:
calculate the dispersion in the perpendicular angle at a given angle
INPUT:
dangle - angle offset along the stream
assumeZeroMean= (True) if True, assume that the mean is zero (should be)
smallest= (False) calculate for smallest eigenvalue direction rather than for middle
simple= (False), if True, return an even simpler estimate
OUTPUT:
dispersion in the perpendicular angle at this angle
HISTORY:
2013-12-06 - Written - Bovy (IAS)
"""
if smallest: eigIndx= 0
else: eigIndx= 1
if simple:
dt= self.meantdAngle(dangle)
return numpy.sqrt(self._sigangle2
+self._sortedSigOEig[eigIndx]*dt**2.)
aplow= numpy.amax([numpy.sqrt(self._sortedSigOEig[eigIndx])*self._tdisrupt*5.,
self._sigangle])
numsig2= integrate.quad(lambda x: x**2.*self.pangledAngle(x,dangle),
aplow,-aplow)[0]
if not assumeZeroMean:
nummean= integrate.quad(lambda x: x*self.pangledAngle(x,dangle),
aplow,-aplow)[0]
else:
nummean= 0.
denom= integrate.quad(self.pangledAngle,aplow,-aplow,(dangle,))[0]
if denom == 0.: return numpy.nan
else: return numpy.sqrt(numsig2/denom-(nummean/denom)**2.)
def _pangledAnglet(self,t,angleperp,dangle,smallest):
"""p(angle_perp|angle_par,time)"""
if smallest: eigIndx= 0
else: eigIndx= 1
if isinstance(angleperp,(int,float,numpy.float32,numpy.float64)):
angleperp= numpy.array([angleperp])
t= numpy.array([t])
out= numpy.zeros_like(angleperp)
tindx= t < self._tdisrupt
out[tindx]=\
numpy.exp(-0.5*angleperp[tindx]**2.\
/(t[tindx]**2.*self._sortedSigOEig[eigIndx]+self._sigangle2))/\
numpy.sqrt(t[tindx]**2.*self._sortedSigOEig[eigIndx]+self._sigangle2)\
*self.ptdAngle(t[t < self._tdisrupt],dangle)
return out
################APPROXIMATE FREQUENCY-ANGLE TRANSFORMATION#####################
def _approxaA(self,R,vR,vT,z,vz,phi,interp=True):
"""
NAME:
_approxaA
PURPOSE:
return action-angle coordinates for a point based on the linear
approximation around the stream track
INPUT:
R,vR,vT,z,vz,phi - phase-space coordinates of the given point
interp= (True), if True, use the interpolated track
OUTPUT:
(Or,Op,Oz,ar,ap,az)
HISTORY:
2013-12-03 - Written - Bovy (IAS)
"""
if isinstance(R,(int,float,numpy.float32,numpy.float64)): #Scalar input
R= numpy.array([R])
vR= numpy.array([vR])
vT= numpy.array([vT])
z= numpy.array([z])
vz= numpy.array([vz])
phi= numpy.array([phi])
closestIndx= [self._find_closest_trackpoint(R[ii],vR[ii],vT[ii],
z[ii],vz[ii],phi[ii],
interp=interp,
xy=False)
for ii in range(len(R))]
out= numpy.empty((6,len(R)))
for ii in range(len(R)):
dxv= numpy.empty(6)
if interp:
dxv[0]= R[ii]-self._interpolatedObsTrack[closestIndx[ii],0]
dxv[1]= vR[ii]-self._interpolatedObsTrack[closestIndx[ii],1]
dxv[2]= vT[ii]-self._interpolatedObsTrack[closestIndx[ii],2]
dxv[3]= z[ii]-self._interpolatedObsTrack[closestIndx[ii],3]
dxv[4]= vz[ii]-self._interpolatedObsTrack[closestIndx[ii],4]
dxv[5]= phi[ii]-self._interpolatedObsTrack[closestIndx[ii],5]
jacIndx= self._find_closest_trackpoint(R[ii],vR[ii],vT[ii],
z[ii],vz[ii],phi[ii],
interp=False,
xy=False)
else:
dxv[0]= R[ii]-self._ObsTrack[closestIndx[ii],0]
dxv[1]= vR[ii]-self._ObsTrack[closestIndx[ii],1]
dxv[2]= vT[ii]-self._ObsTrack[closestIndx[ii],2]
dxv[3]= z[ii]-self._ObsTrack[closestIndx[ii],3]
dxv[4]= vz[ii]-self._ObsTrack[closestIndx[ii],4]
dxv[5]= phi[ii]-self._ObsTrack[closestIndx[ii],5]
jacIndx= closestIndx[ii]
#Make sure phi hasn't wrapped around
if dxv[5] > numpy.pi:
dxv[5]-= 2.*numpy.pi
elif dxv[5] < -numpy.pi:
dxv[5]+= 2.*numpy.pi
#Apply closest jacobian
out[:,ii]= numpy.dot(self._alljacsTrack[jacIndx,:,:],
dxv)
if interp:
out[:,ii]+= self._interpolatedObsTrackAA[closestIndx[ii]]
else:
out[:,ii]+= self._ObsTrackAA[closestIndx[ii]]
return out
def _approxaAInv(self,Or,Op,Oz,ar,ap,az,interp=True):
"""
NAME:
_approxaAInv
PURPOSE:
return R,vR,... coordinates for a point based on the linear
approximation around the stream track
INPUT:
Or,Op,Oz,ar,ap,az - phase space coordinates in frequency-angle
space
interp= (True), if True, use the interpolated track
OUTPUT:
(R,vR,vT,z,vz,phi)
HISTORY:
2013-12-22 - Written - Bovy (IAS)
"""
if isinstance(Or,(int,float,numpy.float32,numpy.float64)): #Scalar input
Or= numpy.array([Or])
Op= numpy.array([Op])
Oz= numpy.array([Oz])
ar= numpy.array([ar])
ap= numpy.array([ap])
az= numpy.array([az])
#Calculate apar, angle offset along the stream
closestIndx= [self._find_closest_trackpointaA(Or[ii],Op[ii],Oz[ii],
ar[ii],ap[ii],az[ii],
interp=interp)\
for ii in range(len(Or))]
out= numpy.empty((6,len(Or)))
for ii in range(len(Or)):
dOa= numpy.empty(6)
if interp:
dOa[0]= Or[ii]-self._interpolatedObsTrackAA[closestIndx[ii],0]
dOa[1]= Op[ii]-self._interpolatedObsTrackAA[closestIndx[ii],1]
dOa[2]= Oz[ii]-self._interpolatedObsTrackAA[closestIndx[ii],2]
dOa[3]= ar[ii]-self._interpolatedObsTrackAA[closestIndx[ii],3]
dOa[4]= ap[ii]-self._interpolatedObsTrackAA[closestIndx[ii],4]
dOa[5]= az[ii]-self._interpolatedObsTrackAA[closestIndx[ii],5]
jacIndx= self._find_closest_trackpointaA(Or[ii],Op[ii],Oz[ii],
ar[ii],ap[ii],az[ii],
interp=False)
else:
dOa[0]= Or[ii]-self._ObsTrackAA[closestIndx[ii],0]
dOa[1]= Op[ii]-self._ObsTrackAA[closestIndx[ii],1]
dOa[2]= Oz[ii]-self._ObsTrackAA[closestIndx[ii],2]
dOa[3]= ar[ii]-self._ObsTrackAA[closestIndx[ii],3]
dOa[4]= ap[ii]-self._ObsTrackAA[closestIndx[ii],4]
dOa[5]= az[ii]-self._ObsTrackAA[closestIndx[ii],5]
jacIndx= closestIndx[ii]
#Make sure the angles haven't wrapped around
if dOa[3] > numpy.pi:
dOa[3]-= 2.*numpy.pi
elif dOa[3] < -numpy.pi:
dOa[3]+= 2.*numpy.pi
if dOa[4] > numpy.pi:
dOa[4]-= 2.*numpy.pi
elif dOa[4] < -numpy.pi:
dOa[4]+= 2.*numpy.pi
if dOa[5] > numpy.pi:
dOa[5]-= 2.*numpy.pi
elif dOa[5] < -numpy.pi:
dOa[5]+= 2.*numpy.pi
#Apply closest jacobian
out[:,ii]= numpy.dot(self._allinvjacsTrack[jacIndx,:,:],
dOa)
if interp:
out[:,ii]+= self._interpolatedObsTrack[closestIndx[ii]]
else:
out[:,ii]+= self._ObsTrack[closestIndx[ii]]
return out
################################EVALUATE THE DF################################
def __call__(self,*args,**kwargs):
"""
NAME:
__call__
PURPOSE:
evaluate the DF
INPUT:
Either:
a) R,vR,vT,z,vz,phi ndarray [nobjects]
b) (Omegar,Omegaphi,Omegaz,angler,anglephi,anglez) tuple if aAInput
where:
Omegar - radial frequency
Omegaphi - azimuthal frequency
Omegaz - vertical frequency
angler - radial angle
anglephi - azimuthal angle
anglez - vertical angle
c) Orbit instance or list thereof
log= if True, return the natural log
aaInput= (False) if True, option b above
OUTPUT:
value of DF
HISTORY:
2013-12-03 - Written - Bovy (IAS)
"""
#First parse log
log= kwargs.pop('log',True)
dOmega, dangle= self.prepData4Call(*args,**kwargs)
#Omega part
dOmega4dfOmega= dOmega\
-numpy.tile(self._dsigomeanProg.T,(dOmega.shape[1],1)).T
logdfOmega= -0.5*numpy.sum(dOmega4dfOmega*
numpy.dot(self._sigomatrixinv,
dOmega4dfOmega),
axis=0)-0.5*self._sigomatrixLogdet\
+numpy.log(numpy.fabs(numpy.dot(self._dsigomeanProgDirection,dOmega)))
#Angle part
dangle2= numpy.sum(dangle**2.,axis=0)
dOmega2= numpy.sum(dOmega**2.,axis=0)
dOmegaAngle= numpy.sum(dOmega*dangle,axis=0)
logdfA= -0.5/self._sigangle2*(dangle2-dOmegaAngle**2./dOmega2)\
-2.*self._lnsigangle-0.5*numpy.log(dOmega2)
#Finite stripping part
a0= dOmegaAngle/numpy.sqrt(2.)/self._sigangle/numpy.sqrt(dOmega2)
ad= numpy.sqrt(dOmega2)/numpy.sqrt(2.)/self._sigangle\
*(self._tdisrupt-dOmegaAngle/dOmega2)
loga= numpy.log((special.erf(a0)+special.erf(ad))/2.) #divided by 2 st 0 for well-within the stream
out= logdfA+logdfOmega+loga+self._logmeandetdOdJp
if log:
return out
else:
return numpy.exp(out)
def prepData4Call(self,*args,**kwargs):
"""
NAME:
prepData4Call
PURPOSE:
prepare stream data for the __call__ method
INPUT:
__call__ inputs
OUTPUT:
(dOmega,dangle); wrt the progenitor; each [3,nobj]
HISTORY:
2013-12-04 - Written - Bovy (IAS)
"""
#First calculate the actionAngle coordinates if they're not given
#as such
freqsAngles= self._parse_call_args(*args,**kwargs)
dOmega= freqsAngles[:3,:]\
-numpy.tile(self._progenitor_Omega.T,(freqsAngles.shape[1],1)).T
dangle= freqsAngles[3:,:]\
-numpy.tile(self._progenitor_angle.T,(freqsAngles.shape[1],1)).T
#Assuming single wrap, resolve large angle differences (wraps should be marginalized over)
dangle[(dangle < -4.)]+= 2.*numpy.pi
dangle[(dangle > 4.)]-= 2.*numpy.pi
return (dOmega,dangle)
def _parse_call_args(self,*args,**kwargs):
"""Helper function to parse the arguments to the __call__ and related functions,
return [6,nobj] array of frequencies (:3) and angles (3:)"""
interp= kwargs.get('interp',self._useInterp)
if len(args) == 5:
raise IOError("Must specify phi for streamdf")
elif len(args) == 6:
if kwargs.get('aAInput',False):
if isinstance(args[0],(int,float,numpy.float32,numpy.float64)):
out= numpy.empty((6,1))
else:
out= numpy.empty((6,len(args[0])))
for ii in range(6):
out[ii,:]= args[ii]
return out
else:
return self._approxaA(*args,interp=interp)
elif isinstance(args[0],Orbit):
o= args[0]
return self._approxaA(o.R(),o.vR(),o.vT(),o.z(),o.vz(),o.phi(),
interp=interp)
elif isinstance(args[0],list) and isinstance(args[0][0],Orbit):
R, vR, vT, z, vz, phi= [], [], [], [], [], []
for o in args[0]:
R.append(o.R())
vR.append(o.vR())
vT.append(o.vT())
z.append(o.z())
vz.append(o.vz())
phi.append(o.phi())
return self._approxaA(numpy.array(R),numpy.array(vR),
numpy.array(vT),numpy.array(z),
numpy.array(vz),numpy.array(phi),
interp=interp)
def callMarg(self,xy,**kwargs):
"""
NAME:
callMarg
PURPOSE:
evaluate the DF, marginalizing over some directions, in Galactocentric rectangular coordinates (or in observed l,b,D,vlos,pmll,pmbb) coordinates)
INPUT:
xy - phase-space point [X,Y,Z,vX,vY,vZ]; the distribution of the dimensions set to None is returned
interp= (object-wide interp default) if True, use the interpolated stream track
cindx= index of the closest point on the (interpolated) stream track if not given, determined from the dimensions given
nsigma= (3) number of sigma to marginalize the DF over (approximate sigma)
ngl= (5) order of Gauss-Legendre integration
lb= (False) if True, xy contains [l,b,D,vlos,pmll,pmbb] in [deg,deg,kpc,km/s,mas/yr,mas/yr] and the marginalized PDF in these coordinates is returned
Vnorm= (220) circular velocity to normalize with when lb=True
Rnorm= (8) Galactocentric radius to normalize with when lb=True
R0= (8) Galactocentric radius of the Sun (kpc)
Zsun= (0.025) Sun's height above the plane (kpc)
vsun= ([-11.1,241.92,7.25]) Sun's motion in cylindrical coordinates (vR positive away from center)
OUTPUT:
p(xy) marginalized over missing directions in xy
HISTORY:
2013-12-16 - Written - Bovy (IAS)
"""
coordGiven= numpy.array([not x is None for x in xy],dtype='bool')
if numpy.sum(coordGiven) == 6:
raise NotImplementedError("When specifying all coordinates, please use __call__ instead of callMarg")
#First construct the Gaussian approximation at this xy
gaussmean, gaussvar= self.gaussApprox(xy,**kwargs)
cholvar, chollower= stable_cho_factor(gaussvar)
#Now Gauss-legendre integrate over missing directions
ngl= kwargs.get('ngl',5)
nsigma= kwargs.get('nsigma',3)
glx, glw= numpy.polynomial.legendre.leggauss(ngl)
coordEval= []
weightEval= []
jj= 0
baseX= (glx+1)/2.
baseX= list(baseX)
baseX.extend(-(glx+1)/2.)
baseX= numpy.array(baseX)
baseW= glw
baseW= list(baseW)
baseW.extend(glw)
baseW= numpy.array(baseW)
for ii in range(6):
if not coordGiven[ii]:
coordEval.append(nsigma*baseX)
weightEval.append(baseW)
jj+= 1
else:
coordEval.append(xy[ii]*numpy.ones(1))
weightEval.append(numpy.ones(1))
mgrid= numpy.meshgrid(*coordEval,indexing='ij')
mgridNotGiven= numpy.array([mgrid[ii].flatten() for ii in range(6)
if not coordGiven[ii]])
mgridNotGiven= numpy.dot(cholvar,mgridNotGiven)
jj= 0
if coordGiven[0]: iX= mgrid[0]
else:
iX= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
if coordGiven[1]: iY= mgrid[1]
else:
iY= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
if coordGiven[2]: iZ= mgrid[2]
else:
iZ= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
if coordGiven[3]: ivX= mgrid[3]
else:
ivX= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
if coordGiven[4]: ivY= mgrid[4]
else:
ivY= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
if coordGiven[5]: ivZ= mgrid[5]
else:
ivZ= mgridNotGiven[jj]+gaussmean[jj]
jj+= 1
iXw, iYw, iZw, ivXw, ivYw, ivZw=\
numpy.meshgrid(*weightEval,indexing='ij')
if kwargs.get('lb',False): #Convert to Galactocentric cylindrical coordinates
#Setup coordinate transformation kwargs
Vnorm= kwargs.get('Vnorm',self._Vnorm)
Rnorm= kwargs.get('Rnorm',self._Rnorm)
R0= kwargs.get('R0',self._R0)
Zsun= kwargs.get('Zsun',self._Zsun)
vsun= kwargs.get('vsun',self._vsun)
tXYZ= bovy_coords.lbd_to_XYZ(iX.flatten(),iY.flatten(),
iZ.flatten(),
degree=True)
iR,iphi,iZ= bovy_coords.XYZ_to_galcencyl(tXYZ[:,0],tXYZ[:,1],
tXYZ[:,2],
Xsun=R0,Ysun=0.,Zsun=Zsun)
tvxvyvz= bovy_coords.vrpmllpmbb_to_vxvyvz(ivX.flatten(),
ivY.flatten(),
ivZ.flatten(),
tXYZ[:,0],tXYZ[:,1],
tXYZ[:,2],XYZ=True)
ivR,ivT,ivZ= bovy_coords.vxvyvz_to_galcencyl(tvxvyvz[:,0],
tvxvyvz[:,1],
tvxvyvz[:,2],
iR,iphi,iZ,
galcen=True,
vsun=vsun)
iR/= Rnorm
iZ/= Rnorm
ivR/= Vnorm
ivT/= Vnorm
ivZ/= Vnorm
else:
#Convert to cylindrical coordinates
iR,iphi,iZ=\
bovy_coords.rect_to_cyl(iX.flatten(),iY.flatten(),iZ.flatten())
ivR,ivT,ivZ=\
bovy_coords.rect_to_cyl_vec(ivX.flatten(),ivY.flatten(),
ivZ.flatten(),
iR,iphi,iZ,cyl=True)
#Add the additional Jacobian dXdY/dldb... if necessary
if kwargs.get('lb',False):
#Find the nearest track point
interp= kwargs.get('interp',self._useInterp)
if not 'cindx' in kwargs:
cindx= self._find_closest_trackpointLB(*xy,interp=interp,
usev=True)
else:
cindx= kwargs['cindx']
#Only l,b,d,... to Galactic X,Y,Z,... is necessary because going
#from Galactic to Galactocentric has Jacobian determinant 1
if interp:
addLogDet= self._interpolatedTrackLogDetJacLB[cindx]
else:
addLogDet= self._trackLogDetJacLB[cindx]
else:
addLogDet= 0.
logdf= self(iR,ivR,ivT,iZ,ivZ,iphi,log=True)
return logsumexp(logdf
+numpy.log(iXw.flatten())
+numpy.log(iYw.flatten())
+numpy.log(iZw.flatten())
+numpy.log(ivXw.flatten())
+numpy.log(ivYw.flatten())
+numpy.log(ivZw.flatten()))\
+0.5*numpy.log(numpy.linalg.det(gaussvar))\
+addLogDet
def gaussApprox(self,xy,**kwargs):
"""
NAME:
gaussApprox
PURPOSE:
return the mean and variance of a Gaussian approximation to the stream DF at a given phase-space point in Galactocentric rectangular coordinates (distribution is over missing directions)
INPUT:
xy - phase-space point [X,Y,Z,vX,vY,vZ]; the distribution of the dimensions set to None is returned
interp= (object-wide interp default) if True, use the interpolated stream track
cindx= index of the closest point on the (interpolated) stream track if not given, determined from the dimensions given
lb= (False) if True, xy contains [l,b,D,vlos,pmll,pmbb] in [deg,deg,kpc,km/s,mas/yr,mas/yr] and the Gaussian approximation in these coordinates is returned
OUTPUT:
(mean,variance) of the approximate Gaussian DF for the missing directions in xy
HISTORY:
2013-12-12 - Written - Bovy (IAS)
"""
interp= kwargs.get('interp',self._useInterp)
lb= kwargs.get('lb',False)
#What are we looking for
coordGiven= numpy.array([not x is None for x in xy],dtype='bool')
nGiven= numpy.sum(coordGiven)
#First find the nearest track point
if not 'cindx' in kwargs and lb:
cindx= self._find_closest_trackpointLB(*xy,interp=interp,
usev=True)
elif not 'cindx' in kwargs and not lb:
cindx= self._find_closest_trackpoint(*xy,xy=True,interp=interp,
usev=True)
else:
cindx= kwargs['cindx']
#Get the covariance matrix
if interp and lb:
tcov= self._interpolatedAllErrCovsLBUnscaled[cindx]
tmean= self._interpolatedObsTrackLB[cindx]
elif interp and not lb:
tcov= self._interpolatedAllErrCovsXY[cindx]
tmean= self._interpolatedObsTrackXY[cindx]
elif not interp and lb:
tcov= self._allErrCovsLBUnscaled[cindx]
tmean= self._ObsTrackLB[cindx]
elif not interp and not lb:
tcov= self._allErrCovsXY[cindx]
tmean= self._ObsTrackXY[cindx]
if lb:#Apply scale factors
tcov= copy.copy(tcov)
tcov*= numpy.tile(self._ErrCovsLBScale,(6,1))
tcov*= numpy.tile(self._ErrCovsLBScale,(6,1)).T
#Fancy indexing to recover V22, V11, and V12; V22, V11, V12 as in Appendix B of 0905.2979v1
V11indx0= numpy.array([[ii for jj in range(6-nGiven)] for ii in range(6) if not coordGiven[ii]])
V11indx1= numpy.array([[ii for ii in range(6) if not coordGiven[ii]] for jj in range(6-nGiven)])
V11= tcov[V11indx0,V11indx1]
V22indx0= numpy.array([[ii for jj in range(nGiven)] for ii in range(6) if coordGiven[ii]])
V22indx1= numpy.array([[ii for ii in range(6) if coordGiven[ii]] for jj in range(nGiven)])
V22= tcov[V22indx0,V22indx1]
V12indx0= numpy.array([[ii for jj in range(nGiven)] for ii in range(6) if not coordGiven[ii]])
V12indx1= numpy.array([[ii for ii in range(6) if coordGiven[ii]] for jj in range(6-nGiven)])
V12= tcov[V12indx0,V12indx1]
#Also get m1 and m2, again following Appendix B of 0905.2979v1
m1= tmean[True-coordGiven]
m2= tmean[coordGiven]
#conditional mean and variance
V22inv= numpy.linalg.inv(V22)
v2= numpy.array([xy[ii] for ii in range(6) if coordGiven[ii]])
condMean= m1+numpy.dot(V12,numpy.dot(V22inv,v2-m2))
condVar= V11-numpy.dot(V12,numpy.dot(V22inv,V12.T))
return (condMean,condVar)
################################SAMPLE THE DF##################################
def sample(self,n,returnaAdt=False,returndt=False,interp=None,
xy=False,lb=False,
Vnorm=None,Rnorm=None,
R0=None,Zsun=None,vsun=None):
"""
NAME:
sample
PURPOSE:
sample from the DF
INPUT:
n - number of points to return
returnaAdt= (False) if True, return (Omega,angle,dt)
returndT= (False) if True, also return the time since the star was stripped
interp= (object-wide default) use interpolation of the stream track
xy= (False) if True, return Galactocentric rectangular coordinates
lb= (False) if True, return Galactic l,b,d,vlos,pmll,pmbb coordinates
+Coordinate transformation inputs (all default to the instance-wide
values):
Vnorm= circular velocity to normalize velocities with
Rnorm= Galactocentric radius to normalize positions with
R0= Galactocentric radius of the Sun (kpc)
Zsun= Sun's height above the plane (kpc)
vsun= Sun's motion in cylindrical coordinates (vR positive away from center)
OUTPUT:
(R,vR,vT,z,vz,phi) of points on the stream in 6,N array
HISTORY:
2013-12-22 - Written - Bovy (IAS)
"""
if interp is None:
interp= self._useInterp
#First sample frequencies
#Sample frequency along largest eigenvalue using ARS
dO1s=\
bovy_ars.bovy_ars([0.,0.],[True,False],
[self._meandO-numpy.sqrt(self._sortedSigOEig[2]),
self._meandO+numpy.sqrt(self._sortedSigOEig[2])],
_h_ars,_hp_ars,nsamples=n,
hxparams=(self._meandO,self._sortedSigOEig[2]),
maxn=100)
dO1s= numpy.array(dO1s)*self._sigMeanSign
dO2s= numpy.random.normal(size=n)*numpy.sqrt(self._sortedSigOEig[1])
dO3s= numpy.random.normal(size=n)*numpy.sqrt(self._sortedSigOEig[0])
#Rotate into dOs in R,phi,z coordinates
dO= numpy.vstack((dO3s,dO2s,dO1s))
dO= numpy.dot(self._sigomatrixEig[1][:,self._sigomatrixEigsortIndx],
dO)
Om= dO+numpy.tile(self._progenitor_Omega.T,(n,1)).T
#Also generate angles
da= numpy.random.normal(size=(3,n))*self._sigangle
#And a random time
dt= numpy.random.uniform(size=n)*self._tdisrupt
#Integrate the orbits relative to the progenitor
da+= dO*numpy.tile(dt,(3,1))
angle= da+numpy.tile(self._progenitor_angle.T,(n,1)).T
if returnaAdt:
return (Om,angle,dt)
#Propagate to R,vR,etc.
RvR= self._approxaAInv(Om[0,:],Om[1,:],Om[2,:],
angle[0,:],angle[1,:],angle[2,:],
interp=interp)
if returndt and not xy and not lb:
return (RvR,dt)
elif not xy and not lb:
return RvR
if xy:
sX= RvR[0]*numpy.cos(RvR[5])
sY= RvR[0]*numpy.sin(RvR[5])
sZ= RvR[3]
svX, svY, svZ=\
bovy_coords.cyl_to_rect_vec(RvR[1],
RvR[2],
RvR[4],
RvR[5])
out= numpy.empty((6,n))
out[0]= sX
out[1]= sY
out[2]= sZ
out[3]= svX
out[4]= svY
out[5]= svZ
if returndt:
return (out,dt)
else:
return out
if lb:
if Vnorm is None:
Vnorm= self._Vnorm
if Rnorm is None:
Rnorm= self._Rnorm
if R0 is None:
R0= self._R0
if Zsun is None:
Zsun= self._Zsun
if vsun is None:
vsun= self._vsun
XYZ= bovy_coords.galcencyl_to_XYZ(RvR[0]*Rnorm,
RvR[5],
RvR[3]*Rnorm,
Xsun=R0,Zsun=Zsun)
vXYZ= bovy_coords.galcencyl_to_vxvyvz(RvR[1]*Vnorm,
RvR[2]*Vnorm,
RvR[4]*Vnorm,
RvR[5],
vsun=vsun)
slbd=bovy_coords.XYZ_to_lbd(XYZ[0],XYZ[1],XYZ[2],
degree=True)
svlbd= bovy_coords.vxvyvz_to_vrpmllpmbb(vXYZ[0],vXYZ[1],vXYZ[2],
slbd[:,0],slbd[:,1],
slbd[:,2],
degree=True)
out= numpy.empty((6,n))
out[0]= slbd[:,0]
out[1]= slbd[:,1]
out[2]= slbd[:,2]
out[3]= svlbd[:,0]
out[4]= svlbd[:,1]
out[5]= svlbd[:,2]
if returndt:
return (out,dt)
else:
return out
def _h_ars(x,params):
"""ln p(Omega) for ARS"""
mO, sO2= params
return -0.5*(x-mO)**2./sO2+numpy.log(x)
def _hp_ars(x,params):
"""d ln p(Omega) / d Omega for ARS"""
mO, sO2= params
return -(x-mO)/sO2+1./x
def _determine_stream_track_single(aA,progenitorTrack,trackt,
progenitor_angle,sigMeanSign,
dsigomeanProgDirection,meanOmega,
thetasTrack):
#Setup output
allAcfsTrack= numpy.empty((9))
alljacsTrack= numpy.empty((6,6))
allinvjacsTrack= numpy.empty((6,6))
ObsTrack= numpy.empty((6))
ObsTrackAA= numpy.empty((6))
detdOdJ= numpy.empty(6)
#Calculate
tacfs= aA.actionsFreqsAngles(progenitorTrack(trackt),
maxn=3)
allAcfsTrack[0]= tacfs[0][0]
allAcfsTrack[1]= tacfs[1][0]
allAcfsTrack[2]= tacfs[2][0]
for jj in range(3,9):
allAcfsTrack[jj]= tacfs[jj]
tjac= calcaAJac(progenitorTrack(trackt)._orb.vxvv,
aA,
dxv=None,actionsFreqsAngles=True,
lb=False,
_initacfs=tacfs)
alljacsTrack[:,:]= tjac[3:,:]
tinvjac= numpy.linalg.inv(tjac[3:,:])
allinvjacsTrack[:,:]= tinvjac
#Also store detdOdJ
jindx= numpy.array([True,True,True,False,False,False,True,True,True],
dtype='bool')
dOdJ= numpy.dot(tjac[3:,:],numpy.linalg.inv(tjac[jindx,:]))[0:3,0:3]
detdOdJ= numpy.linalg.det(dOdJ)
theseAngles= numpy.mod(progenitor_angle\
+thetasTrack\
*sigMeanSign\
*dsigomeanProgDirection,
2.*numpy.pi)
ObsTrackAA[3:]= theseAngles
diffAngles= theseAngles-allAcfsTrack[6:]
diffAngles[(diffAngles > numpy.pi)]= diffAngles[(diffAngles > numpy.pi)]-2.*numpy.pi
diffAngles[(diffAngles < -numpy.pi)]= diffAngles[(diffAngles < -numpy.pi)]+2.*numpy.pi
thisFreq= meanOmega(thetasTrack)
ObsTrackAA[:3]= thisFreq
diffFreqs= thisFreq-allAcfsTrack[3:6]
ObsTrack[:]= numpy.dot(tinvjac,
numpy.hstack((diffFreqs,diffAngles)))
ObsTrack[0]+= \
progenitorTrack(trackt).R()
ObsTrack[1]+= \
progenitorTrack(trackt).vR()
ObsTrack[2]+= \
progenitorTrack(trackt).vT()
ObsTrack[3]+= \
progenitorTrack(trackt).z()
ObsTrack[4]+= \
progenitorTrack(trackt).vz()
ObsTrack[5]+= \
progenitorTrack(trackt).phi()
return [allAcfsTrack,alljacsTrack,allinvjacsTrack,ObsTrack,ObsTrackAA,
detdOdJ]
def _determine_stream_spread_single(sigomatrixEig,
thetasTrack,
sigOmega,
sigAngle,
allinvjacsTrack):
"""sigAngle input may either be a function that returns the dispersion in
perpendicular angle as a function of parallel angle, or a value"""
#Estimate the spread in all frequencies and angles
sigObig2= sigOmega(thetasTrack)**2.
tsigOdiag= copy.copy(sigomatrixEig[0])
tsigOdiag[numpy.argmax(tsigOdiag)]= sigObig2
tsigO= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigOdiag),
numpy.linalg.inv(sigomatrixEig[1])))
#angles
if hasattr(sigAngle,'__call__'):
sigangle2= sigAngle(thetasTrack)**2.
else:
sigangle2= sigAngle**2.
tsigadiag= numpy.ones(3)*sigangle2
tsigadiag[numpy.argmax(tsigOdiag)]= 1.
tsiga= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigadiag),
numpy.linalg.inv(sigomatrixEig[1])))
#correlations, assume half correlated for now (can be calculated)
correlations= numpy.diag(0.5*numpy.ones(3))*numpy.sqrt(tsigOdiag*tsigadiag)
correlations[numpy.argmax(tsigOdiag),numpy.argmax(tsigOdiag)]= 0.
correlations= numpy.dot(sigomatrixEig[1],
numpy.dot(correlations,
numpy.linalg.inv(sigomatrixEig[1])))
#Now convert
fullMatrix= numpy.empty((6,6))
fullMatrix[:3,:3]= tsigO
fullMatrix[3:,3:]= tsiga
fullMatrix[3:,:3]= correlations
fullMatrix[:3,3:]= correlations.T
return numpy.dot(allinvjacsTrack,numpy.dot(fullMatrix,allinvjacsTrack.T))
def calcaAJac(xv,aA,dxv=None,freqs=False,dOdJ=False,actionsFreqsAngles=False,
lb=False,coordFunc=None,
Vnorm=220.,Rnorm=8.,R0=8.,Zsun=0.025,vsun=[-11.1,8.*30.24,7.25],
_initacfs=None):
"""
NAME:
calcaAJac
PURPOSE:
calculate the Jacobian d(J,theta)/d(x,v)
INPUT:
xv - phase-space point: Either
1) [R,vR,vT,z,vz,phi]
2) [l,b,D,vlos,pmll,pmbb] (if lb=True, see below)
3) list/array of 6 numbers that can be transformed into (normalized) R,vR,vT,z,vz,phi using coordFunc
aA - actionAngle instance
dxv - infinitesimal to use (rescaled for lb, so think fractionally))
freqs= (False) if True, go to frequencies rather than actions
dOdJ= (False), actually calculate d Frequency / d action
actionsFreqsAngles= (False) if True, calculate d(action,freq.,angle)/d (xv)
lb= (False) if True, start with (l,b,D,vlos,pmll,pmbb) in (deg,deg,kpc,km/s,mas/yr,mas/yr)
Vnorm= (220) circular velocity to normalize with when lb=True
Rnorm= (8) Galactocentric radius to normalize with when lb=True
R0= (8) Galactocentric radius of the Sun (kpc)
Zsun= (0.025) Sun's height above the plane (kpc)
vsun= ([-11.1,241.92,7.25]) Sun's motion in cylindrical coordinates (vR positive away from center)
coordFunc= (None) if set, this is a function that takes xv and returns R,vR,vT,z,vz,phi in normalized units (units where vc=1 at r=1 if the potential is normalized that way, for example)
OUTPUT:
Jacobian matrix
HISTORY:
2013-11-25 - Written - Bovy (IAS)
"""
if lb:
coordFunc= lambda x: lbCoordFunc(xv,Vnorm,Rnorm,R0,Zsun,vsun)
if not coordFunc is None:
R, vR, vT, z, vz, phi= coordFunc(xv)
else:
R, vR, vT, z, vz, phi= xv[0],xv[1],xv[2],xv[3],xv[4],xv[5]
if dxv is None:
dxv= 10.**-8.*numpy.ones(6)
if lb:
#Re-scale some of the differences, to be more natural
dxv[0]*= 180./numpy.pi
dxv[1]*= 180./numpy.pi
dxv[2]*= Rnorm
dxv[3]*= Vnorm
dxv[4]*= Vnorm/4.74047/xv[2]
dxv[5]*= Vnorm/4.74047/xv[2]
if actionsFreqsAngles:
jac= numpy.zeros((9,6))
else:
jac= numpy.zeros((6,6))
if dOdJ:
jac2= numpy.zeros((6,6))
if _initacfs is None:
jr,lz,jz,Or,Ophi,Oz,ar,aphi,az\
= aA.actionsFreqsAngles(R,vR,vT,z,vz,phi,maxn=3)
else:
jr,lz,jz,Or,Ophi,Oz,ar,aphi,az\
= _initacfs
for ii in range(6):
temp= xv[ii]+dxv[ii] #Trick to make sure dxv is representable
dxv[ii]= temp-xv[ii]
xv[ii]+= dxv[ii]
if not coordFunc is None:
tR, tvR, tvT, tz, tvz, tphi= coordFunc(xv)
else:
tR, tvR, tvT, tz, tvz, tphi= xv[0],xv[1],xv[2],xv[3],xv[4],xv[5]
tjr,tlz,tjz,tOr,tOphi,tOz,tar,taphi,taz\
= aA.actionsFreqsAngles(tR,tvR,tvT,tz,tvz,tphi,maxn=3)
xv[ii]-= dxv[ii]
angleIndx= 3
if actionsFreqsAngles:
jac[0,ii]= (tjr-jr)/dxv[ii]
jac[1,ii]= (tlz-lz)/dxv[ii]
jac[2,ii]= (tjz-jz)/dxv[ii]
jac[3,ii]= (tOr-Or)/dxv[ii]
jac[4,ii]= (tOphi-Ophi)/dxv[ii]
jac[5,ii]= (tOz-Oz)/dxv[ii]
angleIndx= 6
elif freqs:
jac[0,ii]= (tOr-Or)/dxv[ii]
jac[1,ii]= (tOphi-Ophi)/dxv[ii]
jac[2,ii]= (tOz-Oz)/dxv[ii]
else:
jac[0,ii]= (tjr-jr)/dxv[ii]
jac[1,ii]= (tlz-lz)/dxv[ii]
jac[2,ii]= (tjz-jz)/dxv[ii]
if dOdJ:
jac2[0,ii]= (tOr-Or)/dxv[ii]
jac2[1,ii]= (tOphi-Ophi)/dxv[ii]
jac2[2,ii]= (tOz-Oz)/dxv[ii]
#For the angles, make sure we do not hit a turning point
if tar-ar > numpy.pi:
jac[angleIndx,ii]= (tar-ar-2.*numpy.pi)/dxv[ii]
elif tar-ar < -numpy.pi:
jac[angleIndx,ii]= (tar-ar+2.*numpy.pi)/dxv[ii]
else:
jac[angleIndx,ii]= (tar-ar)/dxv[ii]
if taphi-aphi > numpy.pi:
jac[angleIndx+1,ii]= (taphi-aphi-2.*numpy.pi)/dxv[ii]
elif taphi-aphi < -numpy.pi:
jac[angleIndx+1,ii]= (taphi-aphi+2.*numpy.pi)/dxv[ii]
else:
jac[angleIndx+1,ii]= (taphi-aphi)/dxv[ii]
if taz-az > numpy.pi:
jac[angleIndx+2,ii]= (taz-az-2.*numpy.pi)/dxv[ii]
if taz-az < -numpy.pi:
jac[angleIndx+2,ii]= (taz-az+2.*numpy.pi)/dxv[ii]
else:
jac[angleIndx+2,ii]= (taz-az)/dxv[ii]
if dOdJ:
jac2[3,:]= jac[3,:]
jac2[4,:]= jac[4,:]
jac2[5,:]= jac[5,:]
jac= numpy.dot(jac2,numpy.linalg.inv(jac))[0:3,0:3]
return jac
def lbCoordFunc(xv,Vnorm,Rnorm,R0,Zsun,vsun):
#Input is (l,b,D,vlos,pmll,pmbb) in (deg,deg,kpc,km/s,mas/yr,mas/yr)
X,Y,Z= bovy_coords.lbd_to_XYZ(xv[0],xv[1],xv[2],degree=True)
R,phi,Z= bovy_coords.XYZ_to_galcencyl(X,Y,Z,
Xsun=R0,Ysun=0.,Zsun=Zsun)
vx,vy,vz= bovy_coords.vrpmllpmbb_to_vxvyvz(xv[3],xv[4],xv[5],
X,Y,Z,XYZ=True)
vR,vT,vZ= bovy_coords.vxvyvz_to_galcencyl(vx,vy,vz,R,phi,Z,galcen=True,
vsun=vsun)
R/= Rnorm
Z/= Rnorm
vR/= Vnorm
vT/= Vnorm
vZ/= Vnorm
return (R,vR,vT,Z,vZ,phi)<|fim▁end|> | NAME:
meantdAngle
|
<|file_name|>initiatives.js<|end_file_name|><|fim▁begin|>'use strict';
describe('Service: Initiatives', function () {
// instantiate service
var Initiatives,
Timeout,
cfg,
$httpBackend,
$rootScope,
tPromise;
// load the service's module
beforeEach(module('sumaAnalysis'));
beforeEach(inject(function (_$rootScope_, _$httpBackend_, _initiatives_, $q, $timeout) {
$rootScope = _$rootScope_;
$httpBackend = _$httpBackend_;
Initiatives = _initiatives_;
tPromise = $q.defer();
Timeout = $timeout;
cfg = {<|fim▁hole|> timeoutPromise: tPromise,
timeout: 180000
};
}));
it('should make an AJAX call', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond([{}, {}]);
Initiatives.get(cfg).then(function (result) {
expect(result.length).to.equal(2);
done();
});
$httpBackend.flush();
});
it('should respond with error message on failure', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond(500, {message: 'Error'});
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Error',
code: 500
});
done();
});
$httpBackend.flush();
});
it('should return error with promiseTimeout true on aborted http request', function (done) {
// simulate aborted request
$httpBackend.whenGET('lib/php/initiatives.php')
.respond(0, {message: 'Error'});
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Initiatives.get Timeout',
code: 0,
promiseTimeout: true
});
done();
});
$httpBackend.flush();
});
it('should return error without promiseTimeout on http timeout', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond([{}, {}]);
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Initiatives.get Timeout',
code: 0
});
done();
});
Timeout.flush();
});
});<|fim▁end|> | |
<|file_name|>ws_BinaryClass_10_DecisionTreeClassifier_db2_code_gen.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class_gen.test_model("DecisionTreeClassifier" , "BinaryClass_10" , "db2")<|fim▁end|> | from sklearn2sql_heroku.tests.classification import generic as class_gen |
<|file_name|>codegen_types.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use schema_capnp::{brand, node, type_};
use capnp::Error;
use codegen;
use codegen::{GeneratorContext};
use std::collections::hash_map::HashMap;
#[derive(Copy,Clone,PartialEq)]
pub enum Leaf {
Reader(&'static str),
Builder(&'static str),
Owned,
Client,
Server,
ServerDispatch,
Pipeline
}
impl ::std::fmt::Display for Leaf {
fn fmt(&self, fmt:&mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
let display_string = match self {
&Leaf::Reader(lt) => format!("Reader<{}>", lt),
&Leaf::Builder(lt) => format!("Builder<{}>", lt),
&Leaf::Owned => "Owned".to_string(),
&Leaf::Client => "Client".to_string(),
&Leaf::Server => "Server".to_string(),
&Leaf::ServerDispatch => "ServerDispatch".to_string(),
&Leaf::Pipeline => "Pipeline".to_string(),
};
::std::fmt::Display::fmt(&display_string, fmt)
}
}
impl Leaf {
fn bare_name(&self) -> &'static str {
match self {
&Leaf::Reader(_) => "Reader",
&Leaf::Builder(_) => "Builder",
&Leaf::Owned => "Owned",
&Leaf::Client => "Client",
&Leaf::Server => "Server",
&Leaf::ServerDispatch => "ServerDispatch",
&Leaf::Pipeline => "Pipeline",
}
}
fn _have_lifetime(&self) -> bool {
match self {
&Leaf::Reader(_) | &Leaf::Builder(_) => true,
&Leaf::Owned | &Leaf::Client | &Leaf::Server | &Leaf::ServerDispatch | &Leaf::Pipeline => false,
}
}
}
pub struct TypeParameterTexts {
pub expanded_list: Vec<String>,
pub params: String,
pub where_clause: String,
pub where_clause_with_send: String,
pub pipeline_where_clause: String,
pub phantom_data: String
}
// this is a collection of helpers acting on a "Node" (most of them are Type definitions)
pub trait RustNodeInfo {
fn parameters_texts(&self, gen: &::codegen::GeneratorContext,
parent_node_id: Option<u64>) -> TypeParameterTexts;
}
// this is a collection of helpers acting on a "Type" (someplace where a Type is used, not defined)
pub trait RustTypeInfo {
fn is_prim(&self) -> Result<bool, Error>;
fn is_parameter(&self) -> Result<bool, Error>;
fn is_branded(&self) -> Result<bool, Error>;
fn type_string(&self, gen:&codegen::GeneratorContext, module:Leaf) -> Result<String, Error>;
}
impl <'a> RustNodeInfo for node::Reader<'a> {
fn parameters_texts(&self, gen:&::codegen::GeneratorContext,
parent_node_id: Option<u64>) -> TypeParameterTexts {
if self.get_is_generic() {
let params = get_type_parameters(&gen, self.get_id(), parent_node_id);
let type_parameters = params.iter().map(|param| {
format!("{}",param)
}).collect::<Vec<String>>().join(",");
let where_clause = "where ".to_string() + &*(params.iter().map(|param| {
format!("{}: for<'c> ::capnp::traits::Owned<'c>", param)
}).collect::<Vec<String>>().join(", ") + " ");
let where_clause_with_send = "where ".to_string() + &*(params.iter().map(|param| {
format!("{}:'static", param)
}).collect::<Vec<String>>().join(", ") + " ");
let pipeline_where_clause = "where ".to_string() + &*(params.iter().map(|param| {
format!("{}: ::capnp::traits::Pipelined, <{} as ::capnp::traits::Pipelined>::Pipeline: ::capnp::capability::FromTypelessPipeline", param, param)
}).collect::<Vec<String>>().join(", ") + " ");
let phantom_data = "_phantom: ::std::marker::PhantomData,".to_string();
TypeParameterTexts {
expanded_list: params,
params: type_parameters,
where_clause: where_clause,
where_clause_with_send: where_clause_with_send,
pipeline_where_clause: pipeline_where_clause,
phantom_data: phantom_data
}
} else {
TypeParameterTexts {
expanded_list: vec!(),
params: "".to_string(),
where_clause: "".to_string(),
where_clause_with_send: "".to_string(),
pipeline_where_clause: "".to_string(),
phantom_data: "".to_string(),
}
}
}
}
impl <'a> RustTypeInfo for type_::Reader<'a> {
fn type_string(&self, gen:&codegen::GeneratorContext, module:Leaf) -> Result<String, ::capnp::Error> {
let local_lifetime = match module {
Leaf::Reader(lt) => lt,
Leaf::Builder(lt) => lt,
_ => "",
};
let lifetime_comma = if local_lifetime == "" { "".to_string() } else {
format!("{},", local_lifetime)
};
match try!(self.which()) {
type_::Void(()) => Ok("()".to_string()),
type_::Bool(()) => Ok("bool".to_string()),
type_::Int8(()) => Ok("i8".to_string()),
type_::Int16(()) => Ok("i16".to_string()),
type_::Int32(()) => Ok("i32".to_string()),
type_::Int64(()) => Ok("i64".to_string()),
type_::Uint8(()) => Ok("u8".to_string()),
type_::Uint16(()) => Ok("u16".to_string()),
type_::Uint32(()) => Ok("u32".to_string()),
type_::Uint64(()) => Ok("u64".to_string()),
type_::Float32(()) => Ok("f32".to_string()),
type_::Float64(()) => Ok("f64".to_string()),
type_::Text(()) => Ok(format!("::capnp::text::{}", module)),
type_::Data(()) => Ok(format!("::capnp::data::{}", module)),
type_::Struct(st) => {
do_branding(gen, st.get_type_id(), try!(st.get_brand()), module,
gen.scope_map[&st.get_type_id()].join("::"), None)
}
type_::Interface(interface) => {
do_branding(gen, interface.get_type_id(), try!(interface.get_brand()), module,
gen.scope_map[&interface.get_type_id()].join("::"), None)
}
type_::List(ot1) => {
let element_type = try!(ot1.get_element_type());
match try!(element_type.which()) {
type_::Struct(_) => {
let inner = try!(element_type.type_string(gen, Leaf::Owned));
Ok(format!("::capnp::struct_list::{}<{}{}>", module.bare_name(), lifetime_comma, inner))
},
type_::Enum(_) => {
let inner = try!(element_type.type_string(gen, Leaf::Owned));
Ok(format!("::capnp::enum_list::{}<{}{}>", module.bare_name(), lifetime_comma, inner))
},
type_::List(_) => {
let inner = try!(element_type.type_string(gen, Leaf::Owned));
Ok(format!("::capnp::list_list::{}<{}{}>", module.bare_name(), lifetime_comma, inner))
},
type_::Text(()) => {
Ok(format!("::capnp::text_list::{}", module))
},
type_::Data(()) => {
Ok(format!("::capnp::data_list::{}", module))
},
type_::Interface(_) => {
let inner = try!(element_type.type_string(gen, Leaf::Client));
Ok(format!("::capnp::capability_list::{}<{}{}>", module.bare_name(), lifetime_comma, inner))
}
type_::AnyPointer(_) => Err(Error::failed("List(AnyPointer) is unsupported".to_string())),
_ => {
let inner = try!(element_type.type_string(gen, Leaf::Owned));
Ok(format!("::capnp::primitive_list::{}<{}{}>", module.bare_name(), lifetime_comma, inner))
},
}
},
type_::Enum(en) => {
let scope = &gen.scope_map[&en.get_type_id()];
Ok(scope.join("::").to_string())
},
type_::AnyPointer(pointer) => {
match try!(pointer.which()) {
type_::any_pointer::Parameter(def) => {
let the_struct = &gen.node_map[&def.get_scope_id()];
let parameters = try!(the_struct.get_parameters());
let parameter = parameters.get(def.get_parameter_index() as u32);
let parameter_name = try!(parameter.get_name());
match module {
Leaf::Owned => Ok(parameter_name.to_string()),
Leaf::Reader(lifetime) => {
Ok(format!(
"<{} as ::capnp::traits::Owned<{}>>::Reader",
parameter_name, lifetime))
}
Leaf::Builder(lifetime) => {
Ok(format!(
"<{} as ::capnp::traits::Owned<{}>>::Builder",
parameter_name, lifetime))
}
Leaf::Pipeline => {
Ok(format!("<{} as ::capnp::traits::Pipelined>::Pipeline", parameter_name))
}
_ => Err(Error::unimplemented("unimplemented any_pointer leaf".to_string())),
}
},
_ => {
match module {
Leaf::Reader(lifetime) => {
Ok(format!("::capnp::any_pointer::Reader<{}>", lifetime))
}
Leaf::Builder(lifetime) => {
Ok(format!("::capnp::any_pointer::Builder<{}>", lifetime))
}
_ => {
Ok(format!("::capnp::any_pointer::{}", module))
}
}
}
}
}
}
}
fn is_parameter(&self) -> Result<bool, Error> {
match try!(self.which()) {
type_::AnyPointer(pointer) => {
match try!(pointer.which()) {
type_::any_pointer::Parameter(_) => Ok(true),
_ => Ok(false),
}
}
_ => Ok(false)
}
}
fn is_branded(&self) -> Result<bool, Error> {
match try!(self.which()) {
type_::Struct(st) => {
let brand = try!(st.get_brand());
let scopes = try!(brand.get_scopes());
Ok(scopes.len() > 0)
}
_ => Ok(false)
}
}
#[inline(always)]
fn is_prim(&self) -> Result<bool, Error> {
match try!(self.which()) {
type_::Int8(()) | type_::Int16(()) | type_::Int32(()) | type_::Int64(()) |
type_::Uint8(()) | type_::Uint16(()) | type_::Uint32(()) | type_::Uint64(()) |
type_::Float32(()) | type_::Float64(()) | type_::Void(()) | type_::Bool(()) => Ok(true),
_ => Ok(false)
}
}
}
///
///
pub fn do_branding(gen: &GeneratorContext,
node_id: u64,
brand: brand::Reader,
leaf: Leaf,
the_mod: String,
mut parent_scope_id: Option<u64>) -> Result<String, Error> {
let scopes = try!(brand.get_scopes());
let mut brand_scopes = HashMap::new();
for scope in scopes.iter() {
brand_scopes.insert(scope.get_scope_id(), scope);
}
let brand_scopes = brand_scopes; // freeze
let mut current_node_id = node_id;
let mut accumulator: Vec<Vec<String>> = Vec::new();
loop {
let current_node = match gen.node_map.get(¤t_node_id) {
None => break,
Some(node) => node,<|fim▁hole|> None => {
for _ in params.iter() {
arguments.push("::capnp::any_pointer::Owned".to_string());
}
},
Some(scope) => {
match try!(scope.which()) {
brand::scope::Inherit(()) => {
for param in params.iter() {
arguments.push(try!(param.get_name()).to_string());
}
}
brand::scope::Bind(bindings_list_opt) => {
let bindings_list = try!(bindings_list_opt);
assert_eq!(bindings_list.len(), params.len());
for binding in bindings_list.iter() {
match try!(binding.which()) {
brand::binding::Unbound(()) => {
arguments.push("::capnp::any_pointer::Owned".to_string());
}
brand::binding::Type(t) => {
arguments.push(try!(try!(t).type_string(gen, Leaf::Owned)));
}
}
}
}
}
}
}
accumulator.push(arguments);
current_node_id = current_node.get_scope_id();
match (current_node_id, parent_scope_id) {
(0, Some(id)) => current_node_id = id,
_ => (),
}
parent_scope_id = None; // Only consider on the first time around.
}
// Now add a lifetime parameter if the leaf has one.
match leaf {
Leaf::Reader(lt) => accumulator.push(vec!(lt.to_string())),
Leaf::Builder(lt) => accumulator.push(vec!(lt.to_string())),
Leaf::ServerDispatch => accumulator.push(vec!["_T".to_string()]), // HACK
_ => (),
}
accumulator.reverse();
let accumulated = accumulator.concat();
let arguments = if accumulated.len() > 0 {
format!("<{}>", accumulated.join(","))
} else {
"".to_string()
};
Ok(format!(
"{mod}::{leaf}{maybe_colons}{arguments}",
mod = the_mod,
leaf = leaf.bare_name().to_string(),
maybe_colons = if leaf == Leaf::ServerDispatch { "::" } else { "" }, // HACK
arguments = arguments))
}
pub fn get_type_parameters(gen: &GeneratorContext,
node_id: u64,
mut parent_scope_id: Option<u64>) -> Vec<String> {
let mut current_node_id = node_id;
let mut accumulator: Vec<Vec<String>> = Vec::new();
loop {
let current_node = match gen.node_map.get(¤t_node_id) {
None => break,
Some(node) => node,
};
let mut params = Vec::new();
for param in current_node.get_parameters().unwrap().iter() {
params.push(param.get_name().unwrap().to_string());
}
accumulator.push(params);
current_node_id = current_node.get_scope_id();
match (current_node_id, parent_scope_id) {
(0, Some(id)) => current_node_id = id,
_ => (),
}
parent_scope_id = None; // Only consider on the first time around.
}
accumulator.reverse();
accumulator.concat()
}<|fim▁end|> | };
let params = try!(current_node.get_parameters());
let mut arguments: Vec<String> = Vec::new();
match brand_scopes.get(¤t_node_id) { |
<|file_name|>issue-49746-unicode-confusable-in-float-literal-expt.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//~| ERROR cannot subtract `{integer}` from `{float}`
fn main() {}<|fim▁end|> | const UNIVERSAL_GRAVITATIONAL_CONSTANT: f64 = 6.674e−11; // m³⋅kg⁻¹⋅s⁻²
//~^ ERROR expected at least one digit in exponent
//~| ERROR unknown start of token: \u{2212} |
<|file_name|>dependency_format.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Resolution of mixing rlibs and dylibs
//!
//! When producing a final artifact, such as a dynamic library, the compiler has
//! a choice between linking an rlib or linking a dylib of all upstream
//! dependencies. The linking phase must guarantee, however, that a library only
//! show up once in the object file. For example, it is illegal for library A to
//! be statically linked to B and C in separate dylibs, and then link B and C
//! into a crate D (because library A appears twice).
//!
//! The job of this module is to calculate what format each upstream crate
//! should be used when linking each output type requested in this session. This
//! generally follows this set of rules:
//!
//! 1. Each library must appear exactly once in the output.
//! 2. Each rlib contains only one library (it's just an object file)
//! 3. Each dylib can contain more than one library (due to static linking),
//! and can also bring in many dynamic dependencies.
//!
//! With these constraints in mind, it's generally a very difficult problem to
//! find a solution that's not "all rlibs" or "all dylibs". I have suspicions
//! that NP-ness may come into the picture here...
//!
//! The current selection algorithm below looks mostly similar to:
//!
//! 1. If static linking is required, then require all upstream dependencies
//! to be available as rlibs. If not, generate an error.
//! 2. If static linking is requested (generating an executable), then
//! attempt to use all upstream dependencies as rlibs. If any are not
//! found, bail out and continue to step 3.
//! 3. Static linking has failed, at least one library must be dynamically
//! linked. Apply a heuristic by greedily maximizing the number of
//! dynamically linked libraries.
//! 4. Each upstream dependency available as a dynamic library is
//! registered. The dependencies all propagate, adding to a map. It is
//! possible for a dylib to add a static library as a dependency, but it
//! is illegal for two dylibs to add the same static library as a
//! dependency. The same dylib can be added twice. Additionally, it is
//! illegal to add a static dependency when it was previously found as a
//! dylib (and vice versa)
//! 5. After all dynamic dependencies have been traversed, re-traverse the
//! remaining dependencies and add them statically (if they haven't been
//! added already).
//!
//! While not perfect, this algorithm should help support use-cases such as leaf
//! dependencies being static while the larger tree of inner dependencies are
//! all dynamic. This isn't currently very well battle tested, so it will likely
//! fall short in some use cases.
//!
//! Currently, there is no way to specify the preference of linkage with a
//! particular library (other than a global dynamic/static switch).
//! Additionally, the algorithm is geared towards finding *any* solution rather
//! than finding a number of solutions (there are normally quite a few).
use syntax::ast;
use session;
use session::config;
use metadata::cstore;
use metadata::csearch;
use middle::ty;
use util::nodemap::FnvHashMap;
/// A list of dependencies for a certain crate type.
///
/// The length of this vector is the same as the number of external crates used.
/// The value is None if the crate does not need to be linked (it was found
/// statically in another dylib), or Some(kind) if it needs to be linked as
/// `kind` (either static or dynamic).
pub type DependencyList = Vec<Option<cstore::LinkagePreference>>;
/// A mapping of all required dependencies for a particular flavor of output.
///
/// This is local to the tcx, and is generally relevant to one session.
pub type Dependencies = FnvHashMap<config::CrateType, DependencyList>;
pub fn calculate(tcx: &ty::ctxt) {
let mut fmts = tcx.dependency_formats.borrow_mut();
for &ty in tcx.sess.crate_types.borrow().iter() {
fmts.insert(ty, calculate_type(&tcx.sess, ty));
}
tcx.sess.abort_if_errors();
}<|fim▁hole|>
fn calculate_type(sess: &session::Session,
ty: config::CrateType) -> DependencyList {
match ty {
// If the global prefer_dynamic switch is turned off, first attempt
// static linkage (this can fail).
config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
}
// No linkage happens with rlibs, we just needed the metadata (which we
// got long ago), so don't bother with anything.
config::CrateTypeRlib => return Vec::new(),
// Staticlibs must have all static dependencies. If any fail to be
// found, we generate some nice pretty errors.
config::CrateTypeStaticlib => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.rlib.is_some() { return }
sess.err(format!("dependency `{}` not found in rlib format",
data.name).as_slice());
});
return Vec::new();
}
// Generating a dylib without `-C prefer-dynamic` means that we're going
// to try to eagerly statically link all dependencies. This is normally
// done for end-product dylibs, not intermediate products.
config::CrateTypeDylib if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
}
// Everything else falls through below
config::CrateTypeExecutable | config::CrateTypeDylib => {},
}
let mut formats = FnvHashMap::new();
// Sweep all crates for found dylibs. Add all dylibs, as well as their
// dependencies, ensuring there are no conflicts. The only valid case for a
// dependency to be relied upon twice is for both cases to rely on a dylib.
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.dylib.is_some() {
debug!("adding dylib: {}", data.name);
add_library(sess, cnum, cstore::RequireDynamic, &mut formats);
let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum);
for &(depnum, style) in deps.iter() {
debug!("adding {}: {}", style,
sess.cstore.get_crate_data(depnum).name.clone());
add_library(sess, depnum, style, &mut formats);
}
}
});
// Collect what we've got so far in the return vector.
let mut ret = range(1, sess.cstore.next_crate_num()).map(|i| {
match formats.get(&i).map(|v| *v) {
v @ Some(cstore::RequireDynamic) => v,
_ => None,
}
}).collect::<Vec<_>>();
// Run through the dependency list again, and add any missing libraries as
// static libraries.
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.dylib.is_none() && !formats.contains_key(&cnum) {
assert!(src.rlib.is_some());
debug!("adding staticlib: {}", data.name);
add_library(sess, cnum, cstore::RequireStatic, &mut formats);
ret[cnum as uint - 1] = Some(cstore::RequireStatic);
}
});
// When dylib B links to dylib A, then when using B we must also link to A.
// It could be the case, however, that the rlib for A is present (hence we
// found metadata), but the dylib for A has since been removed.
//
// For situations like this, we perform one last pass over the dependencies,
// making sure that everything is available in the requested format.
for (cnum, kind) in ret.iter().enumerate() {
let cnum = cnum as ast::CrateNum;
let src = sess.cstore.get_used_crate_source(cnum + 1).unwrap();
match *kind {
None => continue,
Some(cstore::RequireStatic) if src.rlib.is_some() => continue,
Some(cstore::RequireDynamic) if src.dylib.is_some() => continue,
Some(kind) => {
let data = sess.cstore.get_crate_data(cnum + 1);
sess.err(format!("crate `{}` required to be available in {}, \
but it was not available in this form",
data.name,
match kind {
cstore::RequireStatic => "rlib",
cstore::RequireDynamic => "dylib",
}).as_slice());
}
}
}
return ret;
}
fn add_library(sess: &session::Session,
cnum: ast::CrateNum,
link: cstore::LinkagePreference,
m: &mut FnvHashMap<ast::CrateNum, cstore::LinkagePreference>) {
match m.get(&cnum) {
Some(&link2) => {
// If the linkages differ, then we'd have two copies of the library
// if we continued linking. If the linkages are both static, then we
// would also have two copies of the library (static from two
// different locations).
//
// This error is probably a little obscure, but I imagine that it
// can be refined over time.
if link2 != link || link == cstore::RequireStatic {
let data = sess.cstore.get_crate_data(cnum);
sess.err(format!("cannot satisfy dependencies so `{}` only \
shows up once",
data.name).as_slice());
sess.help("having upstream crates all available in one format \
will likely make this go away");
}
}
None => { m.insert(cnum, link); }
}
}
fn attempt_static(sess: &session::Session) -> Option<DependencyList> {
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
if crates.iter().all(|&(_, ref p)| p.is_some()) {
Some(crates.into_iter().map(|_| Some(cstore::RequireStatic)).collect())
} else {
None
}
}<|fim▁end|> | |
<|file_name|>schedcover.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This creates a CSV file from the output of the debug output of subtarget:
# llvm-tblgen --gen-subtarget --debug-only=subtarget-emitter
# With thanks to Dave Estes for mentioning the idea at 2014 LLVM Developers' Meeting
import os;
import sys;
import re;
import operator;
table = {}
models = set()
filt = None
def add(instr, model, resource=None):
global table, models
entry = table.setdefault(instr, dict())
entry[model] = resource
models.add(model)
def filter_model(m):
global filt
if m and filt:
return filt.search(m) != None
else:
return True
def display():
global table, models
# remove default and itinerary so we can control their sort order to make
# them first
models.discard("default")
models.discard("itinerary")
<|fim▁hole|> ordered_models = [m for m in ordered_models if filter_model(m)]
# print header
sys.stdout.write("instruction")
for model in ordered_models:
sys.stdout.write(", {}".format(model))
sys.stdout.write(os.linesep)
for (instr, mapping) in ordered_table:
sys.stdout.write(instr)
for model in ordered_models:
if model in mapping and mapping[model] is not None:
sys.stdout.write(", {}".format(mapping[model]))
else:
sys.stdout.write(", ")
sys.stdout.write(os.linesep)
def machineModelCover(path):
# The interesting bits
re_sched_default = re.compile("SchedRW machine model for ([^ ]*) (.*)\n");
re_sched_no_default = re.compile("No machine model for ([^ ]*)\n");
re_sched_spec = re.compile("InstRW on ([^ ]*) for ([^ ]*) (.*)\n");
re_sched_no_spec = re.compile("No machine model for ([^ ]*) on processor (.*)\n");
re_sched_itin = re.compile("Itinerary for ([^ ]*): ([^ ]*)\n")
# scan the file
with open(path, 'r') as f:
for line in f.readlines():
match = re_sched_default.match(line)
if match: add(match.group(1), "default", match.group(2))
match = re_sched_no_default.match(line)
if match: add(match.group(1), "default")
match = re_sched_spec.match(line)
if match: add(match.group(2), match.group(1), match.group(3))
match = re_sched_no_spec.match(line)
if match: add(match.group(1), match.group(2))
match = re_sched_itin.match(line)
if match: add(match.group(1), "itinerary", match.group(2))
display()
if len(sys.argv) > 2:
filt = re.compile(sys.argv[2], re.IGNORECASE)
machineModelCover(sys.argv[1])<|fim▁end|> | ordered_table = sorted(table.items(), key=operator.itemgetter(0))
ordered_models = ["itinerary", "default"]
ordered_models.extend(sorted(models)) |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
Useful Utils
==============
"""
from setuptools import setup, find_packages
setup(
name='utilitybelt',
version='0.2.6',
author='Halfmoon Labs',
author_email='[email protected]',
description='Generally useful tools. A python utility belt.',
keywords=('dict dictionary scrub to_dict todict json characters charset '
'hex entropy utility'),<|fim▁hole|> url='https://github.com/onenameio/utilitybelt',
license='MIT',
packages=find_packages(),
install_requires=[
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
zip_safe=False,
)<|fim▁end|> | |
<|file_name|>test_factories.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from addressbase.models import Address
from addressbase.tests.factories import AddressFactory, UprnToCouncilFactory
class TestAddressFactory(TestCase):
def test_address_factory(self):
address = AddressFactory()
self.assertEqual(len(address.uprn), 9)
self.assertEqual(address.addressbase_postal, "D")
class TestUprnToCouncilFactory(TestCase):
def test_uprn_to_council_factory(self):
uprn_to_council = UprnToCouncilFactory()<|fim▁hole|><|fim▁end|> | self.assertIsInstance(uprn_to_council.uprn, Address) |
<|file_name|>dstr-named-dflt-obj-ptrn-prop-id-get-value-err.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources:
// - src/dstr-binding/obj-ptrn-prop-id-get-value-err.case
// - src/dstr-binding/error/async-gen-func-named-expr-dflt.template
/*---
description: Error thrown when accessing the corresponding property of the value object (async generator named function expression (default parameter))
esid: sec-asyncgenerator-definitions-evaluation
features: [async-iteration]
flags: [generated]
info: |
AsyncGeneratorExpression : async [no LineTerminator here] function * BindingIdentifier
( FormalParameters ) { AsyncGeneratorBody }
[...]
7. Let closure be ! AsyncGeneratorFunctionCreate(Normal, FormalParameters,
AsyncGeneratorBody, funcEnv, strict).
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
BindingElement : BindingPattern Initializeropt<|fim▁hole|> 2. ReturnIfAbrupt(v).
---*/
var initEvalCount = 0;
var poisonedProperty = Object.defineProperty({}, 'poisoned', {
get: function() {
throw new Test262Error();
}
});
var f;
f = async function* h({ poisoned: x = ++initEvalCount } = poisonedProperty) {
};
assert.throws(Test262Error, function() {
f();
});
assert.sameValue(initEvalCount, 0);<|fim▁end|> |
1. Let v be GetV(value, propertyName). |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.utils.html import format_html
from django.forms.util import flatatt
from django.utils.encoding import force_text
from mezzanine.conf import settings
from cartridge.shop.forms import OrderForm
from cartridge.shop import checkout
from cartridge.shop.utils import make_choices
from cartridge_braintree.countries import get_country_names_list
class DataEncryptedTextInput(forms.TextInput):
def render(self, name, value, attrs=None):
# See django.forms.widgets.py,
# class Input, method render()
if value is None:
value = ''
if attrs is None:
attrs = {}
attrs['name'] = name
attrs['autocomplete'] = 'off'
attrs['data-encrypted-name'] = name
final_attrs = self.build_attrs(attrs, type=self.input_type)
# Never add the value to the HTML rendering, this field
# will be encrypted and should remain blank if the form is
# re-loaded!
final_attrs['value'] = ''
return format_html('<input{0} />', flatatt(final_attrs))
class DataEncryptedPasswordInput(DataEncryptedTextInput):
input_type = 'password'
class BraintreeOrderForm(OrderForm):
"""
The following changes are made to the cartridge order form:
- Shipping and Billing country fields are rendered using
a Select widget. This ensures the country selected can be
converted to a valid code for Braintree's payment processing.
- Credit Card number and CCV fields are rendered using the
DataEncryptedTextInput and DataEncryptedPasswordInput widgets
so that the HTML form inputs match what is required for braintree.js
See https://www.braintreepayments.com/docs/python/guide/getting_paid
"""
def __init__(self, request, step, data=None, initial=None, errors=None):
OrderForm.__init__(self, request, step, data, initial, errors)
is_first_step = step == checkout.CHECKOUT_STEP_FIRST<|fim▁hole|> # Get list of country names
countries = make_choices(get_country_names_list())
if settings.SHOP_CHECKOUT_STEPS_SPLIT:
if is_first_step:
# Change country widgets to a Select widget
self.fields["billing_detail_country"].widget = forms.Select(choices=countries)
self.fields["billing_detail_country"].initial = settings.SHOP_DEFAULT_COUNTRY
self.fields["shipping_detail_country"].widget = forms.Select(choices=countries)
self.fields["shipping_detail_country"].initial= settings.SHOP_DEFAULT_COUNTRY
if is_payment_step:
# Make card number and cvv fields use the data encrypted widget
self.fields["card_number"].widget = DataEncryptedTextInput()
self.fields["card_ccv"].widget = DataEncryptedPasswordInput()
else:
# Change country widgets to a Select widget
self.fields["billing_detail_country"].widget = forms.Select(choices=countries)
self.fields["billing_detail_country"].initial = settings.SHOP_DEFAULT_COUNTRY
self.fields["shipping_detail_country"].widget = forms.Select(choices=countries)
self.fields["shipping_detail_country"].initial= settings.SHOP_DEFAULT_COUNTRY
if settings.SHOP_PAYMENT_STEP_ENABLED:
# Make card number and cvv fields use the data encrypted widget
self.fields["card_number"].widget = DataEncryptedTextInput()
self.fields["card_ccv"].widget = DataEncryptedPasswordInput()<|fim▁end|> | is_last_step = step == checkout.CHECKOUT_STEP_LAST
is_payment_step = step == checkout.CHECKOUT_STEP_PAYMENT
|
<|file_name|>t152.py<|end_file_name|><|fim▁begin|>print str(range(5,0,-3))[:5]<|fim▁hole|>print range(5,0,-3)[0]
print range(5,0,-3)[1]
print range(5,0,-3)[-1]<|fim▁end|> | print len(range(5,0,-3)) |
<|file_name|>sol-implicit.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Gofem Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package fem
import (
"math"
"github.com/cpmech/gofem/ele"
"github.com/cpmech/gosl/chk"
"github.com/cpmech/gosl/fun"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/la"
"github.com/cpmech/gosl/mpi"
)
// Implicit solves FEM problem using an implicit procedure (with Newthon-Raphson method)
type Implicit struct {
doms []*Domain
sum *Summary
dc *ele.DynCoefs
}
// set factory
func init() {
allocators["imp"] = func(doms []*Domain, sum *Summary, dc *ele.DynCoefs) Solver {
solver := new(Implicit)
solver.doms = doms
solver.sum = sum
solver.dc = dc
return solver
}
}
func (o *Implicit) Run(tf float64, dtFunc, dtoFunc fun.TimeSpace, verbose bool, dbgKb DebugKb_t) (err error) {
// auxiliary
md := 1.0 // time step multiplier if divergence control is on
ndiverg := 0 // number of steps diverging
// control
t := o.doms[0].Sol.T
dat := o.doms[0].Sim.Solver
tout := t + dtoFunc.F(t, nil)
steady := o.doms[0].Sim.Data.Steady
// first output
if o.sum != nil {
err = o.sum.SaveDomains(t, o.doms, false)
if err != nil {
return chk.Err("cannot save results:\n%v", err)
}
}
// message
if verbose && !dat.ShowR {
defer func() { io.Pf("\n") }()
}
// time loop
var Δt float64
var lasttimestep bool
for t < tf {
// check for continued divergence
if ndiverg >= dat.NdvgMax {
return chk.Err("continuous divergence after %d steps reached", ndiverg)
}
// time increment
Δt = dtFunc.F(t, nil) * md
if t+Δt >= tf {
lasttimestep = true
}
if Δt < dat.DtMin {
if md < 1 {
return chk.Err("Δt increment is too small: %g < %g", Δt, dat.DtMin)
}
}
t += Δt
// dynamic coefficients
if !steady {
err = o.dc.CalcBoth(Δt)
if err != nil {
return chk.Err("cannot compute dynamic coefficients")
}
}
// message
if verbose && !dat.ShowR {
io.Pf("> Time = %f\r", t)
}
// for all domains
docontinue := false
for _, d := range o.doms {
// backup solution if divergence control is on
if dat.DvgCtrl {
d.backup()
}
// run iterations
d.Sol.T = t
d.Sol.Dt = Δt
diverging, err := run_iterations(t, Δt, d, o.dc, o.sum, dbgKb)
if err != nil {
return chk.Err("run_iterations failed:\n%v", err)
}
// restore solution and reduce time step if divergence control is on
if dat.DvgCtrl {
if diverging {
if verbose {
io.Pfred(". . . iterations diverging (%2d) . . .\n", ndiverg+1)
}
d.restore()
t -= Δt
d.Sol.T = t
md *= 0.5
ndiverg += 1
docontinue = true
break
}
ndiverg = 0
md = 1.0
}
}
if docontinue {
continue
}
// perform output
if t >= tout || lasttimestep {
if o.sum != nil {
err = o.sum.SaveDomains(t, o.doms, false)
if err != nil {
return chk.Err("cannot save results:\n%v", err)
}
}
tout += dtoFunc.F(t, nil)
}
}
return
}
// run_iterations solves the nonlinear problem
func run_iterations(t, Δt float64, d *Domain, dc *ele.DynCoefs, sum *Summary, dbgKb DebugKb_t) (diverging bool, err error) {
// zero accumulated increments
la.VecFill(d.Sol.ΔY, 0)
// calculate global starred vectors and interpolate starred variables from nodes to integration points
β1 := dc.GetBet1()
β2 := dc.GetBet2()
α1 := dc.GetAlp1()
α2 := dc.GetAlp2()
α3 := dc.GetAlp3()
α4 := dc.GetAlp4()
α5 := dc.GetAlp5()
α6 := dc.GetAlp6()
if !d.Sim.Data.Steady {
// compute starred vectors
for _, I := range d.T1eqs {
d.Sol.Psi[I] = β1*d.Sol.Y[I] + β2*d.Sol.Dydt[I]
}
for _, I := range d.T2eqs {
d.Sol.Zet[I] = α1*d.Sol.Y[I] + α2*d.Sol.Dydt[I] + α3*d.Sol.D2ydt2[I]
d.Sol.Chi[I] = α4*d.Sol.Y[I] + α5*d.Sol.Dydt[I] + α6*d.Sol.D2ydt2[I]
}
// set internal starred variables
for _, e := range d.Elems {
err = e.InterpStarVars(d.Sol)
if err != nil {
err = chk.Err("cannot compute starred variables:\n%v", err)
return
}
}
}
// auxiliary variables
var it int
var largFb, largFb0, Lδu float64
var prevFb, prevLδu float64
dat := d.Sim.Solver
// message
if dat.ShowR {
io.Pf("\n%13s%4s%23s%23s\n", "t", "it", "largFb", "Lδu")
defer func() {
io.Pf("%13.6e%4d%23.15e%23.15e\n", t, it, largFb, Lδu)
}()
}
// iterations
for it = 0; it < dat.NmaxIt; it++ {
// assemble right-hand side vector (fb) with negative of residuals
la.VecFill(d.Fb, 0)
for _, e := range d.Elems {
err = e.AddToRhs(d.Fb, d.Sol)
if err != nil {
return
}
}
// join all fb
if d.Distr {
mpi.AllReduceSum(d.Fb, d.Wb) // this must be done here because there might be nodes sharing boundary conditions<|fim▁hole|>
// point natural boundary conditions; e.g. concentrated loads
d.PtNatBcs.AddToRhs(d.Fb, t)
// essential boundary conditioins; e.g. constraints
d.EssenBcs.AddToRhs(d.Fb, d.Sol)
// find largest absolute component of fb
largFb = la.VecLargest(d.Fb, 1)
// save residual
if d.Sim.Data.Stat {
if sum != nil {
sum.Resids.Append(it == 0, largFb)
}
}
// check largFb value
if it == 0 {
// store largest absolute component of fb
largFb0 = largFb
} else {
// check convergence on Lf0
if largFb < dat.FbTol*largFb0 { // converged on fb
break
}
// check convergence on fb_min
if largFb < dat.FbMin { // converged with smallest value of fb
break
}
}
// check divergence on fb
if it > 1 && dat.DvgCtrl {
if largFb > prevFb {
diverging = true
break
}
}
prevFb = largFb
// assemble Jacobian matrix
do_asm_fact := (it == 0 || !dat.CteTg)
if do_asm_fact {
// assemble element matrices
d.Kb.Start()
for _, e := range d.Elems {
err = e.AddToKb(d.Kb, d.Sol, it == 0)
if err != nil {
return
}
}
// debug
if dbgKb != nil {
dbgKb(d, it)
}
// join A and tr(A) matrices into Kb
if d.Proc == 0 {
d.Kb.PutMatAndMatT(&d.EssenBcs.A)
}
// write smat matrix
if d.Sim.Data.WriteSmat {
la.WriteSmat("/tmp/gofem_Kb", d.Kb.ToMatrix(nil).ToDense(), 1e-14)
chk.Panic("file </tmp/gofem_Kb.smat> written. simulation stopped")
}
// initialise linear solver
if d.InitLSol {
err = d.LinSol.InitR(d.Kb, d.Sim.LinSol.Symmetric, d.Sim.LinSol.Verbose, d.Sim.LinSol.Timing)
if err != nil {
err = chk.Err("cannot initialise linear solver:\n%v", err)
return
}
d.InitLSol = false
}
// perform factorisation
err = d.LinSol.Fact()
if err != nil {
err = chk.Err("factorisation failed:\n%v", err)
return
}
}
// solve for wb := δyb
err = d.LinSol.SolveR(d.Wb, d.Fb, false)
if err != nil {
err = chk.Err("solve failed:%v\n", err)
return
}
// update primary variables (y)
for i := 0; i < d.Ny; i++ {
d.Sol.Y[i] += d.Wb[i] // y += δy
d.Sol.ΔY[i] += d.Wb[i] // ΔY += δy
if math.IsNaN(d.Sol.Y[i]) {
err = chk.Err("Solution vector has NaN compoment\n")
return
}
}
if !d.Sim.Data.Steady {
for _, I := range d.T1eqs {
d.Sol.Dydt[I] = β1*d.Sol.Y[I] - d.Sol.Psi[I]
}
for _, I := range d.T2eqs {
d.Sol.Dydt[I] = α4*d.Sol.Y[I] - d.Sol.Chi[I]
d.Sol.D2ydt2[I] = α1*d.Sol.Y[I] - d.Sol.Zet[I]
}
}
// update Lagrange multipliers (λ)
for i := 0; i < d.Nlam; i++ {
d.Sol.L[i] += d.Wb[d.Ny+i] // λ += δλ
}
// backup / restore
if it == 0 {
// create backup copy of all secondary variables
for _, e := range d.ElemIntvars {
e.BackupIvs(false)
}
} else {
// recover last converged state from backup copy
for _, e := range d.ElemIntvars {
e.RestoreIvs(false)
}
}
// update secondary variables
err = d.UpdateElems()
if err != nil {
break
}
// compute RMS norm of δu and check convegence on δu
Lδu = la.VecRmsErr(d.Wb[:d.Ny], dat.Atol, dat.Rtol, d.Sol.Y[:d.Ny])
// message
if dat.ShowR {
io.Pf("%13.6e%4d%23.15e%23.15e\n", t, it, largFb, Lδu)
}
// stop if converged on δu
if Lδu < dat.Itol {
break
}
// check divergence on Lδu
if it > 1 && dat.DvgCtrl {
if Lδu > prevLδu {
diverging = true
break
}
}
prevLδu = Lδu
}
// check if iterations diverged
if it == dat.NmaxIt {
err = chk.Err("max number of iterations reached: it = %d\n", it)
}
return
}<|fim▁end|> | } |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use prost_build::Config;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut config = Config::new();
config.bytes(&["."]);
tonic_build::configure()
.build_client(true)
.build_server(true)
.compile_with_config(
config,
&[
"protos/bazelbuild_remote-apis/build/bazel/remote/execution/v2/remote_execution.proto",
"protos/bazelbuild_remote-apis/build/bazel/semver/semver.proto",
"protos/buildbarn/cas.proto",
"protos/googleapis/google/bytestream/bytestream.proto",
"protos/googleapis/google/rpc/code.proto",<|fim▁hole|> "protos/standard/google/protobuf/empty.proto",
],
&[
"protos/bazelbuild_remote-apis",
"protos/buildbarn",
"protos/googleapis",
"protos/standard",
],
)?;
Ok(())
}<|fim▁end|> | "protos/googleapis/google/rpc/error_details.proto",
"protos/googleapis/google/rpc/status.proto",
"protos/googleapis/google/longrunning/operations.proto", |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>'use strict'
var test = require('tape')
var createDate = require('./')
test(function (t) {
t.ok(createDate('1-1-2000') instanceof Date)<|fim▁hole|><|fim▁end|> | t.end()
}) |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package miwax.java_conf.gr.jp.frugalitycalc.view;
import android.app.AlertDialog;
import android.databinding.DataBindingUtil;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import miwax.java_conf.gr.jp.frugalitycalc.R;
import miwax.java_conf.gr.jp.frugalitycalc.databinding.ActivityMainBinding;
import miwax.java_conf.gr.jp.frugalitycalc.util.messenger.ShowAlertDialogMessage;
import miwax.java_conf.gr.jp.frugalitycalc.viewmodel.MainViewModel;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.subscriptions.CompositeSubscription;
public class MainActivity extends AppCompatActivity {
private final String VIEW_MODEL = "VIEW_MODEL";
private ActivityMainBinding binding;
private MainViewModel mainViewModel;
private CompositeSubscription subscriptions = new CompositeSubscription();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
binding = DataBindingUtil.setContentView(this, R.layout.activity_main);
if (savedInstanceState != null) {
mainViewModel = savedInstanceState.getParcelable(VIEW_MODEL);
} else {
mainViewModel = new MainViewModel();
}
binding.setViewModel(mainViewModel);
// ダイアログ表示のメッセージ受信
subscriptions.add(
mainViewModel.getMessenger().register(ShowAlertDialogMessage.class)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<ShowAlertDialogMessage>() {
@Override
public void call(ShowAlertDialogMessage message) {
showAlertDialog(message.getTitleId(), message.getTextId());
}<|fim▁hole|> );
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(VIEW_MODEL, mainViewModel);
}
@Override
protected void onDestroy() {
mainViewModel.unsubscribe();
super.onDestroy();
}
private void showAlertDialog(int titleId, int textId) {
new AlertDialog.Builder(this)
.setTitle(this.getString(titleId))
.setMessage(this.getString(textId))
.setPositiveButton("OK", null)
.show();
}
}<|fim▁end|> | }) |
<|file_name|>qgsvectortilemvtencoder.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgsvectortilemvtencoder.cpp
--------------------------------------
Date : April 2020
Copyright : (C) 2020 by Martin Dobias
Email : wonder dot sk at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgsvectortilemvtencoder.h"
#include "qgsfeedback.h"
#include "qgslinestring.h"
#include "qgslogger.h"
#include "qgsmultilinestring.h"
#include "qgsmultipoint.h"
#include "qgsmultipolygon.h"
#include "qgspolygon.h"
#include "qgsvectorlayer.h"
#include "qgsvectortilemvtutils.h"
//! Helper class for writing of geometry commands
struct MVTGeometryWriter
{
vector_tile::Tile_Feature *feature = nullptr;
int resolution;
double tileXMin, tileYMax, tileDX, tileDY;
QPoint cursor;
MVTGeometryWriter( vector_tile::Tile_Feature *f, int res, const QgsRectangle &tileExtent )
: feature( f )
, resolution( res )
, tileXMin( tileExtent.xMinimum() )
, tileYMax( tileExtent.yMaximum() )
, tileDX( tileExtent.width() )
, tileDY( tileExtent.height() )
{
}
void addMoveTo( int count )
{
feature->add_geometry( 1 | ( count << 3 ) );
}
void addLineTo( int count )
{
feature->add_geometry( 2 | ( count << 3 ) );
}
void addClosePath()
{
feature->add_geometry( 7 | ( 1 << 3 ) );
}
void addPoint( const QgsPoint &pt )
{
addPoint( mapToTileCoordinates( pt.x(), pt.y() ) );
}
void addPoint( const QPoint &pt )
{
const qint32 vx = pt.x() - cursor.x();
const qint32 vy = pt.y() - cursor.y();
// (quint32)(-(qint32)((quint32)vx >> 31)) is a C/C++ compliant way
// of doing vx >> 31, which is undefined behavior since vx is signed
feature->add_geometry( ( ( quint32 )vx << 1 ) ^ ( ( quint32 )( -( qint32 )( ( quint32 )vx >> 31 ) ) ) );
feature->add_geometry( ( ( quint32 )vy << 1 ) ^ ( ( quint32 )( -( qint32 )( ( quint32 )vy >> 31 ) ) ) );
cursor = pt;
}
QPoint mapToTileCoordinates( double x, double y )
{
return QPoint( static_cast<int>( round( ( x - tileXMin ) * resolution / tileDX ) ),
static_cast<int>( round( ( tileYMax - y ) * resolution / tileDY ) ) );
}
};
static void encodeLineString( const QgsLineString *lineString, bool isRing, bool reversed, MVTGeometryWriter &geomWriter )
{
int count = lineString->numPoints();
const double *xData = lineString->xData();
const double *yData = lineString->yData();
if ( isRing )
count--; // the last point in linear ring is repeated - but not in MVT
// de-duplicate points
QVector<QPoint> tilePoints;
QPoint last( -9999, -9999 );
tilePoints.reserve( count );
for ( int i = 0; i < count; ++i )
{
const QPoint pt = geomWriter.mapToTileCoordinates( xData[i], yData[i] );
if ( pt == last )
continue;
tilePoints << pt;
last = pt;
}
count = tilePoints.count();
geomWriter.addMoveTo( 1 );
geomWriter.addPoint( tilePoints[0] );
geomWriter.addLineTo( count - 1 );
if ( reversed )
{
for ( int i = count - 1; i >= 1; --i )
geomWriter.addPoint( tilePoints[i] );
}
else
{
for ( int i = 1; i < count; ++i )
geomWriter.addPoint( tilePoints[i] );
}
}
static void encodePolygon( const QgsPolygon *polygon, MVTGeometryWriter &geomWriter )
{
const QgsLineString *exteriorRing = qgsgeometry_cast<const QgsLineString *>( polygon->exteriorRing() );
encodeLineString( exteriorRing, true, !QgsVectorTileMVTUtils::isExteriorRing( exteriorRing ), geomWriter );
geomWriter.addClosePath();
for ( int i = 0; i < polygon->numInteriorRings(); ++i )
{
const QgsLineString *interiorRing = qgsgeometry_cast<const QgsLineString *>( polygon->interiorRing( i ) );
encodeLineString( interiorRing, true, QgsVectorTileMVTUtils::isExteriorRing( interiorRing ), geomWriter );
geomWriter.addClosePath();
}
}
//
QgsVectorTileMVTEncoder::QgsVectorTileMVTEncoder( QgsTileXYZ tileID )
: mTileID( tileID )
{
const QgsTileMatrix tm = QgsTileMatrix::fromWebMercator( mTileID.zoomLevel() );
mTileExtent = tm.tileExtent( mTileID );
}
void QgsVectorTileMVTEncoder::addLayer( QgsVectorLayer *layer, QgsFeedback *feedback, QString filterExpression, QString layerName )
{
if ( feedback && feedback->isCanceled() )
return;
const QgsCoordinateTransform ct( layer->crs(), QgsCoordinateReferenceSystem( "EPSG:3857" ), mTransformContext );
QgsRectangle layerTileExtent = mTileExtent;
try
{
layerTileExtent = ct.transformBoundingBox( layerTileExtent, QgsCoordinateTransform::ReverseTransform );
if ( !layerTileExtent.intersects( layer->extent() ) )
{
return; // tile is completely outside of the layer'e extent
}
}
catch ( const QgsCsException & )
{
QgsDebugMsg( "Failed to reproject tile extent to the layer" );
return;
}
if ( layerName.isEmpty() )
layerName = layer->name();
// add buffer to both filter extent in layer CRS (for feature request) and tile extent in target CRS (for clipping)
const double bufferRatio = static_cast<double>( mBuffer ) / mResolution;
QgsRectangle tileExtent = mTileExtent;
tileExtent.grow( bufferRatio * mTileExtent.width() );
layerTileExtent.grow( bufferRatio * std::max( layerTileExtent.width(), layerTileExtent.height() ) );
QgsFeatureRequest request;
request.setFilterRect( layerTileExtent );
if ( !filterExpression.isEmpty() )
request.setFilterExpression( filterExpression );
QgsFeatureIterator fit = layer->getFeatures( request );
QgsFeature f;
if ( !fit.nextFeature( f ) )
{
return; // nothing to write - do not add the layer at all
}
vector_tile::Tile_Layer *tileLayer = tile.add_layers();
tileLayer->set_name( layerName.toUtf8() );
tileLayer->set_version( 2 ); // 2 means MVT spec version 2.1
tileLayer->set_extent( static_cast<::google::protobuf::uint32>( mResolution ) );
const QgsFields fields = layer->fields();
for ( int i = 0; i < fields.count(); ++i )
{
tileLayer->add_keys( fields[i].name().toUtf8() );
}
do
{
if ( feedback && feedback->isCanceled() )
break;
QgsGeometry g = f.geometry();
// reproject
try
{
g.transform( ct );
}
catch ( const QgsCsException & )
{
QgsDebugMsg( "Failed to reproject geometry " + QString::number( f.id() ) );
continue;
}
// clip
g = g.clipped( tileExtent );
f.setGeometry( g );
addFeature( tileLayer, f );
}
while ( fit.nextFeature( f ) );
mKnownValues.clear();
}
void QgsVectorTileMVTEncoder::addFeature( vector_tile::Tile_Layer *tileLayer, const QgsFeature &f )
{
QgsGeometry g = f.geometry();
const QgsWkbTypes::GeometryType geomType = g.type();
const double onePixel = mTileExtent.width() / mResolution;
if ( geomType == QgsWkbTypes::LineGeometry )
{
if ( g.length() < onePixel )
return; // too short
}
else if ( geomType == QgsWkbTypes::PolygonGeometry )
{
if ( g.area() < onePixel * onePixel )
return; // too small
}
vector_tile::Tile_Feature *feature = tileLayer->add_features();
feature->set_id( static_cast<quint64>( f.id() ) );
//
// encode attributes
//
const QgsAttributes attrs = f.attributes();
for ( int i = 0; i < attrs.count(); ++i )
{
const QVariant v = attrs.at( i );
if ( !v.isValid() || v.isNull() )
continue;
int valueIndex;
if ( mKnownValues.contains( v ) )
{
valueIndex = mKnownValues[v];
}
else
{<|fim▁hole|> valueIndex = tileLayer->values_size() - 1;
mKnownValues[v] = valueIndex;
if ( v.type() == QVariant::Double )
value->set_double_value( v.toDouble() );
else if ( v.type() == QVariant::Int )
value->set_int_value( v.toInt() );
else if ( v.type() == QVariant::Bool )
value->set_bool_value( v.toBool() );
else
value->set_string_value( v.toString().toUtf8().toStdString() );
}
feature->add_tags( static_cast<quint32>( i ) );
feature->add_tags( static_cast<quint32>( valueIndex ) );
}
//
// encode geometry
//
vector_tile::Tile_GeomType mvtGeomType = vector_tile::Tile_GeomType_UNKNOWN;
if ( geomType == QgsWkbTypes::PointGeometry )
mvtGeomType = vector_tile::Tile_GeomType_POINT;
else if ( geomType == QgsWkbTypes::LineGeometry )
mvtGeomType = vector_tile::Tile_GeomType_LINESTRING;
else if ( geomType == QgsWkbTypes::PolygonGeometry )
mvtGeomType = vector_tile::Tile_GeomType_POLYGON;
feature->set_type( mvtGeomType );
if ( QgsWkbTypes::isCurvedType( g.wkbType() ) )
{
g = QgsGeometry( g.get()->segmentize() );
}
MVTGeometryWriter geomWriter( feature, mResolution, mTileExtent );
const QgsAbstractGeometry *geom = g.constGet();
switch ( QgsWkbTypes::flatType( g.wkbType() ) )
{
case QgsWkbTypes::Point:
{
const QgsPoint *pt = static_cast<const QgsPoint *>( geom );
geomWriter.addMoveTo( 1 );
geomWriter.addPoint( *pt );
}
break;
case QgsWkbTypes::LineString:
{
encodeLineString( qgsgeometry_cast<const QgsLineString *>( geom ), true, false, geomWriter );
}
break;
case QgsWkbTypes::Polygon:
{
encodePolygon( static_cast<const QgsPolygon *>( geom ), geomWriter );
}
break;
case QgsWkbTypes::MultiPoint:
{
const QgsMultiPoint *mpt = static_cast<const QgsMultiPoint *>( geom );
geomWriter.addMoveTo( mpt->numGeometries() );
for ( int i = 0; i < mpt->numGeometries(); ++i )
geomWriter.addPoint( *mpt->pointN( i ) );
}
break;
case QgsWkbTypes::MultiLineString:
{
const QgsMultiLineString *mls = qgsgeometry_cast<const QgsMultiLineString *>( geom );
for ( int i = 0; i < mls->numGeometries(); ++i )
{
encodeLineString( mls->lineStringN( i ), true, false, geomWriter );
}
}
break;
case QgsWkbTypes::MultiPolygon:
{
const QgsMultiPolygon *mp = qgsgeometry_cast<const QgsMultiPolygon *>( geom );
for ( int i = 0; i < mp->numGeometries(); ++i )
{
encodePolygon( mp->polygonN( i ), geomWriter );
}
}
break;
default:
break;
}
}
QByteArray QgsVectorTileMVTEncoder::encode() const
{
return QByteArray::fromStdString( tile.SerializeAsString() );
}<|fim▁end|> | vector_tile::Tile_Value *value = tileLayer->add_values(); |
<|file_name|>test_file_with_long_levels.py<|end_file_name|><|fim▁begin|>import sure
import tempfile
from contents import contents
def test_file_with_long_levels():
content = '''/**
* Project X
* Author: Jean Pimentel
* Date: August, 2013
*/
/* > Intro */
Toc toc! Penny! Toc toc! Penny! Toc toc! Penny!
/* >> The Big Bang Theory << */
The Big Bang Theory is an American sitcom created by Chuck Lorre and Bill Prady.
/* ==>>> Characters ========================================================= */
Leonard Hofstadter, Sheldon Cooper, Howard Wolowitz, Rajesh Koothrappali, Penny
/* >>>> Production
============================================================================= */
Executive producer(s): Chuck Lorre, Bill Prady, Steven Molaro
Producer(s): Faye Oshima Belyeu
/* =>>>>> Info section: number of seasons - number of episodes
============================================================================= */
No. of seasons: 5
No. of episodes: 111
/* =>>>>>> A collection of our favorite quotes from the show <=============== */
* Sheldon: Scissors cuts paper, paper covers rock, rock crushes lizard, lizard poisons Spock, Spock smashes scissors, scissors decapitates lizard, lizard eats paper, paper disproves Spock, Spock vaporizes rock, and as it always has, rock crushes scissors.
* Sheldon: I'm not insane, my mother had me tested!
'''
new_content = '''/* TABLE OF CONTENTS
Intro ............................................................... 17
The Big Bang Theory ............................................. 20
Characters .................................................. 23
Production .............................................. 26
Info section: number of seasons - number of e[...] .. 31
A collection of our favorite quotes from [...] .. 36
============================================================================= */
/**
* Project X
* Author: Jean Pimentel
* Date: August, 2013
*/
/* > Intro */
Toc toc! Penny! Toc toc! Penny! Toc toc! Penny!
/* >> The Big Bang Theory << */
The Big Bang Theory is an American sitcom created by Chuck Lorre and Bill Prady.
/* ==>>> Characters ========================================================= */
Leonard Hofstadter, Sheldon Cooper, Howard Wolowitz, Rajesh Koothrappali, Penny
<|fim▁hole|>============================================================================= */
Executive producer(s): Chuck Lorre, Bill Prady, Steven Molaro
Producer(s): Faye Oshima Belyeu
/* =>>>>> Info section: number of seasons - number of episodes
============================================================================= */
No. of seasons: 5
No. of episodes: 111
/* =>>>>>> A collection of our favorite quotes from the show <=============== */
* Sheldon: Scissors cuts paper, paper covers rock, rock crushes lizard, lizard poisons Spock, Spock smashes scissors, scissors decapitates lizard, lizard eats paper, paper disproves Spock, Spock vaporizes rock, and as it always has, rock crushes scissors.
* Sheldon: I'm not insane, my mother had me tested!
'''
temp = tempfile.NamedTemporaryFile()
try:
temp.write(content)
temp.seek(0)
contents(temp.name)
temp.seek(0)
temp.read().should.be.equal(new_content)
finally:
temp.close()<|fim▁end|> | /* >>>> Production |
<|file_name|>deployer_configuration_utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
##############################################################################################
# Copyright (C) 2014 Pier Luigi Ventre - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2014 Giuseppe Siracusano, Stefano Salsano - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Deployer Configuration Utils.
#
# @author Pier Luigi Ventre <[email protected]>
# @author Giuseppe Siracusano <[email protected]>
# @author Stefano Salsano <[email protected]>
#
#
from mininet.node import Node
def convert_port_name_to_number(oshi, port):<|fim▁hole|> p = oshi.cmd("ovs-ofctl dump-ports-desc br-%s | grep %s |awk -F '(' '{print $1}'| cut -d ' ' -f 2" %(oshi.name, port ))
return str(int(p))
def configure_standalone_sw(switches):
print "*** Configuring L2 Switches"
root = Node( 'root', inNamespace=False )
for sw in switches:
print "*** Configuring", sw.name, "As Learning Switch"
root.cmd("ovs-vsctl set-fail-mode %s standalone" % sw.name)<|fim▁end|> | |
<|file_name|>tp_timings.go<|end_file_name|><|fim▁begin|>/*
Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
Copyright (C) ITsysCOM GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package migrator
import (
"github.com/cgrates/cgrates/engine"
"github.com/cgrates/cgrates/utils"
)
func (m *Migrator) migrateCurrentTPTiming() (err error) {
tpids, err := m.storDBIn.StorDB().GetTpIds(utils.TBLTPTimings)
if err != nil {
return err
}
for _, tpid := range tpids {
ids, err := m.storDBIn.StorDB().GetTpTableIds(tpid, utils.TBLTPTimings,
utils.TPDistinctIds{"tag"}, map[string]string{}, nil)
if err != nil {
return err<|fim▁hole|> for _, id := range ids {
tm, err := m.storDBIn.StorDB().GetTPTimings(tpid, id)
if err != nil {
return err
}
if tm != nil {
if !m.dryRun {
if err := m.storDBOut.StorDB().SetTPTimings(tm); err != nil {
return err
}
for _, timing := range tm {
if err := m.storDBIn.StorDB().RemTpData(utils.TBLTPTimings,
timing.TPid, map[string]string{"tag": timing.ID}); err != nil {
return err
}
}
m.stats[utils.TpTiming] += 1
}
}
}
}
return
}
func (m *Migrator) migrateTpTimings() (err error) {
var vrs engine.Versions
current := engine.CurrentStorDBVersions()
if vrs, err = m.getVersions(utils.TpTiming); err != nil {
return
}
switch vrs[utils.TpTiming] {
case current[utils.TpTiming]:
if m.sameStorDB {
break
}
if err := m.migrateCurrentTPTiming(); err != nil {
return err
}
}
return m.ensureIndexesStorDB(utils.TBLTPTimings)
}<|fim▁end|> | } |
<|file_name|>Thermostat.js<|end_file_name|><|fim▁begin|>var Thermostat = function() {
this.temp = 20;
this.mode = 'power saving';
this.min = 10;
this.max = 25;
};
<|fim▁hole|>Thermostat.prototype.setPowerSaving = function(value) {
this.mode = (value ? 'power saving' : 'normal');
this.max = (value ? 25 : 32);
};
Thermostat.prototype.increase = function() {
if(this.temp < this.max) {this.temp++}
};
Thermostat.prototype.decrease = function() {
if(this.temp > this.min) {this.temp--}
};
Thermostat.prototype.resetTemp = function() {
this.temp = 20;
};<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
import Chart from 'chart.js';
import Controller from './controller';
import Scale, {defaults} from './scale';
// Register the Controller and Scale
Chart.controllers.smith = Controller;
Chart.defaults.smith = {
aspectRatio: 1,<|fim▁hole|> callbacks: {
title: () => null,
label: (bodyItem, data) => {
const dataset = data.datasets[bodyItem.datasetIndex];
const d = dataset.data[bodyItem.index];
return dataset.label + ': ' + d.real + ' + ' + d.imag + 'i';
}
}
}
};
Chart.scaleService.registerScaleType('smith', Scale, defaults);<|fim▁end|> | scale: {
type: 'smith',
},
tooltips: { |
<|file_name|>zombieHS.py<|end_file_name|><|fim▁begin|>import random
class Character:
def __init__(self):
self.name = ""
self.life = 20
self.health = random.choice(0, life)
self.zombie_life = 10
self.zombie_health = random.choice(0, zombie_life)
def attack(self, zombie):
self.hit = self.health - self.zombie_health
zombie.life -= self.hit
if self.hit == 0:
print ("..like a nimble sloth, %s evades %s's attack." % (zombie.name, self.name))
else:
print ("%s inflicts debilitating damage on %s!!" % (self.name, zombie.name))
return zombie.health <= 0
class Zombie(Character):
def __init__(self, player):
Character.__init__(self)
ran_adj = random.choice['wretched', 'filthy', 'disgusting', 'oozing']
self.name = "a", rand_adj(), " zombie"
self.health = random.choice(player.health)
class Player(Character):
def __init__(self):
Character.__init__(self)
self.level = 'normal'<|fim▁hole|> def quit(self):
print ("The zombie virus has infected %s. You are now undead and crave brains.") % self.name
self.health = 0
def help(self):
print Commands.keys()
def status(self):
print ("%s's health: %d/%d" % (self.name, self.health, self.health_max))
def weak(self):
print ("%s is cold, hungry and tired.") % self.name
self.health = max(1, self.health - 1)
def rest(self):
ran_adj = random.choice['under a desk','in a locker','in a closet']
if self.state != 'normal':
print ("keep moving %s, zombies coming in hot!") % (self.name, self.zombie_attack)
else:
print ("%s hides" + ran_adj + " and takes a breather.") % self.name
if randint(0,1):
self.zombie = Zombie(self)
print (%s is surprised by %s) %(self.name self.zombie)
self.state = 'fight'
self.zombie_attacks()
def attack(self):
if randint(0, 1):
self.zombie = Zombie(self)
print ("Look out %s! -%s appears!") % (self.name, self.zombie_name)
self.state = 'fight'
self.zombie_attacks()
else:
if self.health < self.health_max:
self.health = self.health + 1
else:
print ("%s has hidden too long.") % self.name
self.health -= 1
def look_around(self):
if self.state != 'normal':
print ("%s runs into %s") % (self.name, self.zombie.name)
self.zombie_attacks()
else:
print ("%s runs into the "+ look) % self.name
look=random.choice["gymnasium","library","metal shop","cafeteria"]
if random.randint(0, 1):
self.zombie = Zombie(self)
print "%s encounters %s!" % (self.name, self.zombie.name)
self.state = 'fight'
else:
if random.randint(0, 1):
self.tired()
def flee(self):
if self.state != 'fight':
print "%s runs down a corridor" % self.name
self.tired()
else:
if random.randint(1, self.health + 5) > random.randint(1, self.zombie.health):
print "%s flees from %s." % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
else:
print "%s couldn't escape from %s!" % (self.name, self.zombie.name);
self.zombie_attacks()
def attack(self):
if self.state != 'fight':
print "%s flails in the air like a twit." % self.name;
self.tired()
else:
if self.do_damage(self.zombie):
print ("%s decapitates %s!") % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
if random.choice(self.health) < 10:
self.health += 1
self.health_max += 1
print "%s is rejuvenated" % self.name
else:
self.zombie_attacks()
def zombie_attacks(self):
if self.zombie.attack(self):
print ("%s's brains were devoured by %s!!!\nyou are undead and crave BRAINS!!/nunless you're a veggetarian then seek GRAINS!!") % (self.name, self.zombie.name)
def menu():
Commands = {
'quit': Player.quit,
'help': Player.help,
'status': Player.status,
'rest': Player.rest,
'look around': Player.look_around,
'flee': Player.flee,
'attack': Player.attack,
}
hero = Player()
hero.name = raw_input("What is your character's name? ")
print "(type help to get a list of actions)\n"
print """When %s leaves homeroom, they
a strange stench in the air
maybe we are dissecting a frog in biology today...""" % hero.name
while (p.health > 0):
line = raw_input("> ")
args = line.split()
if len(args) > 0:
commandFound = False
for c in Commands.keys():
if args[0] == c[:len(args[0])]:
Commands[c](p)
commandFound = True
break
if not commandFound:
print "%s is confused, enter a command" % p.name
"""
living on the outskirts of a government national lab
has it's pros and cons. when the kids in school
say that a rouge virus has started to infect people
and turn them into zombies, you laugh it off.
"""<|fim▁end|> | self.health = 10
self.health_max = 10
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import PropTypes from 'prop-types';
const Soon = () => (
<div>
<article className="glitch">
<span>ALWAYS Ɐ WIP</span>
</article>
</div><|fim▁hole|>
export default Soon;<|fim▁end|> | ); |
<|file_name|>ggi.py<|end_file_name|><|fim▁begin|># ####################################################################
# gofed - set of tools to automize packaging of golang devel codes
# Copyright (C) 2014 Jan Chaloupka, [email protected]
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
import sys
import re
import os
import urllib2
import optparse
from subprocess import Popen, PIPE
from modules.Utils import GREEN, RED, ENDC
from modules.Packages import packageInPkgdb
from modules.Utils import FormatedPrint
from modules.ImportPath import ImportPath
from modules.ImportPathsDecomposer import ImportPathsDecomposer
from modules.GoSymbolsExtractor import GoSymbolsExtractor
from modules.Config import Config
if __name__ == "__main__":
parser = optparse.OptionParser("%prog [-a] [-c] [-d [-v]] [directory]")
parser.add_option_group( optparse.OptionGroup(parser, "directory", "Directory to inspect. If empty, current directory is used.") )
parser.add_option(
"", "-a", "--all", dest="all", action = "store_true", default = False,
help = "Display all imports including golang native"
)
parser.add_option(
"", "-c", "--classes", dest="classes", action = "store_true", default = False,
help = "Decompose imports into classes"
)
parser.add_option(
"", "-d", "--pkgdb", dest="pkgdb", action = "store_true", default = False,
help = "Check if a class is in the PkgDB (only with -c option)"
)
parser.add_option(
"", "-v", "--verbose", dest="verbose", action = "store_true", default = False,
help = "Show all packages if -d option is on"
)
parser.add_option(
"", "-s", "--short", dest="short", action = "store_true", default = False,
help = "Display just classes without its imports"
)
parser.add_option(
"", "", "--spec", dest="spec", action = "store_true", default = False,
help = "Display import path for spec file"
)
parser.add_option(
"", "-r", "--requires", dest="requires", action = "store_true", default = False,
help = "Use Requires instead of BuildRequires. Used only with --spec option."
)
parser.add_option(
"", "", "--skip-errors", dest="skiperrors", action = "store_true", default = False,
help = "Skip all errors during Go symbol parsing"
)
parser.add_option(
"", "", "--importpath", dest="importpath", default = "",
help = "Don't display class belonging to IMPORTPATH prefix"
)
parser.add_option(
"", "", "--scan-all-dirs", dest="scanalldirs", action = "store_true", default = False,
help = "Scan all dirs, including Godeps directory"
)
parser.add_option(
"", "", "--skip-dirs", dest="skipdirs", default = "",
help = "Scan all dirs except specified via SKIPDIRS. Directories are comma separated list."
)
parser.add_option(
"", "", "--all-occurrences", dest="alloccurrences", action = "store_true", default = False,
help = "List imported paths in all packages including main. Default is skip main packages."
)
parser.add_option(
"", "", "--show-occurrence", dest="showoccurrence", action = "store_true", default = False,
help = "Show occurence of import paths."
)
options, args = parser.parse_args()
path = "."
if len(args):
path = args[0]
fmt_obj = FormatedPrint()
if not options.scanalldirs:
noGodeps = Config().getSkippedDirectories()
else:
noGodeps = []
if options.skipdirs:
for dir in options.skipdirs.split(','):<|fim▁hole|> continue
noGodeps.append(dir)
gse_obj = GoSymbolsExtractor(path, imports_only=True, skip_errors=options.skiperrors, noGodeps=noGodeps)
if not gse_obj.extract():
fmt_obj.printError(gse_obj.getError())
exit(1)
package_imports_occurence = gse_obj.getPackageImportsOccurences()
ip_used = gse_obj.getImportedPackages()
ipd = ImportPathsDecomposer(ip_used)
if not ipd.decompose():
fmt_obj.printError(ipd.getError())
exit(1)
warn = ipd.getWarning()
if warn != "":
fmt_obj.printWarning("Warning: %s" % warn)
classes = ipd.getClasses()
sorted_classes = sorted(classes.keys())
# get max length of all imports
max_len = 0
for element in sorted_classes:
if element == "Native":
continue
# class name starts with prefix => filter out
if options.importpath != "" and element.startswith(options.importpath):
continue
gimports = []
for gimport in classes[element]:
if options.importpath != "" and gimport.startswith(options.importpath):
continue
gimports.append(gimport)
for gimport in gimports:
import_len = len(gimport)
if import_len > max_len:
max_len = import_len
if options.spec and options.showoccurrence:
print "# THIS IS NOT A VALID SPEC FORMAT"
print "# COMMENTS HAS TO BE STARTED AT THE BEGGINING OF A LINE"
for element in sorted_classes:
if not options.all and element == "Native":
continue
if not options.alloccurrences:
one_class = []
for gimport in classes[element]:
# does it occur only in main package?
# remove it from classes[element]
skip = True
if gimport in package_imports_occurence:
for occurrence in package_imports_occurence[gimport]:
if not occurrence.endswith(":main"):
skip = False
break
if skip:
continue
one_class.append(gimport)
classes[element] = sorted(one_class)
# class name starts with prefix => filter out
if options.importpath != "" and element.startswith(options.importpath):
continue
# filter out all members of a class prefixed by prefix
gimports = []
for gimport in classes[element]:
if options.importpath != "" and gimport.startswith(options.importpath):
continue
gimports.append(gimport)
if gimports == []:
continue
if options.classes:
# Native class is just printed
if options.all and element == "Native":
# does not make sense to check Native class in PkgDB
if options.pkgdb:
continue
print "Class: %s" % element
if not options.short:
for gimport in gimports:
if options.showoccurrence:
print "\t%s (%s)" % (gimport, ", ".join(package_imports_occurence[gimport]))
else:
print "\t%s" % gimport
continue
# Translate non-native class into package name (if -d option)
if options.pkgdb:
ip_obj = ImportPath(element)
if not ip_obj.parse():
fmt_obj.printWarning("Unable to translate %s to package name" % element)
continue
pkg_name = ip_obj.getPackageName()
if pkg_name == "":
fmt_obj.printWarning(ip_obj.getError())
pkg_in_pkgdb = packageInPkgdb(pkg_name)
if pkg_in_pkgdb:
if options.verbose:
print (GREEN + "Class: %s (%s) PkgDB=%s" + ENDC) % (element, pkg_name, pkg_in_pkgdb)
else:
print (RED + "Class: %s (%s) PkgDB=%s" + ENDC ) % (element, pkg_name, pkg_in_pkgdb)
continue
# Print class
print "Class: %s" % element
if not options.short:
for gimport in sorted(gimports):
if options.showoccurrence:
print "\t%s (%s)" % (gimport, ", ".join(package_imports_occurence[gimport]))
else:
print "\t%s" % gimport
continue
# Spec file BR
if options.spec:
for gimport in sorted(classes[element]):
if options.requires:
if options.showoccurrence:
import_len = len(gimport)
print "Requires: golang(%s) %s# %s" % (gimport, (max_len - import_len)*" ", ", ".join(package_imports_occurence[gimport]))
else:
print "Requires: golang(%s)" % gimport
else:
if options.showoccurrence:
import_len = len(gimport)
print "BuildRequires: golang(%s) %s# %s" % (gimport, (max_len - import_len)*" ", ", ".join(package_imports_occurence[gimport]))
else:
print "BuildRequires: golang(%s)" % gimport
continue
# Just a list of all import paths
for gimport in sorted(classes[element]):
if options.showoccurrence:
import_len = len(gimport)
print "\t%s %s(%s)" % (gimport, (max_len - import_len)*" ", ", ".join(package_imports_occurence[gimport]))
else:
print "\t%s" % gimport<|fim▁end|> | dir = dir.strip()
if dir == "": |
<|file_name|>stlWRT.py<|end_file_name|><|fim▁begin|># $Id$
from module_base import ModuleBase
from module_mixins import FilenameViewModuleMixin
import module_utils
import vtk
class stlWRT(FilenameViewModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
# need to make sure that we're all happy triangles and stuff
self._cleaner = vtk.vtkCleanPolyData()
self._tf = vtk.vtkTriangleFilter()
self._tf.SetInput(self._cleaner.GetOutput())
self._writer = vtk.vtkSTLWriter()
self._writer.SetInput(self._tf.GetOutput())
# sorry about this, but the files get REALLY big if we write them
# in ASCII - I'll make this a gui option later.<|fim▁hole|>
# following is the standard way of connecting up the devide progress
# callback to a VTK object; you should do this for all objects in
mm = self._module_manager
for textobj in (('Cleaning data', self._cleaner),
('Converting to triangles', self._tf),
('Writing STL data', self._writer)):
module_utils.setup_vtk_object_progress(self, textobj[1],
textobj[0])
# ctor for this specific mixin
FilenameViewModuleMixin.__init__(
self,
'Select a filename',
'STL data (*.stl)|*.stl|All files (*)|*',
{'vtkSTLWriter': self._writer},
fileOpen=False)
# set up some defaults
self._config.filename = ''
self.sync_module_logic_with_config()
def close(self):
# we should disconnect all inputs
self.set_input(0, None)
del self._writer
FilenameViewModuleMixin.close(self)
def get_input_descriptions(self):
return ('vtkPolyData',)
def set_input(self, idx, input_stream):
self._cleaner.SetInput(input_stream)
def get_output_descriptions(self):
return ()
def get_output(self, idx):
raise Exception
def logic_to_config(self):
filename = self._writer.GetFileName()
if filename == None:
filename = ''
self._config.filename = filename
def config_to_logic(self):
self._writer.SetFileName(self._config.filename)
def view_to_config(self):
self._config.filename = self._getViewFrameFilename()
def config_to_view(self):
self._setViewFrameFilename(self._config.filename)
def execute_module(self):
if len(self._writer.GetFileName()):
self._writer.Write()<|fim▁end|> | #self._writer.SetFileTypeToBinary() |
<|file_name|>test_multi_types.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2021 Satoru SATOH <[email protected]>
# License: MIT
#
# pylint: disable=missing-docstring, relative-beyond-top-level<|fim▁hole|>from ..multi_load import test_multi_types as multi
from ..single_load import test_multi_types as single
from . import common
class SingleTestCase(common.SingleBase, single.TestCase):
pass
class MultiTestCase(common.MultiBase, multi.TestCase):
pass
# vim:sw=4:ts=4:et:<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rand;
use std::io;<|fim▁hole|>use rand::Rng;
fn main(){
let secret_number = rand::thread_rng().gen_range(1, 101);
println!("猜数字");
loop {
println!("请输入你猜的数字.");
let mut guess = String::new();
io::stdin().read_line(&mut guess)
.expect("读取行失败");
let guess: u32 = guess.trim().parse()
.expect("Please type a number!");
println!("你猜的是: {}", guess);
match guess.cmp(&secret_number) {
Ordering::Less => println!("太小!"),
Ordering::Greater => println!("太大!"),
Ordering::Equal => {
println!("猜对了!");
break;
}
}
}
}<|fim▁end|> | use std::cmp::Ordering; |
<|file_name|>gae_models.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base model class."""
__author__ = 'Sean Lip'
import feconf
import utils
from core.platform import models
transaction_services = models.Registry.import_transaction_services()
from google.appengine.ext import ndb
class BaseModel(ndb.Model):
"""Base model for all persistent object storage classes."""
# When this entity was first created.
created_on = ndb.DateTimeProperty(auto_now_add=True)
# When this entity was last updated.
last_updated = ndb.DateTimeProperty(auto_now=True)
# Whether the current version of the file is deleted.
deleted = ndb.BooleanProperty(indexed=True, default=False)
@property
def id(self):
"""A unique id for this model instance."""
return self.key.id()
def _pre_put_hook(self):
"""This is run before model instances are saved to the datastore.
Subclasses of BaseModel should override this method.
"""
pass
class EntityNotFoundError(Exception):
"""Raised when no entity for a given id exists in the datastore."""
pass
@classmethod
def get(cls, entity_id, strict=True):
"""Gets an entity by id. Fails noisily if strict == True.
Args:
entity_id: str. The id of the entity.
strict: bool. Whether to fail noisily if no entity with the given id
exists in the datastore.
Returns:
None, if strict == False and no undeleted entity with the given id
exists in the datastore. Otherwise, the entity instance that
corresponds to the given id.
Raises:
- base_models.BaseModel.EntityNotFoundError: if strict == True and
no undeleted entity with the given id exists in the datastore.
"""
entity = cls.get_by_id(entity_id)
if entity and entity.deleted:
entity = None
if strict and entity is None:
raise cls.EntityNotFoundError(
'Entity for class %s with id %s not found' %
(cls.__name__, entity_id))
return entity
def put(self):
super(BaseModel, self).put()
@classmethod
def get_multi(cls, entity_ids):
entity_keys = [ndb.Key(cls, entity_id) for entity_id in entity_ids]
return ndb.get_multi(entity_keys)
@classmethod
def put_multi(cls, entities):
return ndb.put_multi(entities)
def delete(self):
super(BaseModel, self).key.delete()
@classmethod
def get_all(cls, include_deleted_entities=False):
"""Returns a filterable iterable of all entities of this class.
If include_deleted_entities is True then entities that have been marked
deleted are returned as well.
"""
query = cls.query()
if not include_deleted_entities:
query = query.filter(cls.deleted == False)
return query
@classmethod
def get_new_id(cls, entity_name):
"""Gets a new id for an entity, based on its name.
The returned id is guaranteed to be unique among all instances of this
entity.
Args:
entity_name: the name of the entity. Coerced to a utf-8 encoded
string. Defaults to ''.
Returns:
str: a new unique id for this entity class.
Raises:
- Exception: if an id cannot be generated within a reasonable number
of attempts.
"""
try:
entity_name = unicode(entity_name).encode('utf-8')
except Exception:
entity_name = ''
MAX_RETRIES = 10
RAND_RANGE = 127 * 127
ID_LENGTH = 12
for i in range(MAX_RETRIES):
new_id = utils.convert_to_hash(
'%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)),
ID_LENGTH)
if not cls.get_by_id(new_id):
return new_id
raise Exception('New id generator is producing too many collisions.')
class VersionedModel(BaseModel):
"""Model that handles storage of the version history of model instances.
To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a
SNAPSHOT_CONTENT_CLASS. The former must contain the String fields
'committer_id', 'commit_type' and 'commit_message', and a JSON field for
the Python list of dicts, 'commit_cmds'. The latter must contain the JSON
field 'content'. The item that is being versioned must be serializable to a
JSON blob.
Note that commit() should be used for VersionedModels, as opposed to put()
for direct subclasses of BaseModel.
"""
# The class designated as the snapshot model. This should be a subclass of
# BaseSnapshotMetadataModel.
SNAPSHOT_METADATA_CLASS = None
# The class designated as the snapshot content model. This should be a
# subclass of BaseSnapshotContentModel.
SNAPSHOT_CONTENT_CLASS = None
# Whether reverting is allowed. Default is False.
ALLOW_REVERT = False
### IMPORTANT: Subclasses should only overwrite things above this line. ###
# The possible commit types.
_COMMIT_TYPE_CREATE = 'create'
_COMMIT_TYPE_REVERT = 'revert'
_COMMIT_TYPE_EDIT = 'edit'
_COMMIT_TYPE_DELETE = 'delete'
# A list containing the possible commit types.
COMMIT_TYPE_CHOICES = [
_COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT,
_COMMIT_TYPE_DELETE
]
# The delimiter used to separate the version number from the model instance
# id. To get the instance id from a snapshot id, use Python's rfind()
# method to find the location of this delimiter.
_VERSION_DELIMITER = '-'
# The reserved prefix for keys that are automatically inserted into a
# commit_cmd dict by this model.
_AUTOGENERATED_PREFIX = 'AUTO'
# The current version number of this instance. In each PUT operation,
# this number is incremented and a snapshot of the modified instance is
# stored in the snapshot metadata and content models. The snapshot
# version number starts at 1 when the model instance is first created.
# All data in this instance represents the version at HEAD; data about the
# previous versions is stored in the snapshot models.
version = ndb.IntegerProperty(default=0)
def _require_not_marked_deleted(self):
if self.deleted:
raise Exception('This model instance has been deleted.')
def _compute_snapshot(self):
"""Generates a snapshot (a Python dict) from the model fields."""
return self.to_dict(exclude=['created_on', 'last_updated'])
def _reconstitute(self, snapshot_dict):
"""Makes this instance into a reconstitution of the given snapshot."""
self.populate(**snapshot_dict)
return self
def _reconstitute_from_snapshot_id(self, snapshot_id):
"""Makes this instance into a reconstitution of the given snapshot."""
snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id)
snapshot_dict = snapshot_model.content
return self._reconstitute(snapshot_dict)
@classmethod
def _get_snapshot_id(cls, instance_id, version_number):
return '%s%s%s' % (
instance_id, cls._VERSION_DELIMITER, version_number)
def _trusted_commit(
self, committer_id, commit_type, commit_message, commit_cmds):
if self.SNAPSHOT_METADATA_CLASS is None:
raise Exception('No snapshot metadata class defined.')
if self.SNAPSHOT_CONTENT_CLASS is None:
raise Exception('No snapshot content class defined.')
if not isinstance(commit_cmds, list):
raise Exception(
'Expected commit_cmds to be a list of dicts, received %s'
% commit_cmds)
for item in commit_cmds:
if not isinstance(item, dict):
raise Exception(
'Expected commit_cmds to be a list of dicts, received %s'
% commit_cmds)
self.version += 1
snapshot = self._compute_snapshot()
snapshot_id = self._get_snapshot_id(self.id, self.version)
snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS(
id=snapshot_id, committer_id=committer_id, commit_type=commit_type,
commit_message=commit_message, commit_cmds=commit_cmds)
snapshot_content_instance = self.SNAPSHOT_CONTENT_CLASS(
id=snapshot_id, content=snapshot)
transaction_services.run_in_transaction(
ndb.put_multi,
[snapshot_metadata_instance, snapshot_content_instance, self])
def delete(self, committer_id, commit_message, force_deletion=False):
if force_deletion:
current_version = self.version
version_numbers = [str(num + 1) for num in range(current_version)]
snapshot_ids = [
self._get_snapshot_id(self.id, version_number)
for version_number in version_numbers]
metadata_keys = [
ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
ndb.delete_multi(metadata_keys)
content_keys = [
ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
ndb.delete_multi(content_keys)
super(VersionedModel, self).delete()
else:
self._require_not_marked_deleted()
self.deleted = True
CMD_DELETE = '%s_mark_deleted' % self._AUTOGENERATED_PREFIX
commit_cmds = [{
'cmd': CMD_DELETE
}]
self._trusted_commit(
committer_id, self._COMMIT_TYPE_DELETE, commit_message,
commit_cmds)
def put(self, *args, **kwargs):
"""For VersionedModels, this method is replaced with commit()."""
raise NotImplementedError
def commit(self, committer_id, commit_message, commit_cmds):
"""Saves a version snapshot and updates the model.
commit_cmds should give sufficient information to reconstruct the
commit.
"""
self._require_not_marked_deleted()
for commit_cmd in commit_cmds:
if 'cmd' not in commit_cmd:
raise Exception(
'Invalid commit_cmd: %s. Expected a \'cmd\' key.'
% commit_cmd)
if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX):
raise Exception(
'Invalid change list command: ' % commit_cmd['cmd'])
commit_type = (
self._COMMIT_TYPE_CREATE if self.version == 0 else
self._COMMIT_TYPE_EDIT)<|fim▁hole|>
def revert(self, committer_id, commit_message, version_number):
self._require_not_marked_deleted()
if not self.ALLOW_REVERT:
raise Exception(
'Reverting of objects of type %s is not allowed.'
% self.__class__.__name__)
CMD_REVERT = '%s_revert_version_number' % self._AUTOGENERATED_PREFIX
commit_cmds = [{
'cmd': CMD_REVERT,
'version_number': version_number
}]
# Do not overwrite the version number.
current_version = self.version
snapshot_id = self._get_snapshot_id(self.id, version_number)
self._reconstitute_from_snapshot_id(snapshot_id)
self.version = current_version
self._trusted_commit(
committer_id, self._COMMIT_TYPE_REVERT, commit_message,
commit_cmds)
@classmethod
def get_version(cls, model_instance_id, version_number):
"""Returns a model instance representing the given version.
The snapshot content is used to populate this model instance. The
snapshot metadata is not used.
"""
cls.get(model_instance_id)._require_not_marked_deleted()
snapshot_id = cls._get_snapshot_id(model_instance_id, version_number)
return cls(id=model_instance_id)._reconstitute_from_snapshot_id(
snapshot_id)
@classmethod
def get(cls, entity_id, strict=True, version=None):
"""Gets an entity by id. Fails noisily if strict == True."""
if version is None:
return super(VersionedModel, cls).get(entity_id, strict=strict)
else:
return cls.get_version(entity_id, version)
@classmethod
def get_snapshots_metadata(cls, model_instance_id, version_numbers):
"""Returns a list of dicts, each representing a model snapshot.
One dict is returned for each version number in the list of version
numbers requested. If any of the version numbers does not exist, an
error is raised.
"""
cls.get(model_instance_id)._require_not_marked_deleted()
snapshot_ids = [
cls._get_snapshot_id(model_instance_id, version_number)
for version_number in version_numbers]
metadata_keys = [
ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id)
for snapshot_id in snapshot_ids]
returned_models = ndb.get_multi(metadata_keys)
for ind, model in enumerate(returned_models):
if model is None:
raise Exception(
'Invalid version number %s for model %s with id %s'
% (version_numbers[ind], cls.__name__, model_instance_id))
return [{
'committer_id': model.committer_id,
'commit_message': model.commit_message,
'commit_cmds': model.commit_cmds,
'commit_type': model.commit_type,
'version_number': version_numbers[ind],
'created_on': model.created_on.strftime(
feconf.HUMAN_READABLE_DATETIME_FORMAT),
} for (ind, model) in enumerate(returned_models)]
class BaseSnapshotMetadataModel(BaseModel):
"""Base class for snapshot metadata classes.
The id of this model is computed using VersionedModel.get_snapshot_id().
"""
# The id of the user who committed this revision.
committer_id = ndb.StringProperty(required=True)
# The type of the commit associated with this snapshot.
commit_type = ndb.StringProperty(
required=True, choices=VersionedModel.COMMIT_TYPE_CHOICES)
# The commit message associated with this snapshot.
commit_message = ndb.TextProperty(indexed=False)
# A sequence of commands that can be used to describe this commit.
# Represented as a list of dicts.
commit_cmds = ndb.JsonProperty(indexed=False)
class BaseSnapshotContentModel(BaseModel):
"""Base class for snapshot content classes.
The id of this model is computed using VersionedModel.get_snapshot_id().
"""
# The snapshot content, as a JSON blob.
content = ndb.JsonProperty(indexed=False)<|fim▁end|> |
self._trusted_commit(
committer_id, commit_type, commit_message, commit_cmds) |
<|file_name|>vprEmulatorPatientModel.js<|end_file_name|><|fim▁begin|>'use strict';
const _ = require('underscore');
const Handlebars = require('handlebars');
const moment = require('moment');
const VPREmulatorModel = require('./vprEmulatorModel');
const toFMDateTime = require('../vdmUtils').toFMDateTime;
class VPRPatientEmulator extends VPREmulatorModel {
template() {
return [
"<results version='{{vprDataVersion}}' timeZone='-0500' >",
"<demographics total='{{total}}' >",
'<patient>',
"<bid value='{{bid}}' />",
"<dob value='{{toDate dateOfBirth}}' />",
'{{{getFacilities}}}',
"<familyName value='{{toFamilyName name}}' />",
"<fullName value='{{name}}' />",
"<gender value='{{toGender sex}}' />",
"<givenNames value='{{toGivenName name}}' />",
"<id value='{{patientId}}' />",
"<inpatient value='{{getInpatient}}' />",
"<lrdfn value='{{getLrdfn}}' />",
"<sc value='{{sc}}' />",
"<ssn value='{{socialSecurityNumber}}' />",
"<veteran value='{{veteran}}' />",
'</patient>',
'</demographics>',
'</results>',
];
}
compileTemplate(mvdmForm) {
Handlebars.registerHelper('toDate', date => toFMDateTime(date.value));
Handlebars.registerHelper('toFamilyName', name => name.substring(0, name.indexOf(',')));
Handlebars.registerHelper('toGender', (sex) => {
const map = {
MALE: 'M',
FEMALE: 'F',<|fim▁hole|> return map[sex];
});
Handlebars.registerHelper('toGivenName', name => name.substring(name.indexOf(',') + 1, name.length));
Handlebars.registerHelper('getInpatient', () => {
if (mvdmForm.currentAdmission) {
return 'true';
}
return 'false';
});
Handlebars.registerHelper('getLrdfn', () => {
if (_.has(mvdmForm, 'laboratoryReference')) {
return mvdmForm.laboratoryReference.id.split('-')[1];
}
return 0;
});
Handlebars.registerHelper('getFacilities', () => {
/*
* Facilities visited by the Patient
*
* In the general case there may be many facilities (MPI can be called). For an
* isolate VISTA, there will be at most one, this VISTA.
*
* TODO: replace with a Javascript Utility or a computed property in Patient
*/
const dateStr = moment().format('YYYY-MM-DD');
return `${'<facilities>' +
"<facility code='050' name='SOFTWARE SERVICE' latestDate='"}${toFMDateTime(dateStr)}' domain='FOIA.DOMAIN.EXT' />` +
'</facilities>';
});
mvdmForm.bid = `C${mvdmForm.socialSecurityNumber.substring(5, 9)}`;
mvdmForm.patientId = mvdmForm.id.replace('2-', '');
mvdmForm.sc = mvdmForm.isServiceConnected ? 1 : 0;
mvdmForm.veteran = mvdmForm.isVeteran ? 1 : 0;
const temp = Handlebars.compile(this.template().join(''));
const res = temp(mvdmForm);
return res;
}
getOnePatientDetail(res) {
if (res === 'error') {
return `<results version='${this.vprDataVersion}' timeZone='-0500' ><demographics total='0' ></demographics></results>`;
}
res.vprDataVersion = this.vprDataVersion;
res.total = 1;
const result = this.compileTemplate(res);
return result.replace(/"/g, '\'');
}
toReturnValue(invokeResult) {
return this.getOnePatientDetail(invokeResult);
}
transformIEN(ien) {
return `2-${ien}`;
}
}
module.exports = VPRPatientEmulator;<|fim▁end|> | }; |
<|file_name|>controllers.py<|end_file_name|><|fim▁begin|><<<<<<< HEAD
from flask import Blueprint, render_template, request, url_for, jsonify
from config import mongo
import pandas as pd
import json
from bson import json_util
import retrieve_model as rmodel
from collections import Counter
main = Blueprint('main', __name__, template_folder='templates')
@main.route('/')
def index():
#mongo.db.visits.insert_one({"no":"way"})
#visits = mongo.db.visits.find_one()
#return str(visits)
return render_template('index.html')
@main.route('/predict/')
def get_started():
down_list = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
quarter_list = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
clock_list = [{'value':15,'name':'<15'}, {'value':14,'name':'<14'}, {'value':13,'name':'<13'},
{'value':12,'name':'<12'}, {'value':11,'name':'<11'}, {'value':10,'name':'<10'},
{'value':9,'name':'<9'}, {'value':8,'name':'<8'}, {'value':7,'name':'<7'},
{'value':6,'name':'<6'}, {'value':5,'name':'<5'}, {'value':4,'name':'<4'},
{'value':3,'name':'<3'}, {'value':2,'name':'<2'}, {'value':1,'name':'<1'}]
yards_list = [{'value':0,'name':'inches'}, {'value':1,'name':'1'},
{'value':2,'name':'2'}, {'value':3,'name':'3'}, {'value':4,'name':'4'},
{'value':5,'name':'5'}, {'value':6,'name':'6'}, {'value':7,'name':'7'},
{'value':8,'name':'8'}, {'value':9,'name':'9'}, {'value':10,'name':'10'},
{'value':11,'name':'11'}, {'value':12,'name':'12'}, {'value':13,'name':'13'},
{'value':14,'name':'14'}, {'value':15,'name':'15'}, {'value':16,'name':'16'},
{'value':17,'name':'17'}, {'value':18,'name':'18'}, {'value':19,'name':'19'},
{'value':20,'name':'20'}, {'value':21,'name':'21'}, {'value':22,'name':'22'},
{'value':23,'name':'23'}, {'value':24,'name':'24'}, {'value':25,'name':'25'}]
field_list = range(0,101,1)
score_list = range(0,61,1)
down_dict = [{'value':1,'name':'1st'},{'value':2,'name':'2nd'},{'value':3,'name':'3rd'},{'value':4,'name':'4th'}]
return render_template('predict.html',
=======
from flask import Blueprint, render_template, request, url_for
from config import mongo
main = Blueprint('main', __name__, template_folder='templates')
@main.route('/')
def index():
mongo.db.visits.insert_one({"foo":"bar"})
visits = mongo.db.visits.find_one()
return str(visits)
#return render_template('index.html')
@main.route('/getstarted/')
def get_started():
down_list = ['1st','2nd','3rd','4th']
quarter_list = ['1st','2nd','3rd','4th']
clock_list = ['> 15 min', '> 10 min', '> 5 min', '> 2 min', '< 2 min', '< 1 min']
yards_list = ['inches', 'goal', '1', '2', '3', '4', '5', '6', '7' ,'8', '9', '10', '> 10']
field_list = range(0,105,5)
score_list = range(-60,61,1)
return render_template('getstarted.html',
>>>>>>> master
down_list=down_list,
quarter_list=quarter_list,
clock_list=clock_list,
yards_list=yards_list,
field_list=field_list,
<<<<<<< HEAD
score_list=score_list,
down_dict=down_dict
)
@main.route('/results/', methods=['POST'])
def results():
=======
score_list=score_list
)
@main.route('/run/', methods=['POST'])
def run():
>>>>>>> master
down = request.form['down']
quarter = request.form['quarter']
clock = request.form['clock']
yards = request.form['yards']
field = request.form['field']
score = request.form['score']
<<<<<<< HEAD
sign = request.form['sign']
guess = request.form['guess']
score = str(int(score) * int(sign))
# Store scenario in mongodb
scenario = {
'down': int(down),
'quarter': int(quarter),
'clock': int(clock),
'yards': int(yards),
'field': int(field),
'score': int(score),
'guess': guess
}
# Insert the current user's guess into the DB
print('Puting this into db:', scenario)
mongo.db.scenarios.insert_one(scenario)
# Pull User guesses from MongoDB
#scenarios = mongo.db.scenarios.find()
# Pull NFL Stats from MongoDB
#nflstats = mongo.db.nfldata.find()
guesses = {'pass':'Pass', 'run':'Run', 'punt':'Punt', 'fg':'Field Goal', 'kneel': 'QB Kneel'}
try:
return render_template('results.html',
guess_title = guesses[guess],
=======
guess = request.form['guess']
# Store scenario in mongodb
scenario = {
'down': down,
'quarter': quarter,
'clock': clock,
'yards': yards,
'field': field,
'score': score,
'guess': guess
}
mongo.db.scenarios.insert_one(scenario)
scenarios = mongo.db.scenarios.find()
try:<|fim▁hole|> clock=clock,
yards=yards,
field=field,
score=score,
guess=guess,
<<<<<<< HEAD
scenarios=[None],#scenarios,
nflstats=[None]#nflstats
)
except Exception as e:
return "Something went wrong..." + str(e)
@main.route('/stats/')
def tables():
title = 'Test Table'
title = rmodel.predict_proba(4,4,1,20,-1)
table = title
return render_template('stats.html', table=table, title=title)
@main.route('/data/guesses/')
def guessData():
guess = request.args.get('guess')
down = request.args.get('down')
quarter = request.args.get('quarter')
clock = request.args.get('clock')
yards = request.args.get('yards')
field = request.args.get('field')
score = request.args.get('score')
search_dict = request.args.to_dict()
for key in search_dict:
#if key != 'guess':
try:
search_dict[key] = int(search_dict[key])
except:
pass
print(search_dict)
s=[data['guess'] for data in mongo.db.scenarios.find(search_dict)]
options = ['pass', 'run', 'punt', 'fg', 'kneel']
count = {option:s.count(option) for option in options}
print(count)
return json.dumps(count, default=json_util.default)
@main.route('/data/nfl/')
def nflData():
playtype = request.args.get('PlayType')
down = request.args.get('down')
quarter = request.args.get('quarter')
clock = request.args.get('clock')
yards = request.args.get('yards')
field = request.args.get('field')
score = request.args.get('score')
search_dict = request.args.to_dict()
for key in search_dict:
if key != 'playtype':
try:
search_dict[key] = int(search_dict[key])
except:
pass
s=[data["PlayType"] for data in mongo.db.nfldata.find(search_dict)]
print(s)
options = ['pass', 'run', 'punt', 'fg', 'kneel']
count = {option:s.count(option) for option in options}
print(count)
return json.dumps(count, default=json_util.default)
@main.route('/api/predict/')
def apiPredict():
arg_dict = request.args.to_dict()
for key in arg_dict:
try:
arg_dict[key] = int(arg_dict[key])
except:
pass
calculations = [
{name:rmodel.predict_group_proba(
arg_dict['quarter'],
arg_dict['down'],
arg_dict['yards'],
arg_dict['clock'],
arg_dict['field'],
arg_dict['score'],
name)
} for name in ['quarter', 'down', 'yards', 'timeunder', 'yrdline100', 'scorediff']
]
calculations.append({'request':rmodel.predict_proba(
arg_dict['quarter'],
arg_dict['down'],
arg_dict['yards'],
arg_dict['clock'],
arg_dict['field'],
arg_dict['score'],
False)
})
return jsonify(calculations)
=======
scenarios=scenarios
)
except:
return "fail"
>>>>>>> master<|fim▁end|> | return render_template('results.html',
>>>>>>> master
down=down,
quarter=quarter, |
<|file_name|>move_vm.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::FuzzTargetImpl;
use anyhow::{bail, Result};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use diem_proptest_helpers::ValueGenerator;
use move_core_types::value::MoveTypeLayout;
use move_vm_types::values::{prop::layout_and_value_strategy, Value};
use std::io::Cursor;
#[derive(Clone, Debug, Default)]
pub struct ValueTarget;
impl FuzzTargetImpl for ValueTarget {
fn description(&self) -> &'static str {
"VM values + types (custom deserializer)"
}
fn generate(&self, _idx: usize, gen: &mut ValueGenerator) -> Option<Vec<u8>> {
let (layout, value) = gen.generate(layout_and_value_strategy());
// Values as currently serialized are not self-describing, so store a serialized form of the
// layout + kind info along with the value as well.
let layout_blob = bcs::to_bytes(&layout).unwrap();
let value_blob = value.simple_serialize(&layout).expect("must serialize");
let mut blob = vec![];
// Prefix the layout blob with its length.
blob.write_u64::<BigEndian>(layout_blob.len() as u64)
.expect("writing should work");
blob.extend_from_slice(&layout_blob);
blob.extend_from_slice(&value_blob);
Some(blob)
}
fn fuzz(&self, data: &[u8]) {
let _ = deserialize(data);
}
}
fn is_valid_layout(layout: &MoveTypeLayout) -> bool {
use MoveTypeLayout as L;
match layout {
L::Bool | L::U8 | L::U64 | L::U128 | L::Address | L::Signer => true,
L::Vector(layout) => is_valid_layout(layout),
L::Struct(struct_layout) => {
if struct_layout.fields().is_empty() {
return false;
}
struct_layout.fields().iter().all(is_valid_layout)
}
}
}
fn deserialize(data: &[u8]) -> Result<()> {
let mut data = Cursor::new(data);
// Read the length of the layout blob.
let layout_len = data.read_u64::<BigEndian>()? as usize;
let position = data.position() as usize;
let data = &data.into_inner()[position..];
if data.len() < layout_len {
bail!("too little data");
}
let layout_data = &data[..layout_len];
let value_data = &data[layout_len..];
let layout: MoveTypeLayout = bcs::from_bytes(layout_data)?;
// The fuzzer may alter the raw bytes, resulting in invalid layouts that will not
// pass the bytecode verifier. We need to filter these out as they can show up as
// false positives.
if !is_valid_layout(&layout) {
bail!("bad layout")
}
let _ = Value::simple_deserialize(value_data, &layout);<|fim▁hole|><|fim▁end|> | Ok(())
} |
<|file_name|>run_test.py<|end_file_name|><|fim▁begin|>import faker
import faker.providers
import faker.providers.address
import faker.providers.address.cs_CZ
import faker.providers.address.de_DE
import faker.providers.address.el_GR
import faker.providers.address.en
import faker.providers.address.en_AU
import faker.providers.address.en_CA
import faker.providers.address.en_GB
import faker.providers.address.en_US
import faker.providers.address.es
import faker.providers.address.es_ES
import faker.providers.address.es_MX
import faker.providers.address.fa_IR
import faker.providers.address.fi_FI
import faker.providers.address.fr_CH
import faker.providers.address.fr_FR
import faker.providers.address.hi_IN
import faker.providers.address.hr_HR
import faker.providers.address.it_IT
import faker.providers.address.ja_JP
import faker.providers.address.ko_KR
import faker.providers.address.ne_NP
import faker.providers.address.nl_BE
import faker.providers.address.nl_NL
import faker.providers.address.no_NO
import faker.providers.address.pl_PL
import faker.providers.address.pt_BR
import faker.providers.address.pt_PT
import faker.providers.address.ru_RU
import faker.providers.address.sk_SK
import faker.providers.address.sl_SI
import faker.providers.address.sv_SE
import faker.providers.address.uk_UA
import faker.providers.address.zh_CN
import faker.providers.address.zh_TW
import faker.providers.barcode
import faker.providers.barcode.en_US
import faker.providers.color
import faker.providers.color.en_US
import faker.providers.color.uk_UA
import faker.providers.company
import faker.providers.company.bg_BG
import faker.providers.company.cs_CZ
import faker.providers.company.de_DE
import faker.providers.company.en_US
import faker.providers.company.es_MX
import faker.providers.company.fa_IR
import faker.providers.company.fi_FI
import faker.providers.company.fr_CH
import faker.providers.company.fr_FR
import faker.providers.company.hr_HR
import faker.providers.company.it_IT
import faker.providers.company.ja_JP
import faker.providers.company.ko_KR
import faker.providers.company.no_NO
import faker.providers.company.pt_BR
import faker.providers.company.pt_PT
import faker.providers.company.ru_RU
import faker.providers.company.sk_SK
import faker.providers.company.sl_SI
import faker.providers.company.sv_SE
import faker.providers.company.zh_CN
import faker.providers.company.zh_TW
import faker.providers.credit_card
import faker.providers.credit_card.en_US
import faker.providers.currency
import faker.providers.currency.en_US
import faker.providers.date_time
import faker.providers.date_time.en_US
import faker.providers.file
import faker.providers.file.en_US
import faker.providers.internet
import faker.providers.internet.bg_BG
import faker.providers.internet.bs_BA
import faker.providers.internet.cs_CZ
import faker.providers.internet.de_AT
import faker.providers.internet.de_DE
import faker.providers.internet.el_GR
import faker.providers.internet.en_AU
import faker.providers.internet.en_US
import faker.providers.internet.fa_IR
import faker.providers.internet.fi_FI
import faker.providers.internet.fr_CH
import faker.providers.internet.fr_FR
import faker.providers.internet.hr_HR
import faker.providers.internet.ja_JP
import faker.providers.internet.ko_KR
import faker.providers.internet.no_NO
import faker.providers.internet.pt_BR
import faker.providers.internet.pt_PT
import faker.providers.internet.ru_RU
import faker.providers.internet.sk_SK
import faker.providers.internet.sl_SI
import faker.providers.internet.sv_SE
import faker.providers.internet.uk_UA
import faker.providers.internet.zh_CN
import faker.providers.job
import faker.providers.job.en_US
import faker.providers.job.fa_IR
import faker.providers.job.fr_CH
import faker.providers.job.fr_FR
import faker.providers.job.hr_HR
import faker.providers.job.pl_PL
import faker.providers.job.ru_RU
import faker.providers.job.uk_UA
import faker.providers.job.zh_TW
import faker.providers.lorem
import faker.providers.lorem.el_GR
import faker.providers.lorem.la
import faker.providers.lorem.ru_RU
import faker.providers.misc
import faker.providers.misc.en_US
import faker.providers.person
import faker.providers.person.bg_BG
import faker.providers.person.cs_CZ
import faker.providers.person.de_AT
import faker.providers.person.de_DE
import faker.providers.person.dk_DK
import faker.providers.person.el_GR
import faker.providers.person.en
import faker.providers.person.en_GB
import faker.providers.person.en_US
import faker.providers.person.es_ES
import faker.providers.person.es_MX
import faker.providers.person.fa_IR
import faker.providers.person.fi_FI
import faker.providers.person.fr_CH
import faker.providers.person.fr_FR
import faker.providers.person.hi_IN
import faker.providers.person.hr_HR
import faker.providers.person.it_IT
import faker.providers.person.ja_JP
import faker.providers.person.ko_KR
import faker.providers.person.lt_LT
import faker.providers.person.lv_LV
import faker.providers.person.ne_NP
import faker.providers.person.nl_NL
import faker.providers.person.no_NO
import faker.providers.person.pl_PL
import faker.providers.person.pt_BR
import faker.providers.person.pt_PT
import faker.providers.person.ru_RU
import faker.providers.person.sl_SI
import faker.providers.person.sv_SE
import faker.providers.person.tr_TR
import faker.providers.person.uk_UA
import faker.providers.person.zh_CN
import faker.providers.person.zh_TW
import faker.providers.phone_number
import faker.providers.phone_number.bg_BG
import faker.providers.phone_number.bs_BA
import faker.providers.phone_number.cs_CZ
import faker.providers.phone_number.de_DE
import faker.providers.phone_number.dk_DK
import faker.providers.phone_number.el_GR
import faker.providers.phone_number.en_AU
import faker.providers.phone_number.en_CA
import faker.providers.phone_number.en_GB
import faker.providers.phone_number.en_US
import faker.providers.phone_number.es_ES
import faker.providers.phone_number.es_MX
import faker.providers.phone_number.fa_IR
import faker.providers.phone_number.fi_FI
import faker.providers.phone_number.fr_CH
import faker.providers.phone_number.fr_FR
import faker.providers.phone_number.hi_IN
import faker.providers.phone_number.hr_HR
import faker.providers.phone_number.it_IT
import faker.providers.phone_number.ja_JP
import faker.providers.phone_number.ko_KR
import faker.providers.phone_number.lt_LT
import faker.providers.phone_number.lv_LV
import faker.providers.phone_number.ne_NP
import faker.providers.phone_number.nl_BE
import faker.providers.phone_number.nl_NL
import faker.providers.phone_number.no_NO
import faker.providers.phone_number.pl_PL
import faker.providers.phone_number.pt_BR
import faker.providers.phone_number.pt_PT
import faker.providers.phone_number.ru_RU
import faker.providers.phone_number.sk_SK
import faker.providers.phone_number.sl_SI
import faker.providers.phone_number.sv_SE
import faker.providers.phone_number.tr_TR
import faker.providers.phone_number.uk_UA
import faker.providers.phone_number.zh_CN
import faker.providers.phone_number.zh_TW
import faker.providers.profile
import faker.providers.profile.en_US
import faker.providers.python
import faker.providers.python.en_US
import faker.providers.ssn
import faker.providers.ssn.en_CA
import faker.providers.ssn.en_US
import faker.providers.ssn.fi_FI
import faker.providers.ssn.fr_CH
import faker.providers.ssn.hr_HR
import faker.providers.ssn.it_IT
import faker.providers.ssn.ko_KR
import faker.providers.ssn.nl_BE
import faker.providers.ssn.nl_NL
import faker.providers.ssn.pt_BR
import faker.providers.ssn.ru_RU<|fim▁hole|>import faker.providers.ssn.zh_CN
import faker.providers.ssn.zh_TW
import faker.providers.user_agent
import faker.providers.user_agent.en_US
import faker.utils<|fim▁end|> | import faker.providers.ssn.sv_SE
import faker.providers.ssn.uk_UA |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('staticblog.views',
(r'^$', 'archive'),<|fim▁hole|>)<|fim▁end|> | (r'^([\-\w]+)$', 'render_post'),
(r'^git/receive', 'handle_hook'), |
<|file_name|>releases.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script retrieves the history of all V8 branches and trunk revisions and
# their corresponding Chromium revisions.
# Requires a chromium checkout with branch heads:
# gclient sync --with_branch_heads
# gclient fetch
import argparse
import csv
import itertools
import json
import os
import re
import sys
from common_includes import *
CONFIG = {
"BRANCHNAME": "retrieve-v8-releases",
"PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile",
}
# Expression for retrieving the bleeding edge revision from a commit message.
PUSH_MSG_SVN_RE = re.compile(r".* \(based on bleeding_edge revision r(\d+)\)$")
PUSH_MSG_GIT_RE = re.compile(r".* \(based on ([a-fA-F0-9]+)\)$")
# Expression for retrieving the merged patches from a merge commit message
# (old and new format).
MERGE_MESSAGE_RE = re.compile(r"^.*[M|m]erged (.+)(\)| into).*$", re.M)
CHERRY_PICK_TITLE_GIT_RE = re.compile(r"^.* \(cherry\-pick\)\.?$")
# New git message for cherry-picked CLs. One message per line.
MERGE_MESSAGE_GIT_RE = re.compile(r"^Merged ([a-fA-F0-9]+)\.?$")
# Expression for retrieving reverted patches from a commit message (old and
# new format).
ROLLBACK_MESSAGE_RE = re.compile(r"^.*[R|r]ollback of (.+)(\)| in).*$", re.M)
# New git message for reverted CLs. One message per line.
ROLLBACK_MESSAGE_GIT_RE = re.compile(r"^Rollback of ([a-fA-F0-9]+)\.?$")
# Expression for retrieving the code review link.
REVIEW_LINK_RE = re.compile(r"^Review URL: (.+)$", re.M)
# Expression with three versions (historical) for extracting the v8 revision
# from the chromium DEPS file.
DEPS_RE = re.compile(r"""^\s*(?:["']v8_revision["']: ["']"""
"""|\(Var\("googlecode_url"\) % "v8"\) \+ "\/trunk@"""
"""|"http\:\/\/v8\.googlecode\.com\/svn\/trunk@)"""
"""([^"']+)["'].*$""", re.M)
# Expression to pick tag and revision for bleeding edge tags. To be used with
# output of 'svn log'.
BLEEDING_EDGE_TAGS_RE = re.compile(
r"A \/tags\/([^\s]+) \(from \/branches\/bleeding_edge\:(\d+)\)")
def SortBranches(branches):
"""Sort branches with version number names."""
return sorted(branches, key=SortingKey, reverse=True)
<|fim▁hole|>def FilterDuplicatesAndReverse(cr_releases):
"""Returns the chromium releases in reverse order filtered by v8 revision
duplicates.
cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev.
"""
last = ""
result = []
for release in reversed(cr_releases):
if last == release[1]:
continue
last = release[1]
result.append(release)
return result
def BuildRevisionRanges(cr_releases):
"""Returns a mapping of v8 revision -> chromium ranges.
The ranges are comma-separated, each range has the form R1:R2. The newest
entry is the only one of the form R1, as there is no end range.
cr_releases is a list of [cr_rev, v8_rev] reverse-sorted by cr_rev.
cr_rev either refers to a chromium svn revision or a chromium branch number.
"""
range_lists = {}
cr_releases = FilterDuplicatesAndReverse(cr_releases)
# Visit pairs of cr releases from oldest to newest.
for cr_from, cr_to in itertools.izip(
cr_releases, itertools.islice(cr_releases, 1, None)):
# Assume the chromium revisions are all different.
assert cr_from[0] != cr_to[0]
# TODO(machenbach): Subtraction is not git friendly.
ran = "%s:%d" % (cr_from[0], int(cr_to[0]) - 1)
# Collect the ranges in lists per revision.
range_lists.setdefault(cr_from[1], []).append(ran)
# Add the newest revision.
if cr_releases:
range_lists.setdefault(cr_releases[-1][1], []).append(cr_releases[-1][0])
# Stringify and comma-separate the range lists.
return dict((rev, ", ".join(ran)) for rev, ran in range_lists.iteritems())
def MatchSafe(match):
if match:
return match.group(1)
else:
return ""
class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
self.CommonPrepare()
self.PrepareBranch()
class RetrieveV8Releases(Step):
MESSAGE = "Retrieve all V8 releases."
def ExceedsMax(self, releases):
return (self._options.max_releases > 0
and len(releases) > self._options.max_releases)
def GetBleedingEdgeFromPush(self, title):
return MatchSafe(PUSH_MSG_SVN_RE.match(title))
def GetBleedingEdgeGitFromPush(self, title):
return MatchSafe(PUSH_MSG_GIT_RE.match(title))
def GetMergedPatches(self, body):
patches = MatchSafe(MERGE_MESSAGE_RE.search(body))
if not patches:
patches = MatchSafe(ROLLBACK_MESSAGE_RE.search(body))
if patches:
# Indicate reverted patches with a "-".
patches = "-%s" % patches
return patches
def GetMergedPatchesGit(self, body):
patches = []
for line in body.splitlines():
patch = MatchSafe(MERGE_MESSAGE_GIT_RE.match(line))
if patch:
patches.append(patch)
patch = MatchSafe(ROLLBACK_MESSAGE_GIT_RE.match(line))
if patch:
patches.append("-%s" % patch)
return ", ".join(patches)
def GetReleaseDict(
self, git_hash, bleeding_edge_rev, bleeding_edge_git, branch, version,
patches, cl_body):
revision = self.vc.GitSvn(git_hash)
return {
# The SVN revision on the branch.
"revision": revision,
# The git revision on the branch.
"revision_git": git_hash,
# The SVN revision on bleeding edge (only for newer trunk pushes).
"bleeding_edge": bleeding_edge_rev,
# The same for git.
"bleeding_edge_git": bleeding_edge_git,
# The branch name.
"branch": branch,
# The version for displaying in the form 3.26.3 or 3.26.3.12.
"version": version,
# The date of the commit.
"date": self.GitLog(n=1, format="%ci", git_hash=git_hash),
# Merged patches if available in the form 'r1234, r2345'.
"patches_merged": patches,
# Default for easier output formatting.
"chromium_revision": "",
# Default for easier output formatting.
"chromium_branch": "",
# Link to the CL on code review. Trunk pushes are not uploaded, so this
# field will be populated below with the recent roll CL link.
"review_link": MatchSafe(REVIEW_LINK_RE.search(cl_body)),
# Link to the commit message on google code.
"revision_link": ("https://code.google.com/p/v8/source/detail?r=%s"
% revision),
}
def GetRelease(self, git_hash, branch):
self.ReadAndPersistVersion()
base_version = [self["major"], self["minor"], self["build"]]
version = ".".join(base_version)
body = self.GitLog(n=1, format="%B", git_hash=git_hash)
patches = ""
if self["patch"] != "0":
version += ".%s" % self["patch"]
if CHERRY_PICK_TITLE_GIT_RE.match(body.splitlines()[0]):
patches = self.GetMergedPatchesGit(body)
else:
patches = self.GetMergedPatches(body)
title = self.GitLog(n=1, format="%s", git_hash=git_hash)
bleeding_edge_revision = self.GetBleedingEdgeFromPush(title)
bleeding_edge_git = ""
if bleeding_edge_revision:
bleeding_edge_git = self.vc.SvnGit(bleeding_edge_revision,
self.vc.RemoteMasterBranch())
else:
bleeding_edge_git = self.GetBleedingEdgeGitFromPush(title)
return self.GetReleaseDict(
git_hash, bleeding_edge_revision, bleeding_edge_git, branch, version,
patches, body), self["patch"]
def GetReleasesFromMaster(self):
tag_text = self.SVN("log https://v8.googlecode.com/svn/tags -v --limit 20")
releases = []
for (tag, revision) in re.findall(BLEEDING_EDGE_TAGS_RE, tag_text):
git_hash = self.vc.SvnGit(revision)
# Add bleeding edge release. It does not contain patches or a code
# review link, as tags are not uploaded.
releases.append(self.GetReleaseDict(
git_hash, revision, git_hash, self.vc.MasterBranch(), tag, "", ""))
return releases
def GetReleasesFromBranch(self, branch):
self.GitReset(self.vc.RemoteBranch(branch))
if branch == self.vc.MasterBranch():
return self.GetReleasesFromMaster()
releases = []
try:
for git_hash in self.GitLog(format="%H").splitlines():
if VERSION_FILE not in self.GitChangedFiles(git_hash):
continue
if self.ExceedsMax(releases):
break # pragma: no cover
if not self.GitCheckoutFileSafe(VERSION_FILE, git_hash):
break # pragma: no cover
release, patch_level = self.GetRelease(git_hash, branch)
releases.append(release)
# Follow branches only until their creation point.
# TODO(machenbach): This omits patches if the version file wasn't
# manipulated correctly. Find a better way to detect the point where
# the parent of the branch head leads to the trunk branch.
if branch != self.vc.CandidateBranch() and patch_level == "0":
break
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up checked-out version file.
self.GitCheckoutFileSafe(VERSION_FILE, "HEAD")
return releases
def RunStep(self):
self.GitCreateBranch(self._config["BRANCHNAME"])
branches = self.vc.GetBranches()
releases = []
if self._options.branch == 'recent':
# Get only recent development on trunk, beta and stable.
if self._options.max_releases == 0: # pragma: no cover
self._options.max_releases = 10
beta, stable = SortBranches(branches)[0:2]
releases += self.GetReleasesFromBranch(stable)
releases += self.GetReleasesFromBranch(beta)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
elif self._options.branch == 'all': # pragma: no cover
# Retrieve the full release history.
for branch in branches:
releases += self.GetReleasesFromBranch(branch)
releases += self.GetReleasesFromBranch(self.vc.CandidateBranch())
releases += self.GetReleasesFromBranch(self.vc.MasterBranch())
else: # pragma: no cover
# Retrieve history for a specified branch.
assert self._options.branch in (branches +
[self.vc.CandidateBranch(), self.vc.MasterBranch()])
releases += self.GetReleasesFromBranch(self._options.branch)
self["releases"] = sorted(releases,
key=lambda r: SortingKey(r["version"]),
reverse=True)
class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
cwd = self._options.chromium
# Check for a clean workdir.
if not self.GitIsWorkdirClean(cwd=cwd): # pragma: no cover
self.Die("Workspace is not clean. Please commit or undo your changes.")
# Assert that the DEPS file is there.
if not os.path.exists(os.path.join(cwd, "DEPS")): # pragma: no cover
self.Die("DEPS file not present.")
class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
cwd = self._options.chromium
self.GitCheckout("master", cwd=cwd)
self.GitPull(cwd=cwd)
self.GitCreateBranch(self.Config("BRANCHNAME"), cwd=cwd)
def ConvertToCommitNumber(step, revision):
# Simple check for git hashes.
if revision.isdigit() and len(revision) < 8:
return revision
return step.GitConvertToSVNRevision(
revision, cwd=os.path.join(step._options.chromium, "v8"))
class RetrieveChromiumV8Releases(Step):
MESSAGE = "Retrieve V8 releases from Chromium DEPS."
def RunStep(self):
cwd = self._options.chromium
releases = filter(
lambda r: r["branch"] in [self.vc.CandidateBranch(),
self.vc.MasterBranch()],
self["releases"])
if not releases: # pragma: no cover
print "No releases detected. Skipping chromium history."
return True
# Update v8 checkout in chromium.
self.GitFetchOrigin(cwd=os.path.join(cwd, "v8"))
oldest_v8_rev = int(releases[-1]["revision"])
cr_releases = []
try:
for git_hash in self.GitLog(
format="%H", grep="V8", cwd=cwd).splitlines():
if "DEPS" not in self.GitChangedFiles(git_hash, cwd=cwd):
continue
if not self.GitCheckoutFileSafe("DEPS", git_hash, cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
cr_rev = self.GetCommitPositionNumber(git_hash, cwd=cwd)
if cr_rev:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_releases.append([cr_rev, v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium ranges to the v8 trunk and bleeding_edge releases.
all_ranges = BuildRevisionRanges(cr_releases)
releases_dict = dict((r["revision"], r) for r in releases)
for revision, ranges in all_ranges.iteritems():
releases_dict.get(revision, {})["chromium_revision"] = ranges
# TODO(machenbach): Unify common code with method above.
class RietrieveChromiumBranches(Step):
MESSAGE = "Retrieve Chromium branch information."
def RunStep(self):
cwd = self._options.chromium
trunk_releases = filter(lambda r: r["branch"] == self.vc.CandidateBranch(),
self["releases"])
if not trunk_releases: # pragma: no cover
print "No trunk releases detected. Skipping chromium history."
return True
oldest_v8_rev = int(trunk_releases[-1]["revision"])
# Filter out irrelevant branches.
branches = filter(lambda r: re.match(r"branch-heads/\d+", r),
self.GitRemotes(cwd=cwd))
# Transform into pure branch numbers.
branches = map(lambda r: int(re.match(r"branch-heads/(\d+)", r).group(1)),
branches)
branches = sorted(branches, reverse=True)
cr_branches = []
try:
for branch in branches:
if not self.GitCheckoutFileSafe("DEPS",
"branch-heads/%d" % branch,
cwd=cwd):
break # pragma: no cover
deps = FileToText(os.path.join(cwd, "DEPS"))
match = DEPS_RE.search(deps)
if match:
v8_rev = ConvertToCommitNumber(self, match.group(1))
cr_branches.append([str(branch), v8_rev])
# Stop after reaching beyond the last v8 revision we want to update.
# We need a small buffer for possible revert/reland frenzies.
# TODO(machenbach): Subtraction is not git friendly.
if int(v8_rev) < oldest_v8_rev - 100:
break # pragma: no cover
# Allow Ctrl-C interrupt.
except (KeyboardInterrupt, SystemExit): # pragma: no cover
pass
# Clean up.
self.GitCheckoutFileSafe("DEPS", "HEAD", cwd=cwd)
# Add the chromium branches to the v8 trunk releases.
all_ranges = BuildRevisionRanges(cr_branches)
trunk_dict = dict((r["revision"], r) for r in trunk_releases)
for revision, ranges in all_ranges.iteritems():
trunk_dict.get(revision, {})["chromium_branch"] = ranges
class CleanUp(Step):
MESSAGE = "Clean up."
def RunStep(self):
self.GitCheckout("master", cwd=self._options.chromium)
self.GitDeleteBranch(self.Config("BRANCHNAME"), cwd=self._options.chromium)
self.CommonCleanup()
class WriteOutput(Step):
MESSAGE = "Print output."
def Run(self):
if self._options.csv:
with open(self._options.csv, "w") as f:
writer = csv.DictWriter(f,
["version", "branch", "revision",
"chromium_revision", "patches_merged"],
restval="",
extrasaction="ignore")
for release in self["releases"]:
writer.writerow(release)
if self._options.json:
with open(self._options.json, "w") as f:
f.write(json.dumps(self["releases"]))
if not self._options.csv and not self._options.json:
print self["releases"] # pragma: no cover
class Releases(ScriptsBase):
def _PrepareOptions(self, parser):
parser.add_argument("-b", "--branch", default="recent",
help=("The branch to analyze. If 'all' is specified, "
"analyze all branches. If 'recent' (default) "
"is specified, track beta, stable and trunk."))
parser.add_argument("-c", "--chromium",
help=("The path to your Chromium src/ "
"directory to automate the V8 roll."))
parser.add_argument("--csv", help="Path to a CSV file for export.")
parser.add_argument("-m", "--max-releases", type=int, default=0,
help="The maximum number of releases to track.")
parser.add_argument("--json", help="Path to a JSON file for export.")
def _ProcessOptions(self, options): # pragma: no cover
return True
def _Config(self):
return {
"BRANCHNAME": "retrieve-v8-releases",
"PERSISTFILE_BASENAME": "/tmp/v8-releases-tempfile",
}
def _Steps(self):
return [
Preparation,
RetrieveV8Releases,
SwitchChromium,
UpdateChromiumCheckout,
RetrieveChromiumV8Releases,
RietrieveChromiumBranches,
CleanUp,
WriteOutput,
]
if __name__ == "__main__": # pragma: no cover
sys.exit(Releases().Run())<|fim▁end|> | |
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>//! Client Requests
use std::marker::PhantomData;
use std::io::{self, Write, BufWriter};
use std::net::Shutdown;
use url::Url;
use method::{self, Method};
use header::Headers;
use header::{self, Host};
use net::{NetworkStream, NetworkConnector, HttpConnector, Fresh, Streaming};
use http::{self, HttpWriter, LINE_ENDING};
use http::HttpWriter::{ThroughWriter, ChunkedWriter, SizedWriter, EmptyWriter};
use version;
use client::{Response, get_host_and_port};
/// A client request to a remote server.
pub struct Request<W> {
/// The target URI for this request.
pub url: Url,
/// The HTTP version of this request.
pub version: version::HttpVersion,
body: HttpWriter<BufWriter<Box<NetworkStream + Send>>>,
headers: Headers,
method: method::Method,
_marker: PhantomData<W>,
}
impl<W> Request<W> {
/// Read the Request headers.
#[inline]
pub fn headers(&self) -> &Headers { &self.headers }
/// Read the Request method.
#[inline]
pub fn method(&self) -> method::Method { self.method.clone() }
}
impl Request<Fresh> {
/// Create a new client request.
pub fn new(method: method::Method, url: Url) -> ::Result<Request<Fresh>> {
let mut conn = HttpConnector(None);
Request::with_connector(method, url, &mut conn)
}
/// Create a new client request with a specific underlying NetworkStream.
pub fn with_connector<C, S>(method: method::Method, url: Url, connector: &mut C)
-> ::Result<Request<Fresh>> where
C: NetworkConnector<Stream=S>,
S: Into<Box<NetworkStream + Send>> {
let (host, port) = try!(get_host_and_port(&url));
let stream = try!(connector.connect(&*host, port, &*url.scheme)).into();
let stream = ThroughWriter(BufWriter::new(stream));
let mut headers = Headers::new();
headers.set(Host {
hostname: host,
port: Some(port),
});
Ok(Request {
method: method,
headers: headers,
url: url,
version: version::HttpVersion::Http11,
body: stream,
_marker: PhantomData,
})
}
/// Consume a Fresh Request, writing the headers and method,
/// returning a Streaming Request.
pub fn start(mut self) -> ::Result<Request<Streaming>> {
let mut uri = self.url.serialize_path().unwrap();
//TODO: this needs a test
if let Some(ref q) = self.url.query {
uri.push('?');
uri.push_str(&q[..]);
}
debug!("request line: {:?} {:?} {:?}", self.method, uri, self.version);
try!(write!(&mut self.body, "{} {} {}{}",
self.method, uri, self.version, LINE_ENDING));
let stream = match self.method {
Method::Get | Method::Head => {<|fim▁hole|> debug!("headers={:?}", self.headers);
try!(write!(&mut self.body, "{}{}", self.headers, LINE_ENDING));
EmptyWriter(self.body.into_inner())
},
_ => {
let mut chunked = true;
let mut len = 0;
match self.headers.get::<header::ContentLength>() {
Some(cl) => {
chunked = false;
len = **cl;
},
None => ()
};
// cant do in match above, thanks borrowck
if chunked {
let encodings = match self.headers.get_mut::<header::TransferEncoding>() {
Some(&mut header::TransferEncoding(ref mut encodings)) => {
//TODO: check if chunked is already in encodings. use HashSet?
encodings.push(header::Encoding::Chunked);
false
},
None => true
};
if encodings {
self.headers.set::<header::TransferEncoding>(
header::TransferEncoding(vec![header::Encoding::Chunked]))
}
}
debug!("headers={:?}", self.headers);
try!(write!(&mut self.body, "{}{}", self.headers, LINE_ENDING));
if chunked {
ChunkedWriter(self.body.into_inner())
} else {
SizedWriter(self.body.into_inner(), len)
}
}
};
Ok(Request {
method: self.method,
headers: self.headers,
url: self.url,
version: self.version,
body: stream,
_marker: PhantomData,
})
}
/// Get a mutable reference to the Request headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut Headers { &mut self.headers }
}
impl Request<Streaming> {
/// Completes writing the request, and returns a response to read from.
///
/// Consumes the Request.
pub fn send(self) -> ::Result<Response> {
let mut raw = try!(self.body.end()).into_inner().unwrap(); // end() already flushes
if !http::should_keep_alive(self.version, &self.headers) {
try!(raw.close(Shutdown::Write));
}
Response::new(raw)
}
}
impl Write for Request<Streaming> {
#[inline]
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
self.body.write(msg)
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.body.flush()
}
}
#[cfg(test)]
mod tests {
use std::str::from_utf8;
use url::Url;
use method::Method::{Get, Head};
use mock::{MockStream, MockConnector};
use super::Request;
#[test]
fn test_get_empty_body() {
let req = Request::with_connector(
Get, Url::parse("http://example.dom").unwrap(), &mut MockConnector
).unwrap();
let req = req.start().unwrap();
let stream = *req.body.end().unwrap()
.into_inner().unwrap().downcast::<MockStream>().ok().unwrap();
let bytes = stream.write;
let s = from_utf8(&bytes[..]).unwrap();
assert!(!s.contains("Content-Length:"));
assert!(!s.contains("Transfer-Encoding:"));
}
#[test]
fn test_head_empty_body() {
let req = Request::with_connector(
Head, Url::parse("http://example.dom").unwrap(), &mut MockConnector
).unwrap();
let req = req.start().unwrap();
let stream = *req.body.end().unwrap()
.into_inner().unwrap().downcast::<MockStream>().ok().unwrap();
let bytes = stream.write;
let s = from_utf8(&bytes[..]).unwrap();
assert!(!s.contains("Content-Length:"));
assert!(!s.contains("Transfer-Encoding:"));
}
}<|fim▁end|> | |
<|file_name|>realvid.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
<|fim▁hole|> but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from resources.lib.libraries import client
def resolve(url):
try:
url = url.replace('/embed-', '/')
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://realvid.net/embed-%s.html' % url
result = client.request(url)
url = re.compile('file *: *"(http.+?)"').findall(result)[-1]
return url
except:
return<|fim▁end|> |
This program is distributed in the hope that it will be useful,
|
<|file_name|>xml_importer.py<|end_file_name|><|fim▁begin|>"""
Each store has slightly different semantics wrt draft v published. XML doesn't officially recognize draft
but does hold it in a subdir. Old mongo has a virtual but not physical draft for every unit in published state.
Split mongo has a physical for every unit in every state.
Given that, here's a table of semantics and behaviors where - means no record and letters indicate values.
For xml, (-, x) means the item is published and can be edited. For split, it means the item's
been deleted from draft and will be deleted from published the next time it gets published. old mongo
can't represent that virtual state (2nd row in table)
In the table body, the tuples represent virtual modulestore result. The row headers represent the pre-import
modulestore state.
Modulestore virtual | XML physical (draft, published)
(draft, published) | (-, -) | (x, -) | (x, x) | (x, y) | (-, x)
----------------------+--------------------------------------------
(-, -) | (-, -) | (x, -) | (x, x) | (x, y) | (-, x)
(-, a) | (-, a) | (x, a) | (x, x) | (x, y) | (-, x) : deleted from draft before import
(a, -) | (a, -) | (x, -) | (x, x) | (x, y) | (a, x)
(a, a) | (a, a) | (x, a) | (x, x) | (x, y) | (a, x)
(a, b) | (a, b) | (x, b) | (x, x) | (x, y) | (a, x)
"""
import logging
import os
import mimetypes
from path import path
import json
import re
from .xml import XMLModuleStore, ImportSystem, ParentTracker
from xblock.runtime import KvsFieldData, DictKeyValueStore
from xmodule.x_module import XModuleDescriptor
from opaque_keys.edx.keys import UsageKey
from xblock.fields import Scope, Reference, ReferenceList, ReferenceValueDict
from xmodule.contentstore.content import StaticContent
from .inheritance import own_metadata
from xmodule.errortracker import make_error_tracker
from .store_utilities import rewrite_nonportable_content_links
import xblock
from xmodule.tabs import CourseTabList
from xmodule.modulestore.django import ASSET_IGNORE_REGEX
from xmodule.modulestore.exceptions import DuplicateCourseError
from xmodule.modulestore.mongo.base import MongoRevisionKey
from xmodule.modulestore import ModuleStoreEnum
log = logging.getLogger(__name__)
def import_static_content(
course_data_path, static_content_store,
target_course_id, subpath='static', verbose=False):
remap_dict = {}
# now import all static assets
static_dir = course_data_path / subpath
try:
with open(course_data_path / 'policies/assets.json') as f:
policy = json.load(f)
except (IOError, ValueError) as err:
# xml backed courses won't have this file, only exported courses;
# so, its absence is not really an exception.
policy = {}
<|fim▁hole|> mimetypes.add_type('application/octet-stream', '.sjson')
mimetypes.add_type('application/octet-stream', '.srt')
mimetypes_list = mimetypes.types_map.values()
for dirname, _, filenames in os.walk(static_dir):
for filename in filenames:
content_path = os.path.join(dirname, filename)
if re.match(ASSET_IGNORE_REGEX, filename):
if verbose:
log.debug('skipping static content %s...', content_path)
continue
if verbose:
log.debug('importing static content %s...', content_path)
try:
with open(content_path, 'rb') as f:
data = f.read()
except IOError:
if filename.startswith('._'):
# OS X "companion files". See
# http://www.diigo.com/annotated/0c936fda5da4aa1159c189cea227e174
continue
# Not a 'hidden file', then re-raise exception
raise
# strip away leading path from the name
fullname_with_subpath = content_path.replace(static_dir, '')
if fullname_with_subpath.startswith('/'):
fullname_with_subpath = fullname_with_subpath[1:]
asset_key = StaticContent.compute_location(target_course_id, fullname_with_subpath)
policy_ele = policy.get(asset_key.path, {})
displayname = policy_ele.get('displayname', filename)
locked = policy_ele.get('locked', False)
mime_type = policy_ele.get('contentType')
# Check extracted contentType in list of all valid mimetypes
if not mime_type or mime_type not in mimetypes_list:
mime_type = mimetypes.guess_type(filename)[0] # Assign guessed mimetype
content = StaticContent(
asset_key, displayname, mime_type, data,
import_path=fullname_with_subpath, locked=locked
)
# first let's save a thumbnail so we can get back a thumbnail location
thumbnail_content, thumbnail_location = static_content_store.generate_thumbnail(content)
if thumbnail_content is not None:
content.thumbnail_location = thumbnail_location
# then commit the content
try:
static_content_store.save(content)
except Exception as err:
log.exception(u'Error importing {0}, error={1}'.format(
fullname_with_subpath, err
))
# store the remapping information which will be needed
# to subsitute in the module data
remap_dict[fullname_with_subpath] = asset_key
return remap_dict
def import_from_xml(
store, user_id, data_dir, course_dirs=None,
default_class='xmodule.raw_module.RawDescriptor',
load_error_modules=True, static_content_store=None,
target_course_id=None, verbose=False,
do_import_static=True, create_new_course_if_not_present=False):
"""
Import xml-based courses from data_dir into modulestore.
Returns:
list of new course objects
Args:
store: a modulestore implementing ModuleStoreWriteBase in which to store the imported courses.
data_dir: the root directory from which to find the xml courses.
course_dirs: If specified, the list of data_dir subdirectories to load. Otherwise, load
all course dirs
target_course_id: is the CourseKey that all modules should be remapped to
after import off disk. NOTE: this only makes sense if importing only
one course. If there are more than one course loaded from data_dir/course_dirs & you
supply this id, this method will raise an AssertException.
static_content_store: the static asset store
do_import_static: if True, then import the course's static files into static_content_store
This can be employed for courses which have substantial
unchanging static content, which is too inefficient to import every
time the course is loaded. Static content for some courses may also be
served directly by nginx, instead of going through django.
create_new_course_if_not_present: If True, then a new course is created if it doesn't already exist.
Otherwise, it throws an InvalidLocationError if the course does not exist.
default_class, load_error_modules: are arguments for constructing the XMLModuleStore (see its doc)
"""
xml_module_store = XMLModuleStore(
data_dir,
default_class=default_class,
course_dirs=course_dirs,
load_error_modules=load_error_modules,
xblock_mixins=store.xblock_mixins,
xblock_select=store.xblock_select,
)
# If we're going to remap the course_id, then we can only do that with
# a single course
if target_course_id:
assert(len(xml_module_store.modules) == 1)
new_courses = []
for course_key in xml_module_store.modules.keys():
if target_course_id is not None:
dest_course_id = target_course_id
else:
dest_course_id = store.make_course_key(course_key.org, course_key.course, course_key.run)
runtime = None
# Creates a new course if it doesn't already exist
if create_new_course_if_not_present and not store.has_course(dest_course_id, ignore_case=True):
try:
new_course = store.create_course(dest_course_id.org, dest_course_id.course, dest_course_id.run, user_id)
runtime = new_course.runtime
except DuplicateCourseError:
# course w/ same org and course exists
log.debug(
"Skipping import of course with id, %s,"
"since it collides with an existing one", dest_course_id
)
continue
with store.bulk_write_operations(dest_course_id):
source_course = xml_module_store.get_course(course_key)
# STEP 1: find and import course module
course, course_data_path = _import_course_module(
store, runtime, user_id,
data_dir, course_key, dest_course_id, source_course,
do_import_static, verbose
)
new_courses.append(course)
# STEP 2: import static content
_import_static_content_wrapper(
static_content_store, do_import_static, course_data_path, dest_course_id, verbose
)
# STEP 3: import PUBLISHED items
# now loop through all the modules depth first and then orphans
with store.branch_setting(ModuleStoreEnum.Branch.published_only, dest_course_id):
all_locs = set(xml_module_store.modules[course_key].keys())
all_locs.remove(source_course.location)
def depth_first(subtree):
"""
Import top down just so import code can make assumptions about parents always being available
"""
if subtree.has_children:
for child in subtree.get_children():
try:
all_locs.remove(child.location)
except KeyError:
# tolerate same child occurring under 2 parents such as in
# ContentStoreTest.test_image_import
pass
if verbose:
log.debug('importing module location {loc}'.format(loc=child.location))
_import_module_and_update_references(
child,
store,
user_id,
course_key,
dest_course_id,
do_import_static=do_import_static,
runtime=course.runtime
)
depth_first(child)
depth_first(source_course)
for leftover in all_locs:
if verbose:
log.debug('importing module location {loc}'.format(loc=leftover))
_import_module_and_update_references(
xml_module_store.get_item(leftover), store,
user_id,
course_key,
dest_course_id,
do_import_static=do_import_static,
runtime=course.runtime
)
# STEP 4: import any DRAFT items
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, dest_course_id):
_import_course_draft(
xml_module_store,
store,
user_id,
course_data_path,
course_key,
dest_course_id,
course.runtime
)
return new_courses
def _import_course_module(
store, runtime, user_id, data_dir, course_key, dest_course_id, source_course, do_import_static,
verbose,
):
if verbose:
log.debug("Scanning {0} for course module...".format(course_key))
# Quick scan to get course module as we need some info from there.
# Also we need to make sure that the course module is committed
# first into the store
course_data_path = path(data_dir) / source_course.data_dir
log.debug(u'======> IMPORTING course {course_key}'.format(
course_key=course_key,
))
if not do_import_static:
# for old-style xblock where this was actually linked to kvs
source_course.static_asset_path = source_course.data_dir
source_course.save()
log.debug('course static_asset_path={path}'.format(
path=source_course.static_asset_path
))
log.debug('course data_dir={0}'.format(source_course.data_dir))
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, dest_course_id):
course = _import_module_and_update_references(
source_course, store, user_id,
course_key,
dest_course_id,
do_import_static=do_import_static,
runtime=runtime,
)
for entry in course.pdf_textbooks:
for chapter in entry.get('chapters', []):
if StaticContent.is_c4x_path(chapter.get('url', '')):
asset_key = StaticContent.get_location_from_path(chapter['url'])
chapter['url'] = StaticContent.get_static_path_from_location(asset_key)
# Original wiki_slugs had value location.course. To make them unique this was changed to 'org.course.name'.
# If we are importing into a course with a different course_id and wiki_slug is equal to either of these default
# values then remap it so that the wiki does not point to the old wiki.
if course_key != course.id:
original_unique_wiki_slug = u'{0}.{1}.{2}'.format(
course_key.org,
course_key.course,
course_key.run
)
if course.wiki_slug == original_unique_wiki_slug or course.wiki_slug == course_key.course:
course.wiki_slug = u'{0}.{1}.{2}'.format(
course.id.org,
course.id.course,
course.id.run,
)
# cdodge: more hacks (what else). Seems like we have a
# problem when importing a course (like 6.002) which
# does not have any tabs defined in the policy file.
# The import goes fine and then displays fine in LMS,
# but if someone tries to add a new tab in the CMS, then
# the LMS barfs because it expects that -- if there are
# *any* tabs -- then there at least needs to be
# some predefined ones
if course.tabs is None or len(course.tabs) == 0:
CourseTabList.initialize_default(course)
store.update_item(course, user_id)
return course, course_data_path
def _import_static_content_wrapper(static_content_store, do_import_static, course_data_path, dest_course_id, verbose):
# then import all the static content
if static_content_store is not None and do_import_static:
# first pass to find everything in /static/
import_static_content(
course_data_path, static_content_store,
dest_course_id, subpath='static', verbose=verbose
)
elif verbose and not do_import_static:
log.debug(
"Skipping import of static content, "
"since do_import_static={0}".format(do_import_static)
)
# no matter what do_import_static is, import "static_import" directory
# This is needed because the "about" pages (eg "overview") are
# loaded via load_extra_content, and do not inherit the lms
# metadata from the course module, and thus do not get
# "static_content_store" properly defined. Static content
# referenced in those extra pages thus need to come through the
# c4x:// contentstore, unfortunately. Tell users to copy that
# content into the "static_import" subdir.
simport = 'static_import'
if os.path.exists(course_data_path / simport):
import_static_content(
course_data_path, static_content_store,
dest_course_id, subpath=simport, verbose=verbose
)
def _import_module_and_update_references(
module, store, user_id,
source_course_id, dest_course_id,
do_import_static=True, runtime=None):
logging.debug(u'processing import of module {}...'.format(module.location.to_deprecated_string()))
if do_import_static and 'data' in module.fields and isinstance(module.fields['data'], xblock.fields.String):
# we want to convert all 'non-portable' links in the module_data
# (if it is a string) to portable strings (e.g. /static/)
module.data = rewrite_nonportable_content_links(
source_course_id,
dest_course_id,
module.data
)
# Move the module to a new course
def _convert_reference_fields_to_new_namespace(reference):
"""
Convert a reference to the new namespace, but only
if the original namespace matched the original course.
Otherwise, returns the input value.
"""
assert isinstance(reference, UsageKey)
if source_course_id == reference.course_key:
return reference.map_into_course(dest_course_id)
else:
return reference
fields = {}
for field_name, field in module.fields.iteritems():
if field.is_set_on(module):
if isinstance(field, Reference):
fields[field_name] = _convert_reference_fields_to_new_namespace(field.read_from(module))
elif isinstance(field, ReferenceList):
references = field.read_from(module)
fields[field_name] = [_convert_reference_fields_to_new_namespace(reference) for reference in references]
elif isinstance(field, ReferenceValueDict):
reference_dict = field.read_from(module)
fields[field_name] = {
key: _convert_reference_fields_to_new_namespace(reference)
for key, reference
in reference_dict.iteritems()
}
elif field_name == 'xml_attributes':
value = field.read_from(module)
# remove any export/import only xml_attributes
# which are used to wire together draft imports
if 'parent_sequential_url' in value:
del value['parent_sequential_url']
if 'index_in_children_list' in value:
del value['index_in_children_list']
fields[field_name] = value
else:
fields[field_name] = field.read_from(module)
return store.import_xblock(user_id, dest_course_id, module.location.category, module.location.block_id, fields, runtime)
def _import_course_draft(
xml_module_store,
store,
user_id,
course_data_path,
source_course_id,
target_course_id,
mongo_runtime
):
'''
This will import all the content inside of the 'drafts' folder, if it exists
NOTE: This is not a full course import, basically in our current
application only verticals (and downwards) can be in draft.
Therefore, we need to use slightly different call points into
the import process_xml as we can't simply call XMLModuleStore() constructor
(like we do for importing public content)
'''
draft_dir = course_data_path + "/drafts"
if not os.path.exists(draft_dir):
return
# create a new 'System' object which will manage the importing
errorlog = make_error_tracker()
# The course_dir as passed to ImportSystem is expected to just be relative, not
# the complete path including data_dir. ImportSystem will concatenate the two together.
data_dir = xml_module_store.data_dir
# Whether or not data_dir ends with a "/" differs in production vs. test.
if not data_dir.endswith("/"):
data_dir += "/"
draft_course_dir = draft_dir.replace(data_dir, '', 1)
system = ImportSystem(
xmlstore=xml_module_store,
course_id=source_course_id,
course_dir=draft_course_dir,
error_tracker=errorlog.tracker,
parent_tracker=ParentTracker(),
load_error_modules=False,
mixins=xml_module_store.xblock_mixins,
field_data=KvsFieldData(kvs=DictKeyValueStore()),
)
# now walk the /vertical directory where each file in there
# will be a draft copy of the Vertical
# First it is necessary to order the draft items by their desired index in the child list
# (order os.walk returns them in is not guaranteed).
drafts = dict()
for dirname, _dirnames, filenames in os.walk(draft_dir + "/vertical"):
for filename in filenames:
module_path = os.path.join(dirname, filename)
with open(module_path, 'r') as f:
try:
# note, on local dev it seems like OSX will put
# some extra files in the directory with "quarantine"
# information. These files are binary files and will
# throw exceptions when we try to parse the file
# as an XML string. Let's make sure we're
# dealing with a string before ingesting
data = f.read()
try:
xml = data.decode('utf-8')
except UnicodeDecodeError, err:
# seems like on OSX localdev, the OS is making
# quarantine files in the unzip directory
# when importing courses so if we blindly try to
# enumerate through the directory, we'll try
# to process a bunch of binary quarantine files
# (which are prefixed with a '._' character which
# will dump a bunch of exceptions to the output,
# although they are harmless.
#
# Reading online docs there doesn't seem to be
# a good means to detect a 'hidden' file that works
# well across all OS environments. So for now, I'm using
# OSX's utilization of a leading '.' in the filename
# to indicate a system hidden file.
#
# Better yet would be a way to figure out if this is
# a binary file, but I haven't found a good way
# to do this yet.
if filename.startswith('._'):
continue
# Not a 'hidden file', then re-raise exception
raise err
# process_xml call below recursively processes all descendants. If
# we call this on all verticals in a course with verticals nested below
# the unit level, we try to import the same content twice, causing naming conflicts.
# Therefore only process verticals at the unit level, assuming that any other
# verticals must be descendants.
if 'index_in_children_list' in xml:
descriptor = system.process_xml(xml)
# HACK: since we are doing partial imports of drafts
# the vertical doesn't have the 'url-name' set in the
# attributes (they are normally in the parent object,
# aka sequential), so we have to replace the location.name
# with the XML filename that is part of the pack
filename, __ = os.path.splitext(filename)
descriptor.location = descriptor.location.replace(name=filename)
index = int(descriptor.xml_attributes['index_in_children_list'])
if index in drafts:
drafts[index].append(descriptor)
else:
drafts[index] = [descriptor]
except Exception:
logging.exception('Error while parsing course xml.')
# For each index_in_children_list key, there is a list of vertical descriptors.
for key in sorted(drafts.iterkeys()):
for descriptor in drafts[key]:
course_key = descriptor.location.course_key
try:
def _import_module(module):
# IMPORTANT: Be sure to update the module location in the NEW namespace
module_location = module.location.map_into_course(target_course_id)
# Update the module's location to DRAFT revision
# We need to call this method (instead of updating the location directly)
# to ensure that pure XBlock field data is updated correctly.
_update_module_location(module, module_location.replace(revision=MongoRevisionKey.draft))
# make sure our parent has us in its list of children
# this is to make sure private only verticals show up
# in the list of children since they would have been
# filtered out from the non-draft store export.
# Note though that verticals nested below the unit level will not have
# a parent_sequential_url and do not need special handling.
if module.location.category == 'vertical' and 'parent_sequential_url' in module.xml_attributes:
sequential_url = module.xml_attributes['parent_sequential_url']
index = int(module.xml_attributes['index_in_children_list'])
seq_location = course_key.make_usage_key_from_deprecated_string(sequential_url)
# IMPORTANT: Be sure to update the sequential in the NEW namespace
seq_location = seq_location.map_into_course(target_course_id)
sequential = store.get_item(seq_location, depth=0)
non_draft_location = module.location.map_into_course(target_course_id)
if not any(child.block_id == module.location.block_id for child in sequential.children):
sequential.children.insert(index, non_draft_location)
store.update_item(sequential, user_id)
_import_module_and_update_references(
module, store, user_id,
source_course_id,
target_course_id,
runtime=mongo_runtime,
)
for child in module.get_children():
_import_module(child)
_import_module(descriptor)
except Exception:
logging.exception('There while importing draft descriptor %s', descriptor)
def allowed_metadata_by_category(category):
# should this be in the descriptors?!?
return {
'vertical': [],
'chapter': ['start'],
'sequential': ['due', 'format', 'start', 'graded']
}.get(category, ['*'])
def check_module_metadata_editability(module):
'''
Assert that there is no metadata within a particular module that
we can't support editing. However we always allow 'display_name'
and 'xml_attributes'
'''
allowed = allowed_metadata_by_category(module.location.category)
if '*' in allowed:
# everything is allowed
return 0
allowed = allowed + ['xml_attributes', 'display_name']
err_cnt = 0
illegal_keys = set(own_metadata(module).keys()) - set(allowed)
if len(illegal_keys) > 0:
err_cnt = err_cnt + 1
print(
": found non-editable metadata on {url}. "
"These metadata keys are not supported = {keys}".format(
url=module.location.to_deprecated_string(), keys=illegal_keys
)
)
return err_cnt
def validate_no_non_editable_metadata(module_store, course_id, category):
err_cnt = 0
for module_loc in module_store.modules[course_id]:
module = module_store.modules[course_id][module_loc]
if module.location.category == category:
err_cnt = err_cnt + check_module_metadata_editability(module)
return err_cnt
def validate_category_hierarchy(
module_store, course_id, parent_category, expected_child_category):
err_cnt = 0
parents = []
# get all modules of parent_category
for module in module_store.modules[course_id].itervalues():
if module.location.category == parent_category:
parents.append(module)
for parent in parents:
for child_loc in parent.children:
if child_loc.category != expected_child_category:
err_cnt += 1
print(
"ERROR: child {child} of parent {parent} was expected to be "
"category of {expected} but was {actual}".format(
child=child_loc, parent=parent.location,
expected=expected_child_category,
actual=child_loc.category
)
)
return err_cnt
def validate_data_source_path_existence(path, is_err=True, extra_msg=None):
_cnt = 0
if not os.path.exists(path):
print(
"{type}: Expected folder at {path}. {extra}".format(
type='ERROR' if is_err else 'WARNING',
path=path,
extra=extra_msg or "",
)
)
_cnt = 1
return _cnt
def validate_data_source_paths(data_dir, course_dir):
# check that there is a '/static/' directory
course_path = data_dir / course_dir
err_cnt = 0
warn_cnt = 0
err_cnt += validate_data_source_path_existence(course_path / 'static')
warn_cnt += validate_data_source_path_existence(
course_path / 'static/subs', is_err=False,
extra_msg='Video captions (if they are used) will not work unless they are static/subs.'
)
return err_cnt, warn_cnt
def validate_course_policy(module_store, course_id):
"""
Validate that the course explicitly sets values for any fields
whose defaults may have changed between the export and the import.
Does not add to error count as these are just warnings.
"""
# is there a reliable way to get the module location just given the course_id?
warn_cnt = 0
for module in module_store.modules[course_id].itervalues():
if module.location.category == 'course':
if not module._field_data.has(module, 'rerandomize'):
warn_cnt += 1
print(
'WARN: course policy does not specify value for '
'"rerandomize" whose default is now "never". '
'The behavior of your course may change.'
)
if not module._field_data.has(module, 'showanswer'):
warn_cnt += 1
print(
'WARN: course policy does not specify value for '
'"showanswer" whose default is now "finished". '
'The behavior of your course may change.'
)
return warn_cnt
def perform_xlint(
data_dir, course_dirs,
default_class='xmodule.raw_module.RawDescriptor',
load_error_modules=True):
err_cnt = 0
warn_cnt = 0
module_store = XMLModuleStore(
data_dir,
default_class=default_class,
course_dirs=course_dirs,
load_error_modules=load_error_modules
)
# check all data source path information
for course_dir in course_dirs:
_err_cnt, _warn_cnt = validate_data_source_paths(path(data_dir), course_dir)
err_cnt += _err_cnt
warn_cnt += _warn_cnt
# first count all errors and warnings as part of the XMLModuleStore import
for err_log in module_store._course_errors.itervalues():
for err_log_entry in err_log.errors:
msg = err_log_entry[0]
if msg.startswith('ERROR:'):
err_cnt += 1
else:
warn_cnt += 1
# then count outright all courses that failed to load at all
for err_log in module_store.errored_courses.itervalues():
for err_log_entry in err_log.errors:
msg = err_log_entry[0]
print(msg)
if msg.startswith('ERROR:'):
err_cnt += 1
else:
warn_cnt += 1
for course_id in module_store.modules.keys():
# constrain that courses only have 'chapter' children
err_cnt += validate_category_hierarchy(
module_store, course_id, "course", "chapter"
)
# constrain that chapters only have 'sequentials'
err_cnt += validate_category_hierarchy(
module_store, course_id, "chapter", "sequential"
)
# constrain that sequentials only have 'verticals'
err_cnt += validate_category_hierarchy(
module_store, course_id, "sequential", "vertical"
)
# validate the course policy overrides any defaults
# which have changed over time
warn_cnt += validate_course_policy(module_store, course_id)
# don't allow metadata on verticals, since we can't edit them in studio
err_cnt += validate_no_non_editable_metadata(
module_store, course_id, "vertical"
)
# don't allow metadata on chapters, since we can't edit them in studio
err_cnt += validate_no_non_editable_metadata(
module_store, course_id, "chapter"
)
# don't allow metadata on sequences that we can't edit
err_cnt += validate_no_non_editable_metadata(
module_store, course_id, "sequential"
)
# check for a presence of a course marketing video
if not module_store.has_item(course_id.make_usage_key('about', 'video')):
print(
"WARN: Missing course marketing video. It is recommended "
"that every course have a marketing video."
)
warn_cnt += 1
print("\n")
print("------------------------------------------")
print("VALIDATION SUMMARY: {err} Errors {warn} Warnings".format(
err=err_cnt, warn=warn_cnt)
)
if err_cnt > 0:
print(
"This course is not suitable for importing. Please fix courseware "
"according to specifications before importing."
)
elif warn_cnt > 0:
print(
"This course can be imported, but some errors may occur "
"during the run of the course. It is recommend that you fix "
"your courseware before importing"
)
else:
print("This course can be imported successfully.")
return err_cnt
def _update_module_location(module, new_location):
"""
Update a module's location.
If the module is a pure XBlock (not an XModule), then its field data
keys will need to be updated to include the new location.
Args:
module (XModuleMixin): The module to update.
new_location (Location): The new location of the module.
Returns:
None
"""
# Retrieve the content and settings fields that have been explicitly set
# to ensure that they are properly re-keyed in the XBlock field data.
if isinstance(module, XModuleDescriptor):
rekey_fields = []
else:
rekey_fields = (
module.get_explicitly_set_fields_by_scope(Scope.content).keys() +
module.get_explicitly_set_fields_by_scope(Scope.settings).keys()
)
module.location = new_location
# Pure XBlocks store the field data in a key-value store
# in which one component of the key is the XBlock's location (equivalent to "scope_ids").
# Since we've changed the XBlock's location, we need to re-save
# all the XBlock's fields so they will be stored using the new location in the key.
# However, since XBlocks only save "dirty" fields, we need to first
# explicitly set each field to its current value before triggering the save.
if len(rekey_fields) > 0:
for rekey_field_name in rekey_fields:
setattr(module, rekey_field_name, getattr(module, rekey_field_name))
module.save()<|fim▁end|> | verbose = True
|
<|file_name|>klog.rs<|end_file_name|><|fim▁begin|>use core::fmt;
<|fim▁hole|> Warning,
Error,
Fatal,
}
fn write_nothing(_: &str) {}
static mut WRITE: fn(&str) = write_nothing;
static mut LEVEL: Level = Level::Info;
pub fn init(write: fn(&str), level: Level) {
unsafe {
WRITE = write;
LEVEL = level;
}
}
struct Writer;
impl fmt::Write for Writer {
fn write_str(&mut self, s: &str) -> fmt::Result {
unsafe { WRITE(s); }
Ok(())
}
}
pub fn log(level: Level, args: fmt::Arguments) {
unsafe {
if level < LEVEL {
return;
}
let writer: &mut fmt::Write = &mut Writer;
let prefix = match level {
Level::Debug => "d ",
Level::Info => "i ",
Level::Warning => "W ",
Level::Error => "E ",
Level::Fatal => "F ",
};
writer.write_str(prefix).unwrap();
writer.write_fmt(args).unwrap();
writer.write_str("\n").unwrap();
}
}<|fim▁end|> | #[derive(PartialEq, PartialOrd)]
pub enum Level {
Debug,
Info, |
<|file_name|>http_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use brotli::Decompressor;
use connector::{Connector, create_http_connector};
use cookie;
use cookie_storage::CookieStorage;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest};
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
use fetch::cors_cache::CorsCache;
use fetch::methods::{Data, DoneChannel, FetchContext, Target, is_simple_header, is_simple_method, main_fetch};
use flate2::read::{DeflateDecoder, GzDecoder};
use hsts::HstsList;
use hyper::Error as HttpError;
use hyper::LanguageTag;
use hyper::client::{Pool, Request as HyperRequest, Response as HyperResponse};
use hyper::header::{Accept, AccessControlAllowCredentials, AccessControlAllowHeaders};
use hyper::header::{AccessControlAllowMethods, AccessControlAllowOrigin};
use hyper::header::{AccessControlMaxAge, AccessControlRequestHeaders};
use hyper::header::{AccessControlRequestMethod, AcceptEncoding, AcceptLanguage};
use hyper::header::{Authorization, Basic, CacheControl, CacheDirective};
use hyper::header::{ContentEncoding, ContentLength, Encoding, Header, Headers};
use hyper::header::{Host, Origin as HyperOrigin, IfMatch, IfRange};
use hyper::header::{IfUnmodifiedSince, IfModifiedSince, IfNoneMatch, Location};
use hyper::header::{Pragma, Quality, QualityItem, Referer, SetCookie};
use hyper::header::{UserAgent, q, qitem};
use hyper::method::Method;
use hyper::status::StatusCode;
use hyper_openssl::OpensslClient;
use hyper_serde::Serde;
use log;
use msg::constellation_msg::PipelineId;
use net_traits::{CookieSource, FetchMetadata, NetworkError, ReferrerPolicy};
use net_traits::request::{CacheMode, CredentialsMode, Destination, Origin};
use net_traits::request::{RedirectMode, Referrer, Request, RequestMode};
use net_traits::request::{ResponseTainting, Type};
use net_traits::response::{HttpsState, Response, ResponseBody, ResponseType};
use resource_thread::AuthCache;
use servo_url::{ImmutableOrigin, ServoUrl};
use std::collections::HashSet;
use std::error::Error;
use std::io::{self, Read, Write};
use std::iter::FromIterator;
use std::mem;
use std::ops::Deref;
use std::sync::RwLock;
use std::sync::mpsc::{channel, Sender};
use std::thread;
use time;
use time::Tm;
use unicase::UniCase;
use uuid;
fn read_block<R: Read>(reader: &mut R) -> Result<Data, ()> {
let mut buf = vec![0; 1024];
match reader.read(&mut buf) {
Ok(len) if len > 0 => {
buf.truncate(len);
Ok(Data::Payload(buf))
}
Ok(_) => Ok(Data::Done),
Err(_) => Err(()),
}
}
pub struct HttpState {
pub hsts_list: RwLock<HstsList>,
pub cookie_jar: RwLock<CookieStorage>,
pub auth_cache: RwLock<AuthCache>,
pub ssl_client: OpensslClient,
pub connector: Pool<Connector>,
}
impl HttpState {
pub fn new(ssl_client: OpensslClient) -> HttpState {
HttpState {
hsts_list: RwLock::new(HstsList::new()),
cookie_jar: RwLock::new(CookieStorage::new(150)),
auth_cache: RwLock::new(AuthCache::new()),
ssl_client: ssl_client.clone(),
connector: create_http_connector(ssl_client),
}
}
}
fn precise_time_ms() -> u64 {
time::precise_time_ns() / (1000 * 1000)
}
pub struct WrappedHttpResponse {
pub response: HyperResponse
}
impl Read for WrappedHttpResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.response.read(buf)
}
}
impl WrappedHttpResponse {
fn headers(&self) -> &Headers {
&self.response.headers
}
fn content_encoding(&self) -> Option<Encoding> {
let encodings = match self.headers().get::<ContentEncoding>() {
Some(&ContentEncoding(ref encodings)) => encodings,
None => return None,
};
if encodings.contains(&Encoding::Gzip) {
Some(Encoding::Gzip)
} else if encodings.contains(&Encoding::Deflate) {
Some(Encoding::Deflate)
} else if encodings.contains(&Encoding::EncodingExt("br".to_owned())) {
Some(Encoding::EncodingExt("br".to_owned()))
} else {
None
}
}
}
// Step 3 of https://fetch.spec.whatwg.org/#concept-fetch.
pub fn set_default_accept(type_: Type, destination: Destination, headers: &mut Headers) {
if headers.has::<Accept>() {
return;
}
let value = match (type_, destination) {
// Step 3.2.
(_, Destination::Document) => {
vec![
qitem(mime!(Text / Html)),
qitem(mime!(Application / ("xhtml+xml"))),
QualityItem::new(mime!(Application / Xml), q(0.9)),
QualityItem::new(mime!(_ / _), q(0.8)),
]
},
// Step 3.3.
(Type::Image, _) => {
vec![
qitem(mime!(Image / Png)),
qitem(mime!(Image / ("svg+xml") )),
QualityItem::new(mime!(Image / _), q(0.8)),
QualityItem::new(mime!(_ / _), q(0.5)),
]
},
// Step 3.3.
(Type::Style, _) => {
vec![
qitem(mime!(Text / Css)),
QualityItem::new(mime!(_ / _), q(0.1))
]
},
// Step 3.1.
_ => {
vec![qitem(mime!(_ / _))]
},
};
// Step 3.4.
headers.set(Accept(value));
}
fn set_default_accept_encoding(headers: &mut Headers) {
if headers.has::<AcceptEncoding>() {
return
}
headers.set(AcceptEncoding(vec![
qitem(Encoding::Gzip),
qitem(Encoding::Deflate),
qitem(Encoding::EncodingExt("br".to_owned()))
]));
}
pub fn set_default_accept_language(headers: &mut Headers) {
if headers.has::<AcceptLanguage>() {
return;
}
let mut en_us: LanguageTag = Default::default();
en_us.language = Some("en".to_owned());
en_us.region = Some("US".to_owned());
let mut en: LanguageTag = Default::default();
en.language = Some("en".to_owned());
headers.set(AcceptLanguage(vec![
qitem(en_us),
QualityItem::new(en, Quality(500)),
]));
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-state-no-referrer-when-downgrade
fn no_referrer_when_downgrade_header(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
return strip_url(referrer_url, false);
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin
fn strict_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
strip_url(referrer_url, true)
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin-when-cross-origin
fn strict_origin_when_cross_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
let cross_origin = referrer_url.origin() != url.origin();
strip_url(referrer_url, cross_origin)
}
/// https://w3c.github.io/webappsec-referrer-policy/#strip-url
fn strip_url(mut referrer_url: ServoUrl, origin_only: bool) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" || referrer_url.scheme() == "http" {
{
let referrer = referrer_url.as_mut_url();
referrer.set_username("").unwrap();
referrer.set_password(None).unwrap();
referrer.set_fragment(None);
if origin_only {
referrer.set_path("");
referrer.set_query(None);
}
}
return Some(referrer_url);
}
return None;
}
/// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
/// Steps 4-6.
pub fn determine_request_referrer(headers: &mut Headers,
referrer_policy: ReferrerPolicy,
referrer_source: ServoUrl,
current_url: ServoUrl)
-> Option<ServoUrl> {
assert!(!headers.has::<Referer>());
// FIXME(#14505): this does not seem to be the correct way of checking for
// same-origin requests.
let cross_origin = referrer_source.origin() != current_url.origin();
// FIXME(#14506): some of these cases are expected to consider whether the
// request's client is "TLS-protected", whatever that means.
match referrer_policy {
ReferrerPolicy::NoReferrer => None,
ReferrerPolicy::Origin => strip_url(referrer_source, true),
ReferrerPolicy::SameOrigin => if cross_origin { None } else { strip_url(referrer_source, false) },
ReferrerPolicy::UnsafeUrl => strip_url(referrer_source, false),
ReferrerPolicy::OriginWhenCrossOrigin => strip_url(referrer_source, cross_origin),
ReferrerPolicy::StrictOrigin => strict_origin(referrer_source, current_url),
ReferrerPolicy::StrictOriginWhenCrossOrigin => strict_origin_when_cross_origin(referrer_source, current_url),
ReferrerPolicy::NoReferrerWhenDowngrade => no_referrer_when_downgrade_header(referrer_source, current_url),
}
}
pub fn set_request_cookies(url: &ServoUrl, headers: &mut Headers, cookie_jar: &RwLock<CookieStorage>) {
let mut cookie_jar = cookie_jar.write().unwrap();
if let Some(cookie_list) = cookie_jar.cookies_for_url(url, CookieSource::HTTP) {
let mut v = Vec::new();
v.push(cookie_list.into_bytes());<|fim▁hole|> }
}
fn set_cookie_for_url(cookie_jar: &RwLock<CookieStorage>,
request: &ServoUrl,
cookie_val: String) {
let mut cookie_jar = cookie_jar.write().unwrap();
let source = CookieSource::HTTP;
let header = Header::parse_header(&[cookie_val.into_bytes()]);
if let Ok(SetCookie(cookies)) = header {
for cookie in cookies {
if let Some(cookie) = cookie::Cookie::from_cookie_string(cookie, request, source) {
cookie_jar.push(cookie, request, source);
}
}
}
}
fn set_cookies_from_headers(url: &ServoUrl, headers: &Headers, cookie_jar: &RwLock<CookieStorage>) {
if let Some(cookies) = headers.get_raw("set-cookie") {
for cookie in cookies.iter() {
if let Ok(cookie_value) = String::from_utf8(cookie.clone()) {
set_cookie_for_url(&cookie_jar,
&url,
cookie_value);
}
}
}
}
struct StreamedResponse {
decoder: Decoder,
}
impl Read for StreamedResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.decoder {
Decoder::Gzip(ref mut d) => d.read(buf),
Decoder::Deflate(ref mut d) => d.read(buf),
Decoder::Brotli(ref mut d) => d.read(buf),
Decoder::Plain(ref mut d) => d.read(buf)
}
}
}
impl StreamedResponse {
fn from_http_response(response: WrappedHttpResponse) -> io::Result<StreamedResponse> {
let decoder = match response.content_encoding() {
Some(Encoding::Gzip) => {
Decoder::Gzip(try!(GzDecoder::new(response)))
}
Some(Encoding::Deflate) => {
Decoder::Deflate(DeflateDecoder::new(response))
}
Some(Encoding::EncodingExt(ref ext)) if ext == "br" => {
Decoder::Brotli(Decompressor::new(response, 1024))
}
_ => {
Decoder::Plain(response)
}
};
Ok(StreamedResponse { decoder: decoder })
}
}
enum Decoder {
Gzip(GzDecoder<WrappedHttpResponse>),
Deflate(DeflateDecoder<WrappedHttpResponse>),
Brotli(Decompressor<WrappedHttpResponse>),
Plain(WrappedHttpResponse)
}
fn prepare_devtools_request(request_id: String,
url: ServoUrl,
method: Method,
headers: Headers,
body: Option<Vec<u8>>,
pipeline_id: PipelineId,
now: Tm,
connect_time: u64,
send_time: u64,
is_xhr: bool) -> ChromeToDevtoolsControlMsg {
let request = DevtoolsHttpRequest {
url: url,
method: method,
headers: headers,
body: body,
pipeline_id: pipeline_id,
startedDateTime: now,
timeStamp: now.to_timespec().sec,
connect_time: connect_time,
send_time: send_time,
is_xhr: is_xhr,
};
let net_event = NetworkEvent::HttpRequest(request);
ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event)
}
fn send_request_to_devtools(msg: ChromeToDevtoolsControlMsg,
devtools_chan: &Sender<DevtoolsControlMsg>) {
devtools_chan.send(DevtoolsControlMsg::FromChrome(msg)).unwrap();
}
fn send_response_to_devtools(devtools_chan: &Sender<DevtoolsControlMsg>,
request_id: String,
headers: Option<Headers>,
status: Option<(u16, Vec<u8>)>,
pipeline_id: PipelineId) {
let response = DevtoolsHttpResponse { headers: headers, status: status, body: None, pipeline_id: pipeline_id };
let net_event_response = NetworkEvent::HttpResponse(response);
let msg = ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event_response);
let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg));
}
fn auth_from_cache(auth_cache: &RwLock<AuthCache>, origin: &ImmutableOrigin) -> Option<Basic> {
if let Some(ref auth_entry) = auth_cache.read().unwrap().entries.get(&origin.ascii_serialization()) {
let user_name = auth_entry.user_name.clone();
let password = Some(auth_entry.password.clone());
Some(Basic { username: user_name, password: password })
} else {
None
}
}
fn obtain_response(connector: &Pool<Connector>,
url: &ServoUrl,
method: &Method,
request_headers: &Headers,
data: &Option<Vec<u8>>,
load_data_method: &Method,
pipeline_id: &Option<PipelineId>,
iters: u32,
request_id: Option<&str>,
is_xhr: bool)
-> Result<(WrappedHttpResponse, Option<ChromeToDevtoolsControlMsg>), NetworkError> {
let null_data = None;
// loop trying connections in connection pool
// they may have grown stale (disconnected), in which case we'll get
// a ConnectionAborted error. this loop tries again with a new
// connection.
loop {
let mut headers = request_headers.clone();
// Avoid automatically sending request body if a redirect has occurred.
//
// TODO - This is the wrong behaviour according to the RFC. However, I'm not
// sure how much "correctness" vs. real-world is important in this case.
//
// https://tools.ietf.org/html/rfc7231#section-6.4
let is_redirected_request = iters != 1;
let request_body;
match data {
&Some(ref d) if !is_redirected_request => {
headers.set(ContentLength(d.len() as u64));
request_body = data;
}
_ => {
if *load_data_method != Method::Get && *load_data_method != Method::Head {
headers.set(ContentLength(0))
}
request_body = &null_data;
}
}
if log_enabled!(log::LogLevel::Info) {
info!("{} {}", method, url);
for header in headers.iter() {
info!(" - {}", header);
}
info!("{:?}", data);
}
let connect_start = precise_time_ms();
let request = HyperRequest::with_connector(method.clone(),
url.clone().into_url(),
&*connector);
let mut request = match request {
Ok(request) => request,
Err(e) => return Err(NetworkError::from_hyper_error(&url, e)),
};
*request.headers_mut() = headers.clone();
let connect_end = precise_time_ms();
let send_start = precise_time_ms();
let mut request_writer = match request.start() {
Ok(streaming) => streaming,
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
if let Some(ref data) = *request_body {
if let Err(e) = request_writer.write_all(&data) {
return Err(NetworkError::Internal(e.description().to_owned()))
}
}
let response = match request_writer.send() {
Ok(w) => w,
Err(HttpError::Io(ref io_error))
if io_error.kind() == io::ErrorKind::ConnectionAborted ||
io_error.kind() == io::ErrorKind::ConnectionReset => {
debug!("connection aborted ({:?}), possibly stale, trying new connection", io_error.description());
continue;
},
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
let send_end = precise_time_ms();
let msg = if let Some(request_id) = request_id {
if let Some(pipeline_id) = *pipeline_id {
Some(prepare_devtools_request(
request_id.into(),
url.clone(), method.clone(), headers,
request_body.clone(), pipeline_id, time::now(),
connect_end - connect_start, send_end - send_start, is_xhr))
} else {
debug!("Not notifying devtools (no pipeline_id)");
None
}
} else {
debug!("Not notifying devtools (no request_id)");
None
};
return Ok((WrappedHttpResponse { response: response }, msg));
}
}
/// [HTTP fetch](https://fetch.spec.whatwg.org#http-fetch)
pub fn http_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
cors_preflight_flag: bool,
authentication_fetch_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// This is a new async fetch, reset the channel we are waiting on
*done_chan = None;
// Step 1
let mut response: Option<Response> = None;
// Step 2
// nothing to do, since actual_response is a function on response
// Step 3
if !request.skip_service_worker && !request.is_service_worker_global_scope {
// Substep 1
// TODO (handle fetch unimplemented)
if let Some(ref res) = response {
// Substep 2
// nothing to do, since actual_response is a function on response
// Substep 3
if (res.response_type == ResponseType::Opaque &&
request.mode != RequestMode::NoCors) ||
(res.response_type == ResponseType::OpaqueRedirect &&
request.redirect_mode != RedirectMode::Manual) ||
(res.url_list.len() > 1 && request.redirect_mode != RedirectMode::Follow) ||
res.is_network_error() {
return Response::network_error(NetworkError::Internal("Request failed".into()));
}
// Substep 4
// TODO: set response's CSP list on actual_response
}
}
// Step 4
let credentials = match request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
// Step 5
if response.is_none() {
// Substep 1
if cors_preflight_flag {
let method_cache_match = cache.match_method(&*request,
request.method.clone());
let method_mismatch = !method_cache_match && (!is_simple_method(&request.method) ||
request.use_cors_preflight);
let header_mismatch = request.headers.iter().any(|view|
!cache.match_header(&*request, view.name()) && !is_simple_header(&view)
);
// Sub-substep 1
if method_mismatch || header_mismatch {
let preflight_result = cors_preflight_fetch(&request, cache, context);
// Sub-substep 2
if let Some(e) = preflight_result.get_network_error() {
return Response::network_error(e.clone());
}
}
}
// Substep 2
request.skip_service_worker = true;
// Substep 3
let mut fetch_result = http_network_or_cache_fetch(
request, authentication_fetch_flag, cors_flag, done_chan, context);
// Substep 4
if cors_flag && cors_check(&request, &fetch_result).is_err() {
return Response::network_error(NetworkError::Internal("CORS check failed".into()));
}
fetch_result.return_internal = false;
response = Some(fetch_result);
}
// response is guaranteed to be something by now
let mut response = response.unwrap();
// Step 5
match response.actual_response().status {
// Code 301, 302, 303, 307, 308
status if status.map_or(false, is_redirect_status) => {
response = match request.redirect_mode {
RedirectMode::Error => Response::network_error(NetworkError::Internal("Redirect mode error".into())),
RedirectMode::Manual => {
response.to_filtered(ResponseType::OpaqueRedirect)
},
RedirectMode::Follow => {
// set back to default
response.return_internal = true;
http_redirect_fetch(request, cache, response,
cors_flag, target, done_chan, context)
}
}
},
// Code 401
Some(StatusCode::Unauthorized) => {
// Step 1
// FIXME: Figure out what to do with request window objects
if cors_flag || !credentials {
return response;
}
// Step 2
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Step 3
if !request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Step 4
return http_fetch(request, cache, cors_flag, cors_preflight_flag,
true, target, done_chan, context);
}
// Code 407
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_fetch(request, cache,
// cors_flag, cors_preflight_flag,
// authentication_fetch_flag, target,
// done_chan, context);
}
_ => { }
}
// Step 6
if authentication_fetch_flag {
// TODO: Create authentication entry for this request
}
// set back to default
response.return_internal = true;
// Step 7
response
}
/// [HTTP redirect fetch](https://fetch.spec.whatwg.org#http-redirect-fetch)
fn http_redirect_fetch(request: &mut Request,
cache: &mut CorsCache,
response: Response,
cors_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1
assert!(response.return_internal);
// Step 2
if !response.actual_response().headers.has::<Location>() {
return response;
}
// Step 3
let location = match response.actual_response().headers.get::<Location>() {
Some(&Location(ref location)) => location.clone(),
_ => return Response::network_error(NetworkError::Internal("Location header parsing failure".into()))
};
let response_url = response.actual_response().url().unwrap();
let location_url = response_url.join(&*location);
let location_url = match location_url {
Ok(url) => url,
_ => return Response::network_error(NetworkError::Internal("Location URL parsing failure".into()))
};
// Step 4
match location_url.scheme() {
"http" | "https" => { },
_ => return Response::network_error(NetworkError::Internal("Not an HTTP(S) Scheme".into()))
}
// Step 5
if request.redirect_count >= 20 {
return Response::network_error(NetworkError::Internal("Too many redirects".into()));
}
// Step 6
request.redirect_count += 1;
// Step 7
let same_origin = location_url.origin()== request.current_url().origin();
let has_credentials = has_credentials(&location_url);
if request.mode == RequestMode::CorsMode && !same_origin && has_credentials {
return Response::network_error(NetworkError::Internal("Cross-origin credentials check failed".into()));
}
// Step 8
if cors_flag && has_credentials {
return Response::network_error(NetworkError::Internal("Credentials check failed".into()));
}
// Step 9
if cors_flag && !same_origin {
request.origin = Origin::Origin(ImmutableOrigin::new_opaque());
}
// Step 10
let status_code = response.actual_response().status.unwrap();
if ((status_code == StatusCode::MovedPermanently || status_code == StatusCode::Found) &&
request.method == Method::Post) ||
status_code == StatusCode::SeeOther {
request.method = Method::Get;
request.body = None;
}
// Step 11
request.url_list.push(location_url);
// Step 12
// TODO implement referrer policy
// Step 13
main_fetch(request, cache, cors_flag, true, target, done_chan, context)
}
fn try_immutable_origin_to_hyper_origin(url_origin: &ImmutableOrigin) -> Option<HyperOrigin> {
match *url_origin {
// TODO (servo/servo#15569) Set "Origin: null" when hyper supports it
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(ref scheme, ref host, ref port) =>
Some(HyperOrigin::new(scheme.clone(), host.to_string(), Some(port.clone())))
}
}
/// [HTTP network or cache fetch](https://fetch.spec.whatwg.org#http-network-or-cache-fetch)
fn http_network_or_cache_fetch(request: &mut Request,
authentication_fetch_flag: bool,
cors_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement Window enum for Request
let request_has_no_window = true;
// Step 2
let mut http_request;
let http_request = if request_has_no_window &&
request.redirect_mode == RedirectMode::Error {
request
} else {
// Step 3
// TODO Implement body source
http_request = request.clone();
&mut http_request
};
// Step 4
let credentials_flag = match http_request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if http_request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
let content_length_value = match http_request.body {
None =>
match http_request.method {
// Step 6
Method::Post | Method::Put =>
Some(0),
// Step 5
_ => None
},
// Step 7
Some(ref http_request_body) => Some(http_request_body.len() as u64)
};
// Step 8
if let Some(content_length_value) = content_length_value {
http_request.headers.set(ContentLength(content_length_value));
}
// Step 9 TODO: needs request's client object
// Step 10
match http_request.referrer {
Referrer::NoReferrer => (),
Referrer::ReferrerUrl(ref http_request_referrer) =>
http_request.headers.set(Referer(http_request_referrer.to_string())),
Referrer::Client =>
// it should be impossible for referrer to be anything else during fetching
// https://fetch.spec.whatwg.org/#concept-request-referrer
unreachable!()
};
// Step 11
if cors_flag || (http_request.method != Method::Get && http_request.method != Method::Head) {
debug_assert!(http_request.origin != Origin::Client);
if let Origin::Origin(ref url_origin) = http_request.origin {
if let Some(hyper_origin) = try_immutable_origin_to_hyper_origin(url_origin) {
http_request.headers.set(hyper_origin)
}
}
}
// Step 12
if !http_request.headers.has::<UserAgent>() {
let user_agent = context.user_agent.clone().into_owned();
http_request.headers.set(UserAgent(user_agent));
}
match http_request.cache_mode {
// Step 13
CacheMode::Default if is_no_store_cache(&http_request.headers) => {
http_request.cache_mode = CacheMode::NoStore;
},
// Step 14
CacheMode::NoCache if !http_request.headers.has::<CacheControl>() => {
http_request.headers.set(CacheControl(vec![CacheDirective::MaxAge(0)]));
},
// Step 15
CacheMode::Reload | CacheMode::NoStore => {
// Substep 1
if !http_request.headers.has::<Pragma>() {
http_request.headers.set(Pragma::NoCache);
}
// Substep 2
if !http_request.headers.has::<CacheControl>() {
http_request.headers.set(CacheControl(vec![CacheDirective::NoCache]));
}
},
_ => {}
}
// Step 16
let current_url = http_request.current_url();
let host = Host {
hostname: current_url.host_str().unwrap().to_owned(),
port: current_url.port()
};
http_request.headers.set(host);
// unlike http_loader, we should not set the accept header
// here, according to the fetch spec
set_default_accept_encoding(&mut http_request.headers);
// Step 17
// TODO some of this step can't be implemented yet
if credentials_flag {
// Substep 1
// TODO http://mxr.mozilla.org/servo/source/components/net/http_loader.rs#504
// XXXManishearth http_loader has block_cookies: support content blocking here too
set_request_cookies(¤t_url,
&mut http_request.headers,
&context.state.cookie_jar);
// Substep 2
if !http_request.headers.has::<Authorization<String>>() {
// Substep 3
let mut authorization_value = None;
// Substep 4
if let Some(basic) = auth_from_cache(&context.state.auth_cache, ¤t_url.origin()) {
if !http_request.use_url_credentials || !has_credentials(¤t_url) {
authorization_value = Some(basic);
}
}
// Substep 5
if authentication_fetch_flag && authorization_value.is_none() {
if has_credentials(¤t_url) {
authorization_value = Some(Basic {
username: current_url.username().to_owned(),
password: current_url.password().map(str::to_owned)
})
}
}
// Substep 6
if let Some(basic) = authorization_value {
http_request.headers.set(Authorization(basic));
}
}
}
// Step 18
// TODO If there’s a proxy-authentication entry, use it as appropriate.
// Step 19
let mut response: Option<Response> = None;
// Step 20
let mut revalidation_needed = false;
// Step 21
// TODO have a HTTP cache to check for a completed response
let complete_http_response_from_cache: Option<Response> = None;
if http_request.cache_mode != CacheMode::NoStore &&
http_request.cache_mode != CacheMode::Reload &&
complete_http_response_from_cache.is_some() {
// TODO Substep 1 and 2. Select a response from HTTP cache.
// Substep 3
if let Some(ref response) = response {
revalidation_needed = response_needs_revalidation(&response);
};
// Substep 4
if http_request.cache_mode == CacheMode::ForceCache ||
http_request.cache_mode == CacheMode::OnlyIfCached {
// TODO pull response from HTTP cache
// response = http_request
}
if revalidation_needed {
// Substep 5
// TODO set If-None-Match and If-Modified-Since according to cached
// response headers.
} else {
// Substep 6
// TODO pull response from HTTP cache
// response = http_request
// response.cache_state = CacheState::Local;
}
}
// Step 22
if response.is_none() {
// Substep 1
if http_request.cache_mode == CacheMode::OnlyIfCached {
return Response::network_error(
NetworkError::Internal("Couldn't find response in cache".into()))
}
// Substep 2
let forward_response = http_network_fetch(http_request, credentials_flag,
done_chan, context);
match forward_response.raw_status {
// Substep 3
Some((200...303, _)) |
Some((305...399, _)) => {
if !http_request.method.safe() {
// TODO Invalidate HTTP cache response
}
},
// Substep 4
Some((304, _)) => {
if revalidation_needed {
// TODO update forward_response headers with cached response
// headers
}
},
_ => {}
}
// Substep 5
if response.is_none() {
response = Some(forward_response);
}
}
let response = response.unwrap();
match response.status {
Some(StatusCode::Unauthorized) => {
// Step 23
// FIXME: Figure out what to do with request window objects
if cors_flag && !credentials_flag {
return response;
}
// Substep 1
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Substep 2
if http_request.body.is_some() {
// TODO Implement body source
}
// Substep 3
if !http_request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Substep 4
return http_network_or_cache_fetch(http_request,
true /* authentication flag */,
cors_flag, done_chan, context);
},
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 24
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_network_or_cache_fetch(request, authentication_fetch_flag,
// cors_flag, done_chan, context);
},
_ => {}
}
// Step 25
if authentication_fetch_flag {
// TODO Create the authentication entry for request and the given realm
}
// Step 26
response
}
/// [HTTP network fetch](https://fetch.spec.whatwg.org/#http-network-fetch)
fn http_network_fetch(request: &Request,
credentials_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement HTTP network fetch spec
// Step 1
// nothing to do here, since credentials_flag is already a boolean
// Step 2
// TODO be able to create connection using current url's origin and credentials
// Step 3
// TODO be able to tell if the connection is a failure
// Step 4
let url = request.current_url();
let request_id = context.devtools_chan.as_ref().map(|_| {
uuid::Uuid::new_v4().simple().to_string()
});
// XHR uses the default destination; other kinds of fetches (which haven't been implemented yet)
// do not. Once we support other kinds of fetches we'll need to be more fine grained here
// since things like image fetches are classified differently by devtools
let is_xhr = request.destination == Destination::None;
let wrapped_response = obtain_response(&context.state.connector,
&url,
&request.method,
&request.headers,
&request.body, &request.method,
&request.pipeline_id, request.redirect_count + 1,
request_id.as_ref().map(Deref::deref), is_xhr);
let pipeline_id = request.pipeline_id;
let (res, msg) = match wrapped_response {
Ok(wrapped_response) => wrapped_response,
Err(error) => return Response::network_error(error),
};
if log_enabled!(log::LogLevel::Info) {
info!("response for {}", url);
for header in res.response.headers.iter() {
info!(" - {}", header);
}
}
let mut response = Response::new(url.clone());
response.status = Some(res.response.status);
response.raw_status = Some((res.response.status_raw().0,
res.response.status_raw().1.as_bytes().to_vec()));
response.headers = res.response.headers.clone();
response.referrer = request.referrer.to_url().cloned();
let res_body = response.body.clone();
// We're about to spawn a thread to be waited on here
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
let meta = match response.metadata().expect("Response metadata should exist at this stage") {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_
};
let devtools_sender = context.devtools_chan.clone();
let meta_status = meta.status.clone();
let meta_headers = meta.headers.clone();
thread::Builder::new().name(format!("fetch worker thread")).spawn(move || {
match StreamedResponse::from_http_response(res) {
Ok(mut res) => {
*res_body.lock().unwrap() = ResponseBody::Receiving(vec![]);
if let Some(ref sender) = devtools_sender {
if let Some(m) = msg {
send_request_to_devtools(m, &sender);
}
// --- Tell devtools that we got a response
// Send an HttpResponse message to devtools with the corresponding request_id
if let Some(pipeline_id) = pipeline_id {
send_response_to_devtools(
&sender, request_id.unwrap(),
meta_headers.map(Serde::into_inner),
meta_status,
pipeline_id);
}
}
loop {
match read_block(&mut res) {
Ok(Data::Payload(chunk)) => {
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
body.extend_from_slice(&chunk);
let _ = done_sender.send(Data::Payload(chunk));
}
},
Ok(Data::Done) | Err(_) => {
let mut body = res_body.lock().unwrap();
let completed_body = match *body {
ResponseBody::Receiving(ref mut body) => {
mem::replace(body, vec![])
},
_ => vec![],
};
*body = ResponseBody::Done(completed_body);
let _ = done_sender.send(Data::Done);
break;
}
}
}
}
Err(_) => {
// XXXManishearth we should propagate this error somehow
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
let _ = done_sender.send(Data::Done);
}
}
}).expect("Thread spawning failed");
// TODO these substeps aren't possible yet
// Substep 1
// Substep 2
// TODO Determine if response was retrieved over HTTPS
// TODO Servo needs to decide what ciphers are to be treated as "deprecated"
response.https_state = HttpsState::None;
// TODO Read request
// Step 5-9
// (needs stream bodies)
// Step 10
// TODO when https://bugzilla.mozilla.org/show_bug.cgi?id=1030660
// is resolved, this step will become uneccesary
// TODO this step
if let Some(encoding) = response.headers.get::<ContentEncoding>() {
if encoding.contains(&Encoding::Gzip) {
}
else if encoding.contains(&Encoding::Compress) {
}
};
// Step 11
// TODO this step isn't possible yet (CSP)
// Step 12
if response.is_network_error() && request.cache_mode == CacheMode::NoStore {
// TODO update response in the HTTP cache for request
}
// TODO this step isn't possible yet
// Step 13
// Step 14.
if credentials_flag {
set_cookies_from_headers(&url, &response.headers, &context.state.cookie_jar);
}
// TODO these steps
// Step 15
// Substep 1
// Substep 2
// Sub-substep 1
// Sub-substep 2
// Sub-substep 3
// Sub-substep 4
// Substep 3
// Step 16
response
}
/// [CORS preflight fetch](https://fetch.spec.whatwg.org#cors-preflight-fetch)
fn cors_preflight_fetch(request: &Request,
cache: &mut CorsCache,
context: &FetchContext)
-> Response {
// Step 1
let mut preflight = Request::new(request.current_url(), Some(request.origin.clone()),
request.is_service_worker_global_scope, request.pipeline_id);
preflight.method = Method::Options;
preflight.initiator = request.initiator.clone();
preflight.type_ = request.type_.clone();
preflight.destination = request.destination.clone();
preflight.referrer = request.referrer.clone();
preflight.referrer_policy = request.referrer_policy;
// Step 2
preflight.headers.set::<AccessControlRequestMethod>(
AccessControlRequestMethod(request.method.clone()));
// Step 3, 4
let mut value = request.headers
.iter()
.filter(|view| !is_simple_header(view))
.map(|view| UniCase(view.name().to_owned()))
.collect::<Vec<UniCase<String>>>();
value.sort();
// Step 5
preflight.headers.set::<AccessControlRequestHeaders>(
AccessControlRequestHeaders(value));
// Step 6
let response = http_network_or_cache_fetch(&mut preflight, false, false, &mut None, context);
// Step 7
if cors_check(&request, &response).is_ok() &&
response.status.map_or(false, |status| status.is_success()) {
// Substep 1
let mut methods = if response.headers.has::<AccessControlAllowMethods>() {
match response.headers.get::<AccessControlAllowMethods>() {
Some(&AccessControlAllowMethods(ref m)) => m.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAM check failed".into()))
}
} else {
vec![]
};
// Substep 2
let header_names = if response.headers.has::<AccessControlAllowHeaders>() {
match response.headers.get::<AccessControlAllowHeaders>() {
Some(&AccessControlAllowHeaders(ref hn)) => hn.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAH check failed".into()))
}
} else {
vec![]
};
// Substep 4
if methods.is_empty() && request.use_cors_preflight {
methods = vec![request.method.clone()];
}
// Substep 5
debug!("CORS check: Allowed methods: {:?}, current method: {:?}",
methods, request.method);
if methods.iter().all(|method| *method != request.method) &&
!is_simple_method(&request.method) {
return Response::network_error(NetworkError::Internal("CORS method check failed".into()));
}
// Substep 6
debug!("CORS check: Allowed headers: {:?}, current headers: {:?}", header_names, request.headers);
let set: HashSet<&UniCase<String>> = HashSet::from_iter(header_names.iter());
if request.headers.iter().any(|ref hv| !set.contains(&UniCase(hv.name().to_owned())) && !is_simple_header(hv)) {
return Response::network_error(NetworkError::Internal("CORS headers check failed".into()));
}
// Substep 7, 8
let max_age = response.headers.get::<AccessControlMaxAge>().map(|acma| acma.0).unwrap_or(0);
// TODO: Substep 9 - Need to define what an imposed limit on max-age is
// Substep 11, 12
for method in &methods {
cache.match_method_and_update(&*request, method.clone(), max_age);
}
// Substep 13, 14
for header_name in &header_names {
cache.match_header_and_update(&*request, &*header_name, max_age);
}
// Substep 15
return response;
}
// Step 8
Response::network_error(NetworkError::Internal("CORS check failed".into()))
}
/// [CORS check](https://fetch.spec.whatwg.org#concept-cors-check)
fn cors_check(request: &Request, response: &Response) -> Result<(), ()> {
// Step 1
let origin = response.headers.get::<AccessControlAllowOrigin>().cloned();
// Step 2
let origin = try!(origin.ok_or(()));
// Step 3
if request.credentials_mode != CredentialsMode::Include &&
origin == AccessControlAllowOrigin::Any {
return Ok(());
}
// Step 4
let origin = match origin {
AccessControlAllowOrigin::Value(origin) => origin,
// if it's Any or Null at this point, there's nothing to do but return Err(())
_ => return Err(())
};
match request.origin {
Origin::Origin(ref o) if o.ascii_serialization() == origin => {},
_ => return Err(())
}
// Step 5
if request.credentials_mode != CredentialsMode::Include {
return Ok(());
}
// Step 6
let credentials = response.headers.get::<AccessControlAllowCredentials>().cloned();
// Step 7
if credentials.is_some() {
return Ok(());
}
// Step 8
Err(())
}
fn has_credentials(url: &ServoUrl) -> bool {
!url.username().is_empty() || url.password().is_some()
}
fn is_no_store_cache(headers: &Headers) -> bool {
headers.has::<IfModifiedSince>() | headers.has::<IfNoneMatch>() |
headers.has::<IfUnmodifiedSince>() | headers.has::<IfMatch>() |
headers.has::<IfRange>()
}
fn response_needs_revalidation(_response: &Response) -> bool {
// TODO this function
false
}
/// https://fetch.spec.whatwg.org/#redirect-status
pub fn is_redirect_status(status: StatusCode) -> bool {
match status {
StatusCode::MovedPermanently |
StatusCode::Found |
StatusCode::SeeOther |
StatusCode::TemporaryRedirect |
StatusCode::PermanentRedirect => true,
_ => false,
}
}<|fim▁end|> | headers.set_raw("Cookie".to_owned(), v); |
<|file_name|>dataframe.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd
from bokeh import mpl<|fim▁hole|>df = pd.DataFrame(np.random.randn(1000, 4), index=ts.index, columns=list('ABCD'))
df = df.cumsum()
df.plot(legend=False)
mpl.to_bokeh(name="dataframe")<|fim▁end|> |
ts = pd.Series(np.random.randn(1000), index=pd.date_range('1/1/2000', periods=1000))
ts = ts.cumsum()
|
<|file_name|>addGnabFeatures.py<|end_file_name|><|fim▁begin|># -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#<|fim▁hole|>
# these are my local ones
from env import gidgetConfigVars
import tsvIO
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
NA_VALUE = -999999
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def cleanUpName(aName):
bName = ''
aName = aName.upper()
## ii = aName.find(" - Homo sapiens (human)")
ii = aName.find(" - HOMO SAPIENS (HUMAN)")
if (ii >= 0):
aName = aName[:ii]
aName = aName.strip()
ii = aName.find("(")
while (ii >= 0):
jj = aName.find(")", ii)
aName = aName[:ii] + aName[jj + 1:]
ii = aName.find("(")
aName = aName.strip()
ii = aName.find("<")
while (ii >= 0):
jj = aName.find(">", ii)
aName = aName[:ii] + aName[jj + 1:]
ii = aName.find("<")
aName = aName.strip()
for ii in range(len(aName)):
if (aName[ii] == ','):
continue
elif (aName[ii] == '('):
bName += '_'
elif (aName[ii] == ')'):
bName += '_'
elif (aName[ii] == '-'):
bName += '_'
elif (aName[ii] == '/'):
bName += '_'
elif (aName[ii] == ';'):
bName += '_'
elif (aName[ii] == '&'):
continue
elif (aName[ii] == '#'):
continue
elif (aName[ii] == ' '):
bName += '_'
else:
bName += aName[ii].upper()
ii = bName.find("__")
while (ii >= 0):
print " ", ii, bName
bName = bName[:ii] + bName[ii + 1:]
print " ", bName
ii = bName.find("__")
return (bName)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def readPathways():
fh = file(
gidgetConfigVars['TCGAFMP_BIOINFORMATICS_REFERENCES'] + "/nci_pid/only_NCI_Nature_ver4.tab", 'r')
pwDict = {}
for aLine in fh:
aLine = aLine.strip()
aLine = aLine.upper()
tokenList = aLine.split('\t')
if (len(tokenList) != 3):
continue
if (tokenList[0] == "pathway"):
continue
longPathwayName = tokenList[0]
shortPathwayName = tokenList[1]
geneTokens = tokenList[2].strip()
geneList = geneTokens.split(',')
geneList.sort()
if (len(geneList) > 0):
while (geneList[0] == ''):
geneList = geneList[1:]
if (len(geneList) == 0):
continue
if (len(geneList) == 0):
continue
pathwayName = cleanUpName(shortPathwayName)
if (pathwayName not in pwDict.keys()):
# print " adding pathway %s (%d) " % ( pathwayName, len(geneList) )
pwDict[pathwayName] = geneList
else:
if (len(pwDict[pathwayName]) < len(geneList)):
# print " substituting shorter list of genes for %s (%d) " % (
# pathwayName, len(geneList) )
pwDict[pathwayName] = geneList
# else:
# print " NOT substituing list for %s " % pathwayName
fh.close()
print " "
print " have pathway dictionary with %d pathways " % len(pwDict)
print " --> now looking for duplicate pathways ... "
pwList = pwDict.keys()
pwList.sort()
delList = []
pairDict = {}
for ii in range(len(pwList) - 1):
iiName = pwList[ii]
iiLen = len(pwDict[iiName])
for jj in range(ii + 1, len(pwList)):
jjName = pwList[jj]
jjLen = len(pwDict[jjName])
if (jjLen != iiLen):
continue
if (pwDict[iiName] == pwDict[jjName]):
print "\n\n SAME !!! "
print iiName, iiLen
print pwDict[iiName]
print jjName, jjLen
print pwDict[jjName]
iiSplit = iiName.split('__')
jjSplit = jjName.split('__')
if (iiSplit[1] <= jjSplit[1]):
pairNames = (iiSplit[1], jjSplit[1])
else:
pairNames = (jjSplit[1], iiSplit[1])
if (pairNames in pairDict.keys()):
pairDict[pairNames] += 1
else:
pairDict[pairNames] = 1
if (iiSplit[1] == jjSplit[1]):
if (len(iiName) <= len(jjName)):
delList += [jjName]
else:
delList += [iiName]
else:
if (iiSplit[1] == "NCI-NATURE"):
delList += [jjName]
elif (jjSplit[1] == "NCI-NATURE"):
delList += [iiName]
elif (iiSplit[1] == "PID"):
delList += [jjName]
elif (jjSplit[1] == "PID"):
delList += [iiName]
elif (iiSplit[1] == "KEGG"):
delList += [jjName]
elif (jjSplit[1] == "KEGG"):
delList += [iiName]
elif (iiSplit[1] == "PWCOMMONS"):
delList += [jjName]
elif (jjSplit[1] == "PWCOMMONS"):
delList += [iiName]
elif (iiSplit[1] == "REACTOME"):
delList += [jjName]
elif (jjSplit[1] == "REACTOME"):
delList += [iiName]
elif (iiSplit[1] == "WIKIPATHWAYS"):
delList += [jjName]
elif (jjSplit[1] == "WIKIPATHWAYS"):
delList += [iiName]
elif (iiSplit[1] == "WIKIPW"):
delList += [jjName]
elif (jjSplit[1] == "WIKIPW"):
delList += [iiName]
elif (iiSplit[1] == "SMPDB"):
delList += [jjName]
elif (jjSplit[1] == "SMPDB"):
delList += [iiName]
elif (iiSplit[1] == "HUMANCYC"):
delList += [jjName]
elif (jjSplit[1] == "HUMANCYC"):
delList += [iiName]
else:
sys.exit(-1)
for aName in delList:
try:
del pwDict[aName]
except:
doNothing = 1
print " "
print " returning pathway dictionary with %d pathways " % len(pwDict)
print " "
for aKey in pairDict.keys():
print aKey, pairDict[aKey]
print " "
print " "
return (pwDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def setFeatBits(rowLabels, featPrefix, doesContainList, notContainList):
numSet = 0
numRow = len(rowLabels)
bitVec = numpy.zeros(numRow, dtype=numpy.bool)
for iR in range(numRow):
if (featPrefix != ""):
if (not rowLabels[iR].startswith(featPrefix)): continue
if (len(doesContainList) > 0):
skipFlag = 1
for aStr in doesContainList:
if (rowLabels[iR].find(aStr) >= 0): skipFlag = 0
if (len(notContainList) > 0):
skipFlag = 0
for aStr in notContainList:
if (rowLabels[iR].find(aStr) >= 0): skipFlag = 1
if (skipFlag): continue
## set bit if we get here ...
bitVec[iR] = 1
numSet += 1
print featPrefix, doesContainList, notContainList, numRow, numSet
if (numSet == 0):
print " numSet=0 ... this is probably a problem ... "
# sys.exit(-1)
return (bitVec)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# B:GNAB:ADAM7:chr8:24298509:24384483:+:y_n_somatic y_n y_del
# --> B:GNAB:ADAM7:chr8:24298509:24384483:+:y_del_somatic
def makeNewFeatureName(curFeatName, oldStringList, newStringList):
for jj in range(len(oldStringList)):
oldStr = oldStringList[jj]
newStr = newStringList[jj]
i1 = curFeatName.find(oldStr)
if ( i1 >= 0 ):
i2 = i1 + len(oldStr)
newFeatName = curFeatName[:i1] + newStr + curFeatName[i2:]
return ( newFeatName )
print " ERROR in makeNewFeatureName ???? ", curFeatName, oldStringList, newStringList
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def chooseCountThreshold(dataD):
rowLabels = dataD['rowLabels']
dMat = dataD['dataMatrix']
numBits = 0
for ii in range(len(rowLabels)):
if (numBits > 0):
continue
if (rowLabels[ii].find("B:GNAB:TP53:") >= 0):
for jj in range(len(dMat[ii])):
if (dMat[ii][jj] == 0):
numBits += 1
elif (dMat[ii][jj] == 1):
numBits += 1
print " number of bits found for TP53 mutation feature: ", numBits
countThreshold = int(numBits / 11) - 1
return (countThreshold)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def findFeature ( rowLabels, s1, s2 ):
for iR in range(len(rowLabels)):
if ( rowLabels[iR].find(s1) >= 0 ):
if ( rowLabels[iR].find(s2) >= 0 ):
return ( iR )
return ( -1 )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def pathwayGnab(dataD, pathways={}):
print " "
print " ************************************************************* "
print " ************************************************************* "
print " "
print " in pathwayGnab ... "
# check that the input feature matrix looks ok ...
try:
numRow = len(dataD['rowLabels'])
numCol = len(dataD['colLabels'])
rowLabels = dataD['rowLabels']
print " %d rows x %d columns " % (numRow, numCol)
# print rowLabels[:5]
# print rowLabels[-5:]
except:
print " ERROR in pathwayGnab ??? bad data ??? "
return (dataD)
if (len(pathways) == 0):
print " "
print " WARNING: no pathway information found ... using a few hard-coded pathways for now "
print " "
pathways = {}
pathways[
"TP53_pathway"] = ["E2F1", "TP53", "RB1", "CDK4", "TIMP3", "CDK2", "ATM",
"CCNE1", "CCND1", "CDKN1A", "BCL2", "BAX", "PCNA", "MDM2",
"APAF1", "GADD45A"]
pathways[
"PI3K_AKT_pathway"] = ["FRAP1", "LST8", "PDPK1", "NGF", "NR4A1", "FOXO1", "CHUK",
"THEM4", "PTEN", "CREB1", "BAD", "RHOA", "TRIB3", "PHLPP",
"CASP9", "AKT1S1", "MDM2", "RPS6KB2"]
pathways[
"Wnt_pathway"] = ["PPP2R5B", "PPP2R5A", "PPP2R5D", "BTRC", "WNT3A",
"PPP2R5C", "MMP7", "PRKX", "CTNNB1", "WNT2", "CSNK2A2", "MAP3K7", "PRKACG",
"WNT1", "WNT4", "WNT3", "CSNK2A1", "PRKACA", "PRKACB", "WNT6", "CUL1",
"WNT10A", "WNT10B", "VANGL1", "ROCK1", "ROCK2", "VANGL2", "CHP2", "SKP1",
"EP300", "JUN", "MAPK9", "PPP2R5E", "MAPK8", "LOC728622", "WNT5A", "WNT5B",
"CXXC4", "DAAM1", "DAAM2", "RBX1", "RAC2", "RAC3", "RAC1", "CACYBP",
"AXIN2", "AXIN1", "DVL2", "DVL3", "TCF7", "CREBBP", "SMAD4", "SMAD3",
"SMAD2", "PORCN", "DVL1", "SFRP5", "SFRP1", "PRICKLE1", "SFRP2", "SFRP4",
"PRICKLE2", "WIF1", "PPARD", "PLCB3", "PLCB4", "FRAT1", "RHOA", "FRAT2",
"SOX17", "PLCB1", "FOSL1", "MYC", "PLCB2", "PPP2R1B", "PRKCA", "PPP2R1A",
"TBL1XR1", "CTBP1", "CTBP2", "TP53", "LEF1", "PRKCG", "PRKCB", "CTNNBIP1",
"SENP2", "CCND1", "PSEN1", "CCND3", "CCND2", "WNT9B", "WNT11", "SIAH1",
"RUVBL1", "WNT9A", "CER1", "NKD1", "WNT16", "NKD2", "APC2", "CAMK2G",
"PPP3R1", "PPP3R2", "TCF7L2", "TCF7L1", "CHD8", "PPP2CA", "PPP2CB",
"PPP3CB", "NFAT5", "CAMK2D", "PPP3CC", "NFATC4", "CAMK2B", "CHP",
"PPP3CA", "NFATC2", "NFATC3", "FBXW11", "CAMK2A", "WNT8A", "WNT8B",
"APC", "NFATC1", "CSNK1A1", "FZD9", "FZD8", "NLK", "FZD1", "CSNK2B",
"CSNK1A1L", "FZD3", "FZD2", "MAPK10", "FZD5", "FZD4", "FZD7", "DKK4",
"WNT2B", "FZD6", "DKK2", "FZD10", "WNT7B", "DKK1", "CSNK1E", "GSK3B",
"LRP6", "TBL1X", "WNT7A", "LRP5", "TBL1Y"]
print " "
print " total number of pathways : ", len(pathways)
print " "
mutationTypes = [":y_n_somatic", ":code_potential_somatic",
":missense_somatic",
":y_del_somatic", ":y_amp_somatic"]
numTypes = len(mutationTypes)
pathwayList = pathways.keys()
pathwayList.sort()
numPW = len(pathways)
newNameVec = [0] * (numPW * numTypes)
newDataMat = [0] * (numPW * numTypes)
dMat = dataD['dataMatrix']
min_numON = chooseCountThreshold(dataD)
if (min_numON < (numCol / 100)):
min_numON = int(numCol / 100)
print " minimum count threshold : ", min_numON
kFeat = 0
max_numON = 0
max_fracON = 0.
## outer loop is over pathways ...
for aPathway in pathwayList:
print " "
print " outer loop over pathways ... ", aPathway
## next loop is over mutation types
for aMutType in mutationTypes:
numON = 0
newFeatName = "B:GNAB:" + aPathway + "::::" + aMutType
print " new feature name : ", newFeatName
# first make sure we don't already have a feature with this name
stopNow = 0
for iRow in range(numRow):
if (newFeatName == rowLabels[iRow]):
print " STOPPING ... this feature already exists ??? ", newFeatName
stopNow = 1
if (stopNow): continue
print " tentative new feature #%d ... <%s> " % (kFeat, newFeatName)
newNameVec[kFeat] = newFeatName
newDataMat[kFeat] = numpy.zeros(numCol)
if (0):
print " "
print " "
print aPathway, newFeatName
print len(pathways[aPathway]), pathways[aPathway]
## and now we can loop over the genes in the pathway
for gnabGene in pathways[aPathway]:
print " looking for pathway gene ", gnabGene
## and look for the desired feature
iR = findFeature ( rowLabels, "B:GNAB:"+gnabGene+":", aMutType )
## if we don't find anything, and we are working on y_del or y_amp
## then we can use y_n instead
if ( iR < 0 ):
print " --> failed to find desired feature ", gnabGene, aMutType
if ( (aMutType==":y_del_somatic") or (aMutType==":y_amp_somatic") ):
iR = findFeature ( rowLabels, "B:GNAB:"+gnabGene+":", ":y_n_somatic" )
if ( iR >= 0 ):
print " --> will use this feature instead ", iR, rowLabels[iR]
else:
print " --> failed to find even a backup feature "
else:
print " --> FOUND desired feature ", gnabGene, aMutType, iR, rowLabels[iR]
if ( iR < 0 ): continue
gnabLabel = rowLabels[iR]
for iCol in range(numCol):
if (dMat[iR][iCol] == 1):
print " %d using mutation bit from gene %s, column %d (%s) [%d] " % \
(newDataMat[kFeat][iCol], gnabGene, iCol, gnabLabel, numON)
if (newDataMat[kFeat][iCol] == 0):
numON += 1
newDataMat[kFeat][iCol] = 1
if (numON > min_numON):
kFeat += 1
print " --> keeping this feature ... ", kFeat, newFeatName, numON, min_numON
# keep track of which pathways are the MOST mutated ...
if (max_numON <= numON):
max_numON = numON
max_pathway = newFeatName
print " MOST mutated so far (1) ... ", max_pathway, max_numON, len(pathways[aPathway])
# note that this is not the fraction of the genes in the pathway that are
# mutated, but just a count normalized by the # of genes in the
# pathway
numGenes = len(pathways[aPathway])
fracON = float(numON) / float(len(pathways[aPathway]))
if (numGenes >= 10):
if (max_fracON <= fracON):
max_fracON = fracON
max_pathway2 = newFeatName
print " MOST mutated so far (2) ... ", max_pathway2, max_fracON, len(pathways[aPathway])
else:
print " --> NOT keeping this feature ... ", newFeatName, numON, min_numON
numNewFeat = kFeat
print " "
print " --> number of new features : ", numNewFeat
print len(newDataMat), len(newDataMat[0])
# now we need to append these new features to the input data matrix
newRowLabels = [0] * (numRow + numNewFeat)
newMatrix = [0] * (numRow + numNewFeat)
for iR in range(numRow):
newRowLabels[iR] = rowLabels[iR]
newMatrix[iR] = dMat[iR]
for iR in range(numNewFeat):
newRowLabels[iR + numRow] = newNameVec[iR]
newMatrix[iR + numRow] = newDataMat[iR]
dataD['rowLabels'] = newRowLabels
dataD['dataMatrix'] = newMatrix
print " "
print " --> finished with pathwayGnab ... "
print " "
return (dataD)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def driverGnab(dataD, driverList):
print " "
print " ************************************************************* "
print " ************************************************************* "
print " "
print " in driverGnab ... "
# check that the input feature matrix looks ok ...
try:
numRow = len(dataD['rowLabels'])
numCol = len(dataD['colLabels'])
rowLabels = dataD['rowLabels']
print " %d rows x %d columns " % (numRow, numCol)
# print rowLabels[:5]
# print rowLabels[-5:]
except:
print " ERROR in driverGnab ??? bad data ??? "
return (dataD)
mutationTypes = [":y_n_somatic", ":code_potential_somatic",
":missense_somatic",
":y_del_somatic", ":y_amp_somatic"]
numTypes = len(mutationTypes)
numK = 1
newNameVec = [0] * (numK * numTypes)
newDataMat = [0] * (numK * numTypes)
dMat = dataD['dataMatrix']
kFeat = 0
if (1):
for aMutType in mutationTypes:
newFeatName = "B:GNAB:driverMut" + "::::" + aMutType
# first make sure we don't already have a feature with this name
# ...
stopNow = 0
for iRow in range(numRow):
if (newFeatName == rowLabels[iRow]):
stopNow = 1
if (stopNow):
continue
print " tentative new feature #%d ... <%s> " % (kFeat, newFeatName)
newNameVec[kFeat] = newFeatName
newDataMat[kFeat] = numpy.zeros(numCol)
for iR in range(numRow):
if (iR % 1000 == 0):
print iR, numRow
if (1):
gnabLabel = rowLabels[iR]
if (not gnabLabel.startswith("B:GNAB:")):
continue
if (gnabLabel.find(aMutType) < 0):
continue
try:
gnabTokens = gnabLabel.split(':')
gnabGene = gnabTokens[2].upper()
except:
print " FAILED to parse GNAB feature name ??? ", gnabLabel
continue
print " considering ", iR, gnabTokens, gnabGene
if (gnabGene in driverList):
for iCol in range(numCol):
if (dMat[iR][iCol] == 1):
print " yes! setting bit at ", kFeat, iCol
newDataMat[kFeat][iCol] = 1
if (1):
print " --> keeping this feature ... ", kFeat, newFeatName
kFeat += 1
numNewFeat = kFeat
print " "
print " --> number of new features : ", numNewFeat
print len(newDataMat), len(newDataMat[0])
# now we need to append these new features to the input data matrix
newRowLabels = [0] * (numRow + numNewFeat)
newMatrix = [0] * (numRow + numNewFeat)
for iR in range(numRow):
newRowLabels[iR] = rowLabels[iR]
newMatrix[iR] = dMat[iR]
for iR in range(numNewFeat):
newRowLabels[iR + numRow] = newNameVec[iR]
newMatrix[iR + numRow] = newDataMat[iR]
dataD['rowLabels'] = newRowLabels
dataD['dataMatrix'] = newMatrix
print " "
print " --> finished with driverGnab ... "
print " "
return (dataD)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def combineGnabCnvr(dataD):
print " "
print " ************************************************************* "
print " ************************************************************* "
print " "
print " in combineGnabCnvr ... "
# check that the input feature matrix looks ok ...
try:
numRow = len(dataD['rowLabels'])
numCol = len(dataD['colLabels'])
rowLabels = dataD['rowLabels']
colLabels = dataD['colLabels']
print " %d rows x %d columns " % (numRow, numCol)
# print rowLabels[:5]
# print rowLabels[-5:]
except:
print " ERROR in combineGnabCnvr ??? bad data ??? "
return (dataD)
# next, we need to find all of the GNAB features and all of the CNVR
# features
print " --> assigning gnab / cnvr flags ... "
gnabFeatIncSubstrings = [ ":y_n", ":code_potential" ]
gnabFeatAmpSubstrings = [ ":y_amp", ":cp_amp" ]
gnabFeatDelSubstrings = [ ":y_del", ":cp_del" ]
cnvrFeatExcSubstrings = [ "Gistic" ]
isGnab = setFeatBits(rowLabels, "B:GNAB:", gnabFeatIncSubstrings, [])
isCnvr = setFeatBits(rowLabels, "N:CNVR:", [], cnvrFeatExcSubstrings)
print len(isGnab), max(isGnab)
print len(isCnvr), max(isCnvr)
if (not max(isGnab) and not max(isCnvr)):
print " missing either GNAB or CNVR features ... "
return (dataD)
# now we need to map each of the GNAB features to one or more CNVR features
mapVec = [0] * numRow
for iR in range(numRow):
if (iR % 1000 == 0):
print iR, numRow
if (isGnab[iR]):
mapVec[iR] = []
gnabLabel = rowLabels[iR]
try:
gnabTokens = gnabLabel.split(':')
gnabChrName = gnabTokens[3].upper()
gnabStart = int(gnabTokens[4])
gnabStop = int(gnabTokens[5])
except:
print " FAILED to parse GNAB feature name ??? ", gnabLabel
continue
# avoid X and Y chromosome genes ...
if (gnabChrName.endswith("X")):
continue
if (gnabChrName.endswith("Y")):
continue
for jR in range(numRow):
if (isCnvr[jR]):
cnvrLabel = rowLabels[jR]
cnvrTokens = cnvrLabel.split(':')
cnvrChrName = cnvrTokens[3].upper()
if (gnabChrName != cnvrChrName):
continue
# print " comparing ... ", gnabLabel, cnvrLabel
cnvrStart = int(cnvrTokens[4])
if (gnabStop < cnvrStart):
continue
cnvrStop = int(cnvrTokens[5])
if (gnabStart > cnvrStop):
continue
mapVec[iR] += [jR]
# print " found match! ", gnabLabel, cnvrLabel, iR, jR,
# mapVec[iR]
if (0):
if (len(mapVec[iR]) > 5):
print iR, gnabLabel, len(mapVec[iR])
for kR in mapVec[iR]:
print " ", kR, rowLabels[kR]
# sys.exit(-1)
# now we need to actually loop over the data ...
dMat = dataD['dataMatrix']
# -------------------------------------------------------------------------
if (0):
# FIRST we want to check for any "adjacent normal" samples and set those to 0 ...
# --> ACTUALLY, deciding NOT to do this anymore ( 31 oct 2012 ) NEW CHANGE
numGNABfeat = 0
numCNVRfeat = 0
for iRow in range(numRow):
curFeature = rowLabels[iRow]
if (curFeature.startswith("B:GNAB:")):
numGNABfeat += 1
elif (curFeature.startswith("N:CNVR:")):
numCNVRfeat += 1
print " number of GNAB features : %d " % (numGNABfeat)
print " number of CNVR features : %d " % (numCNVRfeat)
print " "
numGNABset = 0
numCNVRset = 0
numGNABfeat = 0
numCNVRfeat = 0
numNormalCol = 0
for iCol in range(numCol):
curLabel = colLabels[iCol]
if (curLabel.startswith("TCGA-")):
if (len(curLabel) >= 15):
sampleType = curLabel[13:15]
if (sampleType == '11'):
numNormalCol += 1
# print iCol, curLabel
for iRow in range(numRow):
curFeature = rowLabels[iRow]
if (curFeature.startswith("B:GNAB:")):
# print curFeature, dMat[iRow][iCol]
if (dMat[iRow][iCol] == "NA" or dMat[iRow][iCol] == NA_VALUE):
dMat[iRow][iCol] = 0
numGNABset += 1
elif (curFeature.startswith("N:CNVR:")):
if (curFeature.find(":chrX:") > 0):
continue
if (curFeature.find(":chrY:") > 0):
continue
# print curFeature, dMat[iRow][iCol]
if (dMat[iRow][iCol] == "NA" or dMat[iRow][iCol] == NA_VALUE):
dMat[iRow][iCol] = 0.
numCNVRset += 1
# -------------------------------------------------------------------------
## cnvrThreshold = 2.
## cnvrThreshold = 1.
cnvrAmpThresh = 0.30
cnvrDelThresh = -0.46
print " --> now checking for deletions and amplifications ... ", cnvrAmpThresh, cnvrDelThresh
print " and creating new y_del and y_amp features "
numNewFeat = 0
newNameVec = []
newDataMat = []
for iR in range(numRow):
if (iR % 1000 == 0):
print iR, numRow
if (isGnab[iR]):
print " "
print " having a look at this feature: "
print iR, rowLabels[iR], len(mapVec[iR])
print mapVec[iR]
# how often is this gene mutated?
numYes = 0
numDel = 0
numAmp = 0
numYesDel = 0
numYesAmp = 0
maxCN = -999.
minCN = 999.
for iCol in range(numCol):
mutFlag = 0
ampFlag = 0
delFlag = 0
for jR in mapVec[iR]:
if (dMat[iR][iCol] == 1):
mutFlag = 1
if (dMat[jR][iCol] == NA_VALUE):
continue
if (dMat[jR][iCol] > maxCN):
maxCN = dMat[jR][iCol]
if (dMat[jR][iCol] < minCN):
minCN = dMat[jR][iCol]
if (dMat[jR][iCol] < cnvrDelThresh):
delFlag = 1
if (dMat[jR][iCol] > cnvrAmpThresh):
ampFlag = 1
numYes += mutFlag
numDel += delFlag
numAmp += ampFlag
if (mutFlag or delFlag): numYesDel += 1
if (mutFlag or ampFlag): numYesAmp += 1
addDelFeat = 0
addAmpFeat = 0
fThresh = 0.025
if (numYes + numAmp + numDel > 0):
print " --> %3d mutations (%3d mut or del, %3d mut or amp) " % \
( numYes, numYesDel, numYesAmp )
print " %3d deletions " % numDel, minCN
print " %3d amplifications " % numAmp, maxCN
if (numYesDel > 0):
delFrac1 = float(numYesDel-numYes)/float(numCol)
delFrac2 = float(numYesDel-numDel)/float(numCol)
delFrac3 = 0
if ( numYes > 0 ): delFrac3 += float(numYesDel/numYes)
if ( numDel > 0 ): delFrac3 += float(numYesDel/numDel)
if ( delFrac1>fThresh and delFrac2>fThresh ):
print " deletion looks significant ", numYesDel, numYes, numDel, numCol, delFrac1, delFrac2, delFrac3
addDelFeat = 1
else:
print " deletion does not seem significant (?) ", numYesDel, numYes, numDel, numCol, delFrac1, delFrac2, delFrac3
if (numYesAmp > 0):
ampFrac1 = float(numYesAmp-numYes)/float(numCol)
ampFrac2 = float(numYesAmp-numAmp)/float(numCol)
ampFrac3 = 0
if ( numYes > 0 ): ampFrac3 += float(numYesAmp/numYes)
if ( numAmp > 0 ): ampFrac3 += float(numYesAmp/numAmp)
if ( ampFrac1>fThresh and ampFrac2>fThresh ):
print " amplification looks significant ", numYesAmp, numYes, numAmp, numCol, ampFrac1, ampFrac2, ampFrac3
addAmpFeat = 1
else:
print " amplification does not seem significant (?) ", numYesAmp, numYes, numAmp, numCol, ampFrac1, ampFrac2, ampFrac3
## add the "DEL" feature if appropriate ...
if ( addDelFeat ):
numNewFeat += 1
curFeatName = rowLabels[iR]
newFeatName = makeNewFeatureName(curFeatName, gnabFeatIncSubstrings, gnabFeatDelSubstrings)
print " newFeatName <%s> " % newFeatName
# make sure that there is not already a feature by this name!!!
addFeat = 1
for aLabel in rowLabels:
if (aLabel == newFeatName):
addFeat = 0
print " oops ??? <%s> already exists ??? " % aLabel
if (addFeat):
print " --> adding this new feature: ", newFeatName
newNameVec += [newFeatName]
newDataMat += [numpy.zeros(numCol)]
numBitsOn = 0
for iCol in range(numCol):
# we need to start with NA
newDataMat[-1][iCol] = NA_VALUE
# if we already have a 'yes' for the mutation, that's
# all we need ...
if (dMat[iR][iCol] == 1):
newDataMat[-1][iCol] = 1
numBitsOn += 1
continue
# if not, then check for deletions ...
for jR in mapVec[iR]:
if (dMat[jR][iCol] == NA_VALUE): continue
if (newDataMat[-1][iCol] == 1): continue
if (dMat[jR][iCol] < cnvrDelThresh):
newDataMat[-1][iCol] = 1
numBitsOn += 1
# if we have set this bit we are done ...
if (newDataMat[-1][iCol] == 1): continue
# and otherwise if we have no mutation, set it to 0
if (dMat[iR][iCol] == 0): newDataMat[-1][iCol] = 0
print " number of bits set: ", numBitsOn
## add the "AMP" feature if appropriate ...
if ( addAmpFeat ):
numNewFeat += 1
curFeatName = rowLabels[iR]
newFeatName = makeNewFeatureName(curFeatName, gnabFeatIncSubstrings, gnabFeatAmpSubstrings)
print " newFeatName <%s> " % newFeatName
# make sure that there is not already a feature by this name!!!
addFeat = 1
for aLabel in rowLabels:
if (aLabel == newFeatName):
addFeat = 0
print " oops ??? <%s> already exists ??? " % aLabel
if (addFeat):
print " --> adding this new feature: ", newFeatName
newNameVec += [newFeatName]
newDataMat += [numpy.zeros(numCol)]
numBitsOn = 0
for iCol in range(numCol):
# we need to start with NA
newDataMat[-1][iCol] = NA_VALUE
# if we already have a 'yes' for the mutation, that's
# all we need ...
if (dMat[iR][iCol] == 1):
newDataMat[-1][iCol] = 1
numBitsOn += 1
continue
# if not, then check for amplifications ...
for jR in mapVec[iR]:
if (dMat[jR][iCol] == NA_VALUE): continue
if (newDataMat[-1][iCol] == 1): continue
if (dMat[jR][iCol] > cnvrAmpThresh):
newDataMat[-1][iCol] = 1
numBitsOn += 1
# if we have set this bit we are done ...
if (newDataMat[-1][iCol] == 1): continue
# and otherwise if we have no mutation, set it to 0
if (dMat[iR][iCol] == 0): newDataMat[-1][iCol] = 0
print " number of bits set: ", numBitsOn
# if ( numNewFeat == 0 ):
# print " --> NO new features "
# print " --> finished with combineGnabCnvr ... "
# return ( dataD )
print " "
print " --> number of new features : ", numNewFeat
if ( 0 ):
if (numNewFeat > 0):
print len(newNameVec)
print len(newDataMat), len(newDataMat[0])
for ii in range(numNewFeat):
if (newNameVec[ii].find("CSMD1") > 0):
print newNameVec[ii]
print newDataMat[ii]
print " "
# now we need to append these new features to the input data matrix
newRowLabels = [0] * (numRow + numNewFeat)
newMatrix = [0] * (numRow + numNewFeat)
for iR in range(numRow):
newRowLabels[iR] = rowLabels[iR]
newMatrix[iR] = dMat[iR]
for iR in range(numNewFeat):
newRowLabels[iR + numRow] = newNameVec[iR]
newMatrix[iR + numRow] = newDataMat[iR]
dataD['rowLabels'] = newRowLabels
dataD['dataMatrix'] = newMatrix
print " "
print " --> finished with combineGnabCnvr ... "
print " "
return (dataD)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
if __name__ == "__main__":
if (1):
if (len(sys.argv) == 3):
inFile = sys.argv[1]
outFile = sys.argv[2]
## do_combineGnabCnvr = 1
do_combineGnabCnvr = 0
do_pathwayGnab = 0
do_driverGnab = 0
driverList = ["TP53", "KRAS", "PIK3CA", "PTEN"]
else:
print " "
print " Usage: %s <input TSV file> <output TSV file> "
print " "
print " ERROR -- bad command line arguments "
sys.exit(-1)
print " "
print " Running : %s %s %s " % (sys.argv[0], sys.argv[1], sys.argv[2])
print " "
# read in the input feature matrix first, just in case there
# actually isn't one yet available ...
print " --> reading in feature matrix ... "
testD = tsvIO.readTSV(inFile)
try:
print len(testD['rowLabels']), len(testD['colLabels'])
except:
print " --> invalid / missing input feature matrix "
sys.exit(-1)
# we want to come up with a 'merged' mutation OR deletion feature
if (do_combineGnabCnvr):
print " calling combineGnabCnvr ... "
newD = combineGnabCnvr(testD)
testD = newD
# and then pathway level mutation features
if (do_pathwayGnab):
print " calling pathwayGnab ... "
pwDict = readPathways()
newD = pathwayGnab(testD, pwDict)
testD = newD
# and then a 'driverMut' feature based on the driverList above
# (which is just 4 hardcoded genes for now)
if (do_driverGnab):
print " calling driverGnab ... "
newD = driverGnab(testD, driverList)
testD = newD
# and finally write it out ...
print " --> writing out output feature matrix "
tsvIO.writeTSV_dataMatrix(testD, 0, 0, outFile)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#<|fim▁end|> |
# these are system modules
import numpy
import sys |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for Verisure Smartplugs."""
import logging
from time import monotonic
from homeassistant.components.switch import SwitchEntity
from . import CONF_SMARTPLUGS, HUB as hub
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure switch platform."""
if not int(hub.config.get(CONF_SMARTPLUGS, 1)):
return False
hub.update_overview()
switches = []
switches.extend(
[
VerisureSmartplug(device_label)
for device_label in hub.get("$.smartPlugs[*].deviceLabel")
]
)
add_entities(switches)
class VerisureSmartplug(SwitchEntity):
"""Representation of a Verisure smartplug."""
def __init__(self, device_id):
"""Initialize the Verisure device."""
self._device_label = device_id
self._change_timestamp = 0
self._state = False<|fim▁hole|> @property
def name(self):
"""Return the name or location of the smartplug."""
return hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].area", self._device_label
)
@property
def is_on(self):
"""Return true if on."""
if monotonic() - self._change_timestamp < 10:
return self._state
self._state = (
hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].currentState",
self._device_label,
)
== "ON"
)
return self._state
@property
def available(self):
"""Return True if entity is available."""
return (
hub.get_first("$.smartPlugs[?(@.deviceLabel == '%s')]", self._device_label)
is not None
)
def turn_on(self, **kwargs):
"""Set smartplug status on."""
hub.session.set_smartplug_state(self._device_label, True)
self._state = True
self._change_timestamp = monotonic()
def turn_off(self, **kwargs):
"""Set smartplug status off."""
hub.session.set_smartplug_state(self._device_label, False)
self._state = False
self._change_timestamp = monotonic()
# pylint: disable=no-self-use
def update(self):
"""Get the latest date of the smartplug."""
hub.update_overview()<|fim▁end|> | |
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { HashLocationStrategy, LocationStrategy } from '@angular/common';
import { HttpClientModule } from '@angular/common/http';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { BrowserModule } from '@angular/platform-browser';
import { DocsModule } from 'ng-documentation';
import { AppComponent } from './app.component';
import { DialogDemoModule } from './dialog/dialog.module';
import { DialogComponent } from './dialog/dialog/dialog.component';
import { EllipsisDemoModule } from './ellipsis/ellipsis.module';
import { EllipsisDemoComponent } from './ellipsis/ellipsis/ellipsis.component';
import { NavigationDemoModule } from './navigation/navigation.module';
import { NavigationdDemoComponent } from './navigation/navigation/navigation.component';
import { PopoverDemoComponent } from './popover/popover-demo/popover-demo.component';
import { PopoverDemoModule } from './popover/popover.module';
import { TooltipDemoComponent } from './tooltip/tooltip-demo/tooltip-demo.component';
import { TooltipDemoModule } from './tooltip/tooltip.module';
import { TreeDocsComponent } from './tree-docs/tree-docs.component';
import { TreeDocsModule } from './tree-docs/tree-docs.module';
import { WelcomeComponent } from './welcome/welcome.component';
import { MovableComponent } from './movable/movable/movable.component';
import { MovableModule } from './movable/movable.module';
@NgModule({
declarations: [
AppComponent,
WelcomeComponent
],
imports: [
BrowserModule,
FormsModule,
HttpClientModule,
PopoverDemoModule,
TooltipDemoModule,
NavigationDemoModule,
EllipsisDemoModule,
DialogDemoModule,
TreeDocsModule,
MovableModule,
DocsModule,
DocsModule.configure({
welcomePage: WelcomeComponent,
componentPages: {
popover: { title: 'Popover', component: PopoverDemoComponent },
tooltip: { title: 'Tooltip', component: TooltipDemoComponent },
dialog: { title: 'Dialog', component: DialogComponent },
navigation: { title: 'Navigation', component: NavigationdDemoComponent },
ellipsis: { title: 'Ellipsis', component: EllipsisDemoComponent },
tree: { title: 'Tree', component: TreeDocsComponent },<|fim▁hole|> ],
providers: [{
provide: LocationStrategy,
useClass: HashLocationStrategy
}],
bootstrap: [AppComponent]
})
export class AppModule { }<|fim▁end|> | movable: { title: 'Movable', component: MovableComponent }
}
}) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
/***************************************************************************
GeobricksTRMM
A QGIS plugin
Download TRMM daily data.
-------------------
begin : 2015-10-06
copyright : (C) 2015 by Geobricks
email : [email protected]
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load GeobricksTRMM class from file GeobricksTRMM.
:param iface: A QGIS interface instance.
:type iface: QgsInterface<|fim▁hole|> from .geobricks_trmm_qgis import GeobricksTRMM
return GeobricksTRMM(iface)<|fim▁end|> | """
# |
<|file_name|>sitemap.js<|end_file_name|><|fim▁begin|>const express = require('express'),
handler = require('../handlers/sitemap'),
requestHelper = require('../helpers/request-helper'),
router = express.Router();
router.get('/', requestHelper.cache(), (req, res, next) => {
requestHelper.initRouter({
req: req,
res: res,
handler: handler,
endPointParams: {
posts: '?system.type=blog_post&order=elements.date[desc]&elements=url_slug'
},
view: 'sitemap'
});
});
<|fim▁hole|><|fim▁end|> | module.exports = router; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/* eslint-env node */
'use strict';
<|fim▁hole|><|fim▁end|> | module.exports = {
'cowsay': require('./cowsay')
}; |
<|file_name|>HomeResource.java<|end_file_name|><|fim▁begin|>package org.artifactory.ui.rest.resource.home;
import org.artifactory.api.security.AuthorizationService;
import org.artifactory.ui.rest.resource.BaseResource;
import org.artifactory.ui.rest.service.general.GeneralServiceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
* @author Chen keinan
*/
@Path("home")
@RolesAllowed({AuthorizationService.ROLE_ADMIN, AuthorizationService.ROLE_USER})
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class HomeResource extends BaseResource {
@Autowired
GeneralServiceFactory generalFactory;
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getHomeData()
throws Exception {<|fim▁hole|> return runService(generalFactory.getHomePage());
}
}<|fim▁end|> | |
<|file_name|>make-integer.spec.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import 'mocha';
import { Integer, TestContext } from '../../..';
import { makeInteger } from './make-integer';
describe('make-integer', () => {
it('should handle single digits', () => {
const ctx = new TestContext("test");
const delayedValue = makeInteger("5");
const value = delayedValue(ctx) as Integer;
expect(ctx.callCount).to.equal(0);
expect(value.value).to.equal(5);
expect(value.pretties).to.equal("5");
expect(value.dice()).to.equal(0);
expect(value.depth()).to.equal(1);
});
it('should handle multiple digits', () => {
const ctx = new TestContext("test");
const delayedValue = makeInteger("189465");
const value = delayedValue(ctx) as Integer;
expect(ctx.callCount).to.equal(0);
expect(value.value).to.equal(189465);;<|fim▁hole|>
it('should handle negative numbers', () => {
const ctx = new TestContext("test");
const delayedValue = makeInteger("-189465");
const value = delayedValue(ctx) as Integer;
expect(ctx.callCount).to.equal(0);
expect(value.value).to.equal(-189465);
expect(value.pretties).to.equal("-189465");
expect(value.dice()).to.equal(0);
expect(value.depth()).to.equal(1);
});
});<|fim▁end|> | expect(value.pretties).to.equal("189465");
expect(value.dice()).to.equal(0);
expect(value.depth()).to.equal(1);
}); |
<|file_name|>testVersion.js<|end_file_name|><|fim▁begin|>'use strict';
var IoTServer = require("../iot");
var inquirer = require("inquirer");
var chalk = require('chalk');
inquirer.prompt([{
type: "input",
name: "iotBaseURL",
message: "Enter the URL to the IoT Server",
default: "http://iotserver:7101"
}], function(answers) {
var iot = new IoTServer(answers.iotBaseURL);
iot.setPrincipal('iot', 'welcome1');
console.log(chalk.bold("Initial IoT Version: ") + chalk.cyan(iot.getVersion()));
var d = null;
iot.checkVersion()
.then(function (version) {
console.log(chalk.bold("IoT Version: ") + chalk.cyan(version), "[getVersion =", iot.getVersion(), "]");
return iot.createDevice("sharedSecret");
})
.then(function (device) {
d = device;
console.log(chalk.bold("Device created: ") + chalk.cyan(device.getID()));
return device.activate();
})
.then(function (device) {
console.log(chalk.bold("Device Activated: ") + chalk.cyan(device.getState()));
var data = [{temp: 182}, {temp: 213}, {temp: 16}, {temp: 11}];
return device.sendDataMessages("jsclient:temperature", data);
})
.then(function (response) {
console.log(chalk.bold("Messages sent. Response: "), response.body);
return d.delete();
})
.then(function (gateway) {<|fim▁hole|> console.log(chalk.bold.red("*** Error ***"));
console.log(error.body || error);
if (d) d.delete();
});
});<|fim▁end|> | console.log(chalk.bold("Device deleted."));
})
.catch(function (error) { |
<|file_name|>ParagraphContext.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014-2021 Sayi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deepoove.poi.xwpf;
import java.util.List;
import org.apache.poi.xwpf.usermodel.IRunBody;
import org.apache.poi.xwpf.usermodel.XWPFFieldRun;
import org.apache.poi.xwpf.usermodel.XWPFHyperlinkRun;
import org.apache.poi.xwpf.usermodel.XWPFRun;
import org.apache.xmlbeans.XmlObject;<|fim▁hole|>import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTHyperlink;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTR;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSimpleField;
public class ParagraphContext implements RunBodyContext {
private XWPFParagraphWrapper paragraphWrapper;
public ParagraphContext(XWPFParagraphWrapper paragraphWrapper) {
this.paragraphWrapper = paragraphWrapper;
}
@Override
public IRunBody getTarget() {
return paragraphWrapper.getParagraph();
}
@Override
public List<XWPFRun> getRuns() {
return paragraphWrapper.getParagraph().getRuns();
}
@Override
public void setAndUpdateRun(XWPFRun xwpfRun, XWPFRun sourceRun, int insertPostionCursor) {
paragraphWrapper.setAndUpdateRun(xwpfRun, sourceRun, insertPostionCursor);
}
@Override
public XWPFRun insertNewRun(XWPFRun xwpfRun, int insertPostionCursor) {
if (xwpfRun instanceof XWPFHyperlinkRun) {
return paragraphWrapper.insertNewHyperLinkRun(insertPostionCursor, "");
} else if (xwpfRun instanceof XWPFFieldRun) {
return paragraphWrapper.insertNewField(insertPostionCursor);
} else {
return paragraphWrapper.insertNewRun(insertPostionCursor);
}
}
@Override
public XWPFRun createRun(XWPFRun xwpfRun, IRunBody p) {
if (xwpfRun instanceof XWPFHyperlinkRun) {
return new XWPFHyperlinkRun((CTHyperlink) ((XWPFHyperlinkRun) xwpfRun).getCTHyperlink().copy(),
(CTR) ((XWPFHyperlinkRun) xwpfRun).getCTR().copy(), p);
} else if (xwpfRun instanceof XWPFFieldRun) {
return new XWPFFieldRun((CTSimpleField) ((XWPFFieldRun) xwpfRun).getCTField().copy(),
(CTR) ((XWPFFieldRun) xwpfRun).getCTR().copy(), p);
} else {
return new XWPFRun((CTR) xwpfRun.getCTR().copy(), p);
}
}
@Override
public XWPFRun createRun(XmlObject object, IRunBody p) {
if (object instanceof CTHyperlink) {
return new XWPFHyperlinkRun((CTHyperlink) object, ((CTHyperlink) object).getRArray(0), p);
} else if (object instanceof CTSimpleField) {
return new XWPFFieldRun((CTSimpleField) object, ((CTSimpleField) object).getRArray(0), p);
} else {
return new XWPFRun((CTR) object, p);
}
}
@Override
public void removeRun(int pos) {
paragraphWrapper.removeRun(pos);
}
}<|fim▁end|> | |
<|file_name|>partial_core_matrix.hpp<|end_file_name|><|fim▁begin|>// ===========================================================================
//
// PUBLIC DOMAIN NOTICE
// Agricultural Research Service
// United States Department of Agriculture
//
// This software/database is a "United States Government Work" under the
// terms of the United States Copyright Act. It was written as part of
// the author's official duties as a United States Government employee
// and thus cannot be copyrighted. This software/database is freely
// available to the public for use. The Department of Agriculture (USDA)
// and the U.S. Government have not placed any restriction on its use or
// reproduction.
//
// Although all reasonable efforts have been taken to ensure the accuracy
// and reliability of the software and data, the USDA and the U.S.
// Government do not and cannot warrant the performance or results that
// may be obtained by using this software or data. The USDA and the U.S.
// Government disclaim all warranties, express or implied, including
// warranties of performance, merchantability or fitness for any
// particular purpose.
//
// Please cite the author in any work or product based on this material.
//
// =========================================================================
#ifndef _PARTIAL_CORE_MATRIX_H_
#define _PARTIAL_CORE_MATRIX_H_ 1
#include <memory>
#include <map>
#include <exception>
#include "logging.hpp"
#include "distributions.hpp"
#include "continuous_dynamics.hpp"
namespace afidd
{
namespace smv
{
template<typename GSPN, typename State, typename RNG>
class PartialCoreMatrix
{
public:
// Re-advertise the transition key.
typedef typename GSPN::TransitionKey TransitionKey;
typedef ContinuousPropagator<TransitionKey,RNG> Propagator;
using PropagatorVector=std::vector<Propagator*>;
typedef GSPN PetriNet;
PartialCoreMatrix(GSPN& gspn, PropagatorVector pv)
: gspn_(gspn), propagator_{pv} {}
void set_state(State* s) {
state_=s;
}
void MakeCurrent(RNG& rng) {
if (state_->marking.Modified().size()==0) return;
// Check all neighbors of a place to see if they were enabled.
auto lm=state_->marking.GetLocalMarking();
NeighborsOfPlaces(gspn_, state_->marking.Modified(),
[&] (TransitionKey neighbor_id) {
// Was this transition enabled? When?
double enabling_time=0.0;
Propagator* previous_propagator=nullptr;
for (const auto& enable_prop : propagator_) {
double previously_enabled=false;
std::tie(previously_enabled, enabling_time)
=enable_prop->Enabled(neighbor_id);
if (previously_enabled) {
previous_propagator=enable_prop;
break;
}
}
if (previous_propagator==nullptr) enabling_time=state_->CurrentTime();
// Set up the local marking.
auto neighboring_places=
InputsOfTransition(gspn_, neighbor_id);
state_->marking.InitLocal(lm, neighboring_places);
bool isEnabled=false;
std::unique_ptr<TransitionDistribution<RNG>> dist;
try {
std::tie(isEnabled, dist)=
Enabled(gspn_, neighbor_id, state_->user, lm,
enabling_time, state_->CurrentTime(), rng);
} catch (const std::exception& e) {
BOOST_LOG_TRIVIAL(error)<<"Exception in Enabled new of "
<< neighbor_id <<": " << e.what();
throw;
}
if (isEnabled) {
Propagator* appropriate=nullptr;
for (const auto& prop_ptr : propagator_) {
if (prop_ptr->Include(*dist)) {
appropriate=prop_ptr;
}
}
BOOST_ASSERT_MSG(appropriate!=nullptr, "No propagator willing to "
"accept this distribution");
// Even if it was already enabled, take the new distribution
// in case it has changed.
if (dist!=nullptr) {
bool was_enabled=previous_propagator!=nullptr;
if (was_enabled) {
if (previous_propagator==appropriate) {
try {
appropriate->Enable(neighbor_id, dist, state_->CurrentTime(),
was_enabled, rng);
} catch (const std::exception& e) {
BOOST_LOG_TRIVIAL(error)<<"Exception in Enabled previous of "
<< neighbor_id <<": " << e.what();
throw;
}
} else {
try {
previous_propagator->Disable(neighbor_id,
state_->CurrentTime());
} catch (const std::exception& e) {
BOOST_LOG_TRIVIAL(error)<<"Exception in Disable of "
<< neighbor_id <<": " << e.what();
throw;
}
try {
appropriate->Enable(neighbor_id, dist, state_->CurrentTime(),
was_enabled, rng);
} catch (const std::exception& e) {
BOOST_LOG_TRIVIAL(error)<<"Exception in Enable wasn't "
<< "noprev of " << neighbor_id <<": " << e.what();
throw;
}
}
} else {
try {
appropriate->Enable(neighbor_id, dist, state_->CurrentTime(),
was_enabled, rng);
} catch (const std::exception& e) {
BOOST_LOG_TRIVIAL(error)<<"Exception in Enable wasn't of "
<< neighbor_id <<": " << e.what();
throw;
}
}
} else {
BOOST_ASSERT_MSG(previous_propagator!=nullptr, "Transition didn't "
"return a distribution, so it thinks it was enabled, but it "
"isn't listed as enabled in any propagator");
}
} else if (!isEnabled && previous_propagator!=nullptr) {
previous_propagator->Disable(neighbor_id, state_->CurrentTime());
} else {
; // not enabled, not becoming enabled.
}
});
SMVLOG(BOOST_LOG_TRIVIAL(trace) << "Marking modified cnt: "<<
state_->marking.Modified().size());
state_->marking.Clear();
}
void Trigger(TransitionKey trans_id, double when, RNG& rng) {<|fim▁hole|> <<trans_id;
}
auto neighboring_places=NeighborsOfTransition(gspn_, trans_id);
auto lm=state_->marking.GetLocalMarking();
state_->marking.InitLocal(lm, neighboring_places);
Fire(gspn_, trans_id, state_->user, lm, when, rng);
state_->marking.ReadLocal(lm, neighboring_places);
SMVLOG(BOOST_LOG_TRIVIAL(trace) << "Fire "<<trans_id <<" neighbors: "<<
neighboring_places.size() << " modifies "
<< state_->marking.Modified().size() << " places.");
state_->SetTime(when);
bool enabled=false;
double previous_when;
for (auto& prop_ptr : propagator_) {
std::tie(enabled, previous_when)=prop_ptr->Enabled(trans_id);
if (enabled) {
prop_ptr->Fire(trans_id, state_->CurrentTime(), rng);
break;
}
}
BOOST_ASSERT_MSG(enabled, "The transition that fired wasn't enabled?");
}
PropagatorVector& Propagators() { return propagator_; }
private:
GSPN& gspn_;
State* state_;
PropagatorVector propagator_;
};
} // smv
} // afidd
#endif // _PARTIAL_CORE_MATRIX_H_<|fim▁end|> | if (when-state_->CurrentTime()<-1e-4) {
BOOST_LOG_TRIVIAL(error) << "Firing negative time "<<when <<
" given current time "<<state_->CurrentTime() <<" for transition " |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
analytics.models
Models for Demand and Supply data
:copyright: (c) 2013 by Openlabs Technologies & Consulting (P) Limited
:license: see LICENSE for more details.
"""
import operator
from django.db import models
import django.contrib.admin
from admin.models import Occupation, Institution, Company, SubSector
__all__ = ['DEGREE_CHOICES', 'REGION_CHOICES', 'State', 'City', 'SupplyBase',
'DemandData', 'CompanyYearData', 'DiversityRatioLevel',
'DiversityRatioSubsector', 'GenderDiversity', 'ITSpend',
'RevenueSubsector', 'RevenueOccupation', 'RevenueTotal',
'TalentSaturation']
DEGREE_CHOICES = (
('UG', 'Undergraduate Degree'),
('PG', 'Postgraduate Degree'),
('DOC', 'Ph.D/M.Phil'),
('PSD', 'Post School Diploma'),
('PGD', 'Post Graduate Diploma'),
('UNK', 'Unknown'),
)
REGION_CHOICES = (
('NORTH', 'North'),
('SOUTH', 'South'),
('EAST', 'East'),
('WEST', 'West'),
('CENTRAL', 'Central'),
)
class State(models.Model):
"""
States
"""
name = models.CharField(max_length=50, default=None, unique=True)
region = models.CharField(max_length=12, choices=REGION_CHOICES)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'region',)
def __unicode__(self):
"""
Returns object display name
"""
return self.name
class City(models.Model):
"""
Cities
"""
name = models.CharField(max_length=50, default=None)
state = models.ForeignKey('State')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'state',)
verbose_name_plural = 'Cities'
def __unicode__(self):
"""
Returns object display name
"""
return "%s,%s" % (self.name, self.state)
class SupplyBase(models.Model):
"""
Demand supply data
"""
year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
institution = models.ForeignKey(Institution)
degree = models.CharField(max_length=3, choices=DEGREE_CHOICES,
default=None)
supply = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'institution',
'degree',)
verbose_name_plural = 'SupplyBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class DemandData(models.Model):
"""
Demand data
"""<|fim▁hole|> demand = models.IntegerField()
headcount = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'company',)
verbose_name_plural = 'DemandBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class CompanyYearData(models.Model):
"""
Revenue, Headcount data for companies annually
"""
year = models.IntegerField()
company = models.ForeignKey(Company)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'company', )
verbose_name_plural = 'Company Annual Data'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.company, )
class DiversityRatioLevel(models.Model):
"""
Diversity ratio for levels
"""
year = models.IntegerField(unique=True)
male_leadership = models.IntegerField(
verbose_name='Percent Male in Leadership roles'
)
male_entry = models.IntegerField(
verbose_name='Percent Male in Entry Level roles'
)
male_middle = models.IntegerField(
verbose_name='Percent Male in Middle Level roles'
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
@property
def female_leadership(self):
"Percent Females in leadership level roles"
return 100 - self.male_leadership
@property
def female_entry(self):
"Percent Females in entry level roles"
return 100 - self.male_entry
@property
def female_middle(self):
"Percent Females in middle level roles"
return 100 - self.male_middle
class Meta:
verbose_name_plural = 'Diversity Ratio for Experience Levels'
def __unicode__(self):
"""
Returns object display name
"""
return "%d" % (self.year, )
class DiversityRatioSubsector(models.Model):
"""
Diversity ratio for subsector
"""
year = models.IntegerField()
subsector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
male = models.IntegerField(verbose_name='Percent males in subsector')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
@property
def female(self):
"Percent Females in subsector"
return 100 - self.male
class Meta:
unique_together = ('year', 'subsector', )
verbose_name_plural = 'Diversity Ratio for Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d, %s" % (self.year, self.subsector, )
class GenderDiversity(models.Model):
"""
Gender diversity as per course
"""
year = models.IntegerField()
category = models.CharField(max_length=60)
male = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'category', )
verbose_name_plural = 'Gender Diversity'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.category, )
class ITSpend(models.Model):
"""
IT Spend data
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
world_spend = models.IntegerField(verbose_name='World IT Spend')
india_revenue = models.IntegerField(verbose_name='Indian IT Revenue')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'IT Spend'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueSubsector(models.Model):
"""
Revenue per subsector
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'Revenue by Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueOccupation(models.Model):
"""
Revenue by occupation
"""
year = models.IntegerField()
occupation = models.ForeignKey(Occupation)
revenue = models.IntegerField()
cagr_next_7_years = models.IntegerField(
verbose_name='CAGR % for next 7 years'
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'occupation', )
verbose_name_plural = 'Revenue by occupation'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.occupation, )
@property
def revenue_after_7year(self):
return int(self.revenue * (1 + self.cagr_next_7_years / 100.0) ** 7)
class RevenueTotal(models.Model):
"""
Total revenue
"""
year = models.IntegerField(unique=True)
revenue = models.IntegerField()
most_likely_growth = models.IntegerField(
verbose_name='Most likely growth percent',
blank=True,
null=True
)
optimistic_growth = models.IntegerField(
verbose_name='Optimisitc growth percent',
blank=True,
null=True
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Total Revenues'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%d" % (self.year, self.revenue, )
@property
def growth_series(self):
"""
Return growth and most likely series
"""
resultset = RevenueTotal.objects.filter(year__lte=self.year)
optimistic_series = []
most_likely_series = []
years = []
for result in resultset:
most_likely_series.append(result.revenue)
optimistic_series.append(result.revenue)
years.append(result.year)
for i in range(7):
optimistic_series.append(
int(optimistic_series[-1] *
(1 + self.optimistic_growth / 100.0))
)
most_likely_series.append(
int(most_likely_series[-1] *
(1 + self.most_likely_growth / 100.0))
)
years.append(years[-1] + 1)
return {
'years': years,
'optimistic_series': optimistic_series,
'most_likely_series': most_likely_series,
}
class TalentSaturation(models.Model):
"""
Model for talent saturation
We are keeping headcount because we sum from other models is not equal
to the one in worksheet. Perhaps due to lack of data from all
companies.
"""
year = models.IntegerField(unique=True)
headcount = models.IntegerField()
attrition_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Annual Attrition (%)",
default=5.0,
)
cagr_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="CAGR (%)",
default=8.6
)
fresher_hiring_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Fresher Hiring (%)",
default=95.0
)
need_for_experience_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Need for > 2 years experienced (% of headcount)",
default=45.0
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Talent Saturation'
def __unicode__(self):
"""
Returns object display name
"""
return "%d" % (self.year, )
@property
def quitters(self):
return int(self.headcount * self.attrition_pc / 100)
def series(self):
"Return talent saturation series"
years = []
records = TalentSaturation.objects.filter(year__lte=self.year) \
.order_by('year')
headcounts = [record.headcount for record in records]
years = [record.year for record in records] + \
range(self.year + 1, self.year + 8)
for i in range(7):
headcounts.append(int(headcounts[-1] * (1 + self.cagr_pc / 100)))
# difference between headcounts
hirings = map(
operator.sub, headcounts, [headcounts[0]] + headcounts[:-1],
)
quitters = [record.quitters for record in records]
for i in range(7):
quitters.append(int(quitters[-1] * (1 + self.cagr_pc / 100)))
gross_hiring = map(operator.add, quitters, hirings)
fresher_pcs = [record.fresher_hiring_pc for record in records] + \
[self.fresher_hiring_pc] * 7
fresher_hiring = map(
lambda g, f: int(g * f / 100),
gross_hiring, fresher_pcs
)
experience_need = map(
lambda record: int(
record.headcount * record.need_for_experience_pc / 100
),
records
)
experience_need += map(
lambda x: int(x * self.need_for_experience_pc / 100),
headcounts[-7:]
)
demand = map(
operator.sub,
experience_need, [experience_need[0]] + experience_need[:-1],
)
potential_supply = map(
lambda x: int(x * (self.fresher_hiring_pc / 100) ** 2),
[0, 0] + fresher_hiring[:-2]
)
return {
'years': years[3:],
'demand': demand[3:],
'potential_supply': potential_supply[3:],
}
django.contrib.admin.site.register(State)
django.contrib.admin.site.register(City)
django.contrib.admin.site.register(SupplyBase)
django.contrib.admin.site.register(DemandData)
django.contrib.admin.site.register(CompanyYearData)
django.contrib.admin.site.register(DiversityRatioLevel)
django.contrib.admin.site.register(DiversityRatioSubsector)
django.contrib.admin.site.register(GenderDiversity)
django.contrib.admin.site.register(ITSpend)
django.contrib.admin.site.register(RevenueSubsector)
django.contrib.admin.site.register(RevenueOccupation)
django.contrib.admin.site.register(RevenueTotal)
django.contrib.admin.site.register(TalentSaturation)<|fim▁end|> | year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
company = models.ForeignKey(Company) |
<|file_name|>0002_remove_entry_notes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-18 22:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('about', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='entry',
name='notes',<|fim▁hole|> ),
]<|fim▁end|> | |
<|file_name|>express.js<|end_file_name|><|fim▁begin|>/**
* Express configuration
*/
'use strict';
import express from 'express';
import favicon from 'serve-favicon';
import morgan from 'morgan';
import compression from 'compression';
import bodyParser from 'body-parser';
import methodOverride from 'method-override';
import cookieParser from 'cookie-parser';
import errorHandler from 'errorhandler';
import path from 'path';
import lusca from 'lusca';
import config from './environment';
import passport from 'passport';
import session from 'express-session';
import connectMongo from 'connect-mongo';
import mongoose from 'mongoose';
var MongoStore = connectMongo(session);
export default function(app) {
var env = app.get('env');
if (env === 'development' || env === 'test') {
app.use(express.static(path.join(config.root, '.tmp')));<|fim▁hole|> app.use(favicon(path.join(config.root, 'client', 'favicon.ico')));
}
app.set('appPath', path.join(config.root, 'client'));
app.use(express.static(app.get('appPath')));
app.use(morgan('dev'));
app.set('views', `${config.root}/server/views`);
app.set('view engine', 'jade');
app.use(compression());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use(methodOverride());
app.use(cookieParser());
app.use(passport.initialize());
// Persist sessions with MongoStore / sequelizeStore
// We need to enable sessions for passport-twitter because it's an
// oauth 1.0 strategy, and Lusca depends on sessions
app.use(session({
secret: config.secrets.session,
saveUninitialized: true,
resave: false,
store: new MongoStore({
mongooseConnection: mongoose.connection,
db: 'eecr'
})
}));
/**
* Lusca - express server security
* https://github.com/krakenjs/lusca
*/
if (env !== 'test' && !process.env.SAUCE_USERNAME) {
app.use(lusca({
csrf: {
angular: true
},
xframe: 'SAMEORIGIN',
hsts: {
maxAge: 31536000, //1 year, in seconds
includeSubDomains: true,
preload: true
},
xssProtection: true
}));
}
if ('development' === env) {
const webpackDevMiddleware = require('webpack-dev-middleware');
const stripAnsi = require('strip-ansi');
const webpack = require('webpack');
const makeWebpackConfig = require('../../webpack.make');
const webpackConfig = makeWebpackConfig({ DEV: true });
const compiler = webpack(webpackConfig);
const browserSync = require('browser-sync').create();
/**
* Run Browsersync and use middleware for Hot Module Replacement
*/
browserSync.init({
open: false,
logFileChanges: false,
proxy: 'localhost:' + config.port,
ws: true,
middleware: [
webpackDevMiddleware(compiler, {
noInfo: false,
stats: {
colors: true,
timings: true,
chunks: false
}
})
],
port: config.browserSyncPort,
plugins: ['bs-fullscreen-message']
});
/**
* Reload all devices when bundle is complete
* or send a fullscreen error message to the browser instead
*/
compiler.plugin('done', function(stats) {
console.log('webpack done hook');
if (stats.hasErrors() || stats.hasWarnings()) {
return browserSync.sockets.emit('fullscreen:message', {
title: 'Webpack Error:',
body: stripAnsi(stats.toString()),
timeout: 100000
});
}
browserSync.reload();
});
}
if (env === 'development' || env === 'test') {
app.use(errorHandler()); // Error handler - has to be last
}
}<|fim▁end|> | }
if (env === 'production') { |
<|file_name|>tagger.py<|end_file_name|><|fim▁begin|>from sentence import Sentence
from textblob import TextBlob
from itertools import chain
from collections import Counter
def findSubject(lines):<|fim▁hole|> if len(lines) == 0:
print "messages are empty"
return None
for m in lines:
sentences.append(Sentence(m).nouns)
if len(sentences) != 0:
maxNoun = Counter(list(chain(*sentences))).most_common()[0]
else:
print "No nouns"
return None
if maxNoun[1] >= 2:
return maxNoun[0].replace(" ","_")
else:
return None<|fim▁end|> | sentences = [] |
<|file_name|>WndMainWindow.cpp<|end_file_name|><|fim▁begin|>// NOTE: Apologies for the quality of this code, this is really from pre-opensource Dolphin - that is, 2003.
#include <windows.h>
#include <tchar.h>
#include "base/NativeApp.h"
#include "Globals.h"
#include "shellapi.h"
#include "commctrl.h"
#include "input/input_state.h"
#include "Core/Debugger/SymbolMap.h"
#include "Windows/OpenGLBase.h"
#include "Windows/Debugger/Debugger_Disasm.h"
#include "Windows/Debugger/Debugger_MemoryDlg.h"
#include "main.h"
#include "Core/Core.h"
#include "Core/MemMap.h"
#include "Core/SaveState.h"
#include "Core/System.h"
#include "Core/Config.h"
#include "Windows/EmuThread.h"
#include "resource.h"
#include "Windows/WndMainWindow.h"
#include "Common/LogManager.h"
#include "Common/ConsoleListener.h"
#include "Windows/W32Util/DialogManager.h"
#include "Windows/W32Util/ShellUtil.h"
#include "Windows/W32Util/Misc.h"
#include "GPU/GPUInterface.h"
#include "GPU/GPUState.h"
#include "native/image/png_load.h"
#include "GPU/GLES/TextureScaler.h"
#ifdef THEMES
#include "XPTheme.h"
#endif
#define ENABLE_TOUCH 0
BOOL g_bFullScreen = FALSE;
static RECT g_normalRC = {0};
extern InputState input_state;
#define TIMER_CURSORUPDATE 1
#define CURSORUPDATE_INTERVAL_MS 50
extern unsigned short analog_ctrl_map[];
extern unsigned int key_pad_map[];
extern const char * getVirtualKeyName(unsigned char key);
extern bool saveControlsToFile();
extern bool loadControlsFromFile();
namespace MainWindow
{
HWND hwndMain;
HWND hwndDisplay;
HWND hwndGameList;
static HMENU menu;
static HINSTANCE hInst;
//W32Util::LayeredWindow *layer;
#define MAX_LOADSTRING 100
const TCHAR *szTitle = TEXT("PPSSPP");
const TCHAR *szWindowClass = TEXT("PPSSPPWnd");
const TCHAR *szDisplayClass = TEXT("PPSSPPDisplay");
// Forward declarations of functions included in this code module:
LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM);
LRESULT CALLBACK DisplayProc(HWND, UINT, WPARAM, LPARAM);
LRESULT CALLBACK About(HWND, UINT, WPARAM, LPARAM);
LRESULT CALLBACK Controls(HWND, UINT, WPARAM, LPARAM);
HWND GetHWND()
{
return hwndMain;
}
HWND GetDisplayHWND()
{
return hwndDisplay;
}
void Init(HINSTANCE hInstance)
{
#ifdef THEMES
WTL::CTheme::IsThemingSupported();
#endif
//Register classes
WNDCLASSEX wcex;
wcex.cbSize = sizeof(WNDCLASSEX);
wcex.style = CS_HREDRAW | CS_VREDRAW;
wcex.lpfnWndProc = (WNDPROC)WndProc;
wcex.cbClsExtra = 0;
wcex.cbWndExtra = 0;
wcex.hInstance = hInstance;
wcex.hIcon = LoadIcon(hInstance, (LPCTSTR)IDI_PPSSPP);
wcex.hCursor = LoadCursor(NULL, IDC_ARROW);
wcex.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH);
wcex.lpszMenuName = (LPCSTR)IDR_MENU1;
wcex.lpszClassName = szWindowClass;
wcex.hIconSm = (HICON)LoadImage(hInstance, (LPCTSTR)IDI_PPSSPP, IMAGE_ICON, 16,16,LR_SHARED);
RegisterClassEx(&wcex);
wcex.style = CS_HREDRAW | CS_VREDRAW;;
wcex.lpfnWndProc = (WNDPROC)DisplayProc;
wcex.hIcon = 0;
wcex.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH);
wcex.lpszMenuName = 0;
wcex.lpszClassName = szDisplayClass;
wcex.hIconSm = 0;
RegisterClassEx(&wcex);
}
void GetWindowRectAtZoom(int zoom, RECT &rcInner, RECT &rcOuter) {
// GetWindowRect(hwndMain, &rcInner);
rcInner.left = 0;
rcInner.top = 0;
rcInner.right=480*zoom;//+client edge
rcInner.bottom=272*zoom; //+client edge
rcOuter=rcInner;
AdjustWindowRect(&rcOuter, WS_OVERLAPPEDWINDOW, TRUE);
rcOuter.right += g_Config.iWindowX - rcOuter.left;
rcOuter.bottom += g_Config.iWindowY - rcOuter.top;
rcOuter.left = g_Config.iWindowX;
rcOuter.top = g_Config.iWindowY;
}
void SavePosition() {
WINDOWPLACEMENT placement;
GetWindowPlacement(hwndMain, &placement);
if (placement.showCmd == SW_SHOWNORMAL) {
RECT rc;
GetWindowRect(hwndMain, &rc);
g_Config.iWindowX = rc.left;
g_Config.iWindowY = rc.top;
}
}
void ResizeDisplay(bool noWindowMovement = false) {
RECT rc;
GetClientRect(hwndMain, &rc);
if (!noWindowMovement) {
if ((rc.right - rc.left) == PSP_CoreParameter().pixelWidth &&
(rc.bottom - rc.top) == PSP_CoreParameter().pixelHeight)
return;
PSP_CoreParameter().pixelWidth = rc.right - rc.left;
PSP_CoreParameter().pixelHeight = rc.bottom - rc.top;
MoveWindow(hwndDisplay, 0, 0, PSP_CoreParameter().pixelWidth, PSP_CoreParameter().pixelHeight, TRUE);
}
// round up to a zoom factor for the render size.
int zoom = (rc.right - rc.left + 479) / 480;
if (g_Config.SSAntiAliasing) zoom *= 2;
PSP_CoreParameter().renderWidth = 480 * zoom;
PSP_CoreParameter().renderHeight = 272 * zoom;
PSP_CoreParameter().outputWidth = 480 * zoom;
PSP_CoreParameter().outputHeight = 272 * zoom;
if (gpu)
gpu->Resized();
}
void SetZoom(float zoom) {
if (zoom < 5)
g_Config.iWindowZoom = (int) zoom;
RECT rc, rcOuter;
GetWindowRectAtZoom((int) zoom, rc, rcOuter);
MoveWindow(hwndMain, rcOuter.left, rcOuter.top, rcOuter.right - rcOuter.left, rcOuter.bottom - rcOuter.top, TRUE);
ResizeDisplay();
}
void setTexScalingLevel(int num) {
g_Config.iTexScalingLevel = num;
if(gpu) gpu->ClearCacheNextFrame();
}
void setTexScalingType(int num) {
g_Config.iTexScalingType = num;
if(gpu) gpu->ClearCacheNextFrame();
}
BOOL Show(HINSTANCE hInstance, int nCmdShow)
{
hInst = hInstance; // Store instance handle in our global variable
int zoom = g_Config.iWindowZoom;
if (zoom < 1) zoom = 1;
if (zoom > 4) zoom = 4;
RECT rc,rcOrig;
GetWindowRectAtZoom(zoom, rcOrig, rc);
u32 style = WS_OVERLAPPEDWINDOW;
hwndMain = CreateWindowEx(0,szWindowClass, "", style,
rc.left, rc.top, rc.right-rc.left, rc.bottom-rc.top, NULL, NULL, hInstance, NULL);
SetTimer(hwndMain, TIMER_CURSORUPDATE, CURSORUPDATE_INTERVAL_MS, 0);
SetPlaying(0);
if (!hwndMain)
return FALSE;
menu = GetMenu(hwndMain);
#ifdef FINAL
RemoveMenu(menu,2,MF_BYPOSITION);
RemoveMenu(menu,2,MF_BYPOSITION);
#endif
MENUINFO info;
ZeroMemory(&info,sizeof(MENUINFO));
info.cbSize = sizeof(MENUINFO);
info.cyMax = 0;
info.dwStyle = MNS_CHECKORBMP;
info.fMask = MIM_STYLE;
for (int i = 0; i < GetMenuItemCount(menu); i++)
{
SetMenuInfo(GetSubMenu(menu,i),&info);
}
hwndDisplay = CreateWindowEx(0,szDisplayClass,TEXT(""),
WS_CHILD|WS_VISIBLE,
0,0,/*rcOrig.left,rcOrig.top,*/rcOrig.right-rcOrig.left,rcOrig.bottom-rcOrig.top,hwndMain,0,hInstance,0);
ShowWindow(hwndMain, nCmdShow);
//accept dragged files
DragAcceptFiles(hwndMain, TRUE);
#if ENABLE_TOUCH
RegisterTouchWindow(hwndDisplay, TWF_WANTPALM);
#endif
SetFocus(hwndMain);
SetFocus(hwndDisplay);
return TRUE;
}
void BrowseAndBoot(std::string defaultPath)
{
std::string fn;
std::string filter = "";
filter += "PSP";
filter += "|";
filter += "*.pbp;*.elf;*.iso;*.cso;*.prx";
filter += "|";
filter += "|";
for (int i=0; i<(int)filter.length(); i++)
{
if (filter[i] == '|')
filter[i] = '\0';
}
if (W32Util::BrowseForFileName(true, GetHWND(), "Load File", defaultPath.size() ? defaultPath.c_str() : 0, filter.c_str(),"*.pbp;*.elf;*.iso;*.cso;",fn))
{
// decode the filename with fullpath
std::string fullpath = fn;
char drive[MAX_PATH];
char dir[MAX_PATH];
char fname[MAX_PATH];
char ext[MAX_PATH];
_splitpath(fullpath.c_str(), drive, dir, fname, ext);
std::string executable = std::string(drive) + std::string(dir) + std::string(fname) + std::string(ext);
NativeMessageReceived("boot", executable.c_str());
}
}<|fim▁hole|> {
case WM_ACTIVATE:
break;
case WM_SETFOCUS:
break;
case WM_SIZE:
break;
case WM_ERASEBKGND:
return DefWindowProc(hWnd, message, wParam, lParam);
case WM_LBUTTONDOWN:
{
lock_guard guard(input_state.lock);
input_state.mouse_valid = true;
input_state.pointer_down[0] = true;
input_state.pointer_x[0] = GET_X_LPARAM(lParam);
input_state.pointer_y[0] = GET_Y_LPARAM(lParam);
if (g_Config.iWindowZoom == 1)
{
input_state.pointer_x[0] *= 2;
input_state.pointer_y[0] *= 2;
}
}
break;
case WM_MOUSEMOVE:
{
lock_guard guard(input_state.lock);
input_state.pointer_x[0] = GET_X_LPARAM(lParam);
input_state.pointer_y[0] = GET_Y_LPARAM(lParam);
if (g_Config.iWindowZoom == 1)
{
input_state.pointer_x[0] *= 2;
input_state.pointer_y[0] *= 2;
}
}
break;
case WM_LBUTTONUP:
{
lock_guard guard(input_state.lock);
input_state.pointer_down[0] = false;
input_state.pointer_x[0] = GET_X_LPARAM(lParam);
input_state.pointer_y[0] = GET_Y_LPARAM(lParam);
if (g_Config.iWindowZoom == 1)
{
input_state.pointer_x[0] *= 2;
input_state.pointer_y[0] *= 2;
}
}
break;
case WM_TOUCH:
{
// TODO: Enabling this section will probably break things on Windows XP.
// We probably need to manually fetch pointers to GetTouchInputInfo and CloseTouchInputHandle.
#if ENABLE_TOUCH
UINT inputCount = LOWORD(wParam);
TOUCHINPUT *inputs = new TOUCHINPUT[inputCount];
if (GetTouchInputInfo((HTOUCHINPUT)lParam,
inputCount,
inputs,
sizeof(TOUCHINPUT)))
{
for (int i = 0; i < inputCount; i++) {
// TODO: process inputs here!
}
if (!CloseTouchInputHandle((HTOUCHINPUT)lParam))
{
// error handling
}
}
else
{
// GetLastError() and error handling
}
delete [] inputs;
return DefWindowProc(hWnd, message, wParam, lParam);
#endif
}
case WM_PAINT:
return DefWindowProc(hWnd, message, wParam, lParam);
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
return 0;
}
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
int wmId, wmEvent;
std::string fn;
switch (message)
{
case WM_CREATE:
loadControlsFromFile();
break;
case WM_MOVE:
SavePosition();
ResizeDisplay();
break;
case WM_SIZE:
SavePosition();
ResizeDisplay();
break;
case WM_TIMER:
// Hack: Take the opportunity to also show/hide the mouse cursor in fullscreen mode.
if (g_bFullScreen && globalUIState == UISTATE_INGAME) {
ShowCursor(FALSE);
} else {
ShowCursor(TRUE);
SetCursor(LoadCursor(NULL, IDC_ARROW));
}
SetTimer(hWnd, TIMER_CURSORUPDATE, CURSORUPDATE_INTERVAL_MS, 0);
return 0;
case WM_COMMAND:
wmId = LOWORD(wParam);
wmEvent = HIWORD(wParam);
// Parse the menu selections:
switch (wmId)
{
case ID_FILE_LOAD:
BrowseAndBoot("");
break;
case ID_FILE_LOAD_MEMSTICK:
{
std::string memStickDir, flash0dir;
GetSysDirectories(memStickDir, flash0dir);
memStickDir += "PSP\\GAME\\";
BrowseAndBoot(memStickDir);
}
break;
case ID_FILE_REFRESHGAMELIST:
break;
case ID_FILE_MEMSTICK:
{
std::string memStickDir, flash0dir;
GetSysDirectories(memStickDir, flash0dir);
ShellExecuteA(NULL, "open", memStickDir.c_str(), 0, 0, SW_SHOW);
}
break;
case ID_EMULATION_RUN:
if (Core_IsStepping()) {
Core_EnableStepping(false);
} else {
NativeMessageReceived("run", "");
}
if (disasmWindow[0])
SendMessage(disasmWindow[0]->GetDlgHandle(), WM_COMMAND, IDC_GO, 0);
break;
case ID_EMULATION_STOP:
if (memoryWindow[0]) {
SendMessage(memoryWindow[0]->GetDlgHandle(), WM_CLOSE, 0, 0);
}
if (disasmWindow[0]) {
SendMessage(disasmWindow[0]->GetDlgHandle(), WM_CLOSE, 0, 0);
}
if (Core_IsStepping()) {
Core_EnableStepping(false);
}
NativeMessageReceived("stop", "");
SetPlaying(0);
Update();
break;
case ID_EMULATION_PAUSE:
if (disasmWindow[0])
{
SendMessage(disasmWindow[0]->GetDlgHandle(), WM_COMMAND, IDC_STOP, 0);
} else if (globalUIState == UISTATE_INGAME) {
Core_EnableStepping(true);
}
break;
case ID_EMULATION_RESET:
NativeMessageReceived("reset", "");
break;
case ID_EMULATION_SPEEDLIMIT:
g_Config.bSpeedLimit = !g_Config.bSpeedLimit;
break;
case ID_FILE_LOADSTATEFILE:
if (W32Util::BrowseForFileName(true, hWnd, "Load state",0,"Save States (*.ppst)\0*.ppst\0All files\0*.*\0\0","ppst",fn))
{
SetCursor(LoadCursor(0,IDC_WAIT));
SaveState::Load(fn, SaveStateActionFinished);
}
break;
case ID_FILE_SAVESTATEFILE:
if (W32Util::BrowseForFileName(false, hWnd, "Save state",0,"Save States (*.ppst)\0*.ppst\0All files\0*.*\0\0","ppst",fn))
{
SetCursor(LoadCursor(0,IDC_WAIT));
SaveState::Save(fn, SaveStateActionFinished);
}
break;
// TODO: Add UI for multiple slots
case ID_FILE_QUICKLOADSTATE:
SetCursor(LoadCursor(0,IDC_WAIT));
SaveState::LoadSlot(0, SaveStateActionFinished);
break;
case ID_FILE_QUICKSAVESTATE:
SetCursor(LoadCursor(0,IDC_WAIT));
SaveState::SaveSlot(0, SaveStateActionFinished);
break;
case ID_OPTIONS_SCREEN1X:
SetZoom(1);
break;
case ID_OPTIONS_SCREEN2X:
SetZoom(2);
break;
case ID_OPTIONS_SCREEN3X:
SetZoom(3);
break;
case ID_OPTIONS_SCREEN4X:
SetZoom(4);
break;
case ID_OPTIONS_MIPMAP:
g_Config.bMipMap = !g_Config.bMipMap;
break;
case ID_TEXTURESCALING_OFF:
setTexScalingLevel(1);
break;
case ID_TEXTURESCALING_2X:
setTexScalingLevel(2);
break;
case ID_TEXTURESCALING_3X:
setTexScalingLevel(3);
break;
case ID_TEXTURESCALING_4X:
setTexScalingLevel(4);
break;
case ID_TEXTURESCALING_5X:
setTexScalingLevel(5);
break;
case ID_TEXTURESCALING_XBRZ:
setTexScalingType(TextureScaler::XBRZ);
break;
case ID_TEXTURESCALING_HYBRID:
setTexScalingType(TextureScaler::HYBRID);
break;
case ID_TEXTURESCALING_BICUBIC:
setTexScalingType(TextureScaler::BICUBIC);
break;
case ID_TEXTURESCALING_HYBRID_BICUBIC:
setTexScalingType(TextureScaler::HYBRID_BICUBIC);
break;
case ID_TEXTURESCALING_DEPOSTERIZE:
g_Config.bTexDeposterize = !g_Config.bTexDeposterize;
if(gpu) gpu->ClearCacheNextFrame();
break;
case ID_OPTIONS_BUFFEREDRENDERING:
g_Config.bBufferedRendering = !g_Config.bBufferedRendering;
if (gpu)
gpu->Resized(); // easy way to force a clear...
break;
case ID_OPTIONS_SHOWDEBUGSTATISTICS:
g_Config.bShowDebugStats = !g_Config.bShowDebugStats;
break;
case ID_OPTIONS_HARDWARETRANSFORM:
g_Config.bHardwareTransform = !g_Config.bHardwareTransform;
break;
case ID_OPTIONS_STRETCHDISPLAY:
g_Config.bStretchToDisplay = !g_Config.bStretchToDisplay;
if (gpu)
gpu->Resized(); // easy way to force a clear...
break;
case ID_OPTIONS_FRAMESKIP:
g_Config.iFrameSkip = g_Config.iFrameSkip == 0 ? 1 : 0;
break;
case ID_OPTIONS_USEMEDIAENGINE:
g_Config.bUseMediaEngine = !g_Config.bUseMediaEngine;
break;
case ID_FILE_EXIT:
DestroyWindow(hWnd);
break;
case ID_CPU_DYNAREC:
g_Config.bJit = true;
break;
case ID_CPU_INTERPRETER:
g_Config.bJit = false;
break;
case ID_EMULATION_RUNONLOAD:
g_Config.bAutoRun = !g_Config.bAutoRun;
break;
case ID_DEBUG_DUMPNEXTFRAME:
if (gpu)
gpu->DumpNextFrame();
break;
case ID_DEBUG_LOADMAPFILE:
if (W32Util::BrowseForFileName(true, hWnd, "Load .MAP",0,"Maps\0*.map\0All files\0*.*\0\0","map",fn)) {
symbolMap.LoadSymbolMap(fn.c_str());
// HLE_PatchFunctions();
if (disasmWindow[0])
disasmWindow[0]->NotifyMapLoaded();
if (memoryWindow[0])
memoryWindow[0]->NotifyMapLoaded();
}
break;
case ID_DEBUG_SAVEMAPFILE:
if (W32Util::BrowseForFileName(false, hWnd, "Save .MAP",0,"Maps\0*.map\0All files\0*.*\0\0","map",fn))
symbolMap.SaveSymbolMap(fn.c_str());
break;
case ID_DEBUG_RESETSYMBOLTABLE:
symbolMap.ResetSymbolMap();
for (int i=0; i<numCPUs; i++)
if (disasmWindow[i])
disasmWindow[i]->NotifyMapLoaded();
for (int i=0; i<numCPUs; i++)
if (memoryWindow[i])
memoryWindow[i]->NotifyMapLoaded();
break;
case ID_DEBUG_DISASSEMBLY:
if (disasmWindow[0])
disasmWindow[0]->Show(true);
break;
case ID_DEBUG_MEMORYVIEW:
if (memoryWindow[0])
memoryWindow[0]->Show(true);
break;
case ID_DEBUG_LOG:
LogManager::GetInstance()->GetConsoleListener()->Show(LogManager::GetInstance()->GetConsoleListener()->Hidden());
break;
case ID_OPTIONS_IGNOREILLEGALREADS:
g_Config.bIgnoreBadMemAccess = !g_Config.bIgnoreBadMemAccess;
break;
case ID_OPTIONS_FULLSCREEN:
g_Config.bFullScreen = !g_Config.bFullScreen ;
if(g_bFullScreen) {
_ViewNormal(hWnd);
} else {
_ViewFullScreen(hWnd);
}
break;
case ID_OPTIONS_WIREFRAME:
g_Config.bDrawWireframe = !g_Config.bDrawWireframe;
break;
case ID_OPTIONS_VERTEXCACHE:
g_Config.bVertexCache = !g_Config.bVertexCache;
break;
case ID_OPTIONS_SHOWFPS:
g_Config.bShowFPSCounter = !g_Config.bShowFPSCounter;
break;
case ID_OPTIONS_DISPLAYRAWFRAMEBUFFER:
g_Config.bDisplayFramebuffer = !g_Config.bDisplayFramebuffer;
break;
case ID_OPTIONS_FASTMEMORY:
g_Config.bFastMemory = !g_Config.bFastMemory;
break;
case ID_OPTIONS_USEVBO:
g_Config.bUseVBO = !g_Config.bUseVBO;
break;
case ID_OPTIONS_LINEARFILTERING:
g_Config.bLinearFiltering = !g_Config.bLinearFiltering;
break;
case ID_OPTIONS_SIMPLE2XSSAA:
g_Config.SSAntiAliasing = !g_Config.SSAntiAliasing;
ResizeDisplay(true);
break;
case ID_OPTIONS_CONTROLS:
DialogManager::EnableAll(FALSE);
DialogBox(hInst, (LPCTSTR)IDD_CONTROLS, hWnd, (DLGPROC)Controls);
DialogManager::EnableAll(TRUE);
break;
case ID_EMULATION_SOUND:
g_Config.bEnableSound = !g_Config.bEnableSound;
break;
case ID_HELP_OPENWEBSITE:
ShellExecute(NULL, "open", "http://www.ppsspp.org/", NULL, NULL, SW_SHOWNORMAL);
break;
case ID_HELP_ABOUT:
DialogManager::EnableAll(FALSE);
DialogBox(hInst, (LPCTSTR)IDD_ABOUTBOX, hWnd, (DLGPROC)About);
DialogManager::EnableAll(TRUE);
break;
default:
{
MessageBox(hwndMain,"Unimplemented","Sorry",0);
}
break;
}
break;
case WM_KEYDOWN:
return 0;
case WM_DROPFILES:
{
HDROP hdrop = (HDROP)wParam;
int count = DragQueryFile(hdrop,0xFFFFFFFF,0,0);
if (count != 1)
{
MessageBox(hwndMain,"You can only load one file at a time","Error",MB_ICONINFORMATION);
}
else
{
TCHAR filename[512];
DragQueryFile(hdrop,0,filename,512);
TCHAR *type = filename+_tcslen(filename)-3;
SendMessage(hWnd, WM_COMMAND, ID_EMULATION_STOP, 0);
// Ugly, need to wait for the stop message to process in the EmuThread.
Sleep(20);
MainWindow::SetPlaying(filename);
MainWindow::Update();
NativeMessageReceived("boot", filename);
}
}
break;
case WM_CLOSE:
Core_Stop();
Core_WaitInactive(200);
EmuThread_Stop();
/*
if (g_Config.bConfirmOnQuit && CCore::IsRunning())
{
if (IDNO==MessageBox(hwndMain,"A game is in progress. Are you sure you want to exit?","Are you sure?",MB_YESNO|MB_ICONQUESTION))
return 1;//or 1?
else
return DefWindowProc(hWnd,message,wParam,lParam);
break;
}
else
*/
return DefWindowProc(hWnd,message,wParam,lParam);
case WM_DESTROY:
PostQuitMessage(0);
break;
case WM_USER+1:
if (disasmWindow[0])
SendMessage(disasmWindow[0]->GetDlgHandle(), WM_CLOSE, 0, 0);
if (memoryWindow[0])
SendMessage(memoryWindow[0]->GetDlgHandle(), WM_CLOSE, 0, 0);
disasmWindow[0] = new CDisasm(MainWindow::GetHInstance(), MainWindow::GetHWND(), currentDebugMIPS);
DialogManager::AddDlg(disasmWindow[0]);
disasmWindow[0]->Show(g_Config.bShowDebuggerOnLoad);
if (g_Config.bFullScreen)
_ViewFullScreen(hWnd);
memoryWindow[0] = new CMemoryDlg(MainWindow::GetHInstance(), MainWindow::GetHWND(), currentDebugMIPS);
DialogManager::AddDlg(memoryWindow[0]);
if (disasmWindow[0])
disasmWindow[0]->NotifyMapLoaded();
if (memoryWindow[0])
memoryWindow[0]->NotifyMapLoaded();
SetForegroundWindow(hwndMain);
break;
case WM_MENUSELECT:
// Unfortunately, accelerate keys (hotkeys) shares the same enabled/disabled states
// with corresponding menu items.
UpdateMenus();
break;
// Turn off the screensaver.
// Note that if there's a screensaver password, this simple method
// doesn't work on Vista or higher.
case WM_SYSCOMMAND:
{
switch (wParam)
{
case SC_SCREENSAVE:
return 0;
case SC_MONITORPOWER:
return 0;
}
return DefWindowProc(hWnd, message, wParam, lParam);
}
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
return 0;
}
void UpdateMenus()
{
HMENU menu = GetMenu(GetHWND());
#define CHECKITEM(item,value) CheckMenuItem(menu,item,MF_BYCOMMAND | ((value) ? MF_CHECKED : MF_UNCHECKED));
CHECKITEM(ID_EMULATION_SPEEDLIMIT,g_Config.bSpeedLimit);
// CHECK(ID_OPTIONS_ENABLEFRAMEBUFFER,g_Config.bEnableFrameBuffer);
// CHECK(ID_OPTIONS_EMULATESYSCALL,g_bEmulateSyscall);
CHECKITEM(ID_OPTIONS_DISPLAYRAWFRAMEBUFFER, g_Config.bDisplayFramebuffer);
CHECKITEM(ID_OPTIONS_IGNOREILLEGALREADS,g_Config.bIgnoreBadMemAccess);
CHECKITEM(ID_CPU_INTERPRETER,g_Config.bJit == false);
CHECKITEM(ID_CPU_DYNAREC,g_Config.bJit == true);
CHECKITEM(ID_OPTIONS_BUFFEREDRENDERING, g_Config.bBufferedRendering);
CHECKITEM(ID_OPTIONS_SHOWDEBUGSTATISTICS, g_Config.bShowDebugStats);
CHECKITEM(ID_OPTIONS_WIREFRAME, g_Config.bDrawWireframe);
CHECKITEM(ID_OPTIONS_HARDWARETRANSFORM, g_Config.bHardwareTransform);
CHECKITEM(ID_OPTIONS_FASTMEMORY, g_Config.bFastMemory);
CHECKITEM(ID_OPTIONS_LINEARFILTERING, g_Config.bLinearFiltering);
CHECKITEM(ID_OPTIONS_SIMPLE2XSSAA, g_Config.SSAntiAliasing);
CHECKITEM(ID_OPTIONS_STRETCHDISPLAY, g_Config.bStretchToDisplay);
CHECKITEM(ID_EMULATION_RUNONLOAD, g_Config.bAutoRun);
CHECKITEM(ID_OPTIONS_USEVBO, g_Config.bUseVBO);
CHECKITEM(ID_OPTIONS_VERTEXCACHE, g_Config.bVertexCache);
CHECKITEM(ID_OPTIONS_SHOWFPS, g_Config.bShowFPSCounter);
CHECKITEM(ID_OPTIONS_FRAMESKIP, g_Config.iFrameSkip != 0);
CHECKITEM(ID_OPTIONS_USEMEDIAENGINE, g_Config.bUseMediaEngine);
CHECKITEM(ID_OPTIONS_MIPMAP, g_Config.bMipMap);
CHECKITEM(ID_EMULATION_SOUND, g_Config.bEnableSound);
CHECKITEM(ID_TEXTURESCALING_DEPOSTERIZE, g_Config.bTexDeposterize);
static const int zoomitems[4] = {
ID_OPTIONS_SCREEN1X,
ID_OPTIONS_SCREEN2X,
ID_OPTIONS_SCREEN3X,
ID_OPTIONS_SCREEN4X,
};
for (int i = 0; i < 4; i++) {
CheckMenuItem(menu, zoomitems[i], MF_BYCOMMAND | ((i == g_Config.iWindowZoom - 1) ? MF_CHECKED : MF_UNCHECKED));
}
static const int texscalingitems[] = {
ID_TEXTURESCALING_OFF,
ID_TEXTURESCALING_2X,
ID_TEXTURESCALING_3X,
ID_TEXTURESCALING_4X,
ID_TEXTURESCALING_5X,
};
for (int i = 0; i < 5; i++) {
CheckMenuItem(menu, texscalingitems[i], MF_BYCOMMAND | ((i == g_Config.iTexScalingLevel-1) ? MF_CHECKED : MF_UNCHECKED));
}
static const int texscalingtypeitems[] = {
ID_TEXTURESCALING_XBRZ,
ID_TEXTURESCALING_HYBRID,
ID_TEXTURESCALING_BICUBIC,
ID_TEXTURESCALING_HYBRID_BICUBIC,
};
for (int i = 0; i < 4; i++) {
CheckMenuItem(menu, texscalingtypeitems[i], MF_BYCOMMAND | ((i == g_Config.iTexScalingType) ? MF_CHECKED : MF_UNCHECKED));
}
UpdateCommands();
}
void UpdateCommands()
{
static GlobalUIState lastGlobalUIState = UISTATE_PAUSEMENU;
static CoreState lastCoreState = CORE_ERROR;
if (lastGlobalUIState == globalUIState && lastCoreState == coreState)
return;
lastCoreState = coreState;
lastGlobalUIState = globalUIState;
HMENU menu = GetMenu(GetHWND());
EnableMenuItem(menu,ID_EMULATION_RUN, (Core_IsStepping() || globalUIState == UISTATE_PAUSEMENU) ? MF_ENABLED : MF_GRAYED);
UINT ingameEnable = globalUIState == UISTATE_INGAME ? MF_ENABLED : MF_GRAYED;
EnableMenuItem(menu,ID_EMULATION_PAUSE, ingameEnable);
EnableMenuItem(menu,ID_EMULATION_STOP, ingameEnable);
EnableMenuItem(menu,ID_EMULATION_RESET, ingameEnable);
UINT menuEnable = globalUIState == UISTATE_MENU ? MF_ENABLED : MF_GRAYED;
EnableMenuItem(menu,ID_FILE_LOAD, menuEnable);
EnableMenuItem(menu,ID_FILE_LOAD_MEMSTICK, menuEnable);
EnableMenuItem(menu,ID_FILE_SAVESTATEFILE, !menuEnable);
EnableMenuItem(menu,ID_FILE_LOADSTATEFILE, !menuEnable);
EnableMenuItem(menu,ID_FILE_QUICKSAVESTATE, !menuEnable);
EnableMenuItem(menu,ID_FILE_QUICKLOADSTATE, !menuEnable);
EnableMenuItem(menu,ID_CPU_DYNAREC, menuEnable);
EnableMenuItem(menu,ID_CPU_INTERPRETER, menuEnable);
EnableMenuItem(menu,ID_EMULATION_STOP, !menuEnable);
}
// Message handler for about box.
LRESULT CALLBACK About(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam)
{
switch (message)
{
case WM_INITDIALOG:
W32Util::CenterWindow(hDlg);
{
HWND versionBox = GetDlgItem(hDlg, IDC_VERSION);
char temp[256];
sprintf(temp, "PPSSPP %s", PPSSPP_GIT_VERSION);
SetWindowText(versionBox, temp);
}
return TRUE;
case WM_COMMAND:
if (LOWORD(wParam) == IDOK || LOWORD(wParam) == IDCANCEL)
{
EndDialog(hDlg, LOWORD(wParam));
return TRUE;
}
break;
}
return FALSE;
}
static const char *controllist[] = {
"TURBO MODE\tHold TAB",
"Start\tSpace",
"Select\tV",
"Square\tA",
"Triangle\tS",
"Circle\tX",
"Cross\tZ",
"Left Trigger\tQ",
"Right Trigger\tW",
"Up\tArrow Up",
"Down\tArrow Down",
"Left\tArrow Left",
"Right\tArrow Right",
"Analog Up\tI",
"Analog Down\tK",
"Analog Left\tJ",
"Analog Right\tL",
"Rapid Fire\tShift",
};
static HHOOK pKeydownHook;
static const int control_map_size = IDC_EDIT_KEY_ANALOG_RIGHT - IDC_EDIT_KEY_TURBO + 1;
static u8 control_map[control_map_size];
RECT getRedrawRect(HWND hWnd) {
RECT rc;
HWND hDlg = GetParent(hWnd);
GetWindowRect(hWnd, &rc);
POINT pt = {0, 0};
ScreenToClient(hDlg, &pt);
rc.left += pt.x;
rc.right += pt.x;
rc.top += pt.y;
rc.bottom += pt.y;
return rc;
}
LRESULT CALLBACK KeyboardProc(int nCode, WPARAM wParam, LPARAM lParam)
{
HWND hEdit = GetFocus();
UINT nCtrlID = GetDlgCtrlID(hEdit);
if (nCtrlID < IDC_EDIT_KEY_TURBO || nCtrlID > IDC_EDIT_KEY_ANALOG_RIGHT) {
return CallNextHookEx(pKeydownHook, nCode, wParam, lParam);
}
if (!(lParam&(1<<31))) {
// key down
HWND hDlg = GetParent(hEdit);
const char *str = getVirtualKeyName(wParam);
if (str) {
control_map[nCtrlID - IDC_EDIT_KEY_TURBO] = wParam;
SetWindowTextA(hEdit, str);
RECT rc = getRedrawRect(hEdit);
InvalidateRect(hDlg, &rc, false);
}
else
MessageBoxA(hDlg, "Not supported!", "controller", MB_OK);
}
return 1;
}
HBITMAP LoadImageFromResource(HINSTANCE hInstance,LPCTSTR pszResourceName, LPCTSTR lpType)
{
HRSRC hrsrc = FindResource(hInstance, pszResourceName, lpType);
if (!hrsrc)
return FALSE;
DWORD dwlen = SizeofResource(hInstance, hrsrc);
BYTE *lpRsrc = (BYTE*)LoadResource(hInstance, hrsrc);
if (!lpRsrc)
return FALSE;
int width, height;
unsigned char *image_data = 0;
bool bResult = pngLoadPtr(lpRsrc, dwlen, &width, &height, &image_data, false) != 0;
FreeResource(lpRsrc);
if (!bResult)
return 0;
HBITMAP hbm = CreateBitmap(width, height, 1, 32, image_data);
free(image_data);
return hbm;
}
void BitBlt(HBITMAP hbm, HDC dstDC, int dstX, int dstY, int width, int height, int srcX, int srcY)
{
HDC hCompDC = CreateCompatibleDC(dstDC);
HBITMAP oldbm = (HBITMAP)SelectObject(hCompDC, hbm);
BitBlt(dstDC, dstX, dstY, width, height, hCompDC, srcX, srcY, SRCCOPY);
SelectObject(hCompDC, oldbm);
DeleteObject(hCompDC);
}
// Message handler for control box.
LRESULT CALLBACK Controls(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam)
{
static HBITMAP hbm = 0;
switch (message)
{
case WM_INITDIALOG:
W32Util::CenterWindow(hDlg);
{
// TODO: connect to keyboard device instead
{
HBITMAP hResBM = LoadImageFromResource(hInst, MAKEINTRESOURCE(IDB_IMAGE_PSP), "IMAGE");
HDC hDC = GetDC(hDlg);
RECT clientRect;
GetClientRect(hDlg, &clientRect);
HBITMAP hMemBM = CreateCompatibleBitmap(hDC, clientRect.right, clientRect.bottom);
HDC hResDC = CreateCompatibleDC(hDC);
HDC hMemDC = CreateCompatibleDC(hDC);
SelectObject(hResDC, hResBM);
SelectObject(hMemDC, hMemBM);
BITMAP bm;
GetObject(hResBM, sizeof(BITMAP), &bm);
SetStretchBltMode(hMemDC, HALFTONE);
StretchBlt(hMemDC, 0, 0, clientRect.right, clientRect.bottom, hResDC, 0, 0, bm.bmWidth, bm.bmHeight, SRCCOPY);
if (hbm)
DeleteObject(hbm);
hbm = hMemBM;
DeleteDC(hMemDC);
DeleteDC(hResDC);
ReleaseDC(hDlg, hDC);
DeleteObject(hResBM);
}
int key_pad_size = (IDC_EDIT_KEYRIGHT - IDC_EDIT_KEY_TURBO + 1);
for (u32 i = 0; i <= IDC_EDIT_KEY_ANALOG_RIGHT - IDC_EDIT_KEY_TURBO; i++) {
HWND hEdit = GetDlgItem(hDlg, IDC_EDIT_KEY_TURBO + i);
if (IDC_EDIT_KEY_TURBO + i <= IDC_EDIT_KEYRIGHT)
control_map[i] = key_pad_map[i * 2];
else
control_map[i] = analog_ctrl_map[(i - key_pad_size) * 2];
SetWindowTextA(hEdit, getVirtualKeyName(control_map[i]));
}
ComboBox_AddString(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE), "None");
ComboBox_AddString(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE), "XInput");
ComboBox_AddString(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE), "DirectInput");
if ((g_Config.iForceInputDevice < 0) || (g_Config.iForceInputDevice > 1))
{
ComboBox_SetCurSel(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE), 0);
}
else
{
ComboBox_SetCurSel(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE), (g_Config.iForceInputDevice + 1));
}
DWORD dwThreadID = GetWindowThreadProcessId(hDlg, NULL);
pKeydownHook = SetWindowsHookEx(WH_KEYBOARD,KeyboardProc, NULL, dwThreadID);
}
return TRUE;
case WM_PAINT:
{
PAINTSTRUCT pst;
HDC hdc = BeginPaint(hDlg, &pst);
BITMAP bm;
GetObject(hbm, sizeof(BITMAP), &bm);
int width = bm.bmWidth;
int height = bm.bmHeight;
BitBlt(hbm, hdc, 0, 0, width, height, 0 , 0);
EndPaint(hDlg, &pst);
return TRUE;
}
case WM_CTLCOLORSTATIC:
{
HDC hdc=(HDC)wParam;
SetBkMode(hdc, TRANSPARENT);
return (LRESULT)GetStockObject(NULL_BRUSH);
}
case WM_CTLCOLOREDIT:
{
if ((HWND)lParam == GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE))
return FALSE;
HDC hdc = (HDC)wParam;
SetBkMode(hdc, TRANSPARENT);
SetTextColor(hdc, RGB(255, 0, 0));
HWND hEdit = (HWND)lParam;
RECT rc = getRedrawRect(hEdit);
RECT clientrc;
GetClientRect(hEdit, &clientrc);
BitBlt(hbm, hdc, 0, 0, rc.right - rc.left, rc.bottom - rc.top, rc.left, rc.top);
char str[11];
GetWindowTextA(hEdit, str, 10);
DrawTextA(hdc, str, (int)strlen(str), &clientrc, DT_CENTER|DT_SINGLELINE);
return (LRESULT)GetStockObject(NULL_BRUSH);
}
case WM_COMMAND:
if (LOWORD(wParam) == IDOK || LOWORD(wParam) == IDCANCEL)
{
if (LOWORD(wParam) == IDOK) {
g_Config.iForceInputDevice = (ComboBox_GetCurSel(GetDlgItem(hDlg, IDC_FORCE_INPUT_DEVICE)) - 1);
int key_pad_size = (IDC_EDIT_KEYRIGHT - IDC_EDIT_KEY_TURBO + 1);
for (u32 i = 0; i <= IDC_EDIT_KEY_ANALOG_RIGHT - IDC_EDIT_KEY_TURBO; i++) {
if (IDC_EDIT_KEY_TURBO + i <= IDC_EDIT_KEYRIGHT)
key_pad_map[i * 2] = control_map[i];
else
analog_ctrl_map[(i - key_pad_size) * 2] = control_map[i];
}
saveControlsToFile();
}
UnhookWindowsHookEx(pKeydownHook);
EndDialog(hDlg, LOWORD(wParam));
if (hbm) {
DeleteObject(hbm);
hbm = 0;
}
return TRUE;
}
break;
}
return FALSE;
}
void Update()
{
InvalidateRect(hwndDisplay,0,0);
UpdateWindow(hwndDisplay);
SendMessage(hwndMain,WM_SIZE,0,0);
}
void Redraw()
{
InvalidateRect(hwndDisplay,0,0);
}
void _ViewNormal(HWND hWnd)
{
ShowCursor(TRUE);
SetCursor(LoadCursor(NULL, IDC_ARROW));
// put caption and border styles back
DWORD dwOldStyle = ::GetWindowLong(hWnd, GWL_STYLE);
DWORD dwNewStyle = dwOldStyle | WS_CAPTION | WS_THICKFRAME;
::SetWindowLong(hWnd, GWL_STYLE, dwNewStyle);
// put back the menu bar
::SetMenu(hWnd, menu);
// resize to normal view
// NOTE: use SWP_FRAMECHANGED to force redraw non-client
const int x = g_normalRC.left;
const int y = g_normalRC.top;
const int cx = g_normalRC.right - g_normalRC.left;
const int cy = g_normalRC.bottom - g_normalRC.top;
::SetWindowPos(hWnd, HWND_NOTOPMOST, x, y, cx, cy, SWP_FRAMECHANGED);
// reset full screen indicator
g_bFullScreen = FALSE;
ResizeDisplay();
}
void _ViewFullScreen(HWND hWnd)
{
if (globalUIState == UISTATE_INGAME)
ShowCursor(FALSE);
// keep in mind normal window rectangle
::GetWindowRect(hWnd, &g_normalRC);
// remove caption and border styles
DWORD dwOldStyle = ::GetWindowLong(hWnd, GWL_STYLE);
DWORD dwNewStyle = dwOldStyle & ~(WS_CAPTION | WS_THICKFRAME);
::SetWindowLong(hWnd, GWL_STYLE, dwNewStyle);
// remove the menu bar
::SetMenu(hWnd, NULL);
// resize to full screen view
// NOTE: use SWP_FRAMECHANGED to force redraw non-client
const int x = 0;
const int y = 0;
const int cx = ::GetSystemMetrics(SM_CXSCREEN);
const int cy = ::GetSystemMetrics(SM_CYSCREEN);
::SetWindowPos(hWnd, HWND_TOPMOST, x, y, cx, cy, SWP_FRAMECHANGED);
// set full screen indicator
g_bFullScreen = TRUE;
ResizeDisplay();
}
void SetPlaying(const char *text)
{
char temp[256];
if (text == 0)
snprintf(temp, 256, "PPSSPP %s", PPSSPP_GIT_VERSION);
else
snprintf(temp, 256, "%s - PPSSPP %s", text, PPSSPP_GIT_VERSION);
temp[255] = '\0';
SetWindowText(hwndMain, temp);
}
void SaveStateActionFinished(bool result, void *userdata)
{
if (!result)
MessageBox(0, "Savestate failure. Using savestates between different PPSSPP versions is not supported.", "Sorry", MB_OK);
SetCursor(LoadCursor(0, IDC_ARROW));
}
HINSTANCE GetHInstance()
{
return hInst;
}
}<|fim▁end|> |
LRESULT CALLBACK DisplayProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
switch (message) |
<|file_name|>test_explode.py<|end_file_name|><|fim▁begin|>import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
def test_basic():
s = pd.Series([[0, 1, 2], np.nan, [], (3, 4)], index=list("abcd"), name="foo")
result = s.explode()
expected = pd.Series(
[0, 1, 2, np.nan, np.nan, 3, 4], index=list("aaabcdd"), dtype=object, name="foo"
)
tm.assert_series_equal(result, expected)
def test_mixed_type():
s = pd.Series(
[[0, 1, 2], np.nan, None, np.array([]), pd.Series(["a", "b"])], name="foo"
)
result = s.explode()
expected = pd.Series(
[0, 1, 2, np.nan, None, np.nan, "a", "b"],
index=[0, 0, 0, 1, 2, 3, 4, 4],
dtype=object,
name="foo",
)
tm.assert_series_equal(result, expected)
def test_empty():
s = pd.Series(dtype=object)
result = s.explode()
expected = s.copy()
tm.assert_series_equal(result, expected)
def test_nested_lists():
s = pd.Series([[[1, 2, 3]], [1, 2], 1])
result = s.explode()
expected = pd.Series([[1, 2, 3], 1, 2, 1], index=[0, 1, 1, 2])
tm.assert_series_equal(result, expected)
def test_multi_index():
s = pd.Series(
[[0, 1, 2], np.nan, [], (3, 4)],
name="foo",
index=pd.MultiIndex.from_product([list("ab"), range(2)], names=["foo", "bar"]),
)
result = s.explode()
index = pd.MultiIndex.from_tuples(
[("a", 0), ("a", 0), ("a", 0), ("a", 1), ("b", 0), ("b", 1), ("b", 1)],
names=["foo", "bar"],
)
expected = pd.Series(
[0, 1, 2, np.nan, np.nan, 3, 4], index=index, dtype=object, name="foo"
)
tm.assert_series_equal(result, expected)
def test_large():
s = pd.Series([range(256)]).explode()
result = s.explode()
tm.assert_series_equal(result, s)
def test_invert_array():
df = pd.DataFrame({"a": pd.date_range("20190101", periods=3, tz="UTC")})
listify = df.apply(lambda x: x.array, axis=1)
result = listify.explode()
tm.assert_series_equal(result, df["a"].rename())
@pytest.mark.parametrize(
"s", [pd.Series([1, 2, 3]), pd.Series(pd.date_range("2019", periods=3, tz="UTC"))]
)
def non_object_dtype(s):
result = s.explode()
tm.assert_series_equal(result, s)
def test_typical_usecase():
df = pd.DataFrame(
[{"var1": "a,b,c", "var2": 1}, {"var1": "d,e,f", "var2": 2}],
columns=["var1", "var2"],
)
exploded = df.var1.str.split(",").explode()
result = df[["var2"]].join(exploded)
expected = pd.DataFrame(
{"var2": [1, 1, 1, 2, 2, 2], "var1": list("abcdef")},
columns=["var2", "var1"],
index=[0, 0, 0, 1, 1, 1],
)
tm.assert_frame_equal(result, expected)
def test_nested_EA():
# a nested EA array
s = pd.Series(
[
pd.date_range("20170101", periods=3, tz="UTC"),
pd.date_range("20170104", periods=3, tz="UTC"),
]
)
result = s.explode()
expected = pd.Series(<|fim▁hole|> )
tm.assert_series_equal(result, expected)
def test_duplicate_index():
# GH 28005
s = pd.Series([[1, 2], [3, 4]], index=[0, 0])
result = s.explode()
expected = pd.Series([1, 2, 3, 4], index=[0, 0, 0, 0], dtype=object)
tm.assert_series_equal(result, expected)
def test_ignore_index():
# GH 34932
s = pd.Series([[1, 2], [3, 4]])
result = s.explode(ignore_index=True)
expected = pd.Series([1, 2, 3, 4], index=[0, 1, 2, 3], dtype=object)
tm.assert_series_equal(result, expected)
def test_explode_sets():
# https://github.com/pandas-dev/pandas/issues/35614
s = pd.Series([{"a", "b", "c"}], index=[1])
result = s.explode().sort_values()
expected = pd.Series(["a", "b", "c"], index=[1, 1, 1])
tm.assert_series_equal(result, expected)
def test_explode_scalars_can_ignore_index():
# https://github.com/pandas-dev/pandas/issues/40487
s = pd.Series([1, 2, 3], index=["a", "b", "c"])
result = s.explode(ignore_index=True)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(result, expected)<|fim▁end|> | pd.date_range("20170101", periods=6, tz="UTC"), index=[0, 0, 0, 1, 1, 1] |
<|file_name|>add_swift_support.js<|end_file_name|><|fim▁begin|>var child_process = require('child_process'),
fs = require('fs'),
path = require('path');
module.exports = function(context) {
var IOS_DEPLOYMENT_TARGET = '8.0',
SWIFT_VERSION = '3.0',
COMMENT_KEY = /_comment$/,
CORDOVA_VERSION = process.env.CORDOVA_VERSION;
run();
function run() {
var cordova_util = context.requireCordovaModule('cordova-lib/src/cordova/util'),
ConfigParser = CORDOVA_VERSION >= 6.0
? context.requireCordovaModule('cordova-common').ConfigParser
: context.requireCordovaModule('cordova-lib/src/configparser/ConfigParser'),
projectRoot = cordova_util.isCordova(),
platform_ios,
xml = cordova_util.projectConfig(projectRoot),
cfg = new ConfigParser(xml),
projectName = cfg.name(),
iosPlatformPath = path.join(projectRoot, 'platforms', 'ios'),
iosProjectFilesPath = path.join(iosPlatformPath, projectName),
xcconfigPath = path.join(iosPlatformPath, 'cordova', 'build.xcconfig'),
xcconfigContent,
projectFile,
xcodeProject,
bridgingHeaderPath;
if(CORDOVA_VERSION < 7.0) {
platform_ios = CORDOVA_VERSION < 5.0
? context.requireCordovaModule('cordova-lib/src/plugman/platforms')['ios']
: context.requireCordovaModule('cordova-lib/src/plugman/platforms/ios')
projectFile = platform_ios.parseProjectFile(iosPlatformPath);
} else {
var project_files = context.requireCordovaModule('glob').sync(path.join(iosPlatformPath, '*.xcodeproj', 'project.pbxproj'));
if (project_files.length === 0) {
throw new Error('Can\'t found xcode project file');
}
var pbxPath = project_files[0];
var xcodeproj = context.requireCordovaModule('xcode').project(pbxPath);
xcodeproj.parseSync();
projectFile = {
'xcode': xcodeproj,
write: function () {
var fs = context.requireCordovaModule('fs');
var frameworks_file = path.join(iosPlatformPath, 'frameworks.json');
var frameworks = {};
try {
frameworks = context.requireCordovaModule(frameworks_file);
console.log(JSON.stringify(frameworks));
} catch(e) {}
fs.writeFileSync(pbxPath, xcodeproj.writeSync());
fs.writeFileSync(frameworks_file, JSON.stringify(this.frameworks, null, 4));
}
};
}
xcodeProject = projectFile.xcode;
if (fs.existsSync(xcconfigPath)) {
xcconfigContent = fs.readFileSync(xcconfigPath, 'utf-8');
}
bridgingHeaderPath = getBridgingHeader(projectName, xcconfigContent, xcodeProject);
if(bridgingHeaderPath) {
bridgingHeaderPath = path.join(iosPlatformPath, bridgingHeaderPath);
} else {
bridgingHeaderPath = createBridgingHeader(xcodeProject, projectName, iosProjectFilesPath);
}
getExistingBridgingHeaders(iosProjectFilesPath, function (headers) {
importBridgingHeaders(bridgingHeaderPath, headers);
var configurations = nonComments(xcodeProject.pbxXCBuildConfigurationSection()),
config, buildSettings;
for (config in configurations) {
buildSettings = configurations[config].buildSettings;
buildSettings['IPHONEOS_DEPLOYMENT_TARGET'] = IOS_DEPLOYMENT_TARGET;
buildSettings['SWIFT_VERSION'] = SWIFT_VERSION;
buildSettings['EMBEDDED_CONTENT_CONTAINS_SWIFT'] = "YES";
buildSettings['LD_RUNPATH_SEARCH_PATHS'] = '"@executable_path/Frameworks"';
}
console.log('IOS project now has deployment target set as:[' + IOS_DEPLOYMENT_TARGET + '] ...');
console.log('IOS project option EMBEDDED_CONTENT_CONTAINS_SWIFT set as:[YES] ...');
console.log('IOS project swift_objc Bridging-Header set to:[' + bridgingHeaderPath + '] ...');
console.log('IOS project Runpath Search Paths set to: @executable_path/Frameworks ...');
<|fim▁hole|> });
}
function getBridgingHeader(projectName, xcconfigContent, xcodeProject) {
var configurations,
config,
buildSettings,
bridgingHeader;
if (xcconfigContent) {
var regex = /^SWIFT_OBJC_BRIDGING_HEADER *=(.*)$/m,
match = xcconfigContent.match(regex);
if (match) {
bridgingHeader = match[1];
bridgingHeader = bridgingHeader
.replace("$(PROJECT_DIR)/", "")
.replace("$(PROJECT_NAME)", projectName)
.trim();
return bridgingHeader;
}
}
configurations = nonComments(xcodeProject.pbxXCBuildConfigurationSection());
for (config in configurations) {
buildSettings = configurations[config].buildSettings;
bridgingHeader = buildSettings['SWIFT_OBJC_BRIDGING_HEADER'];
if (bridgingHeader) {
return unquote(bridgingHeader);
}
}
}
function createBridgingHeader(xcodeProject, projectName, xcodeProjectRootPath) {
var newBHPath = path.join(xcodeProjectRootPath, "Plugins", "Bridging-Header.h"),
content = ["//",
"// Use this file to import your target's public headers that you would like to expose to Swift.",
"//",
"#import <Cordova/CDV.h>"]
//fs.openSync(newBHPath, 'w');
console.log('Creating new Bridging-Header.h at path: ', newBHPath);
fs.writeFileSync(newBHPath, content.join("\n"), { encoding: 'utf-8', flag: 'w' });
xcodeProject.addHeaderFile("Bridging-Header.h");
setBridgingHeader(xcodeProject, path.join(projectName, "Plugins", "Bridging-Header.h"));
return newBHPath;
}
function setBridgingHeader(xcodeProject, headerPath) {
var configurations = nonComments(xcodeProject.pbxXCBuildConfigurationSection()),
config, buildSettings, bridgingHeader;
for (config in configurations) {
buildSettings = configurations[config].buildSettings;
buildSettings['SWIFT_OBJC_BRIDGING_HEADER'] = '"' + headerPath + '"';
}
}
function getExistingBridgingHeaders(xcodeProjectRootPath, callback) {
var searchPath = path.join(xcodeProjectRootPath, 'Plugins');
child_process.exec('find . -name "*Bridging-Header*.h"', { cwd: searchPath }, function (error, stdout, stderr) {
var headers = stdout.toString().split('\n').map(function (filePath) {
return path.basename(filePath);
});
callback(headers);
});
}
function importBridgingHeaders(mainBridgingHeader, headers) {
var content = fs.readFileSync(mainBridgingHeader, 'utf-8'),
mainHeaderName = path.basename(mainBridgingHeader);
headers.forEach(function (header) {
if(header !== mainHeaderName && content.indexOf(header) < 0) {
if (content.charAt(content.length - 1) != '\n') {
content += "\n";
}
content += "#import \""+header+"\"\n"
console.log('Importing ' + header + ' into main bridging-header at: ' + mainBridgingHeader);
}
});
fs.writeFileSync(mainBridgingHeader, content, 'utf-8');
}
function nonComments(obj) {
var keys = Object.keys(obj),
newObj = {},
i = 0;
for (i; i < keys.length; i++) {
if (!COMMENT_KEY.test(keys[i])) {
newObj[keys[i]] = obj[keys[i]];
}
}
return newObj;
}
function unquote(str) {
if (str) return str.replace(/^"(.*)"$/, "$1");
}
}<|fim▁end|> | projectFile.write(); |
<|file_name|>menu.py<|end_file_name|><|fim▁begin|>from django.utils.translation import ugettext_lazy as _<|fim▁hole|>from oioioi.contests.utils import contest_exists
top_links_registry = MenuRegistry(_("Top Links Menu"), contest_exists & not_anonymous)<|fim▁end|> |
from oioioi.base.menu import MenuRegistry
from oioioi.base.permissions import not_anonymous |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Connectors documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 4 11:35:44 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
sys.path.append(os.path.abspath('_themes'))
sys.path.append(os.path.abspath('../../../odoo'))
# Load OpenERP with correct addons-path so the doc can be built even if
# the addon import modules from other branches
import openerp
BASE_PATH = os.path.abspath(os.path.join(os.getcwd(), '../../..'))
# You may need to change with your own paths
ADDONS_PATHS = ('odoo/openerp/addons',
'odoo/addons',
'connector',
'connector-ecommerce',
'e-commerce',
'sale-workflow',
'product-attribute',
'connector-magento')
pathes = [os.path.join(BASE_PATH, path) for path in ADDONS_PATHS]
options = ['--addons-path', ','.join(pathes)]
openerp.tools.config.parse_config(options)
os.environ['TZ'] = 'UTC'
openerp.service.start_internal()
# -- General configuration --------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.viewcode']
todo_include_todos = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OpenERP Magento Connector'
copyright = u'2013, Camptocamp SA'
# The version info for the project you're documenting, acts as
# replacement for |version| and |release|, also used in various other
# places throughout the built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to
# documentation for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:<|fim▁hole|>
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in
# the output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation
# for a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see
# the documentation.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "OpenERP Magento Connector",
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Site",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the
# build will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "footer",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
#
# Note that this is served off CDN, so won't be available offline.
'bootswatch_theme': "united",
}
# Add any paths that contain custom themes here, relative to this
# directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default
# is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is
# True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'openerp-magento-connector-doc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples (source
# start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'openerp-magento-connector.tex',
u'OpenERP Magento Connector Documentation',
u'Camptocamp SA', 'manual'),
]
# The name of an image file (relative to this directory) to place at the
# top of the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are
# parts, not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output -----------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'openerp-magento-connector',
u'OpenERP Magento Connector Documentation',
[u'Camptocamp SA'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OpenERP Magento Connector',
u'OpenERP Magento Connector Documentation',
u'Camptocamp SA', 'OpenERP Magento Connector',
'Connector between OpenERP and Magento',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard
# library.
intersphinx_mapping = {
'python': ('http://docs.python.org/2.6', None),
'openerpweb': ('http://doc.openerp.com/trunk/developers/web', None),
'openerpdev': ('http://doc.openerp.com/trunk/developers', None),
'openerpconnector': ('http://www.openerp-connector.com', None),
}<|fim▁end|> | # today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y' |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Document'
db.create_table('witness_document', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('creation_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_update_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=128)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50)),
))
db.send_create_signal('witness', ['Document'])
# Adding model 'DocumentVersion'
db.create_table('witness_documentversion', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('creation_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_update_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('document', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['witness.Document'])),
('number', self.gf('django.db.models.fields.CharField')(max_length=64)),
('title', self.gf('django.db.models.fields.CharField')(max_length=128)),
('text', self.gf('django.db.models.fields.TextField')()),
('yes_action_text', self.gf('django.db.models.fields.CharField')(max_length=64)),
('no_action_text', self.gf('django.db.models.fields.CharField')(max_length=64)),
('is_retired', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('witness', ['DocumentVersion'])
# Adding model 'Decision'
db.create_table('witness_decision', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('creation_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_update_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('document_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['witness.DocumentVersion'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
('full_name', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('ip_address', self.gf('django.db.models.fields.CharField')(max_length=64)),
('text_hash', self.gf('django.db.models.fields.CharField')(max_length=128)),
('action_text', self.gf('django.db.models.fields.CharField')(max_length=64)),
('is_yes', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_no', self.gf('django.db.models.fields.BooleanField')(default=False)),
))<|fim▁hole|> # Deleting model 'Document'
db.delete_table('witness_document')
# Deleting model 'DocumentVersion'
db.delete_table('witness_documentversion')
# Deleting model 'Decision'
db.delete_table('witness_decision')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'witness.decision': {
'Meta': {'object_name': 'Decision'},
'action_text': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'document_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['witness.DocumentVersion']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'is_no': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_yes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'text_hash': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'witness.document': {
'Meta': {'object_name': 'Document'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'witness.documentversion': {
'Meta': {'object_name': 'DocumentVersion'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['witness.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_retired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'no_action_text': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'yes_action_text': ('django.db.models.fields.CharField', [], {'max_length': '64'})
}
}
complete_apps = ['witness']<|fim▁end|> | db.send_create_signal('witness', ['Decision'])
def backwards(self, orm): |
<|file_name|>py2_text.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Tue Dec 13 23:10:40 2016
@author: zhouyu
"""
#%%
import pandas as pd
import numpy as np
import os
import re
import nltk
from nltk.corpus import stopwords
from bs4 import BeautifulSoup
os.chdir('/Users/zhouyu/Documents/Zhou_Yu/DS/kaggle_challenge/text processing')
#%% step1: import data
import glob
alltrainfiles = glob.glob("*.csv")
raw_text =pd.concat((pd.read_csv(f,index_col = None, header =0) for f in alltrainfiles),ignore_index = True)
#raw_text = pd.read_csv("crypto.csv",index_col = None)
#%% step2: clean data, remove HTML, symbols and stopwords
def text_to_words(rawtext):
#split into individual words, remove HTML, only keep letters and number
# convert letters to lower case
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(" ".join(meaningful_words))
def target_to_words(rawtext):
#only return the first target word
reg_c = re.compile('[^a-zA-Z0-9_\\+\\-]')
words = [word for word in reg_c.split(rawtext.lower()) if word!='']
stops = set(stopwords.words("english"))
#take out stop words
meaningful_words = [w for w in words if not w in stops]
return(meaningful_words[0])
#%%
cleaned_post = []
cleaned_target = []
sz = raw_text.shape[0]
for i in range(0,sz):
raw_post = raw_text['title'][i]+' '+raw_text['content'][i]
raw_post = BeautifulSoup(raw_post).get_text()
cleaned_post.append(text_to_words(raw_post))
cleaned_target.append(target_to_words(raw_text['tags'][i]))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,sz)
#print cleaned_post[1]
#%% step3: creating features from a bag of words
from sklearn.feature_extraction.text import CountVectorizer
count_vect = CountVectorizer(analyzer = "word", \
tokenizer = None, \
preprocessor = None, \
stop_words = None, \
max_features = 5000)
X_train_counts = count_vect.fit_transform(cleaned_post)
#X_target_counts = count_vect.fit_transform(cleaned_target)
from sklearn.feature_extraction.text import TfidfTransformer
tf_transformer = TfidfTransformer(use_idf = False).fit(X_train_counts)
X_train_tf = tf_transformer.transform(X_train_counts)
#%% training a linear model
# METHOD 1: BUILD randomforestclassifier...
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier(n_estimators = 10)
forest = rf.fit(X_train_tf, cleaned_target)
#%% examine the result produced by METHOD 1:
pred = rf.predict(X_train_tf)
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
import matplotlib.pyplot as plt
import itertools
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
cnf_matrix = confusion_matrix(cleaned_target,pred)
#target_names = set(cleaned_target)
#np.set_printoptions(precision = 2)
#plt.figure()
#plot_confusion_matrix(cnf_matrix,classes = target_names,normalize = True,title='Normalized confusion matrix')
#plt.show()
target_names = list(OrderedDict.fromkeys(cleaned_target))
print(classification_report(cleaned_target,pred,target_names = target_names))
#######
#%% Method 2: directly predicted as the highest frequency element
# find the highest tf-idf
#step1: select a random sample
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from collections import OrderedDict
sample = np.random.choice(87000,1000,replace = False)
tf_pred = []
tf_target = []
for i in range(0,1000):
r = sample[i];
tf_target.append(cleaned_target[r])
tf_post = X_train_tf.getrow(r).toarray()
tf_post_max = tf_post.argmax()
tf_pred.append(count_vect.get_feature_names()[tf_post_max])
tf_cnf_matrix = confusion_matrix(tf_target,tf_pred)
target_names = list(OrderedDict.fromkeys(tf_pred+tf_target))
print(classification_report(tf_target, tf_pred,target_names =target_names))
#%% evaluate test set
test = pd.read_csv('test/test.csv')
cleaned_test = []
test_sz = test.shape[0]
for i in range(0,test_sz):
test_post = test['title'][i]+' '+test['content'][i]
test_post = BeautifulSoup(test_post).get_text()
cleaned_test.append(text_to_words(test_post))
if((i+1)%1000==0):
print "Cleanning %d of %d\n" % (i+1,test_sz)
#%% use random forest
X_test_counts = count_vect.fit_transform(cleaned_test)
X_test_tf = tf_transformer.transform(X_test_counts)
result = forest.predict(X_test_counts)
# use max tf-idf
#%%
test_pred = []
for i in range(0,test_sz):
tf_test = X_test_tf.getrow(i).toarray()
# just return one tag
#tf_test_max = tf_test.argmax()
#test_pred.append(count_vect.get_feature_names()[tf_test_max])<|fim▁hole|> pred_tags = [count_vect.get_feature_names()[j] for j in ind[0,:].tolist()]
test_pred.append( " ".join(pred_tags))
if((i+1)%1000==0):
print "Predicting %d of %d\n" % (i+1,test_sz)
result = test_pred
#%% prepare submission
submission = pd.read_csv('test/sample_submission.csv')
submission.iloc[:,1] = result
submission.to_csv('test/submission.csv',index = None)
#%% try to use NMF model can not be mapped to specific question...
n_features = 5000
n_topics = 10
n_samples = test_sz
n_top_words = 4
def get_top_words(model, feature_names, n_top_words):
res = []
for topic_idx, topic in enumerate(model.components_):
tags = " ".join([feature_names[i]
for i in topic.argsort()[:-n_top_words - 1:-1]])
res.append(tags)
return res
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF
from time import time
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2,
max_features=n_features,
stop_words='english')
tfidf = tfidf_vectorizer.fit_transform(cleaned_test)
# Fit the NMF model
print("Fitting the NMF model (Frobenius norm) with tf-idf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_topics, random_state=1,
alpha=.1, l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in NMF model (Frobenius norm):")
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
#print_top_words(nmf, tfidf_feature_names, n_top_words)
result = get_top_words(nmf,tfidf_feature_names,n_top_words)<|fim▁end|> | ind = np.argpartition(tf_test,-4)[:,-4:] |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from help.models import ConditionsChapter, FAQ
def faqs(request):
extra_context = {}
extra_context['faqs'] = FAQ.objects.all()
<|fim▁hole|> context_instance=RequestContext(request))
def terms(request):
extra_context = {}
extra_context['termsandconditions'] = ConditionsChapter.objects.all()
return render_to_response('help/terms-and-conditions.html',
extra_context,
context_instance=RequestContext(request))
def about(request):
extra_context = {}
return render_to_response('help/about.html',
extra_context,
context_instance=RequestContext(request))<|fim▁end|> |
return render_to_response('help/faqs.html',
extra_context,
|
<|file_name|>truthy.rs<|end_file_name|><|fim▁begin|>//!
//! A type to represent a truthy value.
//!
//! In Elixir, a term which does not equal `:false` or `:nil` is considered to be
//! truthy. This does not cleanly map to Rust's `bool` type. To distinguish between
//! `bool` and a truthy value, the newtype `Truthy` can be used.
//!
use crate::types::atom;
use crate::{Decoder, Encoder, Env, NifResult, Term};
pub struct Truthy(bool);
impl Encoder for Truthy {<|fim▁hole|> self.0.encode(env)
}
}
impl<'a> Decoder<'a> for Truthy {
fn decode(term: Term<'a>) -> NifResult<Truthy> {
Ok(Truthy(atom::is_truthy(term)))
}
}<|fim▁end|> | fn encode<'a>(&self, env: Env<'a>) -> Term<'a> { |
<|file_name|>invalidThisEmitInContextualObjectLiteral.js<|end_file_name|><|fim▁begin|>//// [invalidThisEmitInContextualObjectLiteral.ts]
interface IDef {
p1: (e:string) => void;
p2: () => (n: number) => any;
}
class TestController {
public m(def: IDef) { }
public p = this.m({
p1: e => { },
p2: () => { return vvvvvvvvv => this; },
});
}
//// [invalidThisEmitInContextualObjectLiteral.js]
var TestController = (function () {
function TestController() {
var _this = this;
this.p = this.m({
p1: function (e) { },
p2: function () { return function (vvvvvvvvv) { return _this; }; }
});
}
TestController.prototype.m = function (def) { };
return TestController;
<|fim▁hole|><|fim▁end|> | }()); |
<|file_name|>eventsocket.py<|end_file_name|><|fim▁begin|>"""
A socket wrapper that uses Event IO.
"""
import socket
import event
import time
import logging
import errno
import traceback
import os
from collections import deque
# TODO: Use new io objects from 2.6
# 26 July 10 - I looked into this and a potential problem with io.StringIO is
# that it assumes all text is unicode. Without a full test and probably lots
# of code updated elsewhere, the older StringIO is probably the better choice
# to fix the bug @AW
# https://agora.lighthouseapp.com/projects/47111/tickets/628-odd-amqp-error
from cStringIO import StringIO
class EventSocket(object):
"""
A socket wrapper which uses libevent.
"""
def __init__( self, family=socket.AF_INET, type=socket.SOCK_STREAM, \
protocol=socket.IPPROTO_IP, read_cb=None, accept_cb=None, \
close_cb=None, error_cb=None, output_empty_cb=None, sock=None, \
debug=False, logger=None, max_read_buffer=0, **kwargs):
"""
Initialize the socket. If no read_cb defined, socket will only be used
for reading. If this socket will be used for accepting new connections,
set read_cb here and it will be passed to new sockets. You can also set
accept_cb and be notified with an EventSocket object on accept(). The
error_cb will be called if there are any errors on the socket. The args
to it will be this socket, an error message, and an optional exception.
The close_cb will be called when this socket closes, with this socket as
its argument. If needed, you can wrap an existing socket by setting the
sock argument to a socket object.
"""
self._debug = debug
self._logger = logger
if self._debug and not self._logger:
print 'WARNING: to debug EventSocket, must provide a logger'
self._debug = False
# There various events we may or may not schedule
self._read_event = None
self._write_event = None
self._accept_event = None
self._connect_event = None
self._pending_read_cb_event = None
# Cache the peername so we can include it in logs even if the socket
# is closed. Note that connect() and bind() have to be the ones to do
# that work.
self._peername = 'unknown'
if sock:
self._sock = sock
try:
self._peername = "%s:%d"%self._sock.getpeername()
# Like connect(), only initialize these if the socket is already connected.
self._read_event = event.read( self._sock, self._protected_cb, self._read_cb )
self._write_event = event.write( self._sock, self._protected_cb, self._write_cb )
except socket.error, e:
# unconnected
pass
else:
self._sock = socket.socket(family, type, protocol)
# wholesale binding of stuff we don't need to alter or intercept
self.listen = self._sock.listen
self.setsockopt = self._sock.setsockopt
self.fileno = self._sock.fileno
self.getpeername = self._sock.getpeername
self.getsockname = self._sock.getsockname
self.getsockopt = self._sock.getsockopt
self.setblocking = self._sock.setblocking # is this correct?
self.settimeout = self._sock.settimeout
self.gettimeout = self._sock.gettimeout
self.shutdown = self._sock.shutdown
self._max_read_buffer = max_read_buffer
#self._write_buf = []
self._write_buf = deque()
#self._read_buf = StringIO()
self._read_buf = bytearray()
self._parent_accept_cb = accept_cb
self._parent_read_cb = read_cb
self._parent_error_cb = error_cb
self._parent_close_cb = close_cb
self._parent_output_empty_cb = output_empty_cb
# This is the pending global error message. It's sort of a hack, but it's
# used for __protected_cb in much the same way as errno. This prevents
# having to pass an error message around, when the best way to do that is
# via kwargs that the event lib is itself trying to interpret and won't
# allow to pass to __protected_cb.
self._error_msg = None
self._closed = False
self._inactive_event = None
self.set_inactive_timeout( 0 )
@property
def closed(self):
'''
Return whether this socket is closed.
'''
return self._closed
def close(self):
"""
Close the socket.
"""
# if self._debug:
# self._logger.debug(\
# "closing connection %s to %s"%(self._sock.getsockname(), self._peername) )
# Unload all our events
if self._read_event:
self._read_event.delete()
self._read_event = None
if self._accept_event:
self._accept_event.delete()
self._accept_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
if self._write_event:
self._write_event.delete()
self._write_event = None
if self._connect_event:
self._connect_event.delete()
self._connect_event = None
if self._sock:
self._sock.close()
self._sock = None
# Flush any pending data to the read callbacks as appropriate. Do this
# manually as there is a chance for the following race condition to occur:
# pending data read by cb
# callback reads 1.1 messages, re-buffers .1 msg back
# callback disconnects from socket based on message, calling close()
# we get back to this code and find there's still data in the input buffer
# and the read cb hasn't been cleared. ruh roh.
#if self._parent_read_cb and self._read_buf.tell()>0:
if self._parent_read_cb and len(self._read_buf)>0:
cb = self._parent_read_cb
self._parent_read_cb = None
self._error_msg = "error processing remaining socket input buffer"
self._protected_cb( cb, self )
# Only mark as closed after socket is really closed, we've flushed buffered
# input, and we're calling back to close handlers.
self._closed = True
if self._parent_close_cb:
self._parent_close_cb( self )
if self._pending_read_cb_event:
self._pending_read_cb_event.delete()
self._pending_read_cb_event = None
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
# Delete references to callbacks to help garbage collection
self._parent_accept_cb = None
self._parent_read_cb = None
self._parent_error_cb = None
self._parent_close_cb = None
self._parent_output_empty_cb = None
# Clear buffers
self._write_buf = None
self._read_buf = None
def accept(self):
"""
No-op as we no longer perform blocking accept calls.
"""
pass
def _set_read_cb(self, cb):
"""
Set the read callback. If there's data in the output buffer, immediately
setup a call.
"""
self._parent_read_cb = cb
#if self._read_buf.tell()>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
if len(self._read_buf)>0 and self._parent_read_cb!=None and self._pending_read_cb_event==None:
self._pending_read_cb_event = \
event.timeout( 0, self._protected_cb, self._parent_read_timer_cb )
# Allow someone to change the various callbacks.
read_cb = property( fset=_set_read_cb )
accept_cb = property( fset=lambda self,func: setattr(self, '_parent_accept_cb', func ) )
close_cb = property( fset=lambda self,func: setattr(self, '_parent_close_cb', func ) )
error_cb = property( fset=lambda self,func: setattr(self, '_parent_error_cb', func ) )
output_empty_cb = property( fset=lambda self,func: setattr(self, '_parent_output_empty_cb',func) )
def bind(self, *args):
"""
Bind the socket.
"""
if self._debug:
self._logger.debug( "binding to %s", str(args) )
self._sock.bind( *args )
self._peername = "%s:%d"%self.getsockname()
self._accept_event = event.read( self, self._protected_cb, self._accept_cb )
def connect(self, *args, **kwargs):
'''
Connect to the socket. If currently non-blocking, will return immediately
and call close_cb when the timeout is reached. If timeout_at is a float,
will wait until that time and then call the close_cb. Otherwise, it will
set timeout_at as time()+timeout, where timeout is a float argument or the
current timeout value of the socket. The check interval for successful
connection on a non-blocking socket is 100ms.
IMPORTANT: If you want the socket to timeout at all in non-blocking mode,
you *must* pass in either a relative timout in seconds, or an absolute
value in timeout_at. Otherwise, the socket will forever try to connect. <|fim▁hole|> control of `timeout` and `timeout_at`.
'''
timeout_at = kwargs.get('timeout_at')
timeout = kwargs.get('timeout')
if not isinstance(timeout_at, float):
if not isinstance(timeout,(int,long,float)):
timeout = self._sock.gettimeout()
if timeout>0:
timeout_at = time.time()+timeout
self._connect_cb(timeout_at, *args, immediate_raise=True)
def _connect_cb(self, timeout_at, *args, **kwargs):
'''
Local support for synch and asynch connect. Required because
`event.timeout` doesn't support kwargs. They are spec'd though so that
we can branch how exceptions are handled.
'''
err = self._sock.connect_ex( *args )
if not err:
self._peername = "%s:%d"%self._sock.getpeername()
self._read_event = event.read( self._sock, self._protected_cb, self._read_cb )
self._write_event = event.write( self._sock, self._protected_cb, self._write_cb )
if self._connect_event:
self._connect_event.delete()
self._connect_event = None
elif err in (errno.EINPROGRESS,errno.EALREADY):
# Only track timeout if we're about to re-schedule. Should only receive
# these on a non-blocking socket.
if isinstance(timeout_at,float) and time.time()>timeout_at:
self._error_msg = 'timeout connecting to %s'%str(args)
self.close()
return
if self._connect_event:
self._connect_event.delete()
# Checking every 100ms seems to be a reasonable amount of frequency. If
# requested this too can be configurable.
self._connect_event = event.timeout(0.1, self._connect_cb,
timeout_at, *args)
else:
if self._connect_event:
self._connect_event.delete()
self._error_msg = os.strerror(err)
serr = socket.error( err, self._error_msg )
if kwargs.get('immediate_raise'):
raise serr
else:
self._handle_error( serr )
def set_inactive_timeout(self, t):
"""
Set the inactivity timeout. If is None or 0, there is no activity timeout.
If t>0 then socket will automatically close if there has been no activity
after t seconds (float supported). Will raise TypeError if <t> is invalid.
"""
if t==None or t==0:
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = None
self._inactive_timeout = 0
elif isinstance(t,(int,long,float)):
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = event.timeout( t, self._inactive_cb )
self._inactive_timeout = t
else:
raise TypeError( "invalid timeout %s"%(str(t)) )
### Private support methods
def _handle_error(self, exc):
'''
Gracefully handle errors.
'''
if self._parent_error_cb:
if self._error_msg!=None:
self._parent_error_cb( self, self._error_msg, exc )
else:
self._parent_error_cb( self, "unknown error", exc )
else:
if self._error_msg!=None:
msg = "unhandled error %s"%(self._error_msg)
else:
msg = "unhandled unknown error"
if self._logger:
self._logger.error( msg, exc_info=True )
else:
traceback.print_exc()
def _protected_cb(self, cb, *args, **kwargs):
"""
Wrap any callback from libevent so that we can be sure that exceptions are
handled and errors forwarded to error_cb.
"""
rval = None
try:
rval = cb(*args, **kwargs)
except Exception, e:
self._handle_error( e )
self._error_msg = None
return rval
def _accept_cb(self):
"""
Accept callback from libevent.
"""
self._error_msg = "error accepting new socket"
(conn, addr) = self._sock.accept()
if self._debug:
self._logger.debug("accepted connection from %s"%(str(addr)))
evsock = EventSocket( read_cb=self._parent_read_cb,
error_cb=self._parent_error_cb,
close_cb=self._parent_close_cb, sock=conn,
debug=self._debug, logger=self._logger,
max_read_buffer=self._max_read_buffer )
if self._parent_accept_cb:
# 31 march 09 aaron - We can't call accept callback asynchronously in the
# event that the socket is quickly opened and closed. What happens is
# that a read event gets scheduled before __parent_accept_cb is run, and
# since the socket is closed, it calls the __parent_close_cb. If the
# socket has not been correctly initialized though, we may encounter
# errors if the close_cb is expected to be changed during the accept
# callback. This is arguably an application-level problem, but handling
# that situation entirely asynchronously would be a giant PITA and prone
# to bugs. We'll avoid that.
self._protected_cb( self._parent_accept_cb, evsock )
# Still reschedule event even if there was an error.
return True
def _read_cb(self):
"""
Read callback from libevent.
"""
# We should be able to use recv_into for speed and efficiency, but sadly
# this was broken after 2.6.1 http://bugs.python.org/issue7827
self._error_msg = "error reading from socket"
data = self._sock.recv( self.getsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF) )
if len(data)>0:
if self._debug:
self._logger.debug( "read %d bytes from %s"%(len(data), self._peername) )
# 23 Feb 09 aaron - There are cases where the client will have started
# pushing data right away, and there's a chance that async handling of
# accept will cause data to be read before the callback function has been
# set. I prefer to ignore data if no read callback defined, but it's
# better to just limit the overall size of the input buffer then to use
# a synchronous callback to __parent_accept_cb.
# TODO: So what is the best way of handling this problem, and if sticking
# with a max input buffer size, what's the correct algorithm? Maybe better
# approach is to raise a notice to a callback and let the callback decide
# what to do.
self._flag_activity()
self._read_buf.extend( data )
if self._max_read_buffer and len(self._read_buf) > self._max_read_buffer:
if self._debug:
self._logger.debug( "buffer for %s overflowed!"%(self._peername) )
# Clear the input buffer so that the callback flush code isn't called in close
self._read_buf = bytearray()
self.close()
return None
# Callback asynchronously so that priority is given to libevent to
# allocate time slices.
if self._parent_read_cb!=None and self._pending_read_cb_event==None:
self._pending_read_cb_event = \
event.timeout( 0, self._protected_cb, self._parent_read_timer_cb )
else:
self.close()
return None
return True
def _parent_read_timer_cb(self):
"""
Callback when we want the parent to read buffered data.
"""
# Shouldn't need to check closed state because all events should be
# cancelled, but there seems to be a case where that can happen so deal
# with it gracefully. Possibly a bug or edge case in libevent when tons
# of events are in play as this only happened during extreme testing.
if not self._closed:
self._error_msg = "error processing socket input buffer"
# allow for __close_cb and __read_cb to do their thing.
self._pending_read_cb_event = None
# Catch edge case where this could have been cleared after _read_cb
if self._parent_read_cb:
self._parent_read_cb( self )
# never reschedule
return None
def _write_cb(self):
"""
Write callback from libevent.
"""
self._error_msg = "error writing socket output buffer"
# If no data, don't reschedule
if len(self._write_buf)==0:
return None
# 7 April 09 aaron - Changed this algorithm so that we continually send
# data from the buffer until the socket didn't accept all of it, then
# break. This should be a bit faster.
if self._debug:
total_sent = 0
total_len = sum( map(len,self._write_buf) )
while len(self._write_buf)>0:
cur = self._write_buf.popleft()
# Catch all env errors since that should catch OSError, IOError and
# socket.error.
try:
bytes_sent = self._sock.send( cur )
except EnvironmentError, e:
# For now this seems to be the only error that isn't fatal. It seems
# to be used only for nonblocking sockets and implies that it can't
# buffer any more data right now.
if e.errno==errno.EAGAIN:
self._write_buf.appendleft( cur )
if self._debug:
self._logger.debug( '"%s" raised, waiting to flush to %s', e, self._peername )
break
else:
raise
if self._debug:
total_sent += bytes_sent
if bytes_sent < len(cur):
# keep the first entry and set to all remaining bytes.
self._write_buf.appendleft( cur[bytes_sent:] )
break
if self._debug:
self._logger.debug( "wrote %d/%d bytes to %s", total_sent,total_len,self._peername )
# also flag activity here? might not be necessary, but in some cases the
# timeout could still be small enough to trigger between accesses to the
# socket output.
self._flag_activity()
if len(self._write_buf)>0:
return True
if self._parent_output_empty_cb!=None:
self._parent_output_empty_cb( self )
return None
def _inactive_cb(self):
"""
Timeout when a socket has been inactive for a long time.
"""
self._error_msg = "error closing inactive socket"
self.close()
def _flag_activity(self):
"""
Flag that this socket is active.
"""
# is there a better way of reseting a timer?
if self._inactive_event:
self._inactive_event.delete()
self._inactive_event = event.timeout( self._inactive_timeout, self._protected_cb, self._inactive_cb )
def write(self, data):
"""
Write some data. Will raise socket.error if connection is closed.
"""
if self._closed:
raise socket.error('write error: socket is closed')
# Always append the data to the write buffer, even if we're not connected
# yet.
self._write_buf.append( data )
# 21 July 09 aaron - I'm not sure if this has a significant benefit, but in
# trying to improve throughput I confirmed that this doesn't break anything
# and keeping the event queue cleaner is certainly good.
if self._write_event and not self._write_event.pending():
self._write_event.add()
if self._debug > 1:
self._logger.debug("buffered %d bytes (%d total) to %s",
len(data), sum(map(len,self._write_buf)), self._peername )
# Flag activity here so we don't timeout in case that event is ready to
# fire and we're just now writing.
self._flag_activity()
def read(self):
"""
Return the current read buffer. Will return a bytearray object.
"""
if self._closed:
raise socket.error('read error: socket is closed')
rval = self._read_buf
self._read_buf = bytearray()
return rval
def buffer(self, s):
'''
Re-buffer some data. If it's a bytearray will assign directly as the current
input buffer, else will add to the current buffer. Assumes that re-buffered
data is happening in the same cycle as read() was called, as anything other
than that would be nearly impossible to handle inside an application.
'''
if isinstance(s, bytearray):
self._read_buf = s
else:
self._read_buf.extend( s )<|fim▁end|> |
Passes *args on to socket.connect_ex, and **kwargs are used for local |
<|file_name|>benchmark.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
'use strict';
var fs = require('fs'),
path = require('path'),
exec = require('child_process').exec,
chalk = require('chalk'),
Table = require('cli-table');
var fileNames = [
'abc',
'amazon',
//'eloquentjavascript',
//'es6-draft',
'es6-table',
'google',
'html-minifier',
'msn',
'newyorktimes',
'stackoverflow',
'wikipedia',
'es6'
];
fileNames = fileNames.sort().reverse();
var table = new Table({
head: ['File', 'Before', 'After', 'Savings', 'Time'],<|fim▁hole|>function toKb(size) {
return (size / 1024).toFixed(2);
}
function redSize(size) {
return chalk.red.bold(size) + chalk.white(' (' + toKb(size) + ' KB)');
}
function greenSize(size) {
return chalk.green.bold(size) + chalk.white(' (' + toKb(size) + ' KB)');
}
function blueSavings(oldSize, newSize) {
var savingsPercent = (1 - newSize / oldSize) * 100;
var savings = (oldSize - newSize) / 1024;
return chalk.cyan.bold(savingsPercent.toFixed(2)) + chalk.white('% (' + savings.toFixed(2) + ' KB)');
}
function blueTime(time) {
return chalk.cyan.bold(time) + chalk.white(' ms');
}
function test(fileName, done) {
if (!fileName) {
console.log('\n' + table.toString());
return;
}
console.log('Processing...', fileName);
var filePath = path.join('benchmarks/', fileName + '.html');
var minifiedFilePath = path.join('benchmarks/generated/', fileName + '.min.html');
var gzFilePath = path.join('benchmarks/generated/', fileName + '.html.gz');
var gzMinifiedFilePath = path.join('benchmarks/generated/', fileName + '.min.html.gz');
var command = path.normalize('./cli.js') + ' ' + filePath + ' -c benchmark.conf' + ' -o ' + minifiedFilePath;
// Open and read the size of the original input
fs.stat(filePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + filePath);
}
var originalSize = stats.size;
exec('gzip --keep --force --best --stdout ' + filePath + ' > ' + gzFilePath, function () {
// Open and read the size of the gzipped original
fs.stat(gzFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + gzFilePath);
}
var gzOriginalSize = stats.size;
// Begin timing after gzipped fixtures have been created
var startTime = new Date();
exec('node ' + command, function () {
// Open and read the size of the minified output
fs.stat(minifiedFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + minifiedFilePath);
}
var minifiedSize = stats.size;
var minifiedTime = new Date() - startTime;
// Gzip the minified output
exec('gzip --keep --force --best --stdout ' + minifiedFilePath + ' > ' + gzMinifiedFilePath, function () {
// Open and read the size of the minified+gzipped output
fs.stat(gzMinifiedFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + gzMinifiedFilePath);
}
var gzMinifiedSize = stats.size;
var gzMinifiedTime = new Date() - startTime;
table.push([
[fileName, '+ gzipped'].join('\n'),
[redSize(originalSize), redSize(gzOriginalSize)].join('\n'),
[greenSize(minifiedSize), greenSize(gzMinifiedSize)].join('\n'),
[blueSavings(originalSize, minifiedSize), blueSavings(gzOriginalSize, gzMinifiedSize)].join('\n'),
[blueTime(minifiedTime), blueTime(gzMinifiedTime)].join('\n')
]);
done();
});
});
});
});
});
});
});
}
(function run() {
test(fileNames.pop(), run);
})();<|fim▁end|> | colWidths: [20, 25, 25, 20, 20]
});
|
<|file_name|>HapticRenderLeaf.cpp<|end_file_name|><|fim▁begin|>/* -*-c++-*- $Id: Version,v 1.2 2004/04/20 12:26:04 andersb Exp $ */
/**
* OsgHaptics - OpenSceneGraph Haptic Library
* Copyright (C) 2006 VRlab, Umeå University
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*/
#include <osgHaptics/HapticRenderLeaf.h>
#include <osgHaptics/RenderTriangleOperator.h>
#include <osgHaptics/HapticRenderBin.h>
#include <osgHaptics/Shape.h>
#include <osgUtil/StateGraph>
#include <osg/Geometry>
#include <osg/TriangleFunctor>
using namespace osgHaptics;
void HapticRenderLeaf::render(osg::RenderInfo& renderInfo,osgUtil::RenderLeaf* previous)<|fim▁hole|> // don't draw this leaf if the abort rendering flag has been set.
if (renderInfo.getState()->getAbortRendering())
{
//cout << "early abort"<<endl;
return;
}
if (previous)
{
// apply matrices if required.
renderInfo.getState()->applyProjectionMatrix(_projection.get());
renderInfo.getState()->applyModelViewMatrix(_modelview.get());
// apply state if required.
osgUtil::StateGraph* prev_rg = previous->_parent;
osgUtil::StateGraph* prev_rg_parent = prev_rg->_parent;
osgUtil::StateGraph* rg = _parent;
if (prev_rg_parent!=rg->_parent)
{
osgUtil::StateGraph::moveStateGraph(*renderInfo.getState(),prev_rg_parent,rg->_parent);
// send state changes and matrix changes to OpenGL.
renderInfo.getState()->apply(rg->_stateset.get());
}
else if (rg!=prev_rg)
{
// send state changes and matrix changes to OpenGL.
renderInfo.getState()->apply(rg->_stateset.get());
}
const osgHaptics::Shape *shape = m_renderbin->getShape(renderInfo);
//--by SophiaSoo/CUHK: for two arms
// Does this shape contain the device currently rendered?
if (!shape || !shape->containCurrentDevice()) {
return;
}
bool render_shape=false;
render_shape = !m_renderbin->hasBeenDrawn(renderInfo);
// If we have a shape,
// and the device is reporting, Dont render haptic shape,
// then bail out and skip the rendering of this HapticRenderLeaf
if (shape && !shape->getHapticDevice()->getEnableShapeRender())
return;
if (shape && render_shape) {
//shape = static_cast<const osgHaptics::Shape*> (sa);
shape->preDraw();
}
#ifdef OSGUTIL_RENDERBACKEND_USE_REF_PTR
osg::Geometry* geom = dynamic_cast<osg::Geometry *>(_drawable.get());
#else
osg::Geometry* geom = dynamic_cast<osg::Geometry *>(_drawable);
#endif
if (geom) {
RenderTriangleOperator op;
geom->accept(op);
}
else
// draw the drawable
{
_drawable->draw(renderInfo);
}
if (shape && render_shape) {
shape->postDraw();
}
}
else
{
std::cerr << "!!!!!!!!!!!!!!!!!!!!!!!!!!!" << std::endl;
// apply matrices if required.
renderInfo.getState()->applyProjectionMatrix(_projection.get());
renderInfo.getState()->applyModelViewMatrix(_modelview.get());
// apply state if required.
osgUtil::StateGraph::moveStateGraph(*renderInfo.getState(),NULL,_parent->_parent);
#ifdef OSGUTIL_RENDERBACKEND_USE_REF_PTR
renderInfo.getState()->apply(_parent->_stateset.get());
#else
renderInfo.getState()->apply(_parent->_stateset);
#endif
// draw the drawable
_drawable->draw(renderInfo);
}
}<|fim▁end|> | { |
<|file_name|>tables.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
class AddProtocol(policy.PolicyTargetMixin, tables.LinkAction):
name = "create"
verbose_name = _("Add Protocol")
url = "horizon:identity:identity_providers:protocols:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("identity", "identity:create_protocol"),)
def get_link_url(self, datum=None):
idp_id = self.table.kwargs['identity_provider_id']
return reverse(self.url, args=(idp_id,))
class RemoveProtocol(policy.PolicyTargetMixin, tables.DeleteAction):
@staticmethod
def action_present(count):
return ngettext_lazy(
"Delete Protocol",
"Delete Protocols",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(<|fim▁hole|> "Deleted Protocol",
"Deleted Protocols",
count
)
policy_rules = (("identity", "identity:delete_protocol"),)
def delete(self, request, obj_id):
identity_provider = self.table.kwargs['identity_provider_id']
protocol = obj_id
api.keystone.protocol_delete(request, identity_provider, protocol)
class ProtocolsTable(tables.DataTable):
protocol = tables.Column("id",
verbose_name=_("Protocol ID"))
mapping = tables.Column("mapping_id",
verbose_name=_("Mapping ID"))
def get_object_display(self, datum):
return datum.id
class Meta(object):
name = "idp_protocols"
verbose_name = _("Protocols")
table_actions = (AddProtocol, RemoveProtocol)
row_actions = (RemoveProtocol, )<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os<|fim▁hole|>import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vgid.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>dwemthysarray.rs<|end_file_name|><|fim▁begin|>use Monster::ScubaArgentine;
use Monster::IndustrialRaverMonkey;
enum Monster {
ScubaArgentine(int, int, int, int),
IndustrialRaverMonkey(int, int, int, int)
}
<|fim▁hole|>impl Monster {
fn attack(&self) {
match *self {
ScubaArgentine(l, s, c, w) => println!("The monster attacks for {} damage.", w),
IndustrialRaverMonkey(l, s, c, w) => println!("The monster attacks for {} damage.", w)
}
}
}
fn main() {
let irm = IndustrialRaverMonkey(46, 35, 91, 2);
irm.attack();
}<|fim▁end|> | |
<|file_name|>linker.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::path::Path;
use std::fs::File;
use std::io::{Read, Write};
fn main() {
let mut dst = env::current_exe().unwrap();
dst.pop();
dst.push("linker-arguments1");
if dst.exists() {
dst.pop();
dst.push("linker-arguments2");<|fim▁hole|> let mut out = String::new();
for arg in env::args().skip(1) {
let path = Path::new(&arg);
if !path.is_file() {
out.push_str(&arg);
out.push_str("\n");
continue
}
let mut contents = Vec::new();
File::open(path).unwrap().read_to_end(&mut contents).unwrap();
out.push_str(&format!("{}: {}\n", arg, hash(&contents)));
}
File::create(dst).unwrap().write_all(out.as_bytes()).unwrap();
}
// fnv hash for now
fn hash(contents: &[u8]) -> u64 {
let mut hash = 0xcbf29ce484222325;
for byte in contents {
hash = hash ^ (*byte as u64);
hash = hash.wrapping_mul(0x100000001b3);
}
hash
}<|fim▁end|> | assert!(!dst.exists());
}
|
<|file_name|>BasePackage.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = {
name: 'base',
configure: function(config) {
config.addCommand(require('./SwitchTextTypeCommand'));
config.addCommand(require('./UndoCommand'));
config.addCommand(require('./RedoCommand'));
config.addTool(require('./UndoTool'));
config.addTool(require('./RedoTool'));
config.addTool(require('./SwitchTextTypeTool'));
// Icons
config.addIcon('undo', { 'fontawesome': 'fa-undo' });
config.addIcon('redo', { 'fontawesome': 'fa-repeat' });
config.addIcon('edit', { 'fontawesome': 'fa-cog' });
config.addIcon('delete', { 'fontawesome': 'fa-times' });
config.addIcon('expand', { 'fontawesome': 'fa-arrows-h' });
config.addIcon('truncate', { 'fontawesome': 'fa-arrows-h' });
// Labels
config.addLabel('undo', {
en: 'Undo',
de: 'Rückgängig'<|fim▁hole|> config.addLabel('redo', {
en: 'Redo',
de: 'Wiederherstellen'
});
config.addLabel('container-selection', {
en: 'Container',
de: 'Container'
});
config.addLabel('container', {
en: 'Container',
de: 'Container'
});
config.addLabel('insert-container', {
en: 'Insert Container',
de: 'Container einfügen'
});
}
};<|fim▁end|> | }); |
<|file_name|>DictionaryTwoLists.js<|end_file_name|><|fim▁begin|>// Description:
// There are two lists of different length. The first one consists of keys,
// the second one consists of values. Write a function
//createDict(keys, values) that returns a dictionary created from keys and
// values. If there are not enough values, the rest of keys should have a
//None (JS null)value. If there not enough keys, just ignore the rest of values.
// Example 1:
// keys = ['a', 'b', 'c', 'd']
// values = [1, 2, 3]
// createDict(keys, values) // returns {'a': 1, 'b': 2, 'c': 3, 'd': null}
// Example 2:
// keys = ['a', 'b', 'c']
// values = [1, 2, 3, 4]
// createDict(keys, values) // returns {'a': 1, 'b': 2, 'c': 3}
function createDict(keys, values){
var result = {};
for(var i = 0;i<keys.length;i++){
result[keys[i]] = values[i]!=undefined ? values[i] : null;<|fim▁hole|>}<|fim▁end|> | }
return result; |
<|file_name|>mulx.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn mulx_1() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(EDX)), operand2: Some(Direct(EBX)), operand3: Some(Direct(EBX)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 99, 246, 211], OperandSize::Dword)
}<|fim▁hole|>}
fn mulx_3() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(ECX)), operand2: Some(Direct(ESP)), operand3: Some(Direct(ESI)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 91, 246, 206], OperandSize::Qword)
}
fn mulx_4() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(EBX)), operand2: Some(Direct(ECX)), operand3: Some(IndirectScaledIndexedDisplaced(RCX, RCX, Two, 1205253571, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 115, 246, 156, 73, 195, 181, 214, 71], OperandSize::Qword)
}
fn mulx_5() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(RBX)), operand2: Some(Direct(RSI)), operand3: Some(Direct(RCX)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 203, 246, 217], OperandSize::Qword)
}
fn mulx_6() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(RSI)), operand2: Some(Direct(RDI)), operand3: Some(Indirect(RBX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 195, 246, 51], OperandSize::Qword)
}<|fim▁end|> |
fn mulx_2() {
run_test(&Instruction { mnemonic: Mnemonic::MULX, operand1: Some(Direct(ECX)), operand2: Some(Direct(EBP)), operand3: Some(IndirectScaledDisplaced(EDX, Eight, 1059438892, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 83, 246, 12, 213, 44, 193, 37, 63], OperandSize::Dword) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate hyper;
use std::io::Write;
use hyper::Server;
use hyper::server::Request;
use hyper::server::Response;
use hyper::net::Fresh;
fn hello(_: Request, res: Response<Fresh>) {
res.send(b"Hello World!").unwrap();
}<|fim▁hole|>}<|fim▁end|> |
fn main() {
Server::http("127.0.0.1:3000").unwrap().handle(hello); |
<|file_name|>availability_set.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class AvailabilitySet(Resource):
"""Create or update availability set parameters.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:param platform_update_domain_count: Update Domain count.
:type platform_update_domain_count: int
:param platform_fault_domain_count: Fault Domain count.
:type platform_fault_domain_count: int
:param virtual_machines: A list of references to all virtual machines in
the availability set.
:type virtual_machines: list of :class:`SubResource
<azure.mgmt.compute.models.SubResource>`
:ivar statuses: The resource status information.
:vartype statuses: list of :class:`InstanceViewStatus
<azure.mgmt.compute.models.InstanceViewStatus>`
:param managed: If the availability set supports managed disks.
:type managed: bool
:param sku: Sku of the availability set
:type sku: :class:`Sku <azure.mgmt.compute.models.Sku>`
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'statuses': {'readonly': True},
}
_attribute_map = {<|fim▁hole|> 'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'platform_update_domain_count': {'key': 'properties.platformUpdateDomainCount', 'type': 'int'},
'platform_fault_domain_count': {'key': 'properties.platformFaultDomainCount', 'type': 'int'},
'virtual_machines': {'key': 'properties.virtualMachines', 'type': '[SubResource]'},
'statuses': {'key': 'properties.statuses', 'type': '[InstanceViewStatus]'},
'managed': {'key': 'properties.managed', 'type': 'bool'},
'sku': {'key': 'sku', 'type': 'Sku'},
}
def __init__(self, location, tags=None, platform_update_domain_count=None, platform_fault_domain_count=None, virtual_machines=None, managed=None, sku=None):
super(AvailabilitySet, self).__init__(location=location, tags=tags)
self.platform_update_domain_count = platform_update_domain_count
self.platform_fault_domain_count = platform_fault_domain_count
self.virtual_machines = virtual_machines
self.statuses = None
self.managed = managed
self.sku = sku<|fim▁end|> | 'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}, |
<|file_name|>colors.py<|end_file_name|><|fim▁begin|>class Color:
''' print() wrappers for console colors<|fim▁hole|> def green(*args, **kwargs): print("\033[92m{}\033[0m".format(" ".join(map(str,args))), **kwargs)
def yellow(*args, **kwargs): print("\033[93m{}\033[0m".format(" ".join(map(str,args))), **kwargs)<|fim▁end|> | '''
def red(*args, **kwargs): print("\033[91m{}\033[0m".format(" ".join(map(str,args))), **kwargs) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate capnp;
extern crate capnp_rpc;
use std::error::Error;
use std::io;
use capnp_rpc::{RpcSystem,twoparty,rpc_twoparty_capnp};
use capnp_rpc::capability::{InitRequest,LocalClient,WaitForContent};
use gj::{EventLoop,Promise,TaskReaper,TaskSet};
use gj::io::tcp;
use freestack::identity3;
pub fn accept_loop(listener: tcp::Listener,
mut task_set: TaskSet<(), Box<Error>>,
client: Client,,
) -> Promise<(), io::Error>
{
listener.accept().lift().then(move |(listener, stream)| {
let (reader, writer) = stream.split();
let mut network =
twoparty::VatNetwork::new(reader, writer,
rpc_twoparty_capnp::Side::Server, Default::default());
let disconnect_promise = network.on_disconnect();<|fim▁hole|>
let rpc_system = RpcSystem::new(Box::new(network), Some(client.clone().client));
task_set.add(disconnect_promise.attach(rpc_system).lift());
accept_loop(listener, task_set, client)
})
}
struct Reaper;
impl TaskReaper<(), Box<Error>> for Reaper {
fn task_failed(&mut self, error: Box<Error>) {
// FIXME: log instead
println!("Task failed: {}", error);
}
}
fn main() {
println!("Starting up");
let bind_addr = "localhost:1234";
let etcd_url = "http://localhost:2379";
let store = Rc::new(RefCell::new(
kvstore::etcd::Etcd::new(etcd_url)
.expect("Error connecting to etcd")));
let identity3_server = identity3::bootstrap_interface(store);
EventLoop::top_level(move |wait_scope| {
use std::net::ToSocketAddrs;
let addr = try!(bind_addr.to_socket_addrs()).next().expect("could ot parse address");
let listener = try!(tcp::Listener::bind(addr));
let task_set = TaskSet::new(Box::new(Reaper));
try!(accept_loop(listener, task_set, identity3_server).wait(wait_scope));
Ok(())
}).expect("top level error");
}<|fim▁end|> | |
<|file_name|>recursion.py<|end_file_name|><|fim▁begin|>def countup(n):
if n >= 10:
print "Blastoff!"
else:
print n
countup(n+1)
def main():
countup(1)
main()
def countdown_from_to(start,stop):
if start == stop:
print "Blastoff!"
elif start <= stop:
print "Invalid pair"
else:
print start
countdown_from_to(start - 1,stop)
def main():
countdown_from_to(89,53)
main()
def adder(sum_):
number = (raw_input("Next Number"))
if (number) == "":
print "The Sum Is {}".format(sum_)
elif number == float:
print number
else:
sum_ += float(number)
print "Running total: {}".format(sum_)<|fim▁hole|> adder(sum_)
def main():
sum_ = 0
adder(sum_)
main()<|fim▁end|> | |
<|file_name|>matcher.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate rff;
extern crate test;
use test::Bencher;
use rff::matcher::matches;
#[bench]
fn bench_matches(b: &mut Bencher) {
b.iter(|| matches("amor", "app/models/order.rb"))
}
#[bench]
fn bench_matches_utf8(b: &mut Bencher) {
b.iter(|| matches("ß", "WEIẞ"))
}
#[bench]
fn bench_matches_mixed(b: &mut Bencher) {
b.iter(|| matches("abc", "abØ"))
}
#[bench]
fn bench_matches_more_specific(b: &mut Bencher) {
b.iter(|| matches("app/models", "app/models/order.rb"))
}
#[bench]
fn bench_matches_mixed_case(b: &mut Bencher) {
b.iter(|| matches("AMOr", "App/Models/Order.rb"))
}
<|fim▁hole|> b.iter(|| {
matches("amor", "app/models/order.rb");
matches("amor", "spec/models/order_spec.rb");
matches("amor", "other_garbage.rb");
matches("amor", "Gemfile");
matches("amor", "node_modules/test/a/thing.js");
matches("amor", "vendor/bundle/ruby/gem.rb")
})
}
#[bench]
fn bench_matches_eq(b: &mut Bencher) {
b.iter(|| {
matches("Gemfile", "Gemfile");
matches("gemfile", "Gemfile")
})
}<|fim▁end|> | #[bench]
fn bench_matches_multiple(b: &mut Bencher) { |
<|file_name|>fake_ingress.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
<|fim▁hole|>You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fake
import (
api "k8s.io/kubernetes/pkg/api"
unversioned "k8s.io/kubernetes/pkg/api/unversioned"
extensions "k8s.io/kubernetes/pkg/apis/extensions"
core "k8s.io/kubernetes/pkg/client/testing/core"
labels "k8s.io/kubernetes/pkg/labels"
watch "k8s.io/kubernetes/pkg/watch"
)
// FakeIngresses implements IngressInterface
type FakeIngresses struct {
Fake *FakeExtensions
ns string
}
var ingressesResource = unversioned.GroupVersionResource{Group: "extensions", Version: "", Resource: "ingresses"}
func (c *FakeIngresses) Create(ingress *extensions.Ingress) (result *extensions.Ingress, err error) {
obj, err := c.Fake.
Invokes(core.NewCreateAction(ingressesResource, c.ns, ingress), &extensions.Ingress{})
if obj == nil {
return nil, err
}
return obj.(*extensions.Ingress), err
}
func (c *FakeIngresses) Update(ingress *extensions.Ingress) (result *extensions.Ingress, err error) {
obj, err := c.Fake.
Invokes(core.NewUpdateAction(ingressesResource, c.ns, ingress), &extensions.Ingress{})
if obj == nil {
return nil, err
}
return obj.(*extensions.Ingress), err
}
func (c *FakeIngresses) UpdateStatus(ingress *extensions.Ingress) (*extensions.Ingress, error) {
obj, err := c.Fake.
Invokes(core.NewUpdateSubresourceAction(ingressesResource, "status", c.ns, ingress), &extensions.Ingress{})
if obj == nil {
return nil, err
}
return obj.(*extensions.Ingress), err
}
func (c *FakeIngresses) Delete(name string, options *api.DeleteOptions) error {
_, err := c.Fake.
Invokes(core.NewDeleteAction(ingressesResource, c.ns, name), &extensions.Ingress{})
return err
}
func (c *FakeIngresses) DeleteCollection(options *api.DeleteOptions, listOptions api.ListOptions) error {
action := core.NewDeleteCollectionAction(ingressesResource, c.ns, listOptions)
_, err := c.Fake.Invokes(action, &extensions.IngressList{})
return err
}
func (c *FakeIngresses) Get(name string) (result *extensions.Ingress, err error) {
obj, err := c.Fake.
Invokes(core.NewGetAction(ingressesResource, c.ns, name), &extensions.Ingress{})
if obj == nil {
return nil, err
}
return obj.(*extensions.Ingress), err
}
func (c *FakeIngresses) List(opts api.ListOptions) (result *extensions.IngressList, err error) {
obj, err := c.Fake.
Invokes(core.NewListAction(ingressesResource, c.ns, opts), &extensions.IngressList{})
if obj == nil {
return nil, err
}
label, _, _ := core.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &extensions.IngressList{}
for _, item := range obj.(*extensions.IngressList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested ingresses.
func (c *FakeIngresses) Watch(opts api.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(core.NewWatchAction(ingressesResource, c.ns, opts))
}
// Patch applies the patch and returns the patched ingress.
func (c *FakeIngresses) Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *extensions.Ingress, err error) {
obj, err := c.Fake.
Invokes(core.NewPatchSubresourceAction(ingressesResource, c.ns, name, data, subresources...), &extensions.Ingress{})
if obj == nil {
return nil, err
}
return obj.(*extensions.Ingress), err
}<|fim▁end|> | Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. |
<|file_name|>ghost-config.js<|end_file_name|><|fim▁begin|>var ConfigInitializer = {
name: 'config',
initialize: function (container, application) {
var apps = $('body').data('apps'),<|fim▁hole|> fileStorage = $('body').data('filestorage'),
blogUrl = $('body').data('blogurl'),
blogTitle = $('body').data('blogtitle');
application.register(
'ghost:config', {apps: apps, fileStorage: fileStorage, blogUrl: blogUrl, tagsUI: tagsUI, blogTitle: blogTitle}, {instantiate: false}
);
application.inject('route', 'config', 'ghost:config');
application.inject('controller', 'config', 'ghost:config');
application.inject('component', 'config', 'ghost:config');
}
};
export default ConfigInitializer;<|fim▁end|> | tagsUI = $('body').data('tagsui'), |
<|file_name|>JavaXmlRequests.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package javaguide.xml;
import org.w3c.dom.Document;
import play.libs.XPath;
import play.mvc.BodyParser;
import play.mvc.Controller;
import play.mvc.Result;
public class JavaXmlRequests extends Controller {
//#xml-hello
public Result sayHello() {
Document dom = request().body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {<|fim▁hole|> return badRequest("Missing parameter [name]");
} else {
return ok("Hello " + name);
}
}
}
//#xml-hello
//#xml-hello-bodyparser
@BodyParser.Of(BodyParser.Xml.class)
public Result sayHelloBP() {
Document dom = request().body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {
String name = XPath.selectText("//name", dom);
if (name == null) {
return badRequest("Missing parameter [name]");
} else {
return ok("Hello " + name);
}
}
}
//#xml-hello-bodyparser
//#xml-reply
@BodyParser.Of(BodyParser.Xml.class)
public Result replyHello() {
Document dom = request().body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {
String name = XPath.selectText("//name", dom);
if (name == null) {
return badRequest("<message \"status\"=\"KO\">Missing parameter [name]</message>").as("application/xml");
} else {
return ok("<message \"status\"=\"OK\">Hello " + name + "</message>").as("application/xml");
}
}
}
//#xml-reply
}<|fim▁end|> | String name = XPath.selectText("//name", dom);
if (name == null) { |
<|file_name|>test_login_flow.py<|end_file_name|><|fim▁begin|>"""Tests for the login flow."""
from . import async_setup_auth
from tests.async_mock import patch
from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI
async def test_fetch_auth_providers(hass, aiohttp_client):
"""Test fetching auth providers."""
client = await async_setup_auth(hass, aiohttp_client)
resp = await client.get("/auth/providers")
assert resp.status == 200
assert await resp.json() == [
{"name": "Example", "type": "insecure_example", "id": None}
]
async def test_fetch_auth_providers_onboarding(hass, aiohttp_client):
"""Test fetching auth providers."""
client = await async_setup_auth(hass, aiohttp_client)
with patch(
"homeassistant.components.onboarding.async_is_user_onboarded",
return_value=False,
):
resp = await client.get("/auth/providers")
assert resp.status == 400
assert await resp.json() == {
"message": "Onboarding not finished",
"code": "onboarding_required",
}
async def test_cannot_get_flows_in_progress(hass, aiohttp_client):
"""Test we cannot get flows in progress."""
client = await async_setup_auth(hass, aiohttp_client, [])
resp = await client.get("/auth/login_flow")
assert resp.status == 405
async def test_invalid_username_password(hass, aiohttp_client):
"""Test we cannot get flows in progress."""
client = await async_setup_auth(hass, aiohttp_client)
resp = await client.post(
"/auth/login_flow",
json={
"client_id": CLIENT_ID,
"handler": ["insecure_example", None],
"redirect_uri": CLIENT_REDIRECT_URI,
},
)
assert resp.status == 200
step = await resp.json()
# Incorrect username
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={
"client_id": CLIENT_ID,
"username": "wrong-user",
"password": "test-pass",
},
)
assert resp.status == 200
step = await resp.json()
assert step["step_id"] == "init"
assert step["errors"]["base"] == "invalid_auth"
# Incorrect password
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={
"client_id": CLIENT_ID,
"username": "test-user",
"password": "wrong-pass",
},
)
assert resp.status == 200
step = await resp.json()
assert step["step_id"] == "init"
assert step["errors"]["base"] == "invalid_auth"
async def test_login_exist_user(hass, aiohttp_client):
"""Test logging in with exist user."""
client = await async_setup_auth(hass, aiohttp_client, setup_api=True)
cred = await hass.auth.auth_providers[0].async_get_or_create_credentials(
{"username": "test-user"}
)
await hass.auth.async_get_or_create_user(cred)
resp = await client.post(
"/auth/login_flow",
json={
"client_id": CLIENT_ID,
"handler": ["insecure_example", None],
"redirect_uri": CLIENT_REDIRECT_URI,
},<|fim▁hole|> resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={"client_id": CLIENT_ID, "username": "test-user", "password": "test-pass"},
)
assert resp.status == 200
step = await resp.json()
assert step["type"] == "create_entry"
assert len(step["result"]) > 1<|fim▁end|> | )
assert resp.status == 200
step = await resp.json()
|
<|file_name|>setJobState.ts<|end_file_name|><|fim▁begin|>/**
* Set the Job state data
* @param {string} id - the job id
* @param {IState} state - the state document
*/<|fim▁hole|> let response: IResponse = getContext().getResponse();
let collectionLink: string = collection.getAltLink();
let documentLink: string = `${collectionLink}/docs/${id}`;
// default response
response.setBody(false);
let isAccepted: boolean = collection.readDocument(documentLink, (error: IRequestCallbackError, job: IJob) => {
if (error) {
throw error;
}
job.state_id = state.id;
job.state_name = state.name;
let options: IReplaceOptions = { etag: job._etag };
let success: boolean = collection.replaceDocument(job._self, job, options, (err: IRequestCallbackError) => {
if (err) {
throw err;
}
response.setBody(true);
});
if (!success) {
throw new Error("The call was not accepted");
}
});
if (!isAccepted) {
throw new Error("The call was not accepted");
}
}<|fim▁end|> | function setJobState(id: string, state: IState) {
let context: IContext = getContext();
let collection: ICollection = context.getCollection(); |
<|file_name|>statusHeader.js<|end_file_name|><|fim▁begin|>let fs = require('fs');
let path = require('path');
let moviesData = require('../config/database');
module.exports = (req, res) => {
if(req.headers.statusheader === "Full") {
fs.readFile("./views/status.html", (err, data) => {
if(err) {
console.log(err);
res.writeHead(404);
res.write('404 Not Found');<|fim▁hole|> 'Content-Type': 'text/html'
});
let imagesCount = moviesData.getMovies().length;
data = data.toString().replace('{content}', `There are currently ${imagesCount} images.`);
res.write(data);
res.end();
})
} else {
return true;
}
}<|fim▁end|> | res.end();
}
res.writeHead(200, { |
<|file_name|>test_pymeta.py<|end_file_name|><|fim▁begin|>import operator
from textwrap import dedent
from twisted.trial import unittest
from ometa.grammar import OMeta, TermOMeta, TreeTransformerGrammar
from ometa.compat import OMeta1
from ometa.runtime import (ParseError, OMetaBase, OMetaGrammarBase, EOFError,
expected, TreeTransformerBase)
from ometa.interp import GrammarInterpreter, TrampolinedGrammarInterpreter
from terml.parser import parseTerm as term
class HandyWrapper(object):
"""
Convenient grammar wrapper for parsing strings.
"""
def __init__(self, klass):
"""
@param klass: The grammar class to be wrapped.
"""
self.klass = klass
def __getattr__(self, name):
"""
Return a function that will instantiate a grammar and invoke the named
rule.
@param: Rule name.
"""
def doIt(s):
"""
@param s: The string to be parsed by the wrapped grammar.
"""
obj = self.klass(s)
ret, err = obj.apply(name)
try:
extra, _ = obj.input.head()
except EOFError:
try:
return ''.join(ret)
except TypeError:
return ret
else:
raise err
return doIt
class OMeta1TestCase(unittest.TestCase):
"""
Tests of OMeta grammar compilation, with v1 syntax.
"""
classTested = OMeta1
def compile(self, grammar):
"""
Produce an object capable of parsing via this grammar.
@param grammar: A string containing an OMeta grammar.
"""
m = self.classTested.makeGrammar(dedent(grammar), 'TestGrammar')
g = m.createParserClass(OMetaBase, globals())
return HandyWrapper(g)
def test_literals(self):
"""
Input matches can be made on literal characters.
"""
g = self.compile("digit ::= '1'")
self.assertEqual(g.digit("1"), "1")
self.assertRaises(ParseError, g.digit, "4")
def test_multipleRules(self):
"""
Grammars with more than one rule work properly.
"""
g = self.compile("""
digit ::= '1'
aLetter ::= 'a'
""")
self.assertEqual(g.digit("1"), "1")
self.assertRaises(ParseError, g.digit, "4")
def test_escapedLiterals(self):
"""
Input matches can be made on escaped literal characters.
"""
g = self.compile(r"newline ::= '\n'")
self.assertEqual(g.newline("\n"), "\n")
def test_integers(self):
"""
Input matches can be made on literal integers.
"""
g = self.compile("stuff ::= 17 0x1F -2 0177")
self.assertEqual(g.stuff([17, 0x1f, -2, 0177]), 0177)
self.assertRaises(ParseError, g.stuff, [1, 2, 3])
def test_star(self):
"""
Input matches can be made on zero or more repetitions of a pattern.
"""
g = self.compile("xs ::= 'x'*")
self.assertEqual(g.xs(""), "")
self.assertEqual(g.xs("x"), "x")
self.assertEqual(g.xs("xxxx"), "xxxx")
self.assertRaises(ParseError, g.xs, "xy")
def test_plus(self):
"""
Input matches can be made on one or more repetitions of a pattern.
"""
g = self.compile("xs ::= 'x'+")
self.assertEqual(g.xs("x"), "x")
self.assertEqual(g.xs("xxxx"), "xxxx")
self.assertRaises(ParseError, g.xs, "xy")
self.assertRaises(ParseError, g.xs, "")
def test_sequencing(self):
"""
Input matches can be made on a sequence of patterns.
"""
g = self.compile("twelve ::= '1' '2'")
self.assertEqual(g.twelve("12"), "2");
self.assertRaises(ParseError, g.twelve, "1")
def test_alternatives(self):
"""
Input matches can be made on one of a set of alternatives.
"""
g = self.compile("digit ::= '0' | '1' | '2'")
self.assertEqual(g.digit("0"), "0")
self.assertEqual(g.digit("1"), "1")
self.assertEqual(g.digit("2"), "2")
self.assertRaises(ParseError, g.digit, "3")
def test_optional(self):
"""
Subpatterns can be made optional.
"""
g = self.compile("foo ::= 'x' 'y'? 'z'")
self.assertEqual(g.foo("xyz"), 'z')
self.assertEqual(g.foo("xz"), 'z')
def test_apply(self):
"""
Other productions can be invoked from within a production.
"""
g = self.compile("""
digit ::= '0' | '1'
bits ::= <digit>+
""")
self.assertEqual(g.bits('0110110'), '0110110')
def test_negate(self):
"""
Input can be matched based on its failure to match a pattern.
"""
g = self.compile("foo ::= ~'0' <anything>")
self.assertEqual(g.foo("1"), "1")
self.assertRaises(ParseError, g.foo, "0")
def test_ruleValue(self):
"""
Productions can specify a Python expression that provides the result
of the parse.
"""
g = self.compile("foo ::= '1' => 7")
self.assertEqual(g.foo('1'), 7)
def test_ruleValueEscapeQuotes(self):
"""
Escaped quotes are handled properly in Python expressions.
"""
g = self.compile(r"""escapedChar ::= '\'' => '\\\''""")
self.assertEqual(g.escapedChar("'"), "\\'")
def test_ruleValueEscapeSlashes(self):
"""
Escaped slashes are handled properly in Python expressions.
"""
g = self.compile(r"""escapedChar ::= '\\' => '\\'""")
self.assertEqual(g.escapedChar("\\"), "\\")
def test_lookahead(self):
"""
Doubled negation does lookahead.
"""
g = self.compile("""
foo ::= ~~(:x) <bar x>
bar :x ::= :a :b ?(x == a == b) => x
""")
self.assertEqual(g.foo("11"), '1')
self.assertEqual(g.foo("22"), '2')
def test_binding(self):
"""
The result of a parsing expression can be bound to a name.
"""
g = self.compile("foo ::= '1':x => int(x) * 2")
self.assertEqual(g.foo("1"), 2)
def test_bindingAccess(self):
"""
Bound names in a rule can be accessed on the grammar's "locals" dict.
"""
G = self.classTested.makeGrammar(
"stuff ::= '1':a ('2':b | '3':c)", 'TestGrammar').createParserClass(OMetaBase, {})
g = G("12")
self.assertEqual(g.apply("stuff")[0], '2')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['b'], '2')
g = G("13")
self.assertEqual(g.apply("stuff")[0], '3')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['c'], '3')
def test_predicate(self):
"""
Python expressions can be used to determine the success or failure of a
parse.
"""
g = self.compile("""
digit ::= '0' | '1'
double_bits ::= <digit>:a <digit>:b ?(a == b) => int(b)
""")
self.assertEqual(g.double_bits("00"), 0)
self.assertEqual(g.double_bits("11"), 1)
self.assertRaises(ParseError, g.double_bits, "10")
self.assertRaises(ParseError, g.double_bits, "01")
def test_parens(self):
"""
Parens can be used to group subpatterns.
"""
g = self.compile("foo ::= 'a' ('b' | 'c')")
self.assertEqual(g.foo("ab"), "b")
self.assertEqual(g.foo("ac"), "c")
def test_action(self):
"""
Python expressions can be run as actions with no effect on the result
of the parse.
"""
g = self.compile("""foo ::= ('1'*:ones !(False) !(ones.insert(0, '0')) => ''.join(ones))""")
self.assertEqual(g.foo("111"), "0111")
def test_bindNameOnly(self):
"""
A pattern consisting of only a bind name matches a single element and
binds it to that name.
"""
g = self.compile("foo ::= '1' :x '2' => x")
self.assertEqual(g.foo("132"), "3")
def test_args(self):
"""
Productions can take arguments.
"""
g = self.compile("""
digit ::= ('0' | '1' | '2'):d => int(d)
foo :x :ignored ::= (?(x > 1) '9' | ?(x <= 1) '8'):d => int(d)
baz ::= <digit>:a <foo a None>:b => [a, b]
""")
self.assertEqual(g.baz("18"), [1, 8])
self.assertEqual(g.baz("08"), [0, 8])
self.assertEqual(g.baz("29"), [2, 9])
self.assertRaises(ParseError, g.foo, "28")
def test_patternMatch(self):
"""
Productions can pattern-match on arguments.
Also, multiple definitions of a rule can be done in sequence.
"""
g = self.compile("""
fact 0 => 1
fact :n ::= <fact (n - 1)>:m => n * m
""")
self.assertEqual(g.fact([3]), 6)
def test_listpattern(self):
"""
Brackets can be used to match contents of lists.
"""
g = self.compile("""
digit ::= :x ?(x.isdigit()) => int(x)
interp ::= [<digit>:x '+' <digit>:y] => x + y
""")
self.assertEqual(g.interp([['3', '+', '5']]), 8)
def test_listpatternresult(self):
"""
The result of a list pattern is the entire list.
"""
g = self.compile("""
digit ::= :x ?(x.isdigit()) => int(x)
interp ::= [<digit>:x '+' <digit>:y]:z => (z, x + y)
""")
e = ['3', '+', '5']
self.assertEqual(g.interp([e]), (e, 8))
def test_recursion(self):
"""
Rules can call themselves.
"""
g = self.compile("""
interp ::= (['+' <interp>:x <interp>:y] => x + y
| ['*' <interp>:x <interp>:y] => x * y
| :x ?(isinstance(x, str) and x.isdigit()) => int(x))
""")
self.assertEqual(g.interp([['+', '3', ['*', '5', '2']]]), 13)
def test_leftrecursion(self):
"""
Left-recursion is detected and compiled appropriately.
"""
g = self.compile("""
num ::= (<num>:n <digit>:d => n * 10 + d
| <digit>)
digit ::= :x ?(x.isdigit()) => int(x)
""")
self.assertEqual(g.num("3"), 3)
self.assertEqual(g.num("32767"), 32767)
def test_characterVsSequence(self):
"""
Characters (in single-quotes) are not regarded as sequences.
"""
g = self.compile("""
interp ::= ([<interp>:x '+' <interp>:y] => x + y
| [<interp>:x '*' <interp>:y] => x * y
| :x ?(isinstance(x, basestring) and x.isdigit()) => int(x))
""")
self.assertEqual(g.interp([['3', '+', ['5', '*', '2']]]), 13)
self.assertEqual(g.interp([[u'3', u'+', [u'5', u'*', u'2']]]), 13)
def test_string(self):
"""
Strings in double quotes match string objects.
"""
g = self.compile("""
interp ::= ["Foo" 1 2] => 3
""")
self.assertEqual(g.interp([["Foo", 1, 2]]), 3)
def test_argEscape(self):
"""
Regression test for bug #239344.
"""
g = self.compile("""
memo_arg :arg ::= <anything> ?(False)
trick ::= <letter> <memo_arg 'c'>
broken ::= <trick> | <anything>*
""")
self.assertEqual(g.broken('ab'), 'ab')
def test_comments(self):
"""
Comments in grammars are accepted and ignored.
"""
g = self.compile("""
#comment here
digit ::= ( '0' #second comment
| '1') #another one
#comments after rules are cool too
bits ::= <digit>+ #last one
""")
self.assertEqual(g.bits('0110110'), '0110110')
def test_accidental_bareword(self):
"""
Accidental barewords are treated as syntax errors in the grammar.
"""
self.assertRaises(ParseError,
self.compile, """
atom ::= ~('|') :a => Regex_Atom(a)
| ' ' atom:a
""")
class OMetaTestCase(unittest.TestCase):
"""
Tests of OMeta grammar compilation.
"""
classTested = OMeta
def compile(self, grammar, globals=None):
"""
Produce an object capable of parsing via this grammar.
@param grammar: A string containing an OMeta grammar.
"""
g = self.classTested.makeGrammar(grammar, 'TestGrammar').createParserClass(OMetaBase, globals or {})
return HandyWrapper(g)
def test_literals(self):
"""
Input matches can be made on literal characters.
"""
g = self.compile("digit = '1'")
self.assertEqual(g.digit("1"), "1")
self.assertRaises(ParseError, g.digit, "4")
def test_escaped_char(self):
"""
Hex escapes are supported in strings in grammars.
"""
g = self.compile(r"bel = '\x07'")
self.assertEqual(g.bel("\x07"), "\x07")
def test_literals_multi(self):
"""
Input matches can be made on multiple literal characters at
once.
"""
g = self.compile("foo = 'foo'")
self.assertEqual(g.foo("foo"), "foo")
self.assertRaises(ParseError, g.foo, "for")
def test_token(self):
"""
Input matches can be made on tokens, which default to
consuming leading whitespace.
"""
g = self.compile('foo = "foo"')
self.assertEqual(g.foo(" foo"), "foo")
self.assertRaises(ParseError, g.foo, "fog")
def test_multipleRules(self):
"""
Grammars with more than one rule work properly.
"""
g = self.compile("""
digit = '1'
aLetter = 'a'
""")
self.assertEqual(g.digit("1"), "1")
self.assertRaises(ParseError, g.digit, "4")
def test_escapedLiterals(self):
"""
Input matches can be made on escaped literal characters.
"""
g = self.compile(r"newline = '\n'")
self.assertEqual(g.newline("\n"), "\n")
def test_integers(self):
"""
Input matches can be made on literal integers.
"""
g = self.compile("stuff = 17 0x1F -2 0177")
self.assertEqual(g.stuff([17, 0x1f, -2, 0177]), 0177)
self.assertRaises(ParseError, g.stuff, [1, 2, 3])
def test_star(self):
"""
Input matches can be made on zero or more repetitions of a pattern.
"""
g = self.compile("xs = 'x'*")
self.assertEqual(g.xs(""), "")
self.assertEqual(g.xs("x"), "x")
self.assertEqual(g.xs("xxxx"), "xxxx")
self.assertRaises(ParseError, g.xs, "xy")
def test_plus(self):
"""
Input matches can be made on one or more repetitions of a pattern.
"""
g = self.compile("xs = 'x'+")
self.assertEqual(g.xs("x"), "x")
self.assertEqual(g.xs("xxxx"), "xxxx")
self.assertRaises(ParseError, g.xs, "xy")
self.assertRaises(ParseError, g.xs, "")
def test_repeat(self):
"""
Match repetitions can be specifically numbered.
"""
g = self.compile("xs = 'x'{2, 4}:n 'x'* -> n")
self.assertEqual(g.xs("xx"), "xx")
self.assertEqual(g.xs("xxxx"), "xxxx")
self.assertEqual(g.xs("xxxxxx"), "xxxx")
self.assertRaises(ParseError, g.xs, "x")
self.assertRaises(ParseError, g.xs, "")
def test_repeat_single(self):
"""
Match repetitions can be specifically numbered.
"""
g = self.compile("xs = 'x'{3}:n 'x'* -> n")
self.assertEqual(g.xs("xxx"), "xxx")
self.assertEqual(g.xs("xxxxxx"), "xxx")
self.assertRaises(ParseError, g.xs, "xx")
def test_repeat_zero(self):
"""
Match repetitions can be specifically numbered.
"""
g = self.compile("xs = 'x'{0}:n 'y' -> n")
self.assertEqual(g.xs("y"), "")
self.assertRaises(ParseError, g.xs, "xy")
def test_repeat_zero_n(self):
"""
Match repetitions can be specifically numbered.
"""
g = self.compile("""
xs :n = 'x'{n}:a 'y' -> a
start = xs(0)
""")
self.assertEqual(g.start("y"), "")
self.assertRaises(ParseError, g.start, "xy")
def test_repeat_var(self):
"""
Match repetitions can be variables.
"""
g = self.compile("xs = (:v -> int(v)):n 'x'{n}:xs 'x'* -> xs")
self.assertEqual(g.xs("2xx"), "xx")
self.assertEqual(g.xs("4xxxx"), "xxxx")
self.assertEqual(g.xs("3xxxxxx"), "xxx")
self.assertRaises(ParseError, g.xs, "2x")
self.assertRaises(ParseError, g.xs, "1")
def test_sequencing(self):
"""
Input matches can be made on a sequence of patterns.
"""
g = self.compile("twelve = '1' '2'")
self.assertEqual(g.twelve("12"), "2");
self.assertRaises(ParseError, g.twelve, "1")
def test_alternatives(self):
"""
Input matches can be made on one of a set of alternatives.
"""
g = self.compile("digit = '0' | '1' | '2'")
self.assertEqual(g.digit("0"), "0")
self.assertEqual(g.digit("1"), "1")
self.assertEqual(g.digit("2"), "2")
self.assertRaises(ParseError, g.digit, "3")
def test_optional(self):
"""
Subpatterns can be made optional.
"""
g = self.compile("foo = 'x' 'y'? 'z'")
self.assertEqual(g.foo("xyz"), 'z')
self.assertEqual(g.foo("xz"), 'z')
def test_apply(self):
"""
Other productions can be invoked from within a production.
"""
g = self.compile("""
digit = '0' | '1'
bits = digit+
""")
self.assertEqual(g.bits('0110110'), '0110110')
def test_negate(self):
"""
Input can be matched based on its failure to match a pattern.
"""
g = self.compile("foo = ~'0' anything")
self.assertEqual(g.foo("1"), "1")
self.assertRaises(ParseError, g.foo, "0")
def test_ruleValue(self):
"""
Productions can specify a Python expression that provides the result
of the parse.
"""
g = self.compile("foo = '1' -> 7")
self.assertEqual(g.foo('1'), 7)
def test_lookahead(self):
"""
Doubled negation does lookahead.
"""
g = self.compile("""
foo = ~~(:x) bar(x)
bar :x = :a :b ?(x == a == b) -> x
""")
self.assertEqual(g.foo("11"), '1')
self.assertEqual(g.foo("22"), '2')
def test_binding(self):
"""
The result of a parsing expression can be bound to a name.
"""
g = self.compile("foo = '1':x -> int(x) * 2")
self.assertEqual(g.foo("1"), 2)
def test_bindingAccess(self):
"""
Bound names in a rule can be accessed on the grammar's "locals" dict.
"""
G = self.classTested.makeGrammar(
"stuff = '1':a ('2':b | '3':c)", 'TestGrammar').createParserClass(OMetaBase, {})
g = G("12")
self.assertEqual(g.apply("stuff")[0], '2')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['b'], '2')
g = G("13")
self.assertEqual(g.apply("stuff")[0], '3')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['c'], '3')
def test_predicate(self):
"""
Python expressions can be used to determine the success or
failure of a parse.
"""
g = self.compile("""
digit = '0' | '1'
double_bits = digit:a digit:b ?(a == b) -> int(b)
""")
self.assertEqual(g.double_bits("00"), 0)
self.assertEqual(g.double_bits("11"), 1)
self.assertRaises(ParseError, g.double_bits, "10")
self.assertRaises(ParseError, g.double_bits, "01")
def test_parens(self):
"""
Parens can be used to group subpatterns.
"""
g = self.compile("foo = 'a' ('b' | 'c')")
self.assertEqual(g.foo("ab"), "b")
self.assertEqual(g.foo("ac"), "c")
def test_action(self):
"""
Python expressions can be run as actions with no effect on the
result of the parse.
"""
g = self.compile("""foo = ('1'*:ones !(False) !(ones.insert(0, '0')) -> ''.join(ones))""")
self.assertEqual(g.foo("111"), "0111")
def test_bindNameOnly(self):
"""
A pattern consisting of only a bind name matches a single element and
binds it to that name.
"""
g = self.compile("foo = '1' :x '2' -> x")
self.assertEqual(g.foo("132"), "3")
def test_args(self):
"""
Productions can take arguments.
"""
g = self.compile("""
digit = ('0' | '1' | '2'):d -> int(d)
foo :x = (?(x > 1) '9' | ?(x <= 1) '8'):d -> int(d)
baz = digit:a foo(a):b -> [a, b]
""")
self.assertEqual(g.baz("18"), [1, 8])
self.assertEqual(g.baz("08"), [0, 8])
self.assertEqual(g.baz("29"), [2, 9])
self.assertRaises(ParseError, g.foo, "28")
def test_patternMatch(self):
"""
Productions can pattern-match on arguments.
Also, multiple definitions of a rule can be done in sequence.
"""
g = self.compile("""
fact 0 -> 1
fact :n = fact((n - 1)):m -> n * m
""")
self.assertEqual(g.fact([3]), 6)
def test_listpattern(self):
"""
Brackets can be used to match contents of lists.
"""
g = self.compile("""
digit = :x ?(x.isdigit()) -> int(x)
interp = [digit:x '+' digit:y] -> x + y
""")
self.assertEqual(g.interp([['3', '+', '5']]), 8)
def test_listpatternresult(self):
"""
The result of a list pattern is the entire list.
"""
g = self.compile("""
digit = :x ?(x.isdigit()) -> int(x)
interp = [digit:x '+' digit:y]:z -> (z, x + y)
""")
e = ['3', '+', '5']
self.assertEqual(g.interp([e]), (e, 8))
def test_recursion(self):
"""
Rules can call themselves.
"""
g = self.compile("""
interp = (['+' interp:x interp:y] -> x + y
| ['*' interp:x interp:y] -> x * y
| :x ?(isinstance(x, str) and x.isdigit()) -> int(x))
""")
self.assertEqual(g.interp([['+', '3', ['*', '5', '2']]]), 13)
def test_leftrecursion(self):
"""
Left-recursion is detected and compiled appropriately.
"""
g = self.compile("""
num = (num:n digit:d -> n * 10 + d
| digit)
digit = :x ?(x.isdigit()) -> int(x)
""")
self.assertEqual(g.num("3"), 3)
self.assertEqual(g.num("32767"), 32767)
def test_characterVsSequence(self):
"""
Characters (in single-quotes) are not regarded as sequences.
"""
g = self.compile("""
interp = ([interp:x '+' interp:y] -> x + y
| [interp:x '*' interp:y] -> x * y
| :x ?(isinstance(x, basestring) and x.isdigit()) -> int(x))
""")
self.assertEqual(g.interp([['3', '+', ['5', '*', '2']]]), 13)
self.assertEqual(g.interp([[u'3', u'+', [u'5', u'*', u'2']]]), 13)
def test_stringConsumedBy(self):
"""
OMeta2's "consumed-by" operator works on strings.
"""
g = self.compile("""
ident = <letter (letter | digit)*>
""")
self.assertEqual(g.ident("a"), "a")
self.assertEqual(g.ident("abc"), "abc")
self.assertEqual(g.ident("a1z"), "a1z")
self.assertRaises(ParseError, g.ident, "1a")
def test_listConsumedBy(self):
"""
OMeta2's "consumed-by" operator works on lists.
"""
g = self.compile("""
ands = [<"And" (ors | vals)*>:x] -> x
ors = [<"Or" vals*:x>] -> x
vals = 1 | 0
""")
self.assertEqual(g.ands([["And", ["Or", 1, 0], 1]]),
["And", ["Or", 1, 0], 1])
def test_string(self):
"""
Strings in double quotes match string objects.
"""
g = self.compile("""
interp = ["Foo" 1 2] -> 3
""")
self.assertEqual(g.interp([["Foo", 1, 2]]), 3)
def test_argEscape(self):
"""
Regression test for bug #239344.
"""
g = self.compile("""
memo_arg :arg = anything ?(False)
trick = letter memo_arg('c')
broken = trick | anything*
""")
self.assertEqual(g.broken('ab'), 'ab')
class TermActionGrammarTests(OMetaTestCase):
classTested = TermOMeta
def test_binding(self):
"""
The result of a parsing expression can be bound to a name.
"""
g = self.compile("foo = '1':x -> mul(int(x), 2)",
{"mul": operator.mul})
self.assertEqual(g.foo("1"), 2)
def test_bindingAccess(self):
"""
Bound names in a rule can be accessed on the grammar's "locals" dict.
"""
G = self.classTested.makeGrammar(
"stuff = '1':a ('2':b | '3':c)", 'TestGrammar').createParserClass(OMetaBase, {})
g = G("12")
self.assertEqual(g.apply("stuff")[0], '2')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['b'], '2')
g = G("13")
self.assertEqual(g.apply("stuff")[0], '3')
self.assertEqual(g.locals['stuff']['a'], '1')
self.assertEqual(g.locals['stuff']['c'], '3')
def test_predicate(self):
"""
Term actions can be used to determine the success or
failure of a parse.
"""
g = self.compile("""
digit = '0' | '1'
double_bits = digit:a digit:b ?(equal(a, b)) -> int(b)
""", {"equal": operator.eq})
self.assertEqual(g.double_bits("00"), 0)
self.assertEqual(g.double_bits("11"), 1)
self.assertRaises(ParseError, g.double_bits, "10")
self.assertRaises(ParseError, g.double_bits, "01")
def test_action(self):
"""
Term actions can be run as actions with no effect on the
result of the parse.
"""
g = self.compile(
"""foo = ('1'*:ones !(False)
!(nconc(ones, '0')) -> join(ones))""",
{"nconc": lambda lst, val: lst.insert(0, val),
"join": ''.join})
self.assertEqual(g.foo("111"), "0111")
def test_patternMatch(self):
"""
Productions can pattern-match on arguments.
Also, multiple definitions of a rule can be done in sequence.
"""
g = self.compile("""
fact 0 -> 1
fact :n = fact(decr(n)):m -> mul(n, m)
""", {"mul": operator.mul, "decr": lambda x: x -1})
self.assertEqual(g.fact([3]), 6)
def test_listpattern(self):
"""
Brackets can be used to match contents of lists.
"""
g = self.compile("""
digit = :x ?(x.isdigit()) -> int(x)
interp = [digit:x '+' digit:y] -> add(x, y)
""", {"add": operator.add})
self.assertEqual(g.interp([['3', '+', '5']]), 8)
def test_listpatternresult(self):
"""
The result of a list pattern is the entire list.
"""
g = self.compile("""
digit = :x ?(x.isdigit()) -> int(x)
interp = [digit:x '+' digit:y]:z -> [z, plus(x, y)]
""", {"plus": operator.add})
e = ['3', '+', '5']
self.assertEqual(g.interp([e]), [e, 8])
def test_recursion(self):
"""
Rules can call themselves.
"""
g = self.compile("""
interp = (['+' interp:x interp:y] -> add(x, y)
| ['*' interp:x interp:y] -> mul(x, y)
| :x ?(isdigit(x)) -> int(x))
""", {"mul": operator.mul,
"add": operator.add,
"isdigit": lambda x: str(x).isdigit()})
self.assertEqual(g.interp([['+', '3', ['*', '5', '2']]]), 13)
def test_leftrecursion(self):
"""
Left-recursion is detected and compiled appropriately.
"""
g = self.compile("""
num = (num:n digit:d -> makeInt(n, d)
| digit)
digit = :x ?(isdigit(x)) -> int(x)
""", {"makeInt": lambda x, y: x * 10 + y,
"isdigit": lambda x: x.isdigit()})
self.assertEqual(g.num("3"), 3)
self.assertEqual(g.num("32767"), 32767)
def test_characterVsSequence(self):
"""
Characters (in single-quotes) are not regarded as sequences.
"""
g = self.compile(
"""
interp = ([interp:x '+' interp:y] -> add(x, y)
| [interp:x '*' interp:y] -> mul(x, y)
| :x ?(isdigit(x)) -> int(x))
""",
{"add": operator.add, "mul": operator.mul,
"isdigit": lambda x: isinstance(x, basestring) and x.isdigit()})
self.assertEqual(g.interp([['3', '+', ['5', '*', '2']]]), 13)
self.assertEqual(g.interp([[u'3', u'+', [u'5', u'*', u'2']]]), 13)
def test_string(self):
"""
Strings in double quotes match string objects.
"""
g = self.compile("""
interp = ["Foo" 1 2] -> 3
""")
self.assertEqual(g.interp([["Foo", 1, 2]]), 3)
def test_argEscape(self):
"""
Regression test for bug #239344.
"""
g = self.compile("""
memo_arg :arg = anything ?(False)
trick = letter memo_arg('c')
broken = trick | anything*
""")
self.assertEqual(g.broken('ab'), 'ab')
def test_lookahead(self):
"""
Doubled negation does lookahead.
"""
g = self.compile("""
foo = ~~(:x) bar(x)
bar :x = :a :b ?(equal(x, a, b)) -> x
""",
{"equal": lambda i, j, k: i == j == k})
self.assertEqual(g.foo("11"), '1')
self.assertEqual(g.foo("22"), '2')
def test_args(self):
"""
Productions can take arguments.
"""
g = self.compile("""
digit = ('0' | '1' | '2'):d -> int(d)
foo :x = (?(gt(x, 1)) '9' | ?(lte(x, 1)) '8'):d -> int(d)
baz = digit:a foo(a):b -> [a, b]
""", {"lte": operator.le, "gt": operator.gt})
self.assertEqual(g.baz("18"), [1, 8])
self.assertEqual(g.baz("08"), [0, 8])
self.assertEqual(g.baz("29"), [2, 9])
self.assertRaises(ParseError, g.foo, "28")
class PyExtractorTest(unittest.TestCase):
"""
Tests for finding Python expressions in OMeta grammars.
"""
def findInGrammar(self, expr):
"""
L{OMeta.pythonExpr()} can extract a single Python expression from a
string, ignoring the text following it.
"""
o = OMetaGrammarBase(expr + "\nbaz = ...\n")
self.assertEqual(o.pythonExpr()[0][0], expr)
def test_expressions(self):
"""
L{OMeta.pythonExpr()} can recognize various paired delimiters properly
and include newlines in expressions where appropriate.
"""
self.findInGrammar("x")
self.findInGrammar("(x + 1)")
self.findInGrammar("{x: (y)}")
self.findInGrammar("x, '('")
self.findInGrammar('x, "("')
self.findInGrammar('x, """("""')
self.findInGrammar('(x +\n 1)')
self.findInGrammar('[x, "]",\n 1]')
self.findInGrammar('{x: "]",\ny: "["}')
o = OMetaGrammarBase("foo(x[1]])\nbaz = ...\n")
self.assertRaises(ParseError, o.pythonExpr)
o = OMetaGrammarBase("foo(x[1]\nbaz = ...\n")
self.assertRaises(ParseError, o.pythonExpr)
class MakeGrammarTest(unittest.TestCase):
"""
Test the definition of grammars via the 'makeGrammar' method.
"""
def test_makeGrammar(self):
results = []
grammar = """
digit = :x ?('0' <= x <= '9') -> int(x)
num = (num:n digit:d !(results.append(True)) -> n * 10 + d
| digit)
"""
TestGrammar = OMeta.makeGrammar(grammar, "G").createParserClass(OMetaBase, {'results':results})
g = TestGrammar("314159")
self.assertEqual(g.apply("num")[0], 314159)
self.assertNotEqual(len(results), 0)
def test_brokenGrammar(self):
grammar = """
andHandler = handler:h1 'and handler:h2 -> And(h1, h2)
"""
e = self.assertRaises(ParseError, OMeta.makeGrammar, grammar,
"Foo")
self.assertEquals(e.position, 56)<|fim▁hole|>
def test_subclassing(self):
"""
A subclass of an OMeta subclass should be able to call rules on its
parent, and access variables in its scope.
"""
grammar1 = """
dig = :x ?(a <= x <= b) -> int(x)
"""
TestGrammar1 = OMeta.makeGrammar(grammar1, "G").createParserClass(OMetaBase, {'a':'0', 'b':'9'})
grammar2 = """
num = (num:n dig:d -> n * base + d
| dig)
"""
TestGrammar2 = OMeta.makeGrammar(grammar2, "G2").createParserClass(TestGrammar1, {'base':10})
g = TestGrammar2("314159")
self.assertEqual(g.apply("num")[0], 314159)
grammar3 = """
dig = :x ?(a <= x <= b or c <= x <= d) -> int(x, base)
"""
TestGrammar3 = OMeta.makeGrammar(grammar3, "G3").createParserClass(
TestGrammar2, {'c':'a', 'd':'f', 'base':16})
g = TestGrammar3("abc123")
self.assertEqual(g.apply("num")[0], 11256099)
def test_super(self):
"""
Rules can call the implementation in a superclass.
"""
grammar1 = "expr = letter"
TestGrammar1 = OMeta.makeGrammar(grammar1, "G").createParserClass(OMetaBase, {})
grammar2 = "expr = super | digit"
TestGrammar2 = OMeta.makeGrammar(grammar2, "G2").createParserClass(TestGrammar1, {})
self.assertEqual(TestGrammar2("x").apply("expr")[0], "x")
self.assertEqual(TestGrammar2("3").apply("expr")[0], "3")
def test_foreign(self):
"""
Rules can call the implementation in a superclass.
"""
grammar_letter = "expr = letter"
GrammarLetter = OMeta.makeGrammar(grammar_letter, "G").createParserClass(OMetaBase, {})
grammar_digit = "expr '5' = digit"
GrammarDigit = OMeta.makeGrammar(grammar_digit, "H").createParserClass(OMetaBase, {})
grammar = ("expr = !(grammar_digit_global):grammar_digit "
"grammar_letter.expr | grammar_digit.expr('5')")
TestGrammar = OMeta.makeGrammar(grammar, "I").createParserClass(
OMetaBase,
{"grammar_letter": GrammarLetter,
"grammar_digit_global": GrammarDigit
})
self.assertEqual(TestGrammar("x").apply("expr")[0], "x")
self.assertEqual(TestGrammar("3").apply("expr")[0], "3")
class HandyInterpWrapper(object):
"""
Convenient grammar wrapper for parsing strings.
"""
def __init__(self, interp):
self._interp = interp
def __getattr__(self, name):
"""
Return a function that will instantiate a grammar and invoke the named
rule.
@param: Rule name.
"""
def doIt(s):
"""
@param s: The string to be parsed by the wrapped grammar.
"""
# totally cheating
tree = not isinstance(s, basestring)
input, ret, err = self._interp.apply(s, name, tree)
try:
extra, _ = input.head()
except EOFError:
try:
return ''.join(ret)
except TypeError:
return ret
else:
raise err
return doIt
class InterpTestCase(OMetaTestCase):
def compile(self, grammar, globals=None):
"""
Produce an object capable of parsing via this grammar.
@param grammar: A string containing an OMeta grammar.
"""
g = OMeta(grammar)
tree = g.parseGrammar('TestGrammar')
g = GrammarInterpreter(tree, OMetaBase, globals)
return HandyInterpWrapper(g)
class TrampolinedInterpWrapper(object):
"""
Convenient grammar wrapper for parsing strings.
"""
def __init__(self, tree, globals):
self._tree = tree
self._globals = globals
def __getattr__(self, name):
"""
Return a function that will instantiate a grammar and invoke the named
rule.
@param: Rule name.
"""
def doIt(s):
"""
@param s: The string to be parsed by the wrapped grammar.
"""
tree = not isinstance(s, basestring)
if tree:
raise unittest.SkipTest("Not applicable for push parsing")
results = []
def whenDone(val, err):
results.append(val)
parser = TrampolinedGrammarInterpreter(self._tree, name, whenDone,
self._globals)
for i, c in enumerate(s):
assert len(results) == 0
parser.receive(c)
parser.end()
if results and parser.input.position == len(parser.input.data):
try:
return ''.join(results[0])
except TypeError:
return results[0]
else:
raise parser.currentError
return doIt
class TrampolinedInterpreterTestCase(OMetaTestCase):
def compile(self, grammar, globals=None):
g = OMeta(grammar)
tree = g.parseGrammar('TestGrammar')
return TrampolinedInterpWrapper(tree, globals)
def test_failure(self):
g = OMeta("""
foo = 'a':one baz:two 'd'+ 'e' -> (one, two)
baz = 'b' | 'c'
""", {})
tree = g.parseGrammar('TestGrammar')
i = TrampolinedGrammarInterpreter(
tree, 'foo', callback=lambda x: setattr(self, 'result', x))
e = self.assertRaises(ParseError, i.receive, 'foobar')
self.assertEqual(str(e),
"\nfoobar\n^\nParse error at line 2, column 0:"
" expected the character 'a'. trail: []\n")
def test_stringConsumedBy(self):
called = []
grammarSource = "rule = <'x'+>:y -> y"
grammar = OMeta(grammarSource).parseGrammar("Parser")
def interp(result, error):
called.append(result)
trampoline = TrampolinedGrammarInterpreter(grammar, "rule", interp)
trampoline.receive("xxxxx")
trampoline.end()
self.assertEqual(called, ["xxxxx"])
class TreeTransformerTestCase(unittest.TestCase):
def compile(self, grammar, namespace=None):
"""
Produce an object capable of parsing via this grammar.
@param grammar: A string containing an OMeta grammar.
"""
if namespace is None:
namespace = globals()
g = TreeTransformerGrammar.makeGrammar(
dedent(grammar), 'TestGrammar').createParserClass(
TreeTransformerBase, namespace)
return g
def test_termForm(self):
g = self.compile("Foo(:left :right) -> left.data + right.data")
self.assertEqual(g.transform(term("Foo(1, 2)"))[0], 3)
def test_termFormNest(self):
g = self.compile("Foo(:left Baz(:right)) -> left.data + right.data")
self.assertEqual(g.transform(term("Foo(1, Baz(2))"))[0], 3)
def test_listForm(self):
g = self.compile("Foo(:left [:first :second]) -> left.data + first.data + second.data")
self.assertEqual(g.transform(term("Foo(1, [2, 3])"))[0], 6)
def test_emptyList(self):
g = self.compile("Foo([]) -> 6")
self.assertEqual(g.transform(term("Foo([])"))[0], 6)
def test_emptyArgs(self):
g = self.compile("Foo() -> 6")
self.assertEqual(g.transform(term("Foo()"))[0], 6)
def test_emptyArgsMeansEmpty(self):
g = self.compile("""
Foo() -> 6
Foo(:x) -> x
""")
self.assertEqual(g.transform(term("Foo(3)"))[0].data, 3)
def test_subTransform(self):
g = self.compile("""
Foo(:left @right) -> left.data + right
Baz(:front :back) -> front.data * back.data
""")
self.assertEqual(g.transform(term("Foo(1, Baz(2, 3))"))[0], 7)
def test_defaultExpand(self):
g = self.compile("""
Foo(:left @right) -> left.data + right
Baz(:front :back) -> front.data * back.data
""")
self.assertEqual(g.transform(term("Blee(Foo(1, 2), Baz(2, 3))"))[0],
term("Blee(3, 6)"))
def test_wide_template(self):
g = self.compile(
"""
Pair(@left @right) --> $left, $right
Name(@n) = ?(n == "a") --> foo
| --> baz
""")
self.assertEqual(g.transform(term('Pair(Name("a"), Name("b"))'))[0],
"foo, baz")
def test_tall_template(self):
g = self.compile(
"""
Name(@n) = ?(n == "a") --> foo
| --> baz
Pair(@left @right) {{{
$left
also, $right
}}}
""")
self.assertEqual(g.transform(term('Pair(Name("a"), Name("b"))'))[0],
"foo\nalso, baz")
def test_tall_template_suite(self):
g = self.compile(
"""
Name(@n) -> n
If(@test @suite) {{{
if $test:
$suite
}}}
""")
self.assertEqual(g.transform(term('If(Name("a"), [Name("foo"), Name("baz")])'))[0],
"if a:\n foo\n baz")
def test_foreign(self):
"""
Rules can call the implementation in a superclass.
"""
grammar_letter = "expr = letter"
GrammarLetter = self.compile(grammar_letter, {})
grammar_digit = "expr '5' = digit"
GrammarDigit = self.compile(grammar_digit, {})
grammar = ("expr = !(grammar_digit_global):grammar_digit "
"GrammarLetter.expr | grammar_digit.expr('5')")
TestGrammar = self.compile(grammar, {
"GrammarLetter": GrammarLetter,
"grammar_digit_global": GrammarDigit
})
self.assertEqual(TestGrammar("x").apply("expr")[0], "x")
self.assertEqual(TestGrammar("3").apply("expr")[0], "3")
class ErrorReportingTests(unittest.TestCase):
def compile(self, grammar):
"""
Produce an object capable of parsing via this grammar.
@param grammar: A string containing an OMeta grammar.
"""
g = OMeta.makeGrammar(grammar, 'TestGrammar').createParserClass(OMetaBase, {})
return HandyWrapper(g)
def test_rawReporting(self):
"""
Errors from parsing contain enough info to figure out what was
expected and where.
"""
g = self.compile("""
start = ( (person feeling target)
| (adjective animal feeling token("some") target))
adjective = token("crazy") | token("clever") | token("awesome")
feeling = token("likes") | token("loves") | token("hates")
animal = token("monkey") | token("horse") | token("unicorn")
person = token("crazy horse") | token("hacker")
target = (token("bananas") | token("robots") | token("americans")
| token("bacon"))
""")
#some warmup
g.start("clever monkey hates some robots")
g.start("awesome unicorn loves some bacon")
g.start("crazy horse hates americans")
g.start("hacker likes robots")
e = self.assertRaises(ParseError, g.start,
"clever hacker likes bacon")
self.assertEqual(e.position, 8)
self.assertEqual(e.error, [('expected', "token", "horse")])
e = self.assertRaises(ParseError, g.start,
"crazy horse likes some grass")
#matching "some" means second branch of 'start' is taken
self.assertEqual(e.position, 23)
self.assertEqual(e.error, [('expected', "token", "bananas"),
('expected', 'token', "bacon"),
('expected', "token", "robots"),
('expected', "token", "americans")])
e = self.assertRaises(ParseError, g.start,
"crazy horse likes mountains")
#no "some" means first branch of 'start' is taken...
#but second is also viable
self.assertEqual(e.position, 18)
self.assertEqual(e.error, [('expected', "token", "some"),
('expected', "token", "bananas"),
('expected', 'token', "bacon"),
('expected', "token", "robots"),
('expected', "token", "americans")])
def test_formattedReporting(self):
"""
Parse errors can be formatted into a nice human-readable view
containing the erroneous input and possible fixes.
"""
g = self.compile("""
dig = '1' | '2' | '3'
bits = <dig>+
""")
input = "123x321"
e = self.assertRaises(ParseError, g.bits, input)
self.assertEqual(e.formatError(),
dedent("""
123x321
^
Parse error at line 1, column 3: expected one of '1', '2', or '3'. trail: [dig]
"""))
input = "foo\nbaz\nboz\ncharlie\nbuz"
e = ParseError(input, 12, expected('token', 'foo') + expected(None, 'b'))
self.assertEqual(e.formatError(),
dedent("""
charlie
^
Parse error at line 4, column 0: expected one of 'b', or token 'foo'. trail: []
"""))
input = '123x321'
e = ParseError(input, 3, expected('digit'))
self.assertEqual(e.formatError(),
dedent("""
123x321
^
Parse error at line 1, column 3: expected a digit. trail: []
"""))<|fim▁end|> | self.assertEquals(e.error, [("expected", None, "\r\n"), ("message", "end of input")])
|
<|file_name|>uks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <[email protected]>
#<|fim▁hole|>Non-relativistic nuclear spin-rotation tensors for UKS
'''
from pyscf.prop.nsr import uhf as uhf_nsr
from pyscf.prop.nmr import uks as uks_nmr
class NSR(uhf_nsr.NSR):
'''Nuclear-spin rotation tensors for UKS'''
get_fock = uks_nmr.get_fock
solve_mo1 = uks_nmr.solve_mo1
from pyscf import lib
from pyscf import dft
dft.uks.UKS.NSR = dft.uks_symm.UKS.NSR = lib.class_as_method(NSR)
if __name__ == '__main__':
from pyscf import gto
from pyscf import dft
from pyscf import lib
mol = gto.Mole()
mol.verbose = 7
mol.output = '/dev/null'
mol.atom = '''h , 0. 0. 0.917
f , 0. 0. 0.
'''
mol.basis = 'dzp'
mol.build()
mf = dft.UKS(mol).run(xc='b3lyp')
rotg = mf.NSR()
m = rotg.kernel()
print(m[1,0,0] - -301.49652448221707)
print(lib.finger(m) - 28.57893850199683)
rotg.gauge_orig = (0,0,.917/lib.param.BOHR)
m = rotg.kernel()
print(m[0,0,0] - 277.173892536396)
print(lib.finger(m) - 96.92616726791988)
mol.atom = '''C , 0. 0. 0.
O , 0. 0. 1.1283
'''
mol.basis = 'ccpvdz'
mol.nucprop = {'C': {'mass': 13}}
mol.build()
mf = dft.UKS(mol).run(xc='bp86')
rotg = NSR(mf)
m = rotg.kernel()
print(m[0,0,0] - -32.23298865237305)
print(lib.finger(m) - -11.278686427378966)
mol.atom='''O 0. 0. 0.
H 0. -0.757 0.587
H 0. 0.757 0.587'''
mol.basis = 'ccpvdz'
mol.build()
mf = dft.UKS(mol).run()
rotg = NSR(mf)
m = rotg.kernel()
print(lib.finger(m) - -66.94250282318671)<|fim▁end|> |
''' |
<|file_name|>order.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
def run_277853s():
N = 1
while N<16384:<|fim▁hole|>def run_277853(N):
cnt = 0
#for (int i = 1; i*i <= N; i = i*2)
i = 1
while i*i <= N:
cnt += 1
print(N, i, cnt)
i = i*4
#print "{:>5}=N {:>5}=cnt".format(N, cnt)
def run_605062s():
N = 1
while N<4096:
run_605062(N)
N = N*2
def run_605062(N):
"""N^(1/2).
The body of inner loop is executed 1 + 2 + 4 + 8 + ... + sqrt(N) ~ 2 sqrt(N)
"""
cnt = 0
#for (int i = 1; i*i <= N; i = i*2)
i = 1
while i <= N:
#for (int j = 0; j < i; j++)
for j in range(i):
cnt += 1
#print i, j, cnt
#print i
i = i*2
print("{:>5}=N {:>5}=cnt".format(N, cnt))
if __name__ == '__main__':
run_277853s()<|fim▁end|> | run_277853(N)
N = N*2
|
<|file_name|>lev_distance.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> uint {
if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); }
let mut dcol: Vec<_> = range(0, t.len() + 1).collect();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc {
dcol[j + 1] = current;
} else {
dcol[j + 1] = cmp::min(current, next);
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
}
current = next;
t_last = j;
}
}
dcol[t_last + 1]
}
#[test]
fn test_lev_distance() {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
for c in range(0u32, MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[], &c[]), 0);
}
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
assert_eq!(lev_distance(a, b), 1);
assert_eq!(lev_distance(b, a), 1);
assert_eq!(lev_distance(a, c), 2);
assert_eq!(lev_distance(c, a), 2);
assert_eq!(lev_distance(b, c), 1);
assert_eq!(lev_distance(c, b), 1);
}<|fim▁end|> | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>thread-view.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input, OnInit } from '@angular/core';
import { ActivatedRoute, Params } from '@angular/router';
import { Location } from '@angular/common';
import { Thread } from './thread';
import { Post } from './post';
import { QueryService } from './query-service';
import { CredentialsService } from './credentials-service';
import { Subscription } from 'rxjs/Subscription';
import 'rxjs/add/operator/switchMap';
@Component({
moduleId: module.id,
selector: 'my-thread-view',
template: `
<div *ngIf=posts >
<h2 *ngIf=thread class="w3-center w3-padding-16"><b>{{thread.name}}</b></h2>
<div *ngFor="let post of posts; let isFirstPost = first">
<div *ngIf="!isFirstPost" class="w3-card-4 w3-margin-top w3-margin-bottom mt-margin-left-48 w3-sand w3-hover-white w3-container w3-padding-4" >
<p class="mt-ptext">{{post.text}} <i>-{{post.author}}, {{post.timestamp | date:'short'}}</i></p>
</div>
<div *ngIf="isFirstPost" class="w3-card-4 w3-margin mt-very-light-blue w3-hover-white w3-container w3-padding-4" >
<p class="mt-ptext">{{post.text}} <i>-{{post.author}}, {{post.timestamp | date:'short'}}</i></p>
</div>
</div>
<div *ngIf="!loggedInUsername" class="w3-center w3-text-red">Log in to post new content</div>
<div *ngIf="loggedInUsername" class="w3-container w3-margin-top w3-margin-bottom mt-margin-left-48 w3-padding-4 mt-card-4">
<span> <textarea class="mt-textarea-col" [(ngModel)]="newPostText" placeholder="type here..."></textarea> </span>
<span><label class="mt-label-small w3-layout-top">-{{this.loggedInUsername}}</label></span>
<span> <button class="w3-layout-top" (click)="postNewComment()">Post</button></span>
</div>
</div>
`,
styleUrls: ['../w3-styles.css']
})
export class ThreadViewComponent implements OnInit{
thread: Thread;
posts: Post[];<|fim▁hole|> loggedInUsername: string = "";
ngOnInit(): void {
this.route.params
.switchMap((params: Params) => this.queryService.getThread(+params['id']))
.subscribe(thread => this.thread = thread);
this.route.params
.switchMap((params: Params) => this.queryService.getThreadPosts(+params['id']))
.subscribe(posts => this.posts = posts);
this.loggedInUsername = this.credentialsService.retrieveParams();
}
constructor(
private queryService: QueryService,
private credentialsService: CredentialsService,
private route: ActivatedRoute,
private location: Location) {
this.subscription = credentialsService.credentialsChange.subscribe((username) => {
this.loggedInUsername = username;
});
}
postNewComment() {
var newPost: Post = {text: this.newPostText, author:this.loggedInUsername, threadId: this.thread.id, timestamp: null };
this.queryService.postNewPost(newPost, this.thread.id)
.then((confirmedPost: Post) => this.successfulPost(confirmedPost));
}
successfulPost(newPost: Post) {
this.newPostText = "";
this.posts.push(newPost);
}
}<|fim▁end|> | newPostText: string;
subscription: Subscription; |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.