prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>example.py<|end_file_name|><|fim▁begin|>#
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from gafferpy import gaffer as g
from gafferpy import gaffer_connector
def run(host, verbose=False):
return run_with_connector(create_connector(host, verbose))
def run_with_connector(gc):
print()
print('Running operations')
print('--------------------------')
print()
get_schema(gc)
get_filter_functions(gc)
get_class_filter_functions(gc)
get_element_generators(gc)
get_object_generators(gc)
get_operations(gc)
get_serialised_fields(gc)
get_store_traits(gc)
is_operation_supported(gc)
add_elements(gc)
get_elements(gc)
get_adj_seeds(gc)
get_all_elements(gc)
get_walks(gc)
generate_elements(gc)
generate_domain_objs(gc)
generate_domain_objects_chain(gc)
get_element_group_counts(gc)
get_sub_graph(gc)
export_to_gaffer_result_cache(gc)
get_job_details(gc)
get_all_job_details(gc)
add_named_operation(gc)
get_all_named_operations(gc)
named_operation(gc)
delete_named_operation(gc)
add_named_view_summarise(gc)
add_named_view_date_range(gc)
get_all_named_views(gc)
named_view_summarise(gc)
named_view_date_range(gc)
named_views(gc)
delete_named_views(gc)
sort_elements(gc)
max_element(gc)
min_element(gc)
to_vertices_to_entity_seeds(gc)
complex_op_chain(gc)
op_chain_in_json(gc)
def create_connector(host, verbose=False):
return gaffer_connector.GafferConnector(host, verbose)
def get_schema(gc):
# Get Schema
result = gc.execute_get(
g.GetSchema()
)
print('Schema:')
print(result)
print()
def get_filter_functions(gc):
# Get filter functions
result = gc.execute_get(
g.GetFilterFunctions()
)
print('Filter Functions:')
print(result)
print()
def get_class_filter_functions(gc):
# Get class filter functions
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetClassFilterFunctions(class_name=class_name)
)
print('Class Filter Functions (IsMoreThan):')
print(result)
print()
def get_element_generators(gc):
# Get Element generators
result = gc.execute_get(
g.GetElementGenerators()
)
print('Element generators:')
print(result)
print()
def get_object_generators(gc):
# Get Object generators
result = gc.execute_get(
g.GetObjectGenerators()
)
print('Object generators:')
print(result)
print()
def get_operations(gc):
# Get operations
result = gc.execute_get(
g.GetOperations()
)
print('Operations:')
print(result)
print()
def get_serialised_fields(gc):
# Get serialised fields
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetSerialisedFields(class_name=class_name)
)
print('Serialised Fields (IsMoreThan):')
print(result)
print()
def get_store_traits(gc):
# Get Store Traits
result = gc.execute_get(
g.GetStoreTraits()
)
print('Store Traits:')
print(result)
print()
def is_operation_supported(gc):
# Is operation supported
operation = 'uk.gov.gchq.gaffer.operation.impl.add.AddElements'
result = gc.is_operation_supported(
g.IsOperationSupported(operation=operation)
)
print(
'\nOperation supported ("uk.gov.gchq.gaffer.operation.impl.add.AddElements"):')
print(result)
print()
def add_elements(gc):
# Add Elements
gc.execute_operation(
g.AddElements(
input=[
g.Entity(
group='JunctionUse',
vertex='M1:1',
properties={
'countByVehicleType': g.freq_map({
'BUS': 10,
'CAR': 50
}),
'endDate': g.date(1034319600000),
'count': g.long(60),
'startDate': g.date(1034316000000)
}
),
g.Edge(
group='RoadHasJunction',
source='M1',
destination='M1:1',
directed=True,
properties={}
)
]
)
)
print('Elements have been added')
print()
def get_elements(gc):
# Get Elements
input = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed('M5:10'),
# Edge input can be provided as follows
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.EITHER),
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.DIRECTED),
# Or you can use True or False for the direction
g.EdgeSeed('M5:10', 'M5:11', True)
],
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[],
transient_properties=[
g.Property('description', 'java.lang.String')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['count'],
predicate=g.IsMoreThan(
value=g.long(1)
)
)
],
transform_functions=[
g.FunctionContext(
selection=['SOURCE', 'DESTINATION', 'count'],
function=g.Function(
class_name='uk.gov.gchq.gaffer.traffic.transform.DescriptionTransform'
),
projection=['description']
)
]
)
]
),
directed_type=g.DirectedType.EITHER
)
)
print('Related input')
print(input)
print()
def get_adj_seeds(gc):
# Adjacent Elements - chain 2 adjacent entities together
adj_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[
g.EntitySeed(
vertex='M5'
)
],
view=g.View(
edges=[
g.ElementDefinition(
'RoadHasJunction',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
'RoadUse',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
)
]
)
print('Adjacent entities - 2 hop')
print(adj_seeds)
print()
def get_all_elements(gc):
# Get all input, but limit the total results to 3
all_elements = gc.execute_operations(
operations=[
g.GetAllElements(),
g.Limit(result_limit=3)
]
)
print('All input (Limited to first 3)')
print(all_elements)
print()
def get_walks(gc):
# Get walks from M32 traversing down RoadHasJunction then JunctionLocatedAt
walks = gc.execute_operation(
g.GetWalks(
input=[
g.EntitySeed('M32'),
],
operations=[
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction'
)
]
)
),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='JunctionLocatedAt'
)
]
)
)
]
)
)
print(
'Walks from M32 traversing down RoadHasJunction then JunctionLocatedAt')
print(walks)
print()
def generate_elements(gc):
# Generate Elements
input = gc.execute_operation(
g.GenerateElements(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.traffic.generator.RoadTrafficStringElementGenerator'
),
input=[
'"South West","E06000054","Wiltshire","6016","389200","179080","M4","LA Boundary","381800","180030","17","391646","179560","TM","E","2000","2000-05-03 00:00:00","7","0","9","2243","15","426","127","21","20","37","106","56","367","3060"'
]
)
)
print('Generated input from provided domain input')
print(input)
print()
def generate_domain_objs(gc):
# Generate Domain Objects - single provided element
input = gc.execute_operation(
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
),
input=[
g.Entity('entity', '1'),
g.Edge('edge', '1', '2', True)
]
)
)
print('Generated input from provided input')
print(input)
print()
def generate_domain_objects_chain(gc):
# Generate Domain Objects - chain of get input then generate input
input = gc.execute_operations(
[
g.GetElements(
input=[g.EntitySeed(vertex='M5')],
seed_matching_type=g.SeedMatchingType.RELATED,
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
)
)
]
)
print('Generated input from get input by seed')
print(input)
print()
def get_element_group_counts(gc):
# Get Elements
group_counts = gc.execute_operations([
g.GetElements(
input=[g.EntitySeed('M5')]
),
g.CountGroups(limit=1000)
])
print('Groups counts (limited to 1000 input)')
print(group_counts)
print()
def get_sub_graph(gc):
# Export and Get to/from an in memory set
entity_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToSet(),
g.GetAdjacentIds(include_incoming_out_going=g.InOutType.OUT),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport()
]
)
print('Export and Get to/from an in memory set')
print(entity_seeds)
print()
def export_to_gaffer_result_cache(gc):
# Export to Gaffer Result Cache and Get from Gaffer Result Cache
job_details = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
print('Export to Gaffer Result Cache. Job Details:')
print(job_details)
print()
job_id = job_details['jobId']
entity_seeds = gc.execute_operation(
g.GetGafferResultCacheExport(job_id=job_id),
)
print('Get Gaffer Result Cache Export.')
print(entity_seeds)
print()
def get_job_details(gc):
# Get all job details
job_details_initial = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('1')],
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
job_id = job_details_initial['jobId']
job_details = gc.execute_operation(
g.GetJobDetails(job_id=job_id),
)
print('Get job details')
print(job_details)
print()
def get_all_job_details(gc):
# Get all job details
all_job_details = gc.execute_operation(
g.GetAllJobDetails(),
)
print('Get all job details (just prints the first 3 results)')
print(all_job_details[:3])
print()
def delete_named_operation(gc):
gc.execute_operation(
g.DeleteNamedOperation('2-hop-with-limit')
)
print('Deleted named operation: 2-hop-with-limit')
print()
def add_named_operation(gc):
gc.execute_operation(
g.AddNamedOperation(
operation_chain={
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit": "${param1}"
}]
},
operation_name='2-hop-with-limit',
description='2 hop query with limit',
overwrite_flag=True,
read_access_roles=["read-user"],
write_access_roles=["write-user"],
parameters=[
g.NamedOperationParameter(
name="param1",
description="Limit param",
default_value=1,
value_class="java.lang.Long",
required=False
)
]
)
)
print('Added named operation: 2-hop-with-limit')
print()
def get_all_named_operations(gc):
namedOperations = gc.execute_operation(
g.GetAllNamedOperations()
)
print('Named operations')
print(namedOperations)
print()
def named_operation(gc):
result = gc.execute_operation(
g.NamedOperation(
operation_name='2-hop-with-limit',
parameters={
'param1': 2
},
input=[
g.EntitySeed('M5')
]
)
)
print('Execute named operation')
print(result)
print()
def delete_named_views(gc):
gc.execute_operation(
g.DeleteNamedView(name='summarise')
)
print('Deleted named view: summarise')
gc.execute_operation(
g.DeleteNamedView(name='dateRange')
)
print('Deleted named view: dateRange')
print()
def add_named_view_summarise(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=[
g.GlobalElementDefinition(group_by=[])
]
),
name='summarise',
description='Summarises all results (overrides the groupBy to an empty array).',
overwrite_flag=True
)
)
print('Added named view: summarise')
print()
def add_named_view_date_range(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=g.GlobalElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='${start}',
end='${end}'
)
)
]
)
),
name='dateRange',
description='Filters results to a provided date range.',
overwrite_flag=True,
parameters=[
g.NamedViewParameter(
name="start",
description="A date string for the start of date range.",
value_class="java.lang.String",
required=False
),
g.NamedViewParameter(
name="end",
description="A date string for the end of the date range.",
value_class="java.lang.String",
required=False
)
]
)
)
print('Added named view: dateRange')
print()
def get_all_named_views(gc):
namedViews = gc.execute_operation(
g.GetAllNamedViews()
)
print('Named views')
print(namedViews)
print()
def named_view_summarise(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="summarise"
)
)
)
print('Execute get elements with summarised named view')
print(result)
print()
def named_view_date_range(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
)
)
print('Execute get elements with date range named view')
print(result)
print()
def named_views(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=[
g.NamedView(
name="summarise"
),
g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
]
)
)
print('Execute get elements with summarised and date range named views')
print(result)
print()
def sort_elements(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Sort(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
],
result_limit=5
)
])
print('Sorted input')
print(input)
print()
def max_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Max(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Max element')
print(input)
print()
def min_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Min(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Min element')
print(input)
print()
def to_vertices_to_entity_seeds(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetElements(
input=[
g.EntitySeed(
vertex='South West'
)
],<|fim▁hole|> group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToVertices(
edge_vertices=g.EdgeVertices.DESTINATION,
use_matched_vertex=g.UseMatchedVertex.OPPOSITE
),
g.ToEntitySeeds(),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
'LocationContainsRoad',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.Limit(5)
])
print('ToVertices then ToEntitySeeds')
print(input)
print()
def complex_op_chain(gc):
# All road junctions in the South West that were heavily used by buses in year 2000.
junctions = gc.execute_operations(
operations=[
g.GetAdjacentIds(
input=[g.EntitySeed(vertex='South West')],
view=g.View(
edges=[
g.ElementDefinition(
group='RegionContainsLocation',
group_by=[]
)
]
)
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='LocationContainsRoad',
group_by=[]
)
]
)
),
g.ToSet(),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GetElements(
view=g.View(
entities=[
g.ElementDefinition(
group='JunctionUse',
group_by=[],
transient_properties=[
g.Property('busCount', 'java.lang.Long')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='2000/01/01',
end='2001/01/01'
)
)
],
post_aggregation_filter_functions=[
g.PredicateContext(
selection=['countByVehicleType'],
predicate=g.PredicateMap(
predicate=g.IsMoreThan(
value={'java.lang.Long': 1000},
or_equal_to=False
),
key='BUS'
)
)
],
transform_functions=[
g.FunctionContext(
selection=['countByVehicleType'],
function=g.FreqMapExtractor(key='BUS'),
projection=['busCount']
)
]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToCsv(
element_generator=g.CsvGenerator(
fields={
'VERTEX': 'Junction',
'busCount': 'Bus Count'
},
quoted=False
),
include_header=True
)
]
)
print(
'All road junctions in the South West that were heavily used by buses in year 2000.')
print(junctions)
print()
def op_chain_in_json(gc):
# Operation chain defined in json
result = gc.execute_operation_chain(
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.CountGroups"
}]
}
)
print('Operation chain defined in json')
print(result)
print()
if __name__ == "__main__":
run('http://localhost:8080/rest/latest', False)<|fim▁end|> | view=g.View(
edges=[
g.ElementDefinition(
'RegionContainsLocation', |
<|file_name|>input_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training.input."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import itertools
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class MatchFilenamesOnceTest(tf.test.TestCase):
def test(self):
temp_dir = self.get_temp_dir()
filenames = [os.path.join(temp_dir, n) for n in os.listdir(temp_dir)]
additional = [os.path.join(self.get_temp_dir(), "match_filenames.%d" % i)
for i in range(3)]
for name in additional:
open(name, "w").write("Some contents")
filenames = list(set(filenames + additional))
with self.test_session():
star = tf.train.match_filenames_once(
os.path.join(self.get_temp_dir(), "*"))
question = tf.train.match_filenames_once(
os.path.join(self.get_temp_dir(), "match_filenames.?"))
one = tf.train.match_filenames_once(additional[1])
tf.initialize_all_variables().run()
self.assertItemsEqual(map(tf.compat.as_bytes, filenames), star.eval())
self.assertItemsEqual(map(tf.compat.as_bytes, additional),
question.eval())
self.assertItemsEqual([tf.compat.as_bytes(additional[1])], one.eval())
class LimitEpochsTest(tf.test.TestCase):
def testNoLimit(self):
with self.test_session():
seven = tf.constant(7)
seven_forever = tf.train.limit_epochs(seven)
tf.initialize_all_variables().run()
for i in range(100):
self.assertEqual(7, seven_forever.eval())
def testLimit(self):
with self.test_session():
love_me = tf.constant("Love Me")
love_me_two_times = tf.train.limit_epochs(love_me, num_epochs=2)
tf.initialize_all_variables().run()
self.assertEqual(b"Love Me", love_me_two_times.eval())
self.assertEqual(b"Love Me", love_me_two_times.eval())
with self.assertRaises(tf.errors.OutOfRangeError):
love_me_two_times.eval()
class StringInputProducerTest(tf.test.TestCase):
def testNoShuffle(self):
with self.test_session():
strings = [b"to", b"be", b"or", b"not", b"to", b"be"]
num_epochs = 3
queue = tf.train.string_input_producer(
strings, num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(len(strings) * num_epochs)
dequeue = queue.dequeue()
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# No randomness, so just see repeated copies of the input.
output = dequeue_many.eval()
self.assertAllEqual(strings * num_epochs, output)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
dequeue.eval()
for thread in threads:
thread.join()
def testShuffle(self):
with self.test_session():
strings = [b"a", b"b", b"c"]
num_epochs = 600
queue = tf.train.string_input_producer(
strings, num_epochs=num_epochs, shuffle=True, seed=271828)
dequeue_many = queue.dequeue_many(len(strings))
dequeue = queue.dequeue()
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# Validate that we only shuffle the strings within an epoch and
# count how often each possible order appears.
expected = [b"abc", b"acb", b"bac", b"bca", b"cab", b"cba"]
frequency = {}
for e in expected:
frequency[e] = 0
for _ in range(num_epochs):
output = dequeue_many.eval()
key = b"".join(output)
self.assertIn(key, expected)
frequency[key] += 1
# Expect an approximately even distribution over all possible orders.
expected_frequency = num_epochs / len(expected)
margin = expected_frequency * 0.4
tf.logging.info("Observed counts: %s", frequency)
for key in expected:
value = frequency[key]
self.assertGreater(value, expected_frequency - margin)
self.assertLess(value, expected_frequency + margin)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
dequeue.eval()
for thread in threads:
thread.join()
def testNullStringPython(self):
# Graph-construction time check for empty string list:
with self.test_session():
with self.assertRaises(ValueError):
_ = tf.train.string_input_producer([])
def testNullString(self):
# Runtime check for empty string list. This is slightly oblique:
# The queue runner should die with an assertion error on the null
# input tensor, causing the dequeue to fail with an OutOfRangeError.
with self.test_session():
coord = tf.train.Coordinator()
queue = tf.train.string_input_producer(tf.constant([], dtype=tf.string))
dequeue = queue.dequeue()
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners(coord=coord)
with self.assertRaises(tf.errors.OutOfRangeError):
dequeue.eval()
coord.request_stop()
for thread in threads:
thread.join()
class RangeInputProducerTest(tf.test.TestCase):
def testNoShuffle(self):
with self.test_session():
num_epochs = 3
range_size = 5
queue = tf.train.range_input_producer(
range_size, num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(range_size * num_epochs)
dequeue = queue.dequeue()
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# No randomness, so just see repeated copies of the input.
output = dequeue_many.eval()
self.assertAllEqual(list(xrange(range_size)) * num_epochs, output)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
dequeue.eval()
for thread in threads:
thread.join()
def testShuffle(self):
with self.test_session():
num_epochs = 200
range_size = 2
queue = tf.train.range_input_producer(
range_size, num_epochs=num_epochs, shuffle=True, seed=314159)
dequeue_many = queue.dequeue_many(range_size)
dequeue = queue.dequeue()
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# Validate that we only shuffle the integers within an epoch and
# count how often each possible order appears.
expected = [12, 21]
frequency = {}
for e in expected:
frequency[e] = 0
for _ in range(num_epochs):
output = dequeue_many.eval()
key = 10 * (output[0] + 1) + (output[1] + 1)
self.assertIn(key, expected)
frequency[key] += 1
# Expect an approximately even distribution over all possible orders.
expected_frequency = num_epochs / len(expected)
margin = expected_frequency * 0.4
tf.logging.info("Observed counts: %s", frequency)
for key in expected:
value = frequency[key]
self.assertGreater(value, expected_frequency - margin)
self.assertLess(value, expected_frequency + margin)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
dequeue.eval()
for thread in threads:
thread.join()
class SliceInputProducerTest(tf.test.TestCase):
def testNoShuffle(self):
with self.test_session() as sess:
num_epochs = 3
source_strings = [b"Alpha", b"Beta", b"Delta", b"Gamma"]
source_ints = [2, 3, 5, 7]
slices = tf.train.slice_input_producer(
[source_strings, source_ints], num_epochs=num_epochs, shuffle=False)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# No randomness, so just see repeated copies of the input.
num_items = len(source_strings) * num_epochs
output = [sess.run(slices) for _ in range(num_items)]
out_strings, out_ints = zip(*output)
self.assertAllEqual(source_strings * num_epochs, out_strings)
self.assertAllEqual(source_ints * num_epochs, out_ints)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(slices)
for thread in threads:
thread.join()
def testShuffle(self):
with self.test_session() as sess:
num_epochs = 1200
source_strings = ["A", "B", "D", "G"]
source_ints = [7, 3, 5, 2]
slices = tf.train.slice_input_producer(
[source_strings, source_ints], num_epochs=num_epochs, shuffle=True,
seed=161803)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# Validate that we only shuffle the integers within an epoch and
# count how often each possible order appears.
expected = [b",".join(x) for x in
itertools.permutations([b"A7", b"B3", b"D5", b"G2"])]
frequency = {}
for e in expected:
frequency[e] = 0
for _ in range(num_epochs):
output = [sess.run(slices) for _ in range(len(source_strings))]
key = b",".join([s + tf.compat.as_bytes(str(i)) for s, i in output])
self.assertIn(key, expected)
frequency[key] += 1
# Expect an approximately even distribution over all possible orders.
expected_frequency = num_epochs / len(expected)
margin = expected_frequency * 0.4
tf.logging.info("Observed counts: %s", frequency)
for key in expected:
value = frequency[key]
self.assertGreater(value, expected_frequency - margin)
self.assertLess(value, expected_frequency + margin)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(slices)
for thread in threads:
thread.join()
class BatchTest(tf.test.TestCase):
def testOneThread(self):
with self.test_session() as sess:
batch_size = 10
num_batches = 3
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_batches * batch_size)
batched = tf.train.batch([counter, "string"], batch_size=batch_size)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
for i in range(num_batches):
results = sess.run(batched)
self.assertAllEqual(results[0], np.arange(i * batch_size,
(i + 1) * batch_size))
self.assertAllEqual(results[1], [b"string"] * batch_size)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
def testOneThreadEnqueueMany(self):
with self.test_session() as sess:
batch_size = 10
num_batches = 3
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_batches * batch_size)
pre_batched = tf.train.batch([counter, "string"], batch_size=2)
batched = tf.train.batch(pre_batched, enqueue_many=True,
batch_size=batch_size)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
for i in range(num_batches):
results = sess.run(batched)
self.assertAllEqual(results[0], np.arange(i * batch_size,
(i + 1) * batch_size))
self.assertAllEqual(results[1], [b"string"] * batch_size)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
def testManyThreads(self):
with self.test_session() as sess:
batch_size = 10
num_batches = 3
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_batches * batch_size)
batched = tf.train.batch([counter, "string"], batch_size=batch_size,
num_threads=4)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
all_counts = []
for i in range(num_batches):
results = sess.run(batched)
tf.logging.info("Batch %d: %s", i, results[0])
self.assertEqual(len(results[0]), batch_size)
all_counts.extend(results[0])
self.assertAllEqual(results[1], [b"string"] * batch_size)
self.assertItemsEqual(all_counts, range(num_batches * batch_size))
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
class BatchJoinTest(tf.test.TestCase):
def testTwoThreads(self):
with self.test_session() as sess:
# Two threads, the first generates (0..69, "a").
num_a = 70
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_a)
# The second generates (99, "b") 90 times and then stops.
num_b = 90
ninety_nine = tf.train.limit_epochs(
tf.constant(99, dtype=tf.int64), num_b)
# These get joined together and grouped into batches of 5.
batch_size = 5
batched = tf.train.batch_join([[counter, "a"], [ninety_nine, "b"]],
batch_size=batch_size)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# Should see the "a" and "b" threads mixed together.
all_a = []
seen_b = 0
saw_both = 0
num_batches = (num_a + num_b) // batch_size
for i in range(num_batches):
results = sess.run(batched)
tf.logging.info("Batch %d: %s", i, results[0])
self.assertEqual(len(results[0]), batch_size)
self.assertEqual(len(results[1]), batch_size)
which_a = [i for i, s in enumerate(results[1]) if s == b"a"]
which_b = [i for i, s in enumerate(results[1]) if s == b"b"]
self.assertEqual(len(which_a) + len(which_b), batch_size)
if len(which_a) > 0 and len(which_b) > 0: saw_both += 1
all_a.extend([results[0][i] for i in which_a])
seen_b += len(which_b)
self.assertAllEqual([99] * len(which_b),
[results[0][i] for i in which_b])
# Some minimum level of mixing of the results of both threads.
self.assertGreater(saw_both, 1)
# Verify the order of results from "a" were preserved.
self.assertAllEqual(all_a, np.arange(num_a))
self.assertEqual(seen_b, num_b)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
class ShuffleBatchTest(tf.test.TestCase):
def testOneThread(self):
with self.test_session() as sess:
batch_size = 10
num_batches = 3
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_batches * batch_size)
batched = tf.train.shuffle_batch(
[counter, "string"], batch_size=batch_size, capacity=32,
min_after_dequeue=16, seed=141421)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
all_counts = []
for i in range(num_batches):
results = sess.run(batched)
self.assertEqual(len(results[0]), batch_size)
all_counts.extend(results[0])
self.assertAllEqual(results[1], [b"string"] * batch_size)
# Results scrambled, but include all the expected numbers.
deltas = [all_counts[i + 1] - all_counts[i]
for i in range(len(all_counts) - 1)]
self.assertFalse(all(d == deltas[0] for d in deltas))
self.assertItemsEqual(all_counts, range(num_batches * batch_size))
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
def testManyThreads(self):
with self.test_session() as sess:
batch_size = 10
num_batches = 3
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_batches * batch_size)
batched = tf.train.shuffle_batch(
[counter, "string"], batch_size=batch_size, capacity=32,
min_after_dequeue=16, seed=173205, num_threads=4)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
all_counts = []
for i in range(num_batches):
results = sess.run(batched)
tf.logging.info("Batch %d: %s", i, results[0])
self.assertEqual(len(results[0]), batch_size)
all_counts.extend(results[0])
self.assertAllEqual(results[1], [b"string"] * batch_size)
# Results scrambled, but include all the expected numbers.
deltas = [all_counts[i + 1] - all_counts[i]
for i in range(len(all_counts) - 1)]
self.assertFalse(all(d == deltas[0] for d in deltas))
self.assertItemsEqual(all_counts, range(num_batches * batch_size))
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
class ShuffleBatchJoinTest(tf.test.TestCase):
def testTwoThreads(self):
with self.test_session() as sess:
# Two threads, the first generates (0..24, "a").
num_a = 25
zero64 = tf.constant(0, dtype=tf.int64)
examples = tf.Variable(zero64)
counter = examples.count_up_to(num_a)
# The second generates (99, "b") 35 times and then stops.
num_b = 35
ninety_nine = tf.train.limit_epochs(
tf.constant(99, dtype=tf.int64), num_b)
# These get joined together and grouped into batches of 5.
batch_size = 5
batched = tf.train.shuffle_batch_join(
[[counter, "a"], [ninety_nine, "b"]], batch_size=batch_size,
capacity=32, min_after_dequeue=16, seed=223607)
tf.initialize_all_variables().run()
threads = tf.train.start_queue_runners()
# Should see the "a" and "b" threads mixed together.
all_a = []
seen_b = 0
saw_both = 0
num_batches = (num_a + num_b) // batch_size
for i in range(num_batches):
results = sess.run(batched)
tf.logging.info("Batch %d: %s", i, results[0])
self.assertEqual(len(results[0]), batch_size)
self.assertEqual(len(results[1]), batch_size)
which_a = [i for i, s in enumerate(results[1]) if s == b"a"]
which_b = [i for i, s in enumerate(results[1]) if s == b"b"]
self.assertEqual(len(which_a) + len(which_b), batch_size)
if len(which_a) > 0 and len(which_b) > 0: saw_both += 1
all_a.extend([results[0][i] for i in which_a])<|fim▁hole|> self.assertAllEqual([99] * len(which_b),
[results[0][i] for i in which_b])
# Some minimum level of mixing of the results of both threads.
self.assertGreater(saw_both, 1)
# Saw all the items from "a", but scrambled.
self.assertItemsEqual(all_a, range(num_a))
deltas = [all_a[i + 1] - all_a[i]
for i in range(len(all_a) - 1)]
self.assertFalse(all(d == deltas[0] for d in deltas))
self.assertEqual(seen_b, num_b)
# Reached the limit.
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(batched)
for thread in threads:
thread.join()
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | seen_b += len(which_b) |
<|file_name|>qcframe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2002-2014 The ProteinDF project
# see also AUTHORS and README.
#
# This file is part of ProteinDF.
#
# ProteinDF is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ProteinDF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ProteinDF. If not, see <http://www.gnu.org/licenses/>.
import shutil
from collections import OrderedDict
import math
import os
import copy
import proteindf_tools as pdf
import proteindf_bridge as bridge
from .qcfragment import QcFragment
import logging
logger = logging.getLogger(__name__)
class QcFrame(object):
_pdfparam_filename = "pdfparam.mpac"
_db_filename = "pdfresults.h5"
TOO_SMALL = 1.0e-5
# ------------------------------------------------------------------
def __init__(self, name, *args, **kwargs):
"""
create QcFrame object.
name: name of the frame molecule
"""
# mandatory parameter
self._name = name
self._fragments = OrderedDict()
self._charge = 0
self._state = {} # 状態保持
self._cmds = self._get_default_cmds() # 外部コマンド
self._initialize()
self._prepare_work_dir()
self._load()
# cache data
self._cache = {}
if (len(args) > 0) and isinstance(args[0], QcFrame):
self._copy_constructor(args[0])
# copy constructor
def _copy_constructor(self, rhs):
self._name = rhs._name
self._fragments = copy.deepcopy(rhs._fragments)
self._charge = rhs._charge
self._state = copy.deepcopy(rhs._state)
self._cmds = copy.copy(rhs._cmds)
# def __del__(self):
# self._save()
def _initialize(self, *args, **kwargs):
pass
def _get_default_cmds(self):
cmds = {}
cmds["lo"] = "lo"
cmds["mat-extend"] = "mat-extend"
cmds["mat-mul"] = "mat-mul"
cmds["mat-select"] = "mat-select"
cmds["mat-symmetrize"] = "mat-symmetrize"
cmds["mat-transpose"] = "mat-transpose"
cmds["mat-diagonal"] = "mat-diagonal"
cmds["archive"] = "archive-h5"
return cmds
# save & load ------------------------------------------------------
def _load(self):
path = os.path.join(self.work_dir, "qcframe.mpac")
if os.path.exists(path):
logger.info("load the fragment state: {}".format(path))
state_dat = bridge.load_msgpack(path)
self.set_by_raw_data(state_dat)
else:
logger.debug("not found the state file")
def save(self):
path = os.path.join(self.work_dir, "qcframe.mpac")
# logger.info('save the fragment state: {}'.format(path))
state_dat = self.get_raw_data()
bridge.save_msgpack(state_dat, path)
def get_raw_data(self):
return self.__getstate__()
def set_by_raw_data(self, raw_data):
self.__setstate__(raw_data)
def __getstate__(self):
state = {}
state["name"] = self.name
tmp_frgs = []
for k, frg in self.fragments():
tmp_frgs.append((k, frg.get_raw_data()))
state["fragments"] = tmp_frgs
state["charge"] = self.charge
state["state"] = self._state
state["cmds"] = self._cmds
return state
def __setstate__(self, state):
assert isinstance(state, dict)
self._name = state.get("name")
self._fragments = OrderedDict()
if "fragments" in state:
for (k, frg) in state.get("fragments"):
self._fragments[k] = QcFragment(frg, parent=self)
self.charge = state.get("charge", 0)
self._state = state.get("state", {})
self._cmds.update(state.get("cmds", self._get_default_cmds()))
# pdfparam ---------------------------------------------------------
def _get_pdfparam(self):
"""
pdfparamオブジェクトを返す
"""
pdfparam_path = os.path.abspath(
os.path.join(self.work_dir, self._pdfparam_filename)
)
if "pdfparam" not in self._cache:
if os.path.exists(pdfparam_path):
mpac_data = bridge.load_msgpack(pdfparam_path)
logger.debug("pdfparam({}) is loaded.".format(pdfparam_path))
self._cache["pdfparam"] = pdf.PdfParam(mpac_data)
else:
pdfsim = pdf.PdfSim()
self._cache["pdfparam"] = pdf.get_default_pdfparam()
logger.debug("use default pdfparam.")
else:
logger.debug("pdfparam is cached.")
return self._cache["pdfparam"]
pdfparam = property(_get_pdfparam)
# DB ---------------------------------------------------------------
def set_db_filename(self, filename):
assert filename is not None
self._db_filename = str(filename)
logger.debug("set_db_filename: {}".format(self._db_filename))
def _get_db_path(self):
db_path = os.path.abspath(os.path.join(self.work_dir, self._db_filename))
logger.debug("db_filename: {}".format(self._db_filename))
return db_path
db_path = property(_get_db_path)
def get_pdfarchive(self):
"""
pdfArchiveオブジェクトを返す
"""
logger.debug("get_pdfarchive db_path={}".format(self.db_path))
pdfarc = None
if self._cmds.get("archive", None) == "archive":
pdfarc = pdf.PdfArchive(self.db_path)
else:
pdfarc = pdf.PdfParam_H5(self.db_path)
return pdfarc
# ==================================================================
# PROPERTIES
# ==================================================================
# command alias ----------------------------------------------------
def set_command_alias(self, cmd_alias_dict):
for k, v in cmd_alias_dict.items():
logger.debug("command update: {} -> {}".format(k, v))
self._cmds[k] = v
# work_dir ---------------------------------------------------------
def _get_work_dir(self):
return self._work_dir
work_dir = property(_get_work_dir)
# name -------------------------------------------------------------
def _get_name(self):
return self._name
name = property(_get_name)
# basisset ---------------------------------------------------------
def _set_basisset(self, pdfparam):
for fragment_name, fragment in self.fragments():
fragment.set_basisset(pdfparam)
# frame_molecule ---------------------------------------------------
def _get_frame_molecule(self):
"""
モデリングされた分子構造をAtomGroupオブジェクトで返す
"""
if "frame_molecule" not in self._cache:
logger.info("create frame molecule coordinates.")
frame_molecule = bridge.AtomGroup()
for frg_name, frg in self._fragments.items():
logger.info(
"fragment name={name}: atoms={atoms}, elec={elec}, charge={charge}".format(
name=frg_name,
atoms=frg.get_number_of_all_atoms(),
elec=frg.sum_of_atomic_number(),
charge=frg.get_AtomGroup().charge,
)
)
frame_molecule[frg_name] = frg.get_AtomGroup()
self._cache["frame_molecule"] = frame_molecule
logger.info("")
return self._cache["frame_molecule"]
frame_molecule = property(_get_frame_molecule)
# fragment_atom_ids ------------------------------------------------
def _get_fragments_atom_ids(self):
fragments_atom_ids = []
for fragment_name, fragment in self.fragments():
fragment_atomgroup = fragment.get_AtomGroup()
fragment_atomgroup *= pdf.ANG2AU # angstrom -> a.u.
fragment_atom_list = fragment_atomgroup.get_atom_list()
atom_id_list = []
for atom in fragment_atom_list:
atom_id = int(self.pdfparam.find_atom_index(atom))
if atom_id == -1:
logger.critical("not found atom index: {}".format(str(atom)))
atom_id_list.append(atom_id)
fragments_atom_ids.append(atom_id_list)
return fragments_atom_ids
fragments_atom_ids = property(_get_fragments_atom_ids)
# work dir ---------------------------------------------------------
def _prepare_work_dir(self):
"""
カレントディレクトリ下に作業ディレクトリとなる
nameのディレクトリを作成する。
"""
# assert(len(self.name) > 0)
if len(self.name) == 0:
logger.critical("frame name is not defined.")
raise
self._work_dir = os.path.abspath(os.path.join(os.curdir, self.name))
if not os.path.exists(self.work_dir):
logger.info(
"{header} make work dir: {path}".format(
header=self.header, path=self.work_dir
)
)
os.mkdir(self.work_dir)
else:
logger.debug(
"{header} already exist: {path}".format(
header=self.header, path=self.work_dir
)
)
def cd_work_dir(self, job_name=""):
"""
作業ディレクトリをオブジェクトのwork_dirに移動する
"""
logger.info("=" * 20)
logger.info(
"{header} > {job_name}@{frame_name}".format(
header=self.header, job_name=job_name, frame_name=self.name
)
)
logger.debug(
"{header} work dir: {work_dir}".format(
header=self.header, work_dir=self.work_dir
)
)
logger.info("=" * 20)
self._prev_dir = os.path.abspath(os.curdir)
os.chdir(self.work_dir)
def restore_cwd(self):
"""
self.cd_work_dir() 以前のディレクトリに戻す
"""
os.chdir(self._prev_dir)
logger.debug(
"{header} < (prev_dir: {path})".format(
header=self.header, path=self._prev_dir
)
)
def _check_path(self, path):
if not os.path.exists(path):
logger.warn(
"{header} NOT FOUND: {path}".format(header=self.header, path=path)
)
# charge -----------------------------------------------------------
def _get_charge(self):
return int(self._charge)
def _set_charge(self, charge):
self._charge = int(charge)
charge = property(_get_charge, _set_charge)
# num_of_AOs -------------------------------------------------------
def get_number_of_AOs(self):
"""
return the number of atomic orbitals.
"""
num_of_AOs = 0
for frg_name, frg in self.fragments():
num_of_AOs += frg.get_number_of_AOs()
return num_of_AOs
# ==================================================================
# STATE
# ==================================================================
# guess_density ----------------------------------------------------
def _get_state_finished_guess_density(self):
self._state.setdefault("is_finished_guess_density", False)
return self._state["is_finished_guess_density"]
def _set_state_finished_guess_density(self, yn):
self._state["is_finished_guess_density"] = bool(yn)
is_finished_guess_density = property(
_get_state_finished_guess_density, _set_state_finished_guess_density
)
# guess_QCLO -------------------------------------------------------
def _get_state_finished_guess_QCLO(self):
self._state.setdefault("is_finished_guess_QCLO", False)
return self._state["is_finished_guess_QCLO"]
def _set_state_finished_guess_QCLO(self, yn):
self._state["is_finished_guess_QCLO"] = bool(yn)
is_finished_guess_QCLO = property(
_get_state_finished_guess_QCLO, _set_state_finished_guess_QCLO
)
# pre-SCF ----------------------------------------------------------
def _get_state_finished_prescf(self):
self._state.setdefault("is_finished_prescf", False)
return self._state["is_finished_prescf"]
def _set_state_finished_prescf(self, yn):
self._state["is_finished_prescf"] = bool(yn)
is_finished_prescf = property(
_get_state_finished_prescf, _set_state_finished_prescf
)
# SCF --------------------------------------------------------------
def _get_state_finished_scf(self):
self._state.setdefault("is_finished_scf", False)
return self._state["is_finished_scf"]
def _set_state_finished_scf(self, yn):
self._state["is_finished_scf"] = bool(yn)
is_finished_scf = property(_get_state_finished_scf, _set_state_finished_scf)
# Force ------------------------------------------------------------
def _get_state_finished_force(self):
self._state.setdefault("is_finished_force", False)
return self._state["is_finished_force"]
def _set_state_finished_force(self, yn):
self._state["is_finished_force"] = bool(yn)
is_finished_force = property(_get_state_finished_force, _set_state_finished_force)
# pick density matrix ---------------------------------------------
def _get_state_finished_pickup_density_matrix(self):
self._state.setdefault("is_finished_pickup_density_matrix", False)
return self._state["is_finished_pickup_density_matrix"]
def _set_state_finished_pickup_density_matrix(self, yn):
self._state["is_finished_pickup_density_matrix"] = bool(yn)
is_finished_pickup_density_matrix = property(
_get_state_finished_pickup_density_matrix,
_set_state_finished_pickup_density_matrix,
)
# LO ---------------------------------------------------------------
def _get_state_finished_LO(self):
self._state.setdefault("is_finished_LO", False)
return self._state["is_finished_LO"]
def _set_state_finished_LO(self, yn):
self._state["is_finished_LO"] = bool(yn)
is_finished_LO = property(_get_state_finished_LO, _set_state_finished_LO)
# pickup LO --------------------------------------------------------
def _get_state_finished_pickup_LO(self):
self._state.setdefault("is_finished_pickup_LO", False)
return self._state["is_finished_pickup_LO"]
def _set_state_finished_pickup_LO(self, yn):
self._state["is_finished_pickup_LO"] = bool(yn)
is_finished_pickup_LO = property(
_get_state_finished_pickup_LO, _set_state_finished_pickup_LO
)
# ==================================================================
# GUESS
# ==================================================================
# guess density ----------------------------------------------------
def guess_density(self, run_type="rks", force=False):
if (self.is_finished_guess_density == True) and (force == False):
logger.info("guess_density has been calced.")
return
self.cd_work_dir("guess_density")
guess_density_matrix_path = "guess.density.{}.mat".format(run_type)
# 既存のデータを消去する
if os.path.exists(guess_density_matrix_path):
os.remove(guess_density_matrix_path)
pdfsim = pdf.PdfSim()
pdfsim.setup()
for frg_name, frg in self.fragments():
logger.info(
"fragment name={}: {} atoms".format(
frg_name, frg.get_number_of_all_atoms()
)
)
if frg.parent == None:
logger.warn(
"guess_density(): parent == None. frg_name={}".format(frg_name)
)
frg.set_command_alias(self._cmds)
frg_guess_density_matrix_path = frg.prepare_guess_density_matrix(run_type)
logger.debug(
"guess_density() [{}@{}] ext: {} from {}".format(
frg_name,
frg.parent.name,
guess_density_matrix_path,
frg_guess_density_matrix_path,
)
)
if os.path.exists(frg_guess_density_matrix_path):
pdf.run_pdf(
[
self._cmds["mat-extend"],
"-d",
guess_density_matrix_path,
frg_guess_density_matrix_path,
guess_density_matrix_path,
]
)
else:
logger.warn(
"not found: frg.guess.dens.mat={}".format(
frg_guess_density_matrix_path
)
)
self.pdfparam.guess = "density_matrix"
logger.info(
"initial guess (density matrix) created at {}".format(
guess_density_matrix_path
)
)
# check
self._check_path(guess_density_matrix_path)
self.is_finished_guess_density = True
self.save()
self.restore_cwd()
def guess_QCLO(self, run_type="rks", force=False, isCalcOrthogonalize=False):
"""create guess by using QCLO method"""
if (self.is_finished_guess_QCLO == True) and (force == False):
logger.info("guess_density has been calced.")
return
self.cd_work_dir("guess_QCLO")
guess_QCLO_matrix_path = "guess.QCLO.{}.mat".format(run_type)
if os.path.exists(guess_QCLO_matrix_path):
os.remove(guess_QCLO_matrix_path)
num_of_AOs = 0
for frg_name, frg in self.fragments():
logger.info(
"guess QCLO: frg_name={}, parent={}".format(frg_name, frg.parent.name)
)
frg.set_command_alias(self._cmds)
frg_QCLO_matrix_path = frg.prepare_guess_QCLO_matrix(
run_type, self, force=force
)
if os.path.exists(frg_QCLO_matrix_path):
pdf.run_pdf(
[
self._cmds["mat-extend"],
"-c",
guess_QCLO_matrix_path,
frg_QCLO_matrix_path,
guess_QCLO_matrix_path,
]
)
else:
logger.warn(
"The QCLO of the subgroup, {}, was not created.".format(frg_name)
)
# orthogonalize
guess_path = "guess.lcao.{}.mat".format(run_type)
if isCalcOrthogonalize:
if self.is_finished_prescf != True:
self.calc_preSCF()
logger.info("orthogonalize")
Xinv_path = self.pdfparam.get_Xinv_mat_path()
self._check_path(guess_QCLO_matrix_path)
pdf.run_pdf(
[
self._cmds["mat-mul"],
"-v",
Xinv_path,
guess_QCLO_matrix_path,
guess_path,
]
)
else:
shutil.copy(guess_QCLO_matrix_path, guess_path)
self.pdfparam.guess = "lcao"
logger.info("guess LCAO matrix created: {}".format(guess_path))
# check
self._check_path(guess_QCLO_matrix_path)
self.is_finished_guess_QCLO = True
self.save()
self.restore_cwd()
# create occ file
self._create_occupation_file(run_type)
def _create_occupation_file(self, run_type="rks"):
self.cd_work_dir("create occ")
self._setup_pdf()
occ_level = -1
electrons_per_orb = 0.0
run_type = run_type.upper()
if run_type == "RKS":
occ_level = int((self.pdfparam.num_of_electrons / 2.0))
electrons_per_orb = 2.0
else:
logger.critical(
"{header} NOT supported. run_type={run_type}".format(
header=self.header, run_type=run_type
)
)
# num_of_MOs = self.pdfparam.num_of_MOs
# occ_vtr = pdf.Vector(num_of_MOs)
occ_vtr = pdf.Vector(occ_level)
for i in range(occ_level):
occ_vtr.set(i, electrons_per_orb)
occ_vtr_path = "guess.occ.{}.vtr".format(run_type.lower())
occ_vtr.save(occ_vtr_path)
self._check_path(occ_vtr_path)
self.save()
self.restore_cwd()
# ==================================================================
# CALC
# ==================================================================
def _setup_pdf(self):
logger.info("{header} setup ProteinDF condition".format(header=self.header))
for frg_name, frg in self.fragments():
frg.set_basisset(self.pdfparam)
self.pdfparam.molecule = self.frame_molecule
# num_of_electrons
# calc from the molecule data
num_of_electrons = self.frame_molecule.sum_of_atomic_number()
logger.info(
"{header} the number of electrons = {elec}".format(
header=self.header, elec=num_of_electrons
)
)
if self.charge != 0:
logger.info("specify the charge => {}".format(self.charge))
num_of_electrons -= self.charge # 電子(-)数と電荷(+)の正負が逆なことに注意
self.pdfparam.num_of_electrons = num_of_electrons
logger.info(
"{header} update the number of electrons => {elec}".format(
header=self.header, elec=self.pdfparam.num_of_electrons
)
)
if self.pdfparam.num_of_electrons % 2 != 0:
logger.warning(
"{header} the number of electrons is not even.".format(
header=self.header
)
)
# ------------------------------------------------------------------
def calc_preSCF(self, dry_run=False):
""" """
if self.is_finished_prescf:
logger.info("preSCF has been calced.")
return
self.cd_work_dir("calc preSCF")
self.check_bump_of_atoms()
self._setup_pdf()
self.pdfparam.step_control = "integral"
self.save()
pdfsim = pdf.PdfSim()
pdfsim.sp(
self.pdfparam,
workdir=self.work_dir,
db_path=self.db_path,
dry_run=dry_run,
cmd_archive=self._cmds["archive"],
)
self._cache.pop("pdfparam")
self.is_finished_prescf = True
self.save()
self.restore_cwd()
# sp ---------------------------------------------------------------
def calc_sp(self, dry_run=False):
"""
calculate single point energy
"""
if self.is_finished_scf:
logger.info("SP has been calced.")
self._grouping_fragments()
self._switch_fragments()
return
if self.is_finished_prescf != True:
self.calc_preSCF(dry_run)
self.cd_work_dir("calc SP")
self.check_bump_of_atoms()
self._setup_pdf()
# self.output_xyz("{}/model.xyz".format(self.name))
self.pdfparam.step_control = "guess scf"
self.save()
pdfsim = pdf.PdfSim()
pdfsim.sp(
self.pdfparam,
workdir=self.work_dir,
db_path=self.db_path,
dry_run=dry_run,
cmd_archive=self._cmds["archive"],
)
self._cache.pop("pdfparam")
self.is_finished_scf = True
self._grouping_fragments()
self._switch_fragments()
self.save()
self.restore_cwd()
# gradieng ----------------------------------------------------------------
def calc_force(self, dry_run=False):
"""
calculate force (energy gradient)
absolute: force -> gradient
"""
if self.is_finished_force:
logger.info("force has been calced.")
return
if self.is_finished_scf != True:
self.calc_sp(dry_run)
self.cd_work_dir("calc force")
self._setup_pdf()
self.pdfparam.step_control = "force"
self.save()
pdfsim = pdf.PdfSim()
# for frg_name, frg in self.fragments():
# frg.set_basisset(self.pdfparam)
# self.pdfparam.molecule = self.frame_molecule
#
# # num_of_electrons
# num_of_electrons = self.pdfparam.num_of_electrons # calc from the molecule data
# logger.info('the number of electrons = {}'.format(num_of_electrons))
# if self.charge != 0:
# logger.info('specify the charge => {}'.format(self.charge))
# num_of_electrons -= self.charge # 電子(-)数と電荷(+)の正負が逆なことに注意
# self.pdfparam.num_of_electrons = num_of_electrons
# logger.info('update the number of electrons => {}'.format(self.pdfparam.num_of_electrons))
pdfsim.sp(
self.pdfparam,
workdir=self.work_dir,
db_path=self.db_path,
dry_run=dry_run,
cmd_archive=self._cmds["archive"],
)
self._cache.pop("pdfparam")
self.is_finished_force = True
self.save()
self.restore_cwd()
# summary ------------------------------------------------------------------
def summary(self, dry_run=False, format_str=None, filepath=None):
"""
Format:
{NUM_OF_ATOMS}: number of atoms
{NUM_OF_AO}: number of AOs
{NUM_OF_MO}: number of MOs
{METHOD}: method
{IS_CONVERGED}: Whether the SCF is converged or not
{ITERATION}: iteration
{TOTAL_ENERGY}: total energy
{GRADIENT_RMS}: gradient RMS
"""
if self.is_finished_scf != True:
self.calc_sp(dry_run)
self.cd_work_dir("summary")
values = {}
pdfarc = self.get_pdfarchive()
values["NUM_OF_ATOMS"] = pdfarc.num_of_atoms
values["NUM_OF_AO"] = pdfarc.num_of_AOs
values["NUM_OF_MO"] = pdfarc.num_of_MOs
values["METHOD"] = pdfarc.method
values["IS_CONVERGED"] = pdfarc.scf_converged
itr = pdfarc.iterations
values["ITERATION"] = itr
values["TOTAL_ENERGY"] = pdfarc.get_total_energy(itr)
values["GRADIENT_RMS"] = pdfarc.get_gradient_rms()
if format_str == None:
format_str = "total energy: {TOTAL_ENERGY} at {ITERATION}"
output = format_str.format(**values)
if output[-1] != "\n":
output += "\n"
logger.info(output)
if filepath != None:
with open(filepath, "a") as f:
f.write(output)
self.restore_cwd()
return output
def get_gradient(self):
""" """
self.cd_work_dir("get_gradient")
pdfarc = self.get_pdfarchive()
num_of_atoms = pdfarc.num_of_atoms
grad = [[] * num_of_atoms]
for atom_index in range(num_of_atoms):
grad[atom_index] = pdfarc.get_force(atom_index)
self.restore_cwd()
# pop --------------------------------------------------------------
def pop(self, dry_run=False, iteration=-1):
""" """
if self.is_finished_scf != True:
self.calc_sp(dry_run)
if iteration == -1:
iteration = self.pdfparam.iterations
self._calc_pop(iteration=iteration)
pop_vtr = self.get_pop(iteration)
self.save()
self.restore_cwd()
return pop_vtr
def _calc_pop(self, iteration=-1, dry_run=False):
""" """
if iteration == -1:
iteration = self.pdfparam.iterations
self.cd_work_dir("calc pop: iteration={}".format(iteration))
pdfsim = pdf.PdfSim()
pdfsim.pop(iteration=iteration, dry_run=dry_run)
self.restore_cwd()
def get_pop(self, iteration=-1):
""" """
if iteration == -1:
iteration = self.pdfparam.iterations
self.cd_work_dir("get pop: iteration={}".format(iteration))
run_type = "rks"
pop_path = self.pdfparam.get_pop_mulliken_path(run_type, iteration=iteration)
pop_vtr = pdf.Vector()
pop_vtr.load(pop_path)
self.restore_cwd()
return pop_vtr
# ==================================================================
# PICKUP
# ==================================================================
# pickup density matrix --------------------------------------------
def pickup_density_matrix(self, runtype="rks"):
"""
密度行列を各フラグメントに割り当てる
"""
if self.is_finished_pickup_density_matrix:
logger.info(
"{header} pickup density matrix has done.".format(header=self.header)
)
return
self.cd_work_dir("pickup density matrix")
# post-SCF
self._grouping_fragments()
self._switch_fragments()
dens_mat_path = self.pdfparam.get_density_matrix_path(runtype=runtype)
logger.info(
"{header} reference density matrix: {path}".format(
header=self.header, path=dens_mat_path
)
)
global_dim = 0
for frg_name, frg in self.fragments():
dim = frg.get_number_of_AOs()
if dim > 0:
frg_dens_mat_path = "Ppq.{}.{}.mat".format(runtype, frg_name)
logger.info(
"{header} select [{start}:{end}] for {fragment}".format(
header=self.header,
fragment=frg_name,
start=global_dim,
end=global_dim + dim - 1,
)
)
# フラグメント対応部分を切り出す
pdf.run_pdf(
[
self._cmds["mat-select"],
"-t",
global_dim,
"-l",
global_dim,
"-b",
global_dim + dim - 1,
"-r",
global_dim + dim - 1,
dens_mat_path,
frg_dens_mat_path,
]
)
# select された行列を対称行列に変換
pdf.run_pdf(
[self._cmds["mat-symmetrize"], frg_dens_mat_path, frg_dens_mat_path]
)
logger.debug(
"{header} density matrix for {fragment} was saved as {path}".format(
header=self.header, fragment=frg_name, path=frg_dens_mat_path
)
)
is_loadable = pdf.SymmetricMatrix.is_loadable(frg_dens_mat_path)
assert is_loadable == True
(row, col) = pdf.SymmetricMatrix.get_size(frg_dens_mat_path)
assert row == dim
assert row == col
# 対称行列パスをフラグメントに登録
frg.set_density_matrix(frg_dens_mat_path)
global_dim += dim
logger.is_finished_pickup_density_matrix = True
self.save()
self.restore_cwd()
# ------------------------------------------------------------------
def calc_lo(self, run_type, force=False, dry_run=False):
if (self.is_finished_LO == True) and (force == False):
logger.info("LO has done.")
return
if self.is_finished_scf != True:
self.calc_sp(dry_run=dry_run)
self.cd_work_dir("calc lo")
# make atom groups for LO
fragments_atom_ids_path = "fragments_atom_id.mpac"
fragments_atom_ids = self.fragments_atom_ids
logger.info("save fragment atom ids as {}".format(fragments_atom_ids_path))
bridge.save_msgpack(fragments_atom_ids, fragments_atom_ids_path)
logger.info("start lo calculation.")
pdf.run_pdf(self._cmds["lo"])
self.is_finished_LO = True
self.save()
self.restore_cwd()
# ------------------------------------------------------------------
def pickup_QCLO_matrix(self, run_type="rks", force=False):
if (self.is_finished_pickup_LO == True) and (force == False):
logger.info("pickup LO has been finished.")
return
self.calc_lo(run_type, force)
self.cd_work_dir("pickup lo")
# post-SCF
self._grouping_fragments()
self._switch_fragments()
# debug
logger.debug("pickup_QCLO_matrix frame: {}".format(self._name))
pdfarc = self.get_pdfarchive()
num_of_AOs = pdfarc.num_of_AOs
num_of_MOs = pdfarc.num_of_MOs
HOMO_level = pdfarc.get_HOMO_level("rks") # option base 0
logger.info("num of AOs: {}".format(num_of_AOs))
logger.info("num of MOs: {}".format(num_of_MOs))
logger.info("HOMO level: {}".format(HOMO_level + 1))
logger.info("fragment information:")
for frg_name, frg in self.fragments():
frg_AOs = frg.get_number_of_AOs()
logger.info("fragment name:[{}] AOs={}".format(frg_name, frg_AOs))
logger.info("")
# calc S*C
if "pdfparam" in self._cache:
self._cache.pop("pdfparam")
lo_satisfied = self.pdfparam.lo_satisfied
if lo_satisfied != True:
logger.warn("lo_satisfied: {}".format(lo_satisfied))
lo_iterations = self.pdfparam.lo_num_of_iterations
logger.info("lo iterations: {}".format(lo_iterations))
logger.info("calc S*C")
CSC_path = "CSC.mat"
Clo_path = self.pdfparam.get_clo_mat_path()
pdf.run_pdf(["component", "-v", "-S", "CSC.mat", "-c", Clo_path])
# load CSC
CSC = pdf.Matrix()
CSC.load(CSC_path)
logger.info("{header} make AO v.s. fragment table".format(header=self.header))
AO_frg_tbl = self._get_AO_fragment_table(num_of_AOs)
# pickup
logger.info(
"{header} assign fragment: start: HOMO={homo}".format(
header=self.header, homo=HOMO_level
)
)
MO_fragment_assigned = {}
for mo in range(HOMO_level + 1):
frg_name = self._define_lo_fragment(mo, num_of_AOs, AO_frg_tbl, CSC)
logger.info(
"{header} #{mo} MO -> fragment: '{frg_name}'".format(
header=self.header, mo=mo, frg_name=frg_name
)
)
MO_fragment_assigned.setdefault(frg_name, [])
MO_fragment_assigned[frg_name].append(mo)
logger.info("{header} assign fragment: end".format(header=self.header))
# assign report
logger.info("==== assign report ====")
for k, MOs in MO_fragment_assigned.items():
logger.info(
"{header} fragment '{frag_name}' has {mo} MO(s)".format(
header=self.header, frag_name=k, mo=len(MOs)
)
)
# フラグメントのC_LOを作成する
logger.info("{header} create C_LO: start".format(header=self.header))
Clo = pdf.Matrix()
Clo.load(Clo_path)
assert num_of_AOs == Clo.rows
for frg_name, frg in self.fragments():
frg_cols = len(MO_fragment_assigned.get(frg_name, []))
logger.info(
"{header} fragment '{frg_name}': col={col}".format(
header=self.header, frg_name=frg_name, col=frg_cols
)
)
if frg_cols == 0:
logger.warning(
"{header} fragment '{frg_name}' has no colomns.".format(
header=self.header, frg_name=frg_name
)
)
# continue
Clo_frg = pdf.Matrix(num_of_AOs, frg_cols)
if frg_name in MO_fragment_assigned:
for col, ref_col in enumerate(MO_fragment_assigned[frg_name]):
for row in range(num_of_AOs):
v = Clo.get(row, ref_col)
Clo_frg.set(row, col, v)
Clo_path = "Clo_{}.mat".format(frg_name)
logger.debug(
"{header} fragment C_LO save: {path}".format(
header=self.header, path=Clo_path
)
)
Clo_frg.save(Clo_path)
frg.set_LO_matrix(Clo_path, run_type)
logger.info("{header} create C_LO: end".format(header=self.header))
# trans C_LO to QCLO
self._trans_LO2QCLO()
# finish
self.is_finished_pickup_LO = True
self.save()
self.restore_cwd()
def _get_AO_fragment_table(self, num_of_AOs):
"""
AO v.s. fragment_name の辞書を返す
"""
frg_table = [None for x in range(num_of_AOs)]
AO_index = 0
for frg_name, frg in self.fragments():
frg_num_of_AOs = frg.get_number_of_AOs()
for i in range(AO_index, AO_index + frg_num_of_AOs):
frg_table[i] = frg_name
AO_index += frg_num_of_AOs
<|fim▁hole|> def _define_lo_fragment(self, mo, num_of_AOs, AO_frg_tbl, CSC):
judge = {}
total = 0.0
for ao in range(num_of_AOs):
frg_name = AO_frg_tbl[ao]
v = math.fabs(CSC.get(ao, mo))
total += v
judge.setdefault(frg_name, 0.0)
judge[frg_name] += v
for frg_name in judge.keys():
judge[frg_name] /= total
ranked_judge = sorted(judge.items(), key=lambda x: x[1], reverse=True)
for rank, (k, v) in enumerate(ranked_judge):
logger.info(
"{header} [{rank}] name:{name}, score:{score:.3f}".format(
header=self.header, rank=rank + 1, name=k, score=v
)
)
high_score = ranked_judge[0][1]
if high_score < 0.5:
logger.warning(
"{header} 1st score is too small: {score}".format(
header=self.header, score=high_score
)
)
return ranked_judge[0][0]
def _trans_LO2QCLO(self):
logger.info("trans LO at {}".format(os.getcwd()))
run_type = "rks"
F_path = self.pdfparam.get_f_mat_path(run_type)
logger.info("F matrix: {}".format(F_path))
for frg_name, frg in self.fragments():
C_QCLO_path = "C_QCLO.{}.mat".format(frg_name) # output for each fragment
frg_AO = frg.get_number_of_AOs()
logger.info(
"{header} fragment '{name}' has {ao} AO(s)".format(
header=self.header, name=frg_name, ao=frg_AO
)
)
if frg.get_number_of_AOs() != 0:
Clo_path = frg.get_LO_matrix_path(run_type)
assert Clo_path != None
# calc (C_LO)dagger * F * C_LO => F'
F_Clo_path = "F_Clo.{}.mat".format(frg_name)
pdf.run_pdf([self._cmds["mat-mul"], "-v", F_path, Clo_path, F_Clo_path])
Clo_dagger_path = "Clo_dagger.{}.mat".format(frg_name)
pdf.run_pdf(
[self._cmds["mat-transpose"], "-v", Clo_path, Clo_dagger_path]
)
F_prime_path = "Fprime.{}.mat".format(frg_name)
pdf.run_pdf(
[
self._cmds["mat-mul"],
"-v",
Clo_dagger_path,
F_Clo_path,
F_prime_path,
]
)
pdf.run_pdf([self._cmds["mat-symmetrize"], F_prime_path, F_prime_path])
# diagonal F'
eigval_path = "QCLO_eigval.{}.vtr".format(frg_name)
Cprime_path = "Cprime.{}.mat".format(frg_name)
logger.info("diagonal F'")
pdf.run_pdf(
[
self._cmds["mat-diagonal"],
"-v",
"-l",
eigval_path,
"-x",
Cprime_path,
F_prime_path,
]
)
# AO基底に変換
pdf.run_pdf(
[self._cmds["mat-mul"], "-v", Clo_path, Cprime_path, C_QCLO_path]
)
else:
logger.info(
"{header} create empty QCLO matrix.".format(header=self.header)
)
empty_mat = pdf.Matrix()
empty_mat.save(C_QCLO_path)
frg.set_QCLO_matrix(C_QCLO_path)
logger.info("C_QCLO saved: {}".format(C_QCLO_path))
# =================================================================
# for fragments
# =================================================================
def fragments(self):
"""
フラグメントの名前とオブジェクトを返すイテレータ
"""
for k in self._fragments.keys():
yield (k, self._fragments[k])
def has_fragment(self, fragment_name):
"""
フラグメントを持っていればTrueを返す
"""
fragment_name = bridge.StrUtils.to_unicode(fragment_name)
return fragment_name in self._fragments.keys()
# operator[] -------------------------------------------------------
def __getitem__(self, fragment_name):
"""
出力用[]演算子
"""
fragment_name = bridge.StrUtils.to_unicode(fragment_name)
return self._fragments.get(fragment_name, None)
def __setitem__(self, fragment_name, obj):
"""
入力用[]演算子
計算前であれば代入可能(つまりモデリング中)であるが、
計算後は代入できない
"""
if self.is_finished_scf:
logger.debug("rearrangement of fragments is prohibited after calculation.")
return
if "frame_molecule" in self._cache:
self._cache.pop("frame_molecule")
fragment_name = bridge.StrUtils.to_unicode(fragment_name)
if isinstance(obj, QcFragment):
fragment = QcFragment(obj)
fragment.parent = self
fragment.name = fragment_name
logger.debug(
"[{my_name}] add fragment: name={fragment_name}".format(
my_name=self.name, fragment_name=fragment_name
)
)
self._fragments[fragment_name] = fragment
elif isinstance(obj, QcFrame):
logger.info(
"begin to register frame molecule: for {}".format(fragment_name)
)
fragment = QcFragment()
fragment.parent = self
fragment.name = fragment_name
for k, f in obj.fragments():
if not f.margin:
logger.warn(
"add fragment: fragment={} for {}".format(k, fragment_name)
)
fragment.set_group(k, f)
else:
logger.warn("pass fragment: fragment={} is margin".format(k))
self._fragments[fragment_name] = fragment
logger.info(
"end of registration frame molecule: for {}".format(fragment_name)
)
else:
raise
# rearrangement -----------------------------------------------------------
def _switch_fragments(self):
"""
fragmentsを入力用から出力用に切り替える
処理内容:
- 各fragmentの親を自分(self)にする
"""
logger.info("{header} switch fragment".format(header=self.header))
output_fragments = OrderedDict()
for frg_name, frg in self.fragments():
logger.info(
"{header} fragment_name: {name}".format(
header=self.header, name=frg_name
)
)
new_frg = QcFragment(frg, parent=self)
assert new_frg.parent.name == self.name
output_fragments[frg_name] = new_frg
self._fragments = output_fragments
# logger.info('merge subgroups')
# for key, frg in self.fragments():
# frg.merge_subgroups()
logger.info("{header} ---> switch".format(header=self.header))
for frg_name, frg in self.fragments():
logger.info(
"{header} {frg_name}: parent={parent_name}".format(
header=self.header, frg_name=frg_name, parent_name=frg.parent.name
)
)
logger.info("{header} <---".format(header=self.header))
def _grouping_fragments(self):
logger.info("{header} grouping fragments".format(header=self.header))
for frg_name, frg in self.fragments():
frg.grouping_subfragments()
# ==================================================================
# coordinates
# ==================================================================
# outout XYZ -------------------------------------------------------
def output_xyz(self, file_path):
xyz = bridge.Xyz(self.frame_molecule)
xyz.save(file_path)
def check_bump_of_atoms(self):
logger.info("{header} check bump of atoms".format(header=self.header))
atom_list = self.frame_molecule.get_atom_list()
num_of_atoms = len(atom_list)
for i in range(num_of_atoms):
xyz1 = atom_list[i].xyz
for j in range(i):
d = xyz1.distance_from(atom_list[j].xyz)
if d < self.TOO_SMALL:
logger.warning(
"{header} atom[{i}][{atom_i}]({atom_i_path}) is near by atom[{j}][{atom_j}]({atom_j_path})".format(
header=self.header,
i=i,
atom_i=str(atom_list[i]),
atom_i_path=atom_list[i].path,
j=j,
atom_j=str(atom_list[j]),
atom_j_path=atom_list[j].path,
)
)
logger.debug("{header} check_bump of atoms: end".format(header=self.header))
# ==================================================================
# orbital table
# ==================================================================
def get_orbital_info(self):
"""
AOに対するQcOrbitalDataリストを返す
"""
orbinfo = []
for k, frg in self.fragments():
orbinfo.extend(frg.get_orbital_info())
return orbinfo
# ==================================================================
# operators
# ==================================================================
# operator == ------------------------------------------------------
def __eq__(self, rhs):
if rhs == None:
return False
return self.name == rhs.name
def __ne__(self, rhs):
return not self.__eq__(rhs)
# operator str -----------------------------------------------------
def __str__(self):
answer = ""
answer = "frame name={}\n".format(self.name)
for key, fragment in self.fragments():
answer += ">> fragment: {}\n".format(key)
answer += str(fragment)
answer += "\n"
return answer
# ==================================================================
# debug
# ==================================================================
def _get_logger_header(self):
"""return header string for logger"""
header = "{name}>".format(name=self.name)
return header
header = property(_get_logger_header)<|fim▁end|> | return frg_table
|
<|file_name|>Routes.ts<|end_file_name|><|fim▁begin|>import express = require('express');
import poiRouter = require('./poiRouters');
import UserRouter from "./../../modules/user/routing";<|fim▁hole|>var router = express.Router();
//noinspection TypeScriptValidateTypes
router.use("/poi", poiRouter.Routers);
//noinspection TypeScriptValidateTypes
router.use("/user",UserRouter.Routers);
export = router;<|fim▁end|> | |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*-----------------------------------------------------------------------------------
/*
/* Main JS
/*
-----------------------------------------------------------------------------------*/
(function($) {
/*---------------------------------------------------- */
/* Preloader
------------------------------------------------------ */
$(window).load(function() {
// will first fade out the loading animation
$("#status").fadeOut("slow");
// will fade out the whole DIV that covers the website.
$("#preloader").delay(500).fadeOut("slow").remove();
$('.js #hero .hero-image img').addClass("animated fadeInUpBig");
$('.js #hero .buttons a.trial').addClass("animated shake");
})
/*---------------------------------------------------- */
/* Mobile Menu
------------------------------------------------------ */
var toggle_button = $("<a>", {
id: "toggle-btn",
html : "Menu",
title: "Menu",
href : "#" }
);
var nav_wrap = $('nav#nav-wrap')
var nav = $("ul#nav");
/* id JS is enabled, remove the two a.mobile-btns
and dynamically prepend a.toggle-btn to #nav-wrap */
nav_wrap.find('a.mobile-btn').remove();
nav_wrap.prepend(toggle_button);
toggle_button.on("click", function(e) {
e.preventDefault();
nav.slideToggle("fast");
});
if (toggle_button.is(':visible')) nav.addClass('mobile');
$(window).resize(function(){
if (toggle_button.is(':visible')) nav.addClass('mobile');
else nav.removeClass('mobile');
});
$('ul#nav li a').on("click", function(){
if (nav.hasClass('mobile')) nav.fadeOut('fast');
});
/*----------------------------------------------------*/
/* FitText Settings
------------------------------------------------------ */
setTimeout(function() {
$('h1.responsive-headline').fitText(1.2, { minFontSize: '25px', maxFontSize: '40px' });
}, 100);
/*----------------------------------------------------*/
/* Smooth Scrolling
------------------------------------------------------ */
$('.smoothscroll').on('click', function (e) {
e.preventDefault();
var target = this.hash,
$target = $(target);
$('html, body').stop().animate({
'scrollTop': $target.offset().top
}, 800, 'swing', function () {
window.location.hash = target;
});
});
/*----------------------------------------------------*/
/* Highlight the current section in the navigation bar
------------------------------------------------------*/
var sections = $("section"),
navigation_links = $("#nav-wrap a");
sections.waypoint( {
handler: function(event, direction) {
var active_section;
active_section = $(this);
if (direction === "up") active_section = active_section.prev();
var active_link = $('#nav-wrap a[href="#' + active_section.attr("id") + '"]');
navigation_links.parent().removeClass("current");
active_link.parent().addClass("current");
},
offset: '35%'
});
/*----------------------------------------------------*/
/* FitVids
/*----------------------------------------------------*/
$(".fluid-video-wrapper").fitVids();
/*----------------------------------------------------*/
/* Waypoints Animations
------------------------------------------------------ */
$('.js .design').waypoint(function() {
$('.js .design .feature-media').addClass( 'animated pulse' );
}, { offset: 'bottom-in-view' });
$('.js .responsive').waypoint(function() {
$('.js .responsive .feature-media').addClass( 'animated pulse' );
}, { offset: 'bottom-in-view' });
$('.js .cross-browser').waypoint(function() {
$('.js .cross-browser .feature-media').addClass( 'animated pulse' );
}, { offset: 'bottom-in-view' });
$('.js .video').waypoint(function() {
$('.js .video .feature-media').addClass( 'animated pulse' );
}, { offset: 'bottom-in-view' });
$('.js #subscribe').waypoint(function() {
$('.js #subscribe input[type="email"]').addClass( 'animated fadeInLeftBig show' );
$('.js #subscribe input[type="submit"]').addClass( 'animated fadeInRightBig show' );
}, { offset: 'bottom-in-view' });
/*----------------------------------------------------*/
/* Flexslider
/*----------------------------------------------------*/
$('.flexslider').flexslider({
namespace: "flex-",
controlsContainer: ".flex-container",
animation: 'slide',
controlNav: true,
directionNav: false,
smoothHeight: true,
slideshowSpeed: 7000,
animationSpeed: 600,
randomize: false,
});
/*----------------------------------------------------*/
/* ImageLightbox
/*----------------------------------------------------*/
if($("html").hasClass('cssanimations')) {
var activityIndicatorOn = function()
{
$( '<div id="imagelightbox-loading"><div></div></div>' ).appendTo( 'body' );
},
activityIndicatorOff = function()
{
$( '#imagelightbox-loading' ).remove();
},
overlayOn = function()
{
$( '<div id="imagelightbox-overlay"></div>' ).appendTo( 'body' );
},
overlayOff = function()
{
$( '#imagelightbox-overlay' ).remove();
},
closeButtonOn = function( instance )
{
$( '<a href="#" id="imagelightbox-close" title="close"><i class="fa fa fa-times"></i></a>' ).appendTo( 'body' ).on( 'click touchend', function(){ $( this ).remove(); instance.quitImageLightbox(); return false; });
},
closeButtonOff = function()
{
$( '#imagelightbox-close' ).remove();
},
captionOn = function()
{
var description = $( 'a[href="' + $( '#imagelightbox' ).attr( 'src' ) + '"] img' ).attr( 'alt' );
if( description.length > 0 )
$( '<div id="imagelightbox-caption">' + description + '</div>' ).appendTo( 'body' ); <|fim▁hole|> };
var instanceA = $( 'a[data-imagelightbox="a"]' ).imageLightbox(
{
onStart: function() { overlayOn(); closeButtonOn( instanceA ); },
onEnd: function() { overlayOff(); captionOff(); closeButtonOff(); activityIndicatorOff(); },
onLoadStart: function() { captionOff(); activityIndicatorOn(); },
onLoadEnd: function() { captionOn(); activityIndicatorOff(); }
});
}
else {
/*----------------------------------------------------*/
/* prettyPhoto for old IE
/*----------------------------------------------------*/
$("#screenshots").find(".item-wrap a").attr("rel","prettyPhoto[pp_gal]");
$("a[rel^='prettyPhoto']").prettyPhoto( {
animation_speed: 'fast', /* fast/slow/normal */
slideshow: false, /* false OR interval time in ms */
autoplay_slideshow: false, /* true/false */
opacity: 0.80, /* Value between 0 and 1 */
show_title: true, /* true/false */
allow_resize: true, /* Resize the photos bigger than viewport. true/false */
default_width: 500,
default_height: 344,
counter_separator_label: '/', /* The separator for the gallery counter 1 "of" 2 */
theme: 'pp_default', /* light_rounded / dark_rounded / light_square / dark_square / facebook */
hideflash: false, /* Hides all the flash object on a page, set to TRUE if flash appears over prettyPhoto */
wmode: 'opaque', /* Set the flash wmode attribute */
autoplay: true, /* Automatically start videos: True/False */
modal: false, /* If set to true, only the close button will close the window */
overlay_gallery: false, /* If set to true, a gallery will overlay the fullscreen image on mouse over */
keyboard_shortcuts: true, /* Set to false if you open forms inside prettyPhoto */
deeplinking: false,
social_tools: false
});
}
})(jQuery);<|fim▁end|> | },
captionOff = function()
{
$( '#imagelightbox-caption' ).remove(); |
<|file_name|>AODeterminization.cpp<|end_file_name|><|fim▁begin|>#include "../../include/domains/AODeterminization.h"
#include "../../include/domains/DummyAction.h"
AllOutcomesDeterminization::
AllOutcomesDeterminization(mlcore::Problem* problem) {
originalProblem_ = problem;
problem->generateAll();
int s_idx = 0;
for (mlcore::State* s : problem->states()) {
states_.insert(s);
if (s == problem->initialState())
this->s0 = s;
stateIndexMap_[s] = s_idx;
transitionGraph_.push_back(std::unordered_map<int, int>());
allStates_.push_back(s);
s_idx++;
}
int action_idx = 0;
for (mlcore::State* s : problem->states()) {
int s_idx = stateIndexMap_[s];
for (mlcore::Action* a : problem->actions()) {
if (!problem->applicable(s, a))
continue;
for (auto& successor : problem->transition(s, a)) {
int s_prime_idx = stateIndexMap_[successor.su_state];
transitionGraph_[s_idx][action_idx] = s_prime_idx;
actionCosts_.push_back(problem->cost(s, a));
actions_.push_back(new DummyAction(action_idx));
actionsVector_.push_back(actions_.back());
action_idx++;
}
}
}
}
std::list<mlcore::Action*>
AllOutcomesDeterminization::actions(mlcore::State* s) const {
int s_idx = stateIndexMap_.at(s);
std::list<mlcore::Action*> stateActions;<|fim▁hole|> }
return stateActions;
}
bool AllOutcomesDeterminization::goal(mlcore::State* s) const {
return originalProblem_->goal(s);
}
std::list<mlcore::Successor>
AllOutcomesDeterminization::transition(mlcore::State* s, mlcore::Action* a) {
int s_idx = stateIndexMap_[s];
DummyAction* dummya = static_cast<DummyAction*>(a);
int s_prime_idx = transitionGraph_[s_idx][dummya->id()];
std::list<mlcore::Successor> successors;
successors.push_back(mlcore::Successor(allStates_[s_prime_idx], 1.0));
return successors;
}
double
AllOutcomesDeterminization::cost(mlcore::State* s, mlcore::Action* a) const {
DummyAction* dummya = static_cast<DummyAction*>(a);
return actionCosts_[dummya->id()];
}
bool AllOutcomesDeterminization::
applicable(mlcore::State* s, mlcore::Action* a) const {
int s_idx = stateIndexMap_.at(s);
DummyAction* dummya = static_cast<DummyAction*>(a);
return transitionGraph_.at(s_idx).count(dummya->id()) > 0;
}<|fim▁end|> | for (auto& entry : transitionGraph_.at(s_idx)) {
stateActions.push_back(actionsVector_[entry.first]); |
<|file_name|>test_json.py<|end_file_name|><|fim▁begin|>import datetime
import uuid
from decimal import Decimal
from django.core import checks, exceptions, serializers
from django.core.serializers.json import DjangoJSONEncoder
from django.forms import CharField, Form, widgets
from django.test.utils import isolate_apps
from django.utils.html import escape
from . import PostgreSQLTestCase
from .models import JSONModel, PostgreSQLModel
try:
from django.contrib.postgres import forms
from django.contrib.postgres.fields import JSONField
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_null(self):
instance = JSONModel()
instance.save()
loaded = JSONModel.objects.get()
self.assertIsNone(loaded.field)
def test_empty_object(self):
instance = JSONModel(field={})
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, {})
def test_empty_list(self):
instance = JSONModel(field=[])
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, [])
def test_boolean(self):
instance = JSONModel(field=True)
instance.save()
loaded = JSONModel.objects.get()
self.assertIs(loaded.field, True)
def test_string(self):
instance = JSONModel(field='why?')
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, 'why?')
def test_number(self):
instance = JSONModel(field=1)
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, 1)
def test_realistic_object(self):
obj = {
'a': 'b',
'c': 1,
'd': ['e', {'f': 'g'}],
'h': True,
'i': False,
'j': None,
}
instance = JSONModel(field=obj)
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, obj)
def test_custom_encoding(self):
"""
JSONModel.field_custom has a custom DjangoJSONEncoder.
"""
some_uuid = uuid.uuid4()
obj_before = {
'date': datetime.date(2016, 8, 12),
'datetime': datetime.datetime(2016, 8, 12, 13, 44, 47, 575981),
'decimal': Decimal('10.54'),
'uuid': some_uuid,
}
obj_after = {
'date': '2016-08-12',
'datetime': '2016-08-12T13:44:47.575',
'decimal': '10.54',
'uuid': str(some_uuid),
}
JSONModel.objects.create(field_custom=obj_before)
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field_custom, obj_after)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = [
JSONModel.objects.create(field=None),
JSONModel.objects.create(field=True),
JSONModel.objects.create(field=False),
JSONModel.objects.create(field='yes'),
JSONModel.objects.create(field=7),
JSONModel.objects.create(field=[]),
JSONModel.objects.create(field={}),
JSONModel.objects.create(field={
'a': 'b',
'c': 1,
}),
JSONModel.objects.create(field={
'a': 'b',
'c': 1,
'd': ['e', {'f': 'g'}],
'h': True,
'i': False,
'j': None,
'k': {'l': 'm'},
}),
JSONModel.objects.create(field=[1, [2]]),
JSONModel.objects.create(field={
'k': True,
'l': False,
}),
JSONModel.objects.create(field={'foo': 'bar'}),
]
def test_exact(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__exact={}),
[self.objs[6]]
)
def test_exact_complex(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__exact={'a': 'b', 'c': 1}),
[self.objs[7]]
)
def test_isnull(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__isnull=True),
[self.objs[0]]
)
def test_isnull_key(self):
# key__isnull works the same as has_key='key'.
self.assertSequenceEqual(
JSONModel.objects.filter(field__a__isnull=True),
self.objs[:7] + self.objs[9:]
)
self.assertSequenceEqual(
JSONModel.objects.filter(field__a__isnull=False),
[self.objs[7], self.objs[8]]
)
def test_contains(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__contains={'a': 'b'}),
[self.objs[7], self.objs[8]]
)
def test_contained_by(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__contained_by={'a': 'b', 'c': 1, 'h': True}),
[self.objs[6], self.objs[7]]
)
def test_has_key(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_key='a'),
[self.objs[7], self.objs[8]]
)
def test_has_keys(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_keys=['a', 'c', 'h']),
[self.objs[8]]
)
def test_has_any_keys(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_any_keys=['c', 'l']),
[self.objs[7], self.objs[8], self.objs[10]]
)
def test_shallow_list_lookup(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__0=1),
[self.objs[9]]
)
def test_shallow_obj_lookup(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__a='b'),
[self.objs[7], self.objs[8]]
)
def test_deep_lookup_objs(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__k__l='m'),
[self.objs[8]]
)
def test_shallow_lookup_obj_target(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__k={'l': 'm'}),
[self.objs[8]]
)
def test_deep_lookup_array(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__1__0=2),
[self.objs[9]]
)
def test_deep_lookup_mixed(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__d__1__f='g'),
[self.objs[8]]
)
def test_deep_lookup_transform(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__c__gt=1),
[]
)
self.assertSequenceEqual(
JSONModel.objects.filter(field__c__lt=5),
[self.objs[7], self.objs[8]]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
JSONModel.objects.filter(id__in=JSONModel.objects.filter(field__c=1)),
self.objs[7:9]
)
def test_iexact(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iexact='BaR').exists())
self.assertFalse(JSONModel.objects.filter(field__foo__iexact='"BaR"').exists())
def test_icontains(self):
self.assertFalse(JSONModel.objects.filter(field__foo__icontains='"bar"').exists())
def test_startswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__startswith='b').exists())
def test_istartswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__istartswith='B').exists())
def test_endswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__endswith='r').exists())
def test_iendswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iendswith='R').exists())
def test_regex(self):
self.assertTrue(JSONModel.objects.filter(field__foo__regex=r'^bar$').exists())
def test_iregex(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iregex=r'^bAr$').exists())
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLTestCase):
def test_invalid_default(self):<|fim▁hole|> model = MyModel()
self.assertEqual(model.check(), [
checks.Warning(
msg=(
"JSONField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint='Use a callable instead, e.g., use `dict` instead of `{}`.',
obj=MyModel._meta.get_field('field'),
id='postgres.E003',
)
])
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = JSONField(default=dict)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = JSONField(default=None)
model = MyModel()
self.assertEqual(model.check(), [])
class TestSerialization(PostgreSQLTestCase):
test_data = (
'[{"fields": {"field": {"a": "b", "c": null}, "field_custom": null}, '
'"model": "postgres_tests.jsonmodel", "pk": null}]'
)
def test_dumping(self):
instance = JSONModel(field={'a': 'b', 'c': None})
data = serializers.serialize('json', [instance])
self.assertJSONEqual(data, self.test_data)
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, {'a': 'b', 'c': None})
class TestValidation(PostgreSQLTestCase):
def test_not_serializable(self):
field = JSONField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(datetime.timedelta(days=1), None)
self.assertEqual(cm.exception.code, 'invalid')
self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.")
def test_custom_encoder(self):
with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."):
field = JSONField(encoder=DjangoJSONEncoder())
field = JSONField(encoder=DjangoJSONEncoder)
self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1))
class TestFormField(PostgreSQLTestCase):
def test_valid(self):
field = forms.JSONField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {'a': 'b'})
def test_valid_empty(self):
field = forms.JSONField(required=False)
value = field.clean('')
self.assertIsNone(value)
def test_invalid(self):
field = forms.JSONField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('{some badly formed: json}')
self.assertEqual(cm.exception.messages[0], "'{some badly formed: json}' value must be valid JSON.")
def test_formfield(self):
model_field = JSONField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.JSONField)
def test_formfield_disabled(self):
class JsonForm(Form):
name = CharField()
jfield = forms.JSONField(disabled=True)
form = JsonForm({'name': 'xyz', 'jfield': '["bar"]'}, initial={'jfield': ['foo']})
self.assertIn('["foo"]</textarea>', form.as_p())
def test_prepare_value(self):
field = forms.JSONField()
self.assertEqual(field.prepare_value({'a': 'b'}), '{"a": "b"}')
self.assertEqual(field.prepare_value(None), 'null')
self.assertEqual(field.prepare_value('foo'), '"foo"')
def test_redisplay_wrong_input(self):
"""
When displaying a bound form (typically due to invalid input), the form
should not overquote JSONField inputs.
"""
class JsonForm(Form):
name = CharField(max_length=2)
jfield = forms.JSONField()
# JSONField input is fine, name is too long
form = JsonForm({'name': 'xyz', 'jfield': '["foo"]'})
self.assertIn('["foo"]</textarea>', form.as_p())
# This time, the JSONField input is wrong
form = JsonForm({'name': 'xy', 'jfield': '{"foo"}'})
# Appears once in the textarea and once in the error message
self.assertEqual(form.as_p().count(escape('{"foo"}')), 2)
def test_widget(self):
"""The default widget of a JSONField is a Textarea."""
field = forms.JSONField()
self.assertIsInstance(field.widget, widgets.Textarea)
def test_custom_widget_kwarg(self):
"""The widget can be overridden with a kwarg."""
field = forms.JSONField(widget=widgets.Input)
self.assertIsInstance(field.widget, widgets.Input)
def test_custom_widget_attribute(self):
"""The widget can be overridden with an attribute."""
class CustomJSONField(forms.JSONField):
widget = widgets.Input
field = CustomJSONField()
self.assertIsInstance(field.widget, widgets.Input)
def test_already_converted_value(self):
field = forms.JSONField(required=False)
tests = [
'["a", "b", "c"]', '{"a": 1, "b": 2}', '1', '1.5', '"foo"',
'true', 'false', 'null',
]
for json_string in tests:
val = field.clean(json_string)
self.assertEqual(field.clean(val), val)
def test_has_changed(self):
field = forms.JSONField()
self.assertIs(field.has_changed({'a': True}, '{"a": 1}'), True)
self.assertIs(field.has_changed({'a': 1, 'b': 2}, '{"b": 2, "a": 1}'), False)<|fim▁end|> | class MyModel(PostgreSQLModel):
field = JSONField(default={})
|
<|file_name|>AccountRoleDAO.java<|end_file_name|><|fim▁begin|>package org.zanata.dao;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.criterion.Restrictions;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.AutoCreate;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.zanata.model.HAccount;
import org.zanata.model.HAccountRole;
import org.zanata.model.HProject;
@Name("accountRoleDAO")
@AutoCreate
@Scope(ScopeType.STATELESS)
public class AccountRoleDAO extends AbstractDAOImpl<HAccountRole, Integer> {
public AccountRoleDAO() {
super(HAccountRole.class);
}
public AccountRoleDAO(Session session) {
super(HAccountRole.class, session);
}
public boolean roleExists(String role) {
return findByName(role) != null;
}
public HAccountRole findByName(String roleName) {
Criteria cr = getSession().createCriteria(HAccountRole.class);
cr.add(Restrictions.eq("name", roleName));
cr.setCacheable(true).setComment("AccountRoleDAO.findByName");
return (HAccountRole) cr.uniqueResult();
}
public HAccountRole create(String roleName, HAccountRole.RoleType type,
String... includesRoles) {
HAccountRole role = new HAccountRole();
role.setName(roleName);
role.setRoleType(type);
for (String includeRole : includesRoles) {
Set<HAccountRole> groups = role.getGroups();
if (groups == null) {
groups = new HashSet<HAccountRole>();
role.setGroups(groups);<|fim▁hole|> return role;
}
public HAccountRole updateIncludeRoles(String roleName,
String... includesRoles) {
HAccountRole role = findByName(roleName);
for (String includeRole : includesRoles) {
Set<HAccountRole> groups = role.getGroups();
if (groups == null) {
groups = new HashSet<HAccountRole>();
role.setGroups(groups);
}
groups.add(findByName(includeRole));
}
makePersistent(role);
return role;
}
public List<HAccount> listMembers(String roleName) {
HAccountRole role = findByName(roleName);
return listMembers(role);
}
@SuppressWarnings("unchecked")
public List<HAccount> listMembers(HAccountRole role) {
Query query =
getSession()
.createQuery(
"from HAccount account where :role member of account.roles");
query.setParameter("role", role);
query.setComment("AccountRoleDAO.listMembers");
return query.list();
}
public Collection<HAccountRole> getByProject(HProject project) {
return getSession()
.createQuery(
"select p.allowedRoles from HProject p where p = :project")
.setParameter("project", project)
.setComment("AccountRoleDAO.getByProject").list();
}
}<|fim▁end|> | }
groups.add(findByName(includeRole));
}
makePersistent(role); |
<|file_name|>new.go<|end_file_name|><|fim▁begin|>package groups
import (
"errors"
"fmt"
"io"
"github.com/spf13/cobra"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
kprinters "k8s.io/kubernetes/pkg/printers"
"github.com/openshift/origin/pkg/client"
"github.com/openshift/origin/pkg/cmd/templates"
"github.com/openshift/origin/pkg/cmd/util/clientcmd"
userapi "github.com/openshift/origin/pkg/user/apis/user"
)
const NewGroupRecommendedName = "new"
var (
newLong = templates.LongDesc(`
Create a new group.
This command will create a new group with an optional list of users.`)
newExample = templates.Examples(`
# Add a group with no users
%[1]s my-group
# Add a group with two users
%[1]s my-group user1 user2
# Add a group with one user and shorter output
%[1]s my-group user1 -o name`)
)
type NewGroupOptions struct {
GroupClient client.GroupInterface
Group string
Users []string
Out io.Writer
Printer kprinters.ResourcePrinterFunc
}
func NewCmdNewGroup(name, fullName string, f *clientcmd.Factory, out io.Writer) *cobra.Command {
options := &NewGroupOptions{Out: out}
cmd := &cobra.Command{
Use: name + " GROUP [USER ...]",
Short: "Create a new group",
Long: newLong,
Example: fmt.Sprintf(newExample, fullName),
Run: func(cmd *cobra.Command, args []string) {
if err := options.Complete(f, cmd, args); err != nil {
kcmdutil.CheckErr(kcmdutil.UsageError(cmd, err.Error()))
}
kcmdutil.CheckErr(options.Validate())
kcmdutil.CheckErr(options.AddGroup())
},
}
kcmdutil.AddPrinterFlags(cmd)
return cmd
}
func (o *NewGroupOptions) Complete(f *clientcmd.Factory, cmd *cobra.Command, args []string) error {
if len(args) < 1 {
return errors.New("You must specify at least one argument: GROUP [USER ...]")
}
o.Group = args[0]
if len(args) > 1 {
o.Users = append(o.Users, args[1:]...)
}
osClient, _, err := f.Clients()
if err != nil {
return err
}
o.GroupClient = osClient.Groups()
printer, err := f.PrinterForCommand(cmd, true, nil, kprinters.PrintOptions{})
if err != nil {
return err
}
if printer != nil {
o.Printer = printer.PrintObj
} else {
o.Printer = func(obj runtime.Object, out io.Writer) error {
mapper, _ := f.Object()
return f.PrintObject(cmd, true, mapper, obj, out)
}
}
return nil
}
func (o *NewGroupOptions) Validate() error {
if len(o.Group) == 0 {<|fim▁hole|> }
if o.GroupClient == nil {
return fmt.Errorf("GroupClient is required")
}
if o.Out == nil {
return fmt.Errorf("Out is required")
}
if o.Printer == nil {
return fmt.Errorf("Printer is required")
}
return nil
}
func (o *NewGroupOptions) AddGroup() error {
group := &userapi.Group{}
group.Name = o.Group
usedNames := sets.String{}
for _, user := range o.Users {
if usedNames.Has(user) {
continue
}
usedNames.Insert(user)
group.Users = append(group.Users, user)
}
actualGroup, err := o.GroupClient.Create(group)
if err != nil {
return err
}
return o.Printer(actualGroup, o.Out)
}<|fim▁end|> | return fmt.Errorf("Group is required") |
<|file_name|>wiredep.js<|end_file_name|><|fim▁begin|><|fim▁hole|> return {
server: {
src: ['<%= tmp %>/index.html'],
ignorePath: /\.\.\//
}
}
};<|fim▁end|> | module.exports = function() { |
<|file_name|>webpack.config.development.js<|end_file_name|><|fim▁begin|>/* eslint max-len: 0 */
import webpack from 'webpack';
import merge from 'webpack-merge';
import baseConfig from './webpack.config.base';
const port = process.env.PORT || 3000;
export default merge(baseConfig, {
debug: true,
devtool: 'cheap-module-eval-source-map',
entry: [
`webpack-hot-middleware/client?path=http://localhost:${port}/__webpack_hmr`,
'./app/index'
],
output: {
publicPath: `http://localhost:${port}/dist/`
},
module: {
loaders: [
{
test: /\.global\.css$/,
loaders: [
'style-loader',
'css-loader?sourceMap'
]
},
{
test: /^((?!\.global).)*\.css$/,
loaders: [
'style-loader',
'css-loader?modules&sourceMap&importLoaders=1&localIdentName=[name]__[local]___[hash:base64:5]'
]
}
]
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify('development')
})
],
<|fim▁hole|><|fim▁end|> | target: 'electron-renderer'
}); |
<|file_name|>Navigation.js<|end_file_name|><|fim▁begin|>import React from 'react';
import withStyles from 'isomorphic-style-loader/lib/withStyles';
import s from './Navigation.css';
import Link from '../Link';
class Navigation extends React.Component {
render() {
return (<|fim▁hole|> <Link className={s.link} to="/catalog">Каталог продукції</Link>
<Link className={s.link} to="/about">Про нас</Link>
<Link className={s.link} to="/catalog">Наші роботи</Link>
</div>
);
}
}
export default withStyles(s)(Navigation);<|fim▁end|> | <div className={s.root} role="navigation"> |
<|file_name|>bug107.go<|end_file_name|><|fim▁begin|>// errchk $G $D/$F.go
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import os "os"
type _ os.FileInfo
func f() (os int) {
// In the next line "os" should refer to the result variable, not
// to the package.
v := os.Open("", 0, 0); // ERROR "undefined"
return 0<|fim▁hole|><|fim▁end|> | } |
<|file_name|>alerts.ts<|end_file_name|><|fim▁begin|>'use strict';
import {Injectable, Inject} from '@angular/core';
import { Platform, Events} from 'ionic-angular';
import {GeofenceProvider} from './geofence';
import { Storage } from '@ionic/storage';
@Injectable()
export class AlertsProvider {
private geofenceProvider: GeofenceProvider;
private ALERTS_DATA: string;
constructor(
@Inject(Events) public events: Events,
//@Inject(Platform) public platform: Platform,
geofenceProvider: GeofenceProvider,
public storage: Storage<|fim▁hole|> this.events = events;
this.geofenceProvider = geofenceProvider;
//this.platform = platform;
this.ALERTS_DATA = 'alertdate';
this.listenToGeofenceEvents();
}
listenToGeofenceEvents() {
this.events.subscribe('geofence:vehicleout', () => {
this.geofenceProvider.getGeofenceData().then((geofence) => {
geofence = JSON.parse(geofence);
if (geofence.length > 0) {
this.setAlertsData(geofence);
}
});
});
}
setAlertsData(data) {
/*var alerts = [
{sensorName: 'Sensor 1', temperature: '100', pressure: '80'},
{sensorName: 'Sensor 2', temperature: '101', pressure: '82'},
{sensorName: 'Sensor 3', temperature: '102', pressure: '83'}
];
*/
/*
if (alerts.length > 0) {
if(this.ALERTS_DATA == '' || this.ALERTS_DATA == 'alertsdata') {
this.ALERTS_DATA = JSON.stringify(alerts);
this.storage.set(this.ALERTS_DATA, JSON.stringify(alerts));
} else {
let alerts_data = JSON.parse(this.alerts);
this.ALERTS_DATA = JSON.stringify(alerts_data.concat(alerts));
this.storage.set(this.ALERTS_DATA, JSON.stringify(alerts_data.concat(alerts)));
}
}
*/
this.events.publish('alerts:refresh');
}
getAlertsData() {
return this.storage.get(this.ALERTS_DATA).then((value) => {
return value;
});
}
}<|fim▁end|> | ) { |
<|file_name|>compiler_facade_interface.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* A set of interfaces which are shared between `@angular/core` and `@angular/compiler` to allow
* for late binding of `@angular/compiler` for JIT purposes.
*
* This file has two copies. Please ensure that they are in sync:
* - packages/compiler/src/compiler_facade_interface.ts (master)
* - packages/core/src/render3/jit/compiler_facade_interface.ts (copy)
*
* Please ensure that the two files are in sync using this command:
* ```
* cp packages/compiler/src/compiler_facade_interface.ts \
* packages/core/src/render3/jit/compiler_facade_interface.ts
* ```
*/
export interface ExportedCompilerFacade { ɵcompilerFacade: CompilerFacade; }
export interface CompilerFacade {
compilePipe(angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3PipeMetadataFacade):
any;
compileInjectable(
angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3InjectableMetadataFacade): any;
compileInjector(
angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3InjectorMetadataFacade): any;
compileNgModule(
angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3NgModuleMetadataFacade): any;
compileDirective(
angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3DirectiveMetadataFacade): any;
compileComponent(
angularCoreEnv: CoreEnvironment, sourceMapUrl: string, meta: R3ComponentMetadataFacade): any;
R3ResolvedDependencyType: typeof R3ResolvedDependencyType;
}
export interface CoreEnvironment { [name: string]: Function; }
export type StringMap = {
[key: string]: string;
};
export type StringMapWithRename = {
[key: string]: string | [string, string];
};
export type Provider = any;
export enum R3ResolvedDependencyType {
Token = 0,
Attribute = 1,
}
export interface R3DependencyMetadataFacade {
token: any;
resolved: R3ResolvedDependencyType;
host: boolean;
optional: boolean;
self: boolean;
skipSelf: boolean;
}
export interface R3PipeMetadataFacade {
name: string;
type: any;
pipeName: string;
deps: R3DependencyMetadataFacade[]|null;
pure: boolean;
}
export interface R3InjectableMetadataFacade {
name: string;
type: any;
typeArgumentCount: number;
ctorDeps: R3DependencyMetadataFacade[]|null;
providedIn: any;
useClass?: any;
useFactory?: any;
useExisting?: any;
useValue?: any;
userDeps?: R3DependencyMetadataFacade[];
}
export interface R3NgModuleMetadataFacade {
type: any;
bootstrap: Function[];
declarations: Function[];
imports: Function[];
exports: Function[];
emitInline: boolean;
}
export interface R3InjectorMetadataFacade {
name: string;
type: any;
deps: R3DependencyMetadataFacade[]|null;
providers: any;
imports: any;
}
export interface R3DirectiveMetadataFacade {
name: string;
type: any;
typeArgumentCount: number;
typeSourceSpan: null;
deps: R3DependencyMetadataFacade[]|null;
selector: string|null;
queries: R3QueryMetadataFacade[];
host: {[key: string]: string};
propMetadata: {[key: string]: any[]};
lifecycle: {usesOnChanges: boolean;};
inputs: string[];
outputs: string[];
usesInheritance: boolean;
exportAs: string[]|null;
providers: Provider[]|null;
}
export interface R3ComponentMetadataFacade extends R3DirectiveMetadataFacade {
template: string;
preserveWhitespaces: boolean;
animations: any[]|undefined;
viewQueries: R3QueryMetadataFacade[];
pipes: Map<string, any>;
directives: {selector: string, expression: any}[];
styles: string[];
encapsulation: ViewEncapsulation;
viewProviders: Provider[]|null;
interpolation?: [string, string];
changeDetection?: ChangeDetectionStrategy;
}
<|fim▁hole|>
export interface R3QueryMetadataFacade {
propertyName: string;
first: boolean;
predicate: any|string[];
descendants: boolean;
read: any|null;
}<|fim▁end|> | export type ViewEncapsulation = number;
export type ChangeDetectionStrategy = number; |
<|file_name|>type_sizes_piechart.py<|end_file_name|><|fim▁begin|>from collections import Counter<|fim▁hole|>import matplotlib.pyplot as plt
from arcapix.fs.gpfs import ListProcessingRule, ManagementPolicy
def type_sizes(file_list):
c = Counter()
for f in file_list:
c.update({splitext(f.name): f.filesize})
return c
p = ManagementPolicy()
r = p.rules.new(ListProcessingRule, 'types', type_sizes)
result = p.run('mmfs1')['types']
plt.pie(list(result.values()), labels=list(result.keys()), autopct='%1.1f%%')
plt.axis('equal')
plt.show()<|fim▁end|> | from os.path import splitext
|
<|file_name|>urls.js<|end_file_name|><|fim▁begin|>module.exports.twitter = function twitter(username) {
// Creates the canonical twitter URL without the '@'
return 'https://twitter.com/' + username.replace(/^@/, '');
};
module.exports.facebook = function facebook(username) {
// Handles a starting slash, this shouldn't happen, but just in case<|fim▁hole|><|fim▁end|> | return 'https://www.facebook.com/' + username.replace(/^\//, '');
}; |
<|file_name|>test-gamestats.js<|end_file_name|><|fim▁begin|>var assert = require('should');
var GameStats = require('../static/js/gamestats.js');
describe('gamestats', function(){
describe('#accuracy', function(){
it('should be updated after keypress', function () {
var gm = new GameStats();
gm.keypress();
gm.keypress();
gm.accuracy.should.equal(1);
gm.accuracy.should.not.equal(0);
});
it('accuracy should be updated after keybackspacepress', function () {
var gm = new GameStats();
gm.keypress();
gm.backspacepress();
gm.accuracy.should.equal(0.5);
});
it('accuracy calculations', function () {
var gm = new GameStats();<|fim▁hole|> });
});
});<|fim▁end|> | gm.keypress();
gm.backspacepress();
gm.backspacepress();
gm.accuracy.should.equal(1/3); |
<|file_name|>Utils.py<|end_file_name|><|fim▁begin|>#
# Utility functions
#
import sys
from functools import partial
from uuid import UUID
from hashlib import sha1
from os import path, listdir
from zipfile import ZipFile
from subprocess import Popen, TimeoutExpired
import nacl.utils
import nacl.secret
def isValidUUID(uid):
"""
Validate UUID
@param uid: UUID value to be verfied, can be bytes or str
@return: True if UUID valid, else False
"""
try:
# attempt convertion from bytes to str
uid = uid.decode('ascii')
except AttributeError:
# is already bytes object
pass
except UnicodeDecodeError:
# uid contains non-ascii characters, invalid UUID
return False
try:
out = UUID(uid, version=4)
except ValueError:
return False
# check converted value from UUID equals original value. UUID class is not strict on input
return str(out) == uid
def encrypt(safe, *args):
"""
Encrypt all provided data
@param safe: encryption class
@param args: data to be encrypted
@return: encryption output iterable
"""
return (safe.encrypt(a, nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)) for a in args)
def sha1sum(filePath, blocksize=1024):
"""
Calculate SHA1 hash of file
@param filePath: Path to hashable file
@param blocksize: Amount of bytes to read into memory before hashing
@return: SHA1 hash value (bytes)
"""
with open(filePath, mode='rb') as f:
out = sha1()
for buf in iter(partial(f.read, blocksize), b''):
out.update(buf)
return bytes(out.hexdigest(), encoding='ascii')
def checkCerts():
"""
Checks to see if required TLS certificates exist in Resources directory. Attempts to generate certificates if not found
@returns: Boolean value based on success
"""
resDir = absolutePath('Resources')
command = None
success = False
# check to see if required certificates exist
if not all(True if path.isfile(path.join(resDir, cert)) else False for cert in ('server.crt', 'server.key.orig')):
############
# Check OS
############
if sys.platform in ('linux', 'darwin'):
# bash script run
command = 'sh {}'.format('create_certs_linux.sh')
elif sys.platform == 'win32':
hasOpenSSL = False
# check for openssl requirement (downloaded during installer run)
files = sorted((path.isdir(f), f) for f in listdir(resDir) if f.lower().startswith('openssl-'))
# check for expanded directory and executable
for isDir, ofile in files:
if isDir and path.isfile(path.join(resDir, ofile, 'openssl.exe')):
hasOpenSSL = True
newDir = ofile
break
if not hasOpenSSL and files:
# sorted filename to list newest version first)
for ofile in sorted(f for isDir, f in files if not isDir and path.splitext(f)[1] == '.zip'):
# extract archive
with ZipFile(path.join(resDir, ofile), 'r') as ozip:
newDir = path.join(resDir, path.splitext(ofile)[0])
ozip.extractall(path=newDir)
# verify openssl.exe exists in directory
if path.isfile(path.join(newDir, 'openssl.exe')):
hasOpenSSL = True
break
if hasOpenSSL:
# write openssl directory to config file
with open(path.join(resDir, 'openssl.cfg'), 'w') as config:
config.writelines([newDir])
# windows bat command file
command = r'cmd /c {}'.format('create_certs_windows.bat')
if command:
proc = Popen([command], cwd=resDir, shell=True)
try:
proc.wait(180)
except TimeoutExpired:
proc.kill()
# check command has generated correct files
if all(True if path.isfile(path.join(resDir, cert)) else False for cert in ('server.crt', 'server.key.orig')):
success = True
else:
success = True
return success
def absolutePath(pathname):
"""
Return the absolute path of the given file or directory
@return: absolute path
"""
if getattr(sys, 'frozen', False):
# Frozen application denotes packaged application, modules are moved into a zip
datadir = path.dirname(sys.executable)
else:
# Source based installation, use parent directory of this module's directory
datadir = path.join(path.dirname(__file__), path.pardir)
<|fim▁hole|><|fim▁end|> | return path.abspath(path.join(datadir, pathname)) |
<|file_name|>source_trick_reciever.py<|end_file_name|><|fim▁begin|>import sys
import os
import pysos
import signal
# these two variables should be changed depending on the test drivers PID
# and the type of message it will be sending, If you are using the generic_test.c
# then it is likely these two values can stay the same
TEST_MODULE = 0x81
MSG_TEST_DATA= 33
ALARM_LEN = 60
START_DATA = 100
FINAL_DATA = 200
TEST_FAIL = 155
TEST_PASS = 255
# variables holding new and old sensor values<|fim▁hole|># this can be replaces with whatever you want since this is specific to
# what the test driver expects for data
oldstate = {}
state = {}
# a signal handler that will go off for an alarm
# it is highly suggested that you use this since it is the easiest way to test if your
# node has entered panic mode via the script
def panic_handler(signum, frame):
print "it is highly likely that your node has entered panic mode"
print "please reset the node"
sys.exit(1)
# message handler for messages of type MSG_DATA_READY
def generic_test(msg):
""" Small example of test driver usage. It simulates a virtual
dice and shows which side of the dice is up.
"""
global oldstate
global state
print "message recieved"
signal.alarm(ALARM_LEN)
#unpack the values we are expecting, in this case it is a node id, the acclerometer id,
# and a value from the sensor
(node_id, node_state, data) = pysos.unpack("<BBB", msg['data'])
if node_id not in state.keys():
state[node_id] = 0
oldstate[node_id] = 0
# these are some simple calculations to test the sensor value we have gotten
# this is the part which you need to fill in in order to verify that the function is working
if (node_state == START_DATA):
print "initialization began correctly"
if (node_state == 0):
state[node_id] = data
if (node_state == TEST_FAIL):
print >> sys.stderr, "the test for item %d has failed" %data
if (node_state == TEST_PASS):
print "the test for item %d has passed" %data
if (node_state == 1 and state[node_id] != data):
print >> sys.stderr, " a message was lost somewhere on node %d before count %d" %(node_id,data)
if (node_state == FINAL_DATA):
print "finalization worked correctly"
if __name__ == "__main__":
# here we set up a connection to sossrv using the pysos module
# and begin listening for messages
# we also register our function above with the server so that it is called
# when the appropriate message type is recieved
srv = pysos.sossrv()
srv.register_trigger(generic_test, sid=TEST_MODULE, type=MSG_TEST_DATA)
# register the signal handler and begin an alarm that will wait for 60 seconds before going off
# other times for the alarm might be good, use your own judgement based on your test
signal.signal(signal.SIGALRM, panic_handler)
signal.alarm(ALARM_LEN)
# we do this so since the test_suite application has information regarding the amount of time
# each test should be run. after the amount of time specified in test.lst, test_suite will
# end this script and move to another test
while(1):
continue<|fim▁end|> | |
<|file_name|>version.py<|end_file_name|><|fim▁begin|>"""
The latest version of this package is available at:
<http://github.com/jantman/webhook2lambda2sqs>
################################################################################
Copyright 2016 Jason Antman <[email protected]> <http://www.jasonantman.com>
This file is part of webhook2lambda2sqs, also known as webhook2lambda2sqs.
webhook2lambda2sqs is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
webhook2lambda2sqs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.<|fim▁hole|>
You should have received a copy of the GNU Affero General Public License
along with webhook2lambda2sqs. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
################################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/webhook2lambda2sqs> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
################################################################################
AUTHORS:
Jason Antman <[email protected]> <http://www.jasonantman.com>
################################################################################
"""
VERSION = '0.2.0'
PROJECT_URL = 'https://github.com/jantman/webhook2lambda2sqs'<|fim▁end|> | |
<|file_name|>0004_emailmarketingconfiguration_welcome_email_send_delay.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models<|fim▁hole|>
dependencies = [
('email_marketing', '0003_auto_20160715_1145'),
]
operations = [
migrations.AddField(
model_name='emailmarketingconfiguration',
name='welcome_email_send_delay',
field=models.IntegerField(default=600, help_text='Number of seconds to delay the sending of User Welcome email after user has been activated'),
),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>macros.go<|end_file_name|><|fim▁begin|>package main
import (
"encoding/xml"
"errors"
"os"
"os/exec"
"strings"
)
type KmItem struct {
Keys []string `xml:"key"`
Values []string `xml:",any"`
}
type KmCategory struct {
Keys []string `xml:"key"`
Values []string `xml:"string"`
Items []KmItem `xml:"array>dict"`
}
type KmCategories struct {
Categories []KmCategory `xml:"array>dict"`
}
type KmMacro struct {
UID string
Name string
Category string
Hotkey string
}
func getKmMacros() (map[string]KmMacro, error) {
// Allow to change the command for fetching macros, so the function could be unit-tested
getAllMacrosCommand := os.Getenv("GET_ALL_KM_MACROS_COMMAND")
if getAllMacrosCommand == "" {
getAllMacrosCommand = "osascript ./get_all_km_macros.scpt"
}
categoriesWithAllMacros, err := getKmCategories(getAllMacrosCommand)
if err != nil {
return nil, err
}
getHotkeyMacrosCommand := os.Getenv("GET_HOTKEY_KM_MACROS_COMMAND")
if getHotkeyMacrosCommand == "" {
getHotkeyMacrosCommand = "osascript ./get_hotkey_km_macros.scpt"
}
categoriesWithHotKeyMacros, err := getKmCategories(getHotkeyMacrosCommand)
if err != nil {
return nil, err
}
macros := make(map[string]KmMacro)
var uid string
for _, category := range categoriesWithAllMacros.Categories {
for _, item := range category.Items {
uid = item.getValueByKey("uid")
macros[uid] = KmMacro{
UID: uid,<|fim▁hole|> Hotkey: "",
}
}
}
for _, category := range categoriesWithHotKeyMacros.Categories {
for _, item := range category.Items {
uid = item.getValueByKey("uid")
macro, isExists := macros[uid]
if isExists == true {
macro.Hotkey = item.getValueByKey("key")
// TODO Use pointer instead?
macros[uid] = macro
}
}
}
return macros, nil
}
func getKmCategories(command string) (KmCategories, error) {
out, err := exec.Command("sh", "-c", command).Output()
var categories KmCategories
if err != nil {
return categories, errors.New("Unable to get macros from Keyboard Maestro")
}
if !strings.Contains(string(out), "<?xml") {
return categories, errors.New(string(out))
}
err = xml.Unmarshal(out, &categories)
if err != nil {
return categories, errors.New("Unable to get macros from Keyboard Maestro")
}
return categories, nil
}
func (item KmItem) getValueByKey(requestedKey string) string {
for i, key := range item.Keys {
if key == requestedKey {
return item.Values[i]
}
}
return ""
}
// TODO Find out how to use the same func for both KmItem and KmCategory
func (item KmCategory) getValueByKey(requestedKey string) string {
for i, key := range item.Keys {
if key == requestedKey {
return item.Values[i]
}
}
return ""
}<|fim▁end|> | Name: item.getValueByKey("name"),
Category: category.getValueByKey("name"), |
<|file_name|>notebook.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from ..core.templates import DOC_NB_JS
from ..core.json_encoder import serialize_json
from ..model import Model
from ..util.string import encode_utf8
from .elements import div_for_render_item
from .util import FromCurdoc, OutputDocumentFor, standalone_docs_json_and_render_items
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'notebook_content'
)
<|fim▁hole|>#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
def notebook_content(model, notebook_comms_target=None, theme=FromCurdoc):
''' Return script and div that will display a Bokeh plot in a Jupyter
Notebook.
The data for the plot is stored directly in the returned HTML.
Args:
model (Model) : Bokeh object to render
notebook_comms_target (str, optional) :
A target name for a Jupyter Comms object that can update
the document that is rendered to this notebook div
theme (Theme, optional) :
Defaults to the ``Theme`` instance in the current document.
Setting this to ``None`` uses the default theme or the theme
already specified in the document. Any other value must be an
instance of the ``Theme`` class.
Returns:
script, div, Document
.. note::
Assumes :func:`~bokeh.io.notebook.load_notebook` or the equivalent
has already been executed.
'''
if not isinstance(model, Model):
raise ValueError("notebook_content expects a single Model instance")
# Comms handling relies on the fact that the new_doc returned here
# has models with the same IDs as they were started with
with OutputDocumentFor([model], apply_theme=theme, always_new=True) as new_doc:
(docs_json, [render_item]) = standalone_docs_json_and_render_items([model])
div = div_for_render_item(render_item)
render_item = render_item.to_json()
if notebook_comms_target:
render_item["notebook_comms_target"] = notebook_comms_target
script = DOC_NB_JS.render(
docs_json=serialize_json(docs_json),
render_items=serialize_json([render_item]),
)
return encode_utf8(script), encode_utf8(div), new_doc
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------<|fim▁end|> | |
<|file_name|>rotate_point.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate noise;
use noise::{Cylinders, RotatePoint};<|fim▁hole|> let cylinders = Cylinders::new();
let rotate_point = RotatePoint::new(cylinders).set_x_angle(60.0);
debug::render_noise_module3("rotate_point.png", &rotate_point, 1024, 1024, 50);
}<|fim▁end|> |
mod debug;
fn main() { |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" High-level objects for fields. """
from collections import OrderedDict
from datetime import date, datetime
from functools import partial
from operator import attrgetter
from types import NoneType
import logging
import pytz
import xmlrpclib
from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__)
class SpecialValue(object):
""" Encapsulates a value in the cache in place of a normal value. """
def __init__(self, value):
self.value = value
def get(self):
return self.value
class FailedValue(SpecialValue):
""" Special value that encapsulates an exception instead of a value. """
def __init__(self, exception):
self.exception = exception
def get(self):
raise self.exception
def _check_value(value):
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
return value.get() if isinstance(value, SpecialValue) else value
def resolve_all_mro(cls, name, reverse=False):
""" Return the (successively overridden) values of attribute ``name`` in ``cls``
in mro order, or inverse mro order if ``reverse`` is true.
"""
klasses = reversed(cls.__mro__) if reverse else cls.__mro__
for klass in klasses:
if name in klass.__dict__:
yield klass.__dict__[name]
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
``__slots__`` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots)
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.column_attrs = []
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_column_'):
cls.column_attrs.append((attr[8:], attr))
elif attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
class Field(object):
""" The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instanciating a field:
:param string: the label of the field seen by users (string); if not
set, the ORM takes the field name in the class (capitalized).
:param help: the tooltip of the field seen by users (string)
:param readonly: whether the field is readonly (boolean, by default ``False``)
:param required: whether the value of the field is required (boolean, by
default ``False``)
:param index: whether the field is indexed in database (boolean, by
default ``False``)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value
:param states: a dictionary mapping state values to lists of UI attribute-value
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
Note: Any state-based condition requires the ``state`` field value to be
available on the client-side UI. This is typically done by including it in
the relevant views, possibly made invisible if not relevant for the
end-user.
:param groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param string oldname: the previous name of this field, so that ORM can rename
it automatically at migration
.. _field-computed:
.. rubric:: Computed fields
One can define a field whose value is computed instead of simply being
read from the database. The attributes that are specific to computed
fields are given below. To define such a field, simply provide a value
for the attribute ``compute``.
:param compute: name of a method that computes the field
:param inverse: name of a method that inverses the field (optional)
:param search: name of a method that implement search on the field (optional)
:param store: whether the field is stored in database (boolean, by
default ``False`` on computed fields)
:param compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (boolean, by default ``False``)
The methods given for ``compute``, ``inverse`` and ``search`` are model
methods. Their signature is shown in the following example::
upper = fields.Char(compute='_compute_upper',
inverse='_inverse_upper',
search='_search_upper')
@api.depends('name')
def _compute_upper(self):
for rec in self:
rec.upper = rec.name.upper() if rec.name else False
def _inverse_upper(self):
for rec in self:
rec.name = rec.upper.lower() if rec.upper else False
def _search_upper(self, operator, value):
if operator == 'like':
operator = 'ilike'
return [('name', operator, value)]
The compute method has to assign the field on all records of the invoked
recordset. The decorator :meth:`openerp.api.depends` must be applied on
the compute method to specify the field dependencies; those dependencies
are used to determine when to recompute the field; recomputation is
automatic and guarantees cache/database consistency. Note that the same
method can be used for several fields, you simply have to assign all the
given fields in the method; the method will be invoked once for all
those fields.
By default, a computed field is not stored to the database, and is
computed on-the-fly. Adding the attribute ``store=True`` will store the
field's values in the database. The advantage of a stored field is that
searching on that field is done by the database itself. The disadvantage
is that it requires database updates when the field must be recomputed.
The inverse method, as its name says, does the inverse of the compute
method: the invoked records have a value for the field, and you must
apply the necessary changes on the field dependencies such that the
computation gives the expected value. Note that a computed field without
an inverse method is readonly by default.
The search method is invoked when processing domains before doing an
actual search on the model. It must return a domain equivalent to the
condition: ``field operator value``.
.. _field-related:
.. rubric:: Related fields
The value of a related field is given by following a sequence of
relational fields and reading a field on the reached model. The complete
sequence of fields to traverse is specified by the attribute
:param related: sequence of field names
Some field attributes are automatically copied from the source field if
they are not redefined: ``string``, ``help``, ``readonly``, ``required`` (only
if all fields in the sequence are required), ``groups``, ``digits``, ``size``,
``translate``, ``sanitize``, ``selection``, ``comodel_name``, ``domain``,
``context``. All semantic-free attributes are copied from the source
field.
By default, the values of related fields are not stored to the database.
Add the attribute ``store=True`` to make it stored, just like computed
fields. Related fields are automatically recomputed when their
dependencies are modified.
.. _field-company-dependent:
.. rubric:: Company-dependent fields
Formerly known as 'property' fields, the value of those fields depends
on the company. In other words, users that belong to different companies
may see different values for the field on a given record.
:param company_dependent: whether the field is company-dependent (boolean)
.. _field-incremental-definition:
.. rubric:: Incremental definition
A field is defined as class attribute on a model class. If the model
is extended (see :class:`~openerp.models.Model`), one can also extend
the field definition by redefining a field with the same name and same
type on the subclass. In that case, the attributes of the field are
taken from the parent class and overridden by the ones given in
subclasses.
For instance, the second class below only adds a tooltip on the field
``state``::
class First(models.Model):
_name = 'foo'
state = fields.Selection([...], required=True)
class Second(models.Model):
_inherit = 'foo'
state = fields.Selection(help="Blah blah blah")
"""
__metaclass__ = MetaField
type = None # type of the field (string)
relational = False # whether the field is a relational one
_slots = {
'_attrs': EMPTY_DICT, # dictionary of field attributes; it contains:
# - all attributes after __init__()
# - free attributes only after set_class_name()
'automatic': False, # whether the field is automatically created ("magic" field)
'inherited': False, # whether the field is inherited (_inherits)
'column': None, # the column corresponding to the field
'setup_done': False, # whether the field has been set up
'name': None, # name of the field
'model_name': None, # name of the model of this field
'comodel_name': None, # name of the model of values (if relational)
'store': True, # whether the field is stored in database
'index': False, # whether the field is indexed in database
'manual': False, # whether the field is a custom field
'copy': True, # whether the field is copied over by BaseModel.copy()
'depends': (), # collection of field dependencies
'recursive': False, # whether self depends on itself
'compute': None, # compute(recs) computes field on recs
'compute_sudo': False, # whether field should be recomputed as admin
'inverse': None, # inverse(recs) inverses field on recs
'search': None, # search(recs, operator, value) searches on self
'related': None, # sequence of field names, for related fields
'related_sudo': True, # whether related fields should be read as admin
'company_dependent': False, # whether ``self`` is company-dependent (property field)
'default': None, # default(recs) returns the default value
'string': None, # field label
'help': None, # field tooltip
'readonly': False, # whether the field is readonly
'required': False, # whether the field is required
'states': None, # set readonly and required depending on state
'groups': None, # csv list of group xml ids
'change_default': False, # whether the field may trigger a "user-onchange"
'deprecated': None, # whether the field is deprecated
'inverse_fields': (), # collection of inverse fields (objects)
'computed_fields': (), # fields computed with the same method as self
'related_field': None, # corresponding related field
'_triggers': (), # invalidation and recomputation triggers
}
def __init__(self, string=None, **kwargs):
kwargs['string'] = string
attrs = {key: val for key, val in kwargs.iteritems() if val is not None}
self._attrs = attrs or EMPTY_DICT
def __getattr__(self, name):
""" Access non-slot field attribute. """
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
""" Set slot or non-slot field attribute. """
try:
object.__setattr__(self, name, value)
except AttributeError:
if self._attrs:
self._attrs[name] = value
else:
self._attrs = {name: value} # replace EMPTY_DICT
def __delattr__(self, name):
""" Remove non-slot field attribute. """
try:
del self._attrs[name]
except KeyError:
raise AttributeError(name)
def new(self, **kwargs):
""" Return a field of the same type as ``self``, with its own parameters. """
return type(self)(**kwargs)
def set_class_name(self, cls, name):
""" Assign the model class and field name of ``self``. """
self_attrs = self._attrs
for attr, value in self._slots.iteritems():
setattr(self, attr, value)
self.model_name = cls._name
self.name = name
# determine all inherited field attributes
attrs = {}
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
attrs.update(field._attrs)
else:
attrs.clear()
attrs.update(self_attrs) # necessary in case self is not in cls
# initialize ``self`` with ``attrs``
if attrs.get('compute'):
# by default, computed fields are not stored, not copied and readonly
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
if attrs.get('related'):
# by default, related fields are not stored and not copied
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
# fix for function fields overridden by regular columns
if not isinstance(attrs.get('column'), (NoneType, fields.function)):
attrs.pop('store', None)
for attr, value in attrs.iteritems():
setattr(self, attr, value)
if not self.string and not self.related:
# related fields get their string from their parent field
self.string = name.replace('_', ' ').capitalize()
# determine self.default and cls._defaults in a consistent way
self._determine_default(cls, name)
def _determine_default(self, cls, name):
""" Retrieve the default value for ``self`` in the hierarchy of ``cls``, and
determine ``self.default`` and ``cls._defaults`` accordingly.
"""
self.default = None
# traverse the class hierarchy upwards, and take the first field
# definition with a default or _defaults for self
for klass in cls.__mro__:
if name in klass.__dict__:
field = klass.__dict__[name]
if not isinstance(field, type(self)):
# klass contains another value overridden by self
return
if 'default' in field._attrs:
# take the default in field, and adapt it for cls._defaults
value = field._attrs['default']
if callable(value):
from openerp import api
self.default = value
cls._defaults[name] = api.model(
lambda recs: self.convert_to_write(value(recs))
)
else:
self.default = lambda recs: value
cls._defaults[name] = value
return
defaults = klass.__dict__.get('_defaults') or {}
if name in defaults:
# take the value from _defaults, and adapt it for self.default
value = defaults[name]
if callable(value):
func = lambda recs: value(recs._model, recs._cr, recs._uid, recs._context)
else:
func = lambda recs: value
self.default = lambda recs: self.convert_to_cache(
func(recs), recs, validate=False,
)
cls._defaults[name] = value
return
def __str__(self):
return "%s.%s" % (self.model_name, self.name)
def __repr__(self):
return "%s.%s" % (self.model_name, self.name)
############################################################################
#
# Field setup
#
def setup(self, env):
""" Make sure that ``self`` is set up, except for recomputation triggers. """
if not self.setup_done:
if self.related:
self._setup_related(env)
else:
self._setup_regular(env)
self.setup_done = True
#
# Setup of non-related fields
#
def _setup_regular(self, env):
""" Setup the attributes of a non-related field. """
recs = env[self.model_name]
def make_depends(deps):
return tuple(deps(recs) if callable(deps) else deps)
# convert compute into a callable and determine depends
if isinstance(self.compute, basestring):
# if the compute method has been overridden, concatenate all their _depends
self.depends = ()
for method in resolve_all_mro(type(recs), self.compute, reverse=True):
self.depends += make_depends(getattr(method, '_depends', ()))
self.compute = getattr(type(recs), self.compute)
else:
self.depends = make_depends(getattr(self.compute, '_depends', ()))
# convert inverse and search into callables
if isinstance(self.inverse, basestring):
self.inverse = getattr(type(recs), self.inverse)
if isinstance(self.search, basestring):
self.search = getattr(type(recs), self.search)
#
# Setup of related fields
#
def _setup_related(self, env):
""" Setup the attributes of a related field. """
# fix the type of self.related if necessary
if isinstance(self.related, basestring):
self.related = tuple(self.related.split('.'))
# determine the chain of fields, and make sure they are all set up
recs = env[self.model_name]
fields = []
for name in self.related:
field = recs._fields[name]
field.setup(env)
recs = recs[name]
fields.append(field)
self.related_field = field
# check type consistency
if self.type != field.type:
raise Warning("Type of related field %s is inconsistent with %s" % (self, field))
# determine dependencies, compute, inverse, and search
self.depends = ('.'.join(self.related),)
self.compute = self._compute_related
if not (self.readonly or field.readonly):
self.inverse = self._inverse_related
if field._description_searchable:
# allow searching on self only if the related field is searchable
self.search = self._search_related
# copy attributes from field to self (string, help, etc.)
for attr, prop in self.related_attrs:
if not getattr(self, attr):
setattr(self, attr, getattr(field, prop))
for attr, value in field._attrs.iteritems():
if attr not in self._attrs:
setattr(self, attr, value)
# special case for states: copy it only for inherited fields
if not self.states and self.inherited:
self.states = field.states
# special case for required: check if all fields are required
if not self.store and not self.required:
self.required = all(field.required for field in fields)
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
# when related_sudo, bypass access rights checks when reading values
others = records.sudo() if self.related_sudo else records
for record, other in zip(records, others):
if not record.id:
# draft record, do not switch to another environment
other = record
# traverse the intermediate fields; follow the first record at each step
for name in self.related[:-1]:
other = other[name][:1]
record[self.name] = other[self.related[-1]]
def _inverse_related(self, records):
""" Inverse the related field ``self`` on ``records``. """
# store record values, otherwise they may be lost by cache invalidation!
record_value = {record: record[self.name] for record in records}
for record in records:
other = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
other = other[name][:1]
if other:
other[self.related[-1]] = record_value[record]
def _search_related(self, records, operator, value):
""" Determine the domain to search on field ``self``. """
return [('.'.join(self.related), operator, value)]
# properties used by _setup_related() to copy values from related field
_related_comodel_name = property(attrgetter('comodel_name'))
_related_string = property(attrgetter('string'))
_related_help = property(attrgetter('help'))
_related_readonly = property(attrgetter('readonly'))
_related_groups = property(attrgetter('groups'))
@property
def base_field(self):
""" Return the base field of an inherited field, or ``self``. """
return self.related_field.base_field if self.inherited else self
#
# Setup of field triggers
#
# The triggers is a collection of pairs (field, path) of computed fields
# that depend on ``self``. When ``self`` is modified, it invalidates the cache
# of each ``field``, and registers the records to recompute based on ``path``.
# See method ``modified`` below for details.
#
def add_trigger(self, trigger):
""" Add a recomputation trigger on ``self``. """
if trigger not in self._triggers:
self._triggers += (trigger,)
def setup_triggers(self, env):
""" Add the necessary triggers to invalidate/recompute ``self``. """
model = env[self.model_name]
for path in self.depends:
self._setup_dependency([], model, path.split('.'))
def _setup_dependency(self, path0, model, path1):
""" Make ``self`` depend on ``model``; `path0 + path1` is a dependency of
``self``, and ``path0`` is the sequence of field names from ``self.model``
to ``model``.
"""
env = model.env
head, tail = path1[0], path1[1:]
if head == '*':
# special case: add triggers on all fields of model (except self)
fields = set(model._fields.itervalues()) - set([self])
else:
fields = [model._fields[head]]
for field in fields:
if field == self:
_logger.debug("Field %s is recursively defined", self)
self.recursive = True
continue
#_logger.debug("Add trigger on %s to recompute %s", field, self)
field.add_trigger((self, '.'.join(path0 or ['id'])))
# add trigger on inverse fields, too
for invf in field.inverse_fields:
#_logger.debug("Add trigger on %s to recompute %s", invf, self)
invf.add_trigger((self, '.'.join(path0 + [head])))
# recursively traverse the dependency
if tail:
comodel = env[field.comodel_name]
self._setup_dependency(path0 + [head], comodel, tail)
@property
def dependents(self):
""" Return the computed fields that depend on ``self``. """
return (field for field, path in self._triggers)
############################################################################
#
# Field description
#
def get_description(self, env):
""" Return a dictionary that describes the field ``self``. """
desc = {'type': self.type}
for attr, prop in self.description_attrs:
value = getattr(self, prop)
if callable(value):
value = value(env)
if value is not None:
desc[attr] = value
return desc
# properties used by get_description()
_description_store = property(attrgetter('store'))
_description_manual = property(attrgetter('manual'))
_description_depends = property(attrgetter('depends'))
_description_related = property(attrgetter('related'))
_description_company_dependent = property(attrgetter('company_dependent'))
_description_readonly = property(attrgetter('readonly'))
_description_required = property(attrgetter('required'))
_description_states = property(attrgetter('states'))
_description_groups = property(attrgetter('groups'))
_description_change_default = property(attrgetter('change_default'))
_description_deprecated = property(attrgetter('deprecated'))
@property
def _description_searchable(self):
return bool(self.store or self.search or (self.column and self.column._fnct_search))
@property
def _description_sortable(self):
return self.store or (self.inherited and self.related_field._description_sortable)
def _description_string(self, env):
if self.string and env.lang:
field = self.base_field
name = "%s,%s" % (field.model_name, field.name)
trans = env['ir.translation']._get_source(name, 'field', env.lang)
return trans or self.string
return self.string
def _description_help(self, env):
if self.help and env.lang:
name = "%s,%s" % (self.model_name, self.name)
trans = env['ir.translation']._get_source(name, 'help', env.lang)
return trans or self.help
return self.help
############################################################################
#
# Conversion to column instance
#
def to_column(self):
""" Return a column object corresponding to ``self``, or ``None``. """
if not self.store and self.compute:
# non-stored computed fields do not have a corresponding column
self.column = None
return None
# determine column parameters
#_logger.debug("Create fields._column for Field %s", self)
args = {}
for attr, prop in self.column_attrs:
args[attr] = getattr(self, prop)
for attr, value in self._attrs.iteritems():
args[attr] = value
if self.company_dependent:
# company-dependent fields are mapped to former property fields
args['type'] = self.type
args['relation'] = self.comodel_name
self.column = fields.property(**args)
elif self.column:
# let the column provide a valid column for the given parameters
self.column = self.column.new(_computed_field=bool(self.compute), **args)
else:
# create a fresh new column of the right type
self.column = getattr(fields, self.type)(**args)
return self.column
# properties used by to_column() to create a column instance
_column_copy = property(attrgetter('copy'))
_column_select = property(attrgetter('index'))
_column_manual = property(attrgetter('manual'))
_column_string = property(attrgetter('string'))
_column_help = property(attrgetter('help'))
_column_readonly = property(attrgetter('readonly'))
_column_required = property(attrgetter('required'))
_column_states = property(attrgetter('states'))
_column_groups = property(attrgetter('groups'))
_column_change_default = property(attrgetter('change_default'))
_column_deprecated = property(attrgetter('deprecated'))
############################################################################
#
# Conversion of values
#
def null(self, env):
""" return the null value for this field in the given environment """
return False
def convert_to_cache(self, value, record, validate=True):
""" convert ``value`` to the cache level in ``env``; ``value`` may come from
an assignment, or have the format of methods :meth:`BaseModel.read`
or :meth:`BaseModel.write`
:param record: the target record for the assignment, or an empty recordset
:param bool validate: when True, field-specific validation of
``value`` will be performed
"""
return value
def convert_to_read(self, value, use_name_get=True):
""" convert ``value`` from the cache to a value as returned by method
:meth:`BaseModel.read`
:param bool use_name_get: when True, value's diplay name will
be computed using :meth:`BaseModel.name_get`, if relevant
for the field
"""
return False if value is None else value
def convert_to_write(self, value, target=None, fnames=None):
""" convert ``value`` from the cache to a valid value for method
:meth:`BaseModel.write`.
:param target: optional, the record to be modified with this value
:param fnames: for relational fields only, an optional collection of
field names to convert
"""
return self.convert_to_read(value)
def convert_to_onchange(self, value):
""" convert ``value`` from the cache to a valid value for an onchange
method v7.
"""
return self.convert_to_write(value)
def convert_to_export(self, value, env):
""" convert ``value`` from the cache to a valid value for export. The
parameter ``env`` is given for managing translations.
"""
if not value:
return ''
return value if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
""" convert ``value`` from the cache to a suitable display name. """
return ustr(value)
############################################################################
#
# Descriptor methods
#
def __get__(self, record, owner):
""" return the value of field ``self`` on ``record`` """
if record is None:
return self # the field is accessed through the owner class
if not record:
# null record -> return the null value for this field
return self.null(record.env)
# only a single record may be accessed
record.ensure_one()
try:
return record._cache[self]
except KeyError:
pass
# cache miss, retrieve value
if record.id:
# normal record -> read or compute value for this field
self.determine_value(record)
else:
# draft record -> compute the value or let it be null
self.determine_draft_value(record)
# the result should be in cache now
return record._cache[self]
def __set__(self, record, value):
""" set the value of field ``self`` on ``record`` """
env = record.env
# only a single record may be updated
record.ensure_one()
# adapt value to the cache level
value = self.convert_to_cache(value, record)
if env.in_draft or not record.id:
# determine dependent fields
spec = self.modified_draft(record)
# set value in cache, inverse field, and mark record as dirty
record._cache[self] = value
if env.in_onchange:
for invf in self.inverse_fields:
invf._update(value, record)
record._set_dirty(self.name)
# determine more dependent fields, and invalidate them
if self.relational:
spec += self.modified_draft(record)
env.invalidate(spec)
else:
# simply write to the database, and update cache
record.write({self.name: self.convert_to_write(value)})
record._cache[self] = value
############################################################################
#
# Computation of field values
#
def _compute_value(self, records):
""" Invoke the compute method on ``records``. """
# initialize the fields to their corresponding null value in cache
for field in self.computed_fields:
records._cache[field] = field.null(records.env)
records.env.computed[field].update(records._ids)
self.compute(records)
for field in self.computed_fields:
records.env.computed[field].difference_update(records._ids)
def compute_value(self, records):
""" Invoke the compute method on ``records``; the results are in cache. """
with records.env.do_in_draft():
try:
self._compute_value(records)
except (AccessError, MissingError):
# some record is forbidden or missing, retry record by record
for record in records:
try:
self._compute_value(record)
except Exception as exc:
record._cache[self.name] = FailedValue(exc)
def determine_value(self, record):
""" Determine the value of ``self`` for ``record``. """
env = record.env
if self.column and not (self.depends and env.in_draft):
# this is a stored field or an old-style function field
if self.depends:
# this is a stored computed field, check for recomputation
recs = record._recompute_check(self)
if recs:
# recompute the value (only in cache)
self.compute_value(recs)
# HACK: if result is in the wrong cache, copy values
if recs.env != env:
for source, target in zip(recs, recs.with_env(env)):
try:
values = target._convert_to_cache({
f.name: source[f.name] for f in self.computed_fields
}, validate=False)
except MissingError as e:
values = FailedValue(e)
target._cache.update(values)
# the result is saved to database by BaseModel.recompute()
return
# read the field from database
record._prefetch_field(self)
elif self.compute:
# this is either a non-stored computed field, or a stored computed
# field in draft mode
if self.recursive:
self.compute_value(record)
else:
recs = record._in_cache_without(self)
self.compute_value(recs)
else:
# this is a non-stored non-computed field
record._cache[self] = self.null(env)
def determine_draft_value(self, record):
""" Determine the value of ``self`` for the given draft ``record``. """
if self.compute:
self._compute_value(record)
else:
record._cache[self] = SpecialValue(self.null(record.env))
def determine_inverse(self, records):
""" Given the value of ``self`` on ``records``, inverse the computation. """
if self.inverse:
self.inverse(records)
def determine_domain(self, records, operator, value):
""" Return a domain representing a condition on ``self``. """
if self.search:
return self.search(records, operator, value)
else:
return [(self.name, operator, value)]
############################################################################
#
# Notification when fields are modified
#
def modified(self, records):
""" Notify that field ``self`` has been modified on ``records``: prepare the
fields/records to recompute, and return a spec indicating what to
invalidate.
"""
# invalidate the fields that depend on self, and prepare recomputation
spec = [(self, records._ids)]
for field, path in self._triggers:
if path and field.store:
# don't move this line to function top, see log
env = records.env(user=SUPERUSER_ID, context={'active_test': False})
target = env[field.model_name].search([(path, 'in', records.ids)])
if target:
spec.append((field, target._ids))
# recompute field on target in the environment of records,
# and as user admin if required
if field.compute_sudo:
target = target.with_env(records.env(user=SUPERUSER_ID))
else:
target = target.with_env(records.env)
target._recompute_todo(field)
else:
spec.append((field, None))
return spec
def modified_draft(self, records):
""" Same as :meth:`modified`, but in draft mode. """
env = records.env
# invalidate the fields on the records in cache that depend on
# ``records``, except fields currently being computed
spec = []
for field, path in self._triggers:
target = env[field.model_name]
computed = target.browse(env.computed[field])
if path == 'id':
target = records - computed
elif path:
target = (target.browse(env.cache[field]) - computed).filtered(
lambda rec: rec._mapped_cache(path) & records
)
else:
target = target.browse(env.cache[field]) - computed
if target:
spec.append((field, target._ids))
return spec
class Boolean(Field):
type = 'boolean'
def convert_to_cache(self, value, record, validate=True):
return bool(value)
def convert_to_export(self, value, env):
if env.context.get('export_raw_data'):
return value
return ustr(value)
class Integer(Field):
type = 'integer'
_slots = {
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
_related_group_operator = property(attrgetter('group_operator'))
_column_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, dict):
# special case, when an integer field is used as inverse for a one2many
return value.get('id', False)
return int(value or 0)
def convert_to_read(self, value, use_name_get=True):
# Integer values greater than 2^31-1 are not supported in pure XMLRPC,
# so we have to pass them as floats :-(
if value and value > xmlrpclib.MAXINT:
return float(value)
return value
def _update(self, records, value):
# special case, when an integer field is used as inverse for a one2many
records._cache[self] = value.id or 0
def convert_to_export(self, value, env):
if value or value == 0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class Float(Field):
""" The precision digits are given by the attribute
:param digits: a pair (total, decimal), or a function taking a database
cursor and returning a pair (total, decimal)
"""
type = 'float'
_slots = {
'_digits': None, # digits argument passed to class initializer
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
def __init__(self, string=None, digits=None, **kwargs):
super(Float, self).__init__(string=string, _digits=digits, **kwargs)
@property
def digits(self):
if callable(self._digits):
with fields._get_cursor() as cr:
return self._digits(cr)
else:
return self._digits
def _setup_digits(self, env):
""" Setup the digits for ``self`` and its corresponding column """
pass
def _setup_regular(self, env):
super(Float, self)._setup_regular(env)
self._setup_digits(env)
_related__digits = property(attrgetter('_digits'))
_related_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_description_digits = property(attrgetter('digits'))
_column_digits = property(lambda self: not callable(self._digits) and self._digits)
_column_digits_compute = property(lambda self: callable(self._digits) and self._digits)
_column_group_operator = property(attrgetter('group_operator'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
# apply rounding here, otherwise value in cache may be wrong!
value = float(value or 0.0)
digits = self.digits
return float_round(value, precision_digits=digits[1]) if digits else value
def convert_to_export(self, value, env):
if value or value == 0.0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class _String(Field):
""" Abstract class for string fields. """
_slots = {
'translate': False, # whether the field is translated
}
_column_translate = property(attrgetter('translate'))
_related_translate = property(attrgetter('translate'))
_description_translate = property(attrgetter('translate'))
class Char(_String):
""" Basic string field, can be length-limited, usually displayed as a
single-line string in clients
:param int size: the maximum size of values stored for that field
:param bool translate: whether the values of this field can be translated
"""
type = 'char'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_column_size = property(attrgetter('size'))
_related_size = property(attrgetter('size'))
_description_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Char, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Char field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)[:self.size]
class Text(_String):
""" Very similar to :class:`~.Char` but used for longer contents, does not
have a size and usually displayed as a multiline text box.
:param translate: whether the value of this field can be translated
"""
type = 'text'
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)
class Html(_String):
type = 'html'
_slots = {
'sanitize': True, # whether value must be sanitized
'strip_style': False, # whether to strip style attributes
}
_column_sanitize = property(attrgetter('sanitize'))
_related_sanitize = property(attrgetter('sanitize'))
_description_sanitize = property(attrgetter('sanitize'))
_column_strip_style = property(attrgetter('strip_style'))
_related_strip_style = property(attrgetter('strip_style'))
_description_strip_style = property(attrgetter('strip_style'))
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
if validate and self.sanitize:
return html_sanitize(value, strip_style=self.strip_style)
return value
class Date(Field):
type = 'date'
@staticmethod
def today(*args):
""" Return the current day in the format expected by the ORM.
This function may be used to compute default values.
"""
return date.today().strftime(DATE_FORMAT)
@staticmethod
def context_today(record, timestamp=None):
""" Return the current date as seen in the client's timezone in a format
fit for date fields. This method may be used to compute default
values.
:param datetime timestamp: optional datetime value to use instead of
the current date and time (must be a datetime, regular dates
can't be converted between timezones.)
:rtype: str
"""
today = timestamp or datetime.now()
context_today = None
tz_name = record._context.get('tz') or record.env.user.tz
if tz_name:
try:
today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST
context_today = today_utc.astimezone(pytz.timezone(tz_name))
except Exception:
_logger.debug("failed to compute context/client-specific today date, using UTC value for `today`",
exc_info=True)
return (context_today or today).strftime(DATE_FORMAT)
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`date` value. """
if not value:
return None
value = value[:DATE_LENGTH]
return datetime.strptime(value, DATE_FORMAT).date()
@staticmethod
def to_string(value):
""" Convert a :class:`date` value into the format expected by the ORM. """
return value.strftime(DATE_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
return value[:DATE_LENGTH]
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
class Datetime(Field):
type = 'datetime'
@staticmethod
def now(*args):
""" Return the current day and time in the format expected by the ORM.
This function may be used to compute default values.
"""
return datetime.now().strftime(DATETIME_FORMAT)
@staticmethod
def context_timestamp(record, timestamp):
"""Returns the given timestamp converted to the client's timezone.
This method is *not* meant for use as a _defaults initializer,
because datetime fields are automatically converted upon
display on client side. For _defaults you :meth:`fields.datetime.now`
should be used instead.
:param datetime timestamp: naive datetime value (expressed in UTC)
to be converted to the client timezone
:rtype: datetime
:return: timestamp converted to timezone-aware datetime in context
timezone
"""
assert isinstance(timestamp, datetime), 'Datetime instance expected'
tz_name = record._context.get('tz') or record.env.user.tz
utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST
if tz_name:
try:
context_tz = pytz.timezone(tz_name)
return utc_timestamp.astimezone(context_tz)
except Exception:
_logger.debug("failed to compute context/client-specific timestamp, "
"using the UTC value",
exc_info=True)
return utc_timestamp
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`datetime` value. """
if not value:
return None
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return datetime.strptime(value, DATETIME_FORMAT)
@staticmethod
def to_string(value):
""" Convert a :class:`datetime` value into the format expected by the ORM. """
return value.strftime(DATETIME_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return value
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
assert record, 'Record expected'
return Datetime.to_string(Datetime.context_timestamp(record, Datetime.from_string(value)))
class Binary(Field):
type = 'binary'
class Selection(Field):
"""
:param selection: specifies the possible values for this field.
It is given as either a list of pairs (``value``, ``string``), or a
model method, or a method name.
:param selection_add: provides an extension of the selection in the case
of an overridden field. It is a list of pairs (``value``, ``string``).
The attribute ``selection`` is mandatory except in the case of
:ref:`related fields <field-related>` or :ref:`field extensions
<field-incremental-definition>`.
"""
type = 'selection'
_slots = {
'selection': None, # [(value, string), ...], function or method name
}
def __init__(self, selection=None, string=None, **kwargs):
if callable(selection):
from openerp import api
selection = api.expected(api.model, selection)
super(Selection, self).__init__(selection=selection, string=string, **kwargs)
def _setup_regular(self, env):
super(Selection, self)._setup_regular(env)
assert self.selection is not None, "Field %s without selection" % self
def _setup_related(self, env):
super(Selection, self)._setup_related(env)
# selection must be computed on related field
field = self.related_field
self.selection = lambda model: field._description_selection(model.env)
def set_class_name(self, cls, name):
super(Selection, self).set_class_name(cls, name)
# determine selection (applying 'selection_add' extensions)
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
# We cannot use field.selection or field.selection_add here
# because those attributes are overridden by ``set_class_name``.
if 'selection' in field._attrs:
self.selection = field._attrs['selection']
if 'selection_add' in field._attrs:
# use an OrderedDict to update existing values
selection_add = field._attrs['selection_add']
self.selection = OrderedDict(self.selection + selection_add).items()
else:
self.selection = None
def _description_selection(self, env):
""" return the selection list (pairs (value, label)); labels are
translated according to context language
"""
selection = self.selection
if isinstance(selection, basestring):
return getattr(env[self.model_name], selection)()
if callable(selection):
return selection(env[self.model_name])
# translate selection labels
if env.lang:
name = "%s,%s" % (self.model_name, self.name)
translate = partial(
env['ir.translation']._get_source, name, 'selection', env.lang)
return [(value, translate(label) if label else label) for value, label in selection]
else:
return selection
@property
def _column_selection(self):
if isinstance(self.selection, basestring):
method = self.selection
return lambda self, *a, **kw: getattr(self, method)(*a, **kw)
else:
return self.selection
def get_values(self, env):
""" return a list of the possible values """
selection = self.selection
if isinstance(selection, basestring):
selection = getattr(env[self.model_name], selection)()
elif callable(selection):
selection = selection(env[self.model_name])
return [value for value, _ in selection]
def convert_to_cache(self, value, record, validate=True):
if not validate:
return value or False
if value in self.get_values(record.env):
return value
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_export(self, value, env):
if not isinstance(self.selection, list):
# FIXME: this reproduces an existing buggy behavior!
return value if value else ''
for item in self._description_selection(env):
if item[0] == value:
return item[1]
return False
class Reference(Selection):
type = 'reference'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_related_size = property(attrgetter('size'))
_column_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Reference, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Reference field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if ((not validate or value._name in self.get_values(record.env))
and len(value) <= 1):
return value.with_env(record.env) or False
elif isinstance(value, basestring):
res_model, res_id = value.split(',')
return record.env[res_model].browse(int(res_id))
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return "%s,%s" % (value._name, value.id) if value else False
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value and value.display_name)
class _Relational(Field):
""" Abstract class for relational fields. """
relational = True
_slots = {
'domain': [], # domain for searching values
'context': {}, # context for searching values
}
def _setup_regular(self, env):
super(_Relational, self)._setup_regular(env)
if self.comodel_name not in env.registry:
_logger.warning("Field %s with unknown comodel_name %r"
% (self, self.comodel_name))
self.comodel_name = '_unknown'
@property
def _related_domain(self):
if callable(self.domain):
# will be called with another model than self's
return lambda recs: self.domain(recs.env[self.model_name])
else:
# maybe not correct if domain is a string...
return self.domain
_related_context = property(attrgetter('context'))
_description_relation = property(attrgetter('comodel_name'))
_description_context = property(attrgetter('context'))
def _description_domain(self, env):
return self.domain(env[self.model_name]) if callable(self.domain) else self.domain
_column_obj = property(attrgetter('comodel_name'))
_column_domain = property(attrgetter('domain'))
_column_context = property(attrgetter('context'))
def null(self, env):
return env[self.comodel_name]
def modified(self, records):
# Invalidate cache for self.inverse_fields, too. Note that recomputation
# of fields that depend on self.inverse_fields is already covered by the
# triggers (see above).
spec = super(_Relational, self).modified(records)
for invf in self.inverse_fields:
spec.append((invf, None))
return spec
class Many2one(_Relational):
""" The value of such a field is a recordset of size 0 (no
record) or 1 (a single record).
:param comodel_name: name of the target model (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param ondelete: what to do when the referred record is deleted;
possible values are: ``'set null'``, ``'restrict'``, ``'cascade'``
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param delegate: set it to ``True`` to make fields of the target model
accessible from the current model (corresponds to ``_inherits``)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
"""
type = 'many2one'
_slots = {
'ondelete': 'set null', # what to do when value is deleted
'auto_join': False, # whether joins are generated upon search
'delegate': False, # whether self implements delegation
}
def __init__(self, comodel_name=None, string=None, **kwargs):
super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs)
def set_class_name(self, cls, name):
super(Many2one, self).set_class_name(cls, name)
# determine self.delegate
if not self.delegate:
self.delegate = name in cls._inherits.values()
_column_ondelete = property(attrgetter('ondelete'))
_column_auto_join = property(attrgetter('auto_join'))
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
records._cache[self] = value
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, (NoneType, int, long)):
return record.env[self.comodel_name].browse(value)
if isinstance(value, BaseModel):
if value._name == self.comodel_name and len(value) <= 1:
return value.with_env(record.env)
raise ValueError("Wrong value for %s: %r" % (self, value))
elif isinstance(value, tuple):
return record.env[self.comodel_name].browse(value[0])
elif isinstance(value, dict):
return record.env[self.comodel_name].new(value)
else:
return self.null(record.env)
def convert_to_read(self, value, use_name_get=True):
if use_name_get and value:
# evaluate name_get() as superuser, because the visibility of a
# many2one field value (id and name) depends on the current record's
# access rights, and not the value's access rights.
try:
value_sudo = value.sudo()
# performance trick: make sure that all records of the same
# model as value in value.env will be prefetched in value_sudo.env
value_sudo.env.prefetch[value._name].update(value.env.prefetch[value._name])
return value_sudo.name_get()[0]
except MissingError:
# Should not happen, unless the foreign key is missing.
return False
else:
return value.id
def convert_to_write(self, value, target=None, fnames=None):
return value.id
def convert_to_onchange(self, value):
return value.id
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value.display_name)
class UnionUpdate(SpecialValue):
""" Placeholder for a value update; when this value is taken from the cache,
it returns ``record[field.name] | value`` and stores it in the cache.
"""
def __init__(self, field, record, value):
self.args = (field, record, value)
def get(self):
field, record, value = self.args
# in order to read the current field's value, remove self from cache
del record._cache[field]
# read the current field's value, and update it in cache only
record._cache[field] = new_value = record[field.name] | value
return new_value
class _RelationalMulti(_Relational):
""" Abstract class for relational fields *2many. """
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
for record in records:
if self in record._cache:
record._cache[self] = record[self.name] | value
else:<|fim▁hole|> def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if value._name == self.comodel_name:
return value.with_env(record.env)
elif isinstance(value, list):
# value is a list of record ids or commands
comodel = record.env[self.comodel_name]
ids = OrderedSet(record[self.name].ids)
# modify ids with the commands
for command in value:
if isinstance(command, (tuple, list)):
if command[0] == 0:
ids.add(comodel.new(command[2]).id)
elif command[0] == 1:
comodel.browse(command[1]).update(command[2])
ids.add(command[1])
elif command[0] == 2:
# note: the record will be deleted by write()
ids.discard(command[1])
elif command[0] == 3:
ids.discard(command[1])
elif command[0] == 4:
ids.add(command[1])
elif command[0] == 5:
ids.clear()
elif command[0] == 6:
ids = OrderedSet(command[2])
elif isinstance(command, dict):
ids.add(comodel.new(command).id)
else:
ids.add(command)
# return result as a recordset
return comodel.browse(list(ids))
elif not value:
return self.null(record.env)
raise ValueError("Wrong value for %s: %s" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return value.ids
def convert_to_write(self, value, target=None, fnames=None):
# remove/delete former records
if target is None:
set_ids = []
result = [(6, 0, set_ids)]
add_existing = lambda id: set_ids.append(id)
else:
tag = 2 if self.type == 'one2many' else 3
result = [(tag, record.id) for record in target[self.name] - value]
add_existing = lambda id: result.append((4, id))
if fnames is None:
# take all fields in cache, except the inverses of self
fnames = set(value._fields) - set(MAGIC_COLUMNS)
for invf in self.inverse_fields:
fnames.discard(invf.name)
# add new and existing records
for record in value:
if not record.id:
values = {k: v for k, v in record._cache.iteritems() if k in fnames}
values = record._convert_to_write(values)
result.append((0, 0, values))
elif record._is_dirty():
values = {k: record._cache[k] for k in record._get_dirty() if k in fnames}
values = record._convert_to_write(values)
result.append((1, record.id, values))
else:
add_existing(record.id)
return result
def convert_to_export(self, value, env):
return ','.join(name for id, name in value.name_get()) if value else ''
def convert_to_display_name(self, value, record=None):
raise NotImplementedError()
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
for record in records:
value = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
value = value[name][:1]
record[self.name] = value[self.related[-1]]
class One2many(_RelationalMulti):
""" One2many field; the value of such a field is the recordset of all the
records in ``comodel_name`` such that the field ``inverse_name`` is equal to
the current record.
:param comodel_name: name of the target model (string)
:param inverse_name: name of the inverse ``Many2one`` field in
``comodel_name`` (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param limit: optional limit to use upon read (integer)
The attributes ``comodel_name`` and ``inverse_name`` are mandatory except in
the case of related fields or field extensions.
"""
type = 'one2many'
_slots = {
'inverse_name': None, # name of the inverse field
'auto_join': False, # whether joins are generated upon search
'limit': None, # optional limit to use upon read
'copy': False, # o2m are not copied by default
}
def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs):
super(One2many, self).__init__(
comodel_name=comodel_name,
inverse_name=inverse_name,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(One2many, self)._setup_regular(env)
if self.inverse_name:
# link self to its inverse field and vice-versa
comodel = env[self.comodel_name]
invf = comodel._fields[self.inverse_name]
# In some rare cases, a ``One2many`` field can link to ``Int`` field
# (res_model/res_id pattern). Only inverse the field if this is
# a ``Many2one`` field.
if isinstance(invf, Many2one):
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
_description_relation_field = property(attrgetter('inverse_name'))
_column_fields_id = property(attrgetter('inverse_name'))
_column_auto_join = property(attrgetter('auto_join'))
_column_limit = property(attrgetter('limit'))
class Many2many(_RelationalMulti):
""" Many2many field; the value of such a field is the recordset.
:param comodel_name: name of the target model (string)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
:param relation: optional name of the table that stores the relation in
the database (string)
:param column1: optional name of the column referring to "these" records
in the table ``relation`` (string)
:param column2: optional name of the column referring to "those" records
in the table ``relation`` (string)
The attributes ``relation``, ``column1`` and ``column2`` are optional. If not
given, names are automatically generated from model names, provided
``model_name`` and ``comodel_name`` are different!
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param limit: optional limit to use upon read (integer)
"""
type = 'many2many'
_slots = {
'relation': None, # name of table
'column1': None, # column of table referring to model
'column2': None, # column of table referring to comodel
'limit': None, # optional limit to use upon read
}
def __init__(self, comodel_name=None, relation=None, column1=None, column2=None,
string=None, **kwargs):
super(Many2many, self).__init__(
comodel_name=comodel_name,
relation=relation,
column1=column1,
column2=column2,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(Many2many, self)._setup_regular(env)
if not self.relation and self.store:
# retrieve self.relation from the corresponding column
column = self.to_column()
if isinstance(column, fields.many2many):
self.relation, self.column1, self.column2 = \
column._sql_names(env[self.model_name])
if self.relation:
m2m = env.registry._m2m
# if inverse field has already been setup, it is present in m2m
invf = m2m.get((self.relation, self.column2, self.column1))
if invf:
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
else:
# add self in m2m, so that its inverse field can find it
m2m[(self.relation, self.column1, self.column2)] = self
_column_rel = property(attrgetter('relation'))
_column_id1 = property(attrgetter('column1'))
_column_id2 = property(attrgetter('column2'))
_column_limit = property(attrgetter('limit'))
class Serialized(Field):
""" Minimal support for existing sparse and serialized fields. """
type = 'serialized'
def convert_to_cache(self, value, record, validate=True):
return value or {}
class Id(Field):
""" Special case for field 'id'. """
type = 'integer'
_slots = {
'string': 'ID',
'store': True,
'readonly': True,
}
def to_column(self):
self.column = fields.integer(self.string)
return self.column
def __get__(self, record, owner):
if record is None:
return self # the field is accessed through the class owner
if not record:
return False
return record.ensure_one()._ids[0]
def __set__(self, record, value):
raise TypeError("field 'id' cannot be assigned")
# imported here to avoid dependency cycle issues
from openerp import SUPERUSER_ID, registry
from .exceptions import Warning, AccessError, MissingError
from .models import BaseModel, MAGIC_COLUMNS
from .osv import fields<|fim▁end|> | record._cache[self] = UnionUpdate(self, record, value)
|
<|file_name|>niche-prefer-zero.rs<|end_file_name|><|fim▁begin|>// Check that niche selection prefers zero and that jumps are optimized away.
// See https://github.com/rust-lang/rust/pull/87794
// assembly-output: emit-asm
// only-x86
// compile-flags: -Copt-level=3
#![crate_type = "lib"]
#[repr(u8)]
pub enum Size {
One = 1,
Two = 2,
Three = 3,
}
#[no_mangle]
pub fn handle(x: Option<Size>) -> u8 {
match x {
None => 0,
Some(size) => size as u8,
}<|fim▁hole|>// There should be no jumps in output
// CHECK-NOT: j<|fim▁end|> | }
|
<|file_name|>sale_order.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# Avanzosc - Avanced Open Source Consulting
# Copyright (C) 2011 - 2014 Avanzosc <http://www.avanzosc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|># GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp.osv import orm
import time
class SaleOrder(orm.Model):
_inherit = 'sale.order'
def action_wait(self, cr, uid, ids, context=None):
product_obj = self.pool['product.product']
res = super(SaleOrder, self).action_wait(cr, uid, ids, context)
for o in self.browse(cr, uid, ids, context):
for line in o.order_line:
if line.product_id:
vals = {'last_sale_date':
time.strftime('%Y-%m-%d %H:%M:%S'),
'last_customer_id': line.order_id.partner_id.id,
}
product_obj.write(cr, uid, [line.product_id.id], vals,
context)
return res<|fim▁end|> | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>AssetsScreenTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.<|fim▁hole|>
package org.kie.workbench.common.screens.library.client.screens.assets;
import elemental2.dom.HTMLElement;
import org.guvnor.common.services.project.model.WorkspaceProject;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.screens.library.api.LibraryService;
import org.kie.workbench.common.screens.library.client.util.LibraryPlaces;
import org.kie.workbench.common.services.shared.project.KieModule;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.ext.widgets.common.client.common.BusyIndicatorView;
import org.uberfire.mocks.CallerMock;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class AssetsScreenTest {
private AssetsScreen assetsScreen;
@Mock
private AssetsScreen.View view;
@Mock
private LibraryPlaces libraryPlaces;
@Mock
private EmptyAssetsScreen emptyAssetsScreen;
@Mock
private PopulatedAssetsScreen populatedAssetsScreen;
@Mock
private InvalidProjectScreen invalidProjectScreen;
@Mock
private TranslationService ts;
@Mock
private BusyIndicatorView busyIndicatorView;
@Mock
private LibraryService libraryService;
private WorkspaceProject workspaceProject;
@Before
public void setUp() {
workspaceProject = mock(WorkspaceProject.class);
doReturn(mock(KieModule.class)).when(workspaceProject).getMainModule();
when(libraryPlaces.getActiveWorkspaceContext()).thenReturn(workspaceProject);
EmptyAssetsView emptyView = mock(EmptyAssetsView.class);
PopulatedAssetsView populatedView = mock(PopulatedAssetsView.class);
InvalidProjectView invalidProjectView = mock(InvalidProjectView.class);
HTMLElement emptyElement = mock(HTMLElement.class);
HTMLElement populatedElement = mock(HTMLElement.class);
HTMLElement invalidProjectElement = mock(HTMLElement.class);
when(emptyAssetsScreen.getView()).thenReturn(emptyView);
when(emptyView.getElement()).thenReturn(emptyElement);
when(populatedAssetsScreen.getView()).thenReturn(populatedView);
when(populatedView.getElement()).thenReturn(populatedElement);
when(invalidProjectScreen.getView()).thenReturn(invalidProjectView);
when(invalidProjectView.getElement()).thenReturn(invalidProjectElement);
this.assetsScreen = new AssetsScreen(view,
libraryPlaces,
emptyAssetsScreen,
populatedAssetsScreen,
invalidProjectScreen,
ts,
busyIndicatorView,
new CallerMock<>(libraryService));
}
@Test
public void testShowEmptyScreenAssets() {
when(libraryService.hasAssets(any(WorkspaceProject.class))).thenReturn(false);
this.assetsScreen.init();
verify(emptyAssetsScreen,
times(1)).getView();
verify(populatedAssetsScreen,
never()).getView();
verify(view).setContent(emptyAssetsScreen.getView().getElement());
}
@Test
public void testShowPopulatedScreenAssets() {
when(libraryService.hasAssets(any(WorkspaceProject.class))).thenReturn(true);
this.assetsScreen.init();
verify(emptyAssetsScreen,
never()).getView();
verify(populatedAssetsScreen,
times(1)).getView();
verify(view).setContent(populatedAssetsScreen.getView().getElement());
}
@Test
public void testSetContentNotCalledWhenAlreadyDisplayed() throws Exception {
try {
testShowEmptyScreenAssets();
} catch (AssertionError ae) {
throw new AssertionError("Precondition failed. Could not set empty asset screen.", ae);
}
HTMLElement emptyElement = emptyAssetsScreen.getView().getElement();
emptyElement.parentNode = mock(HTMLElement.class);
reset(view);
assetsScreen.init();
verify(view, never()).setContent(any());
}
@Test
public void testInvalidProject() throws Exception {
reset(workspaceProject);
doReturn(null).when(workspaceProject).getMainModule();
assetsScreen.init();
verify(view).setContent(invalidProjectScreen.getView().getElement());
verify(libraryService, never()).hasAssets(any(WorkspaceProject.class));
}
}<|fim▁end|> | *
*/ |
<|file_name|>graph_theory.py<|end_file_name|><|fim▁begin|>"""This module provides graph theory functionality."""
from heapq import heapify, heappop, heappush
def dijkstra(nodes, edges, startNode, directed):
"""Finds the length between each node in the graph and the startNode.
Arguments:
nodes - the set of nodes in the graph.
edges - the set of edges in the graph. Each edge should be a 3-tuple
containing the source and destination nodes, as well as the<|fim▁hole|> startNode - the starting node for the search.
directed - boolean, should the graph be treated as directed,
or instead indirected?
Returns: a dict whose keys are nodes and whose values are the smallest cost
to get from startNode to that particular node.
E.g.
# example: from wikipedia
nodes = ["1", "2", "3", "4", "5", "6"]
edges = [("1", "2", 7), ("1", "3", 9), ("1", "6", 14), ("2", "3", 10),
("2", "4", 15), ("3", "4", 11), ("3", "6", 2), ("4", "5", 6),
("5", "6", 9)]
d = dijkstra(set(nodes), set(edges), "1", True)
print d
"""
#construct a dict holding each nodes' neighbours and the cost to them
neighbours = dict([(node, []) for node in nodes])
for edge in edges:
neighbours[edge[0]].append((edge[1], edge[2]))
if not directed:
neighbours[edge[1]].append((edge[0], edge[2]))
#to every node assign a distance (starting with +inf and zero for startNode)
distance = dict([(node, float("inf")) for node in nodes])
distance[startNode] = 0
#mark every node as unvisited
visited = dict([(node, False) for node in nodes])
#main part of algorithm
unvisitedQ = [(0, startNode)]
unvisitedQSet = set([startNode])
while len(unvisitedQ) != 0:
currentNode = heapq.heappop(unvisitedQ)[1]
unvisitedQSet.remove(currentNode)
for (node, edgeWeight) in neighbours[currentNode]:
if not visited[node]:
if distance[currentNode] + edgeWeight < distance[node]:
distance[node] = distance[currentNode] + edgeWeight
if node not in unvisitedQSet:
heapq.heappush(unvisitedQ, (distance[node], node))
unvisitedQSet.add(node)
visited[currentNode] = True
return distance
def prim(adj_list, start_v):
"""Finds a minimal spanning tree given a graph's adjacency list. The list
should be a dictionary whose keys are vertices. The values should be lists
of 2-tuples. Each 2-tuple should contain a 'to vertex' and a weight.
Returned is a list of edges in the minimal spanning tree, each a 3-tuple
containing the 'to vertex', 'from vertex', and weight.
E.g. from wikipedia:
a = {'A': [('B', 7), ('D', 5)],
'B': [('A', 7), ('C', 8), ('D', 9), ('E', 7)],
'C': [('B', 8), ('E', 5)],
'D': [('A', 5), ('B', 9), ('E', 15), ('F', 6)],
'E': [('B', 7), ('C', 5), ('D', 15), ('F', 8), ('G', 9)],
'F': [('D', 6), ('E', 8), ('G', 11)],
'G': [('E', 9), ('F', 11)]}
for from_v, to_v, weight in prim(a, 'A'):
print from_v, to_v, weight
"""
n = len(adj_list)
v_new = [start_v]
e_new = []
q = [(weight, start_v, to_v) for to_v, weight in adj_list[start_v]]
heapify(q)
while len(v_new) != n:
while True:
weight, from_v, to_v = heappop(q)
if from_v not in v_new or to_v not in v_new:
break
v_new.append(to_v)
e_new.append((from_v, to_v, weight))
for next_to_v, weight in adj_list[to_v]:
if next_to_v not in v_new:
heappush(q, (weight, to_v, next_to_v))
return e_new<|fim▁end|> | (non-negative) weight of that edge (in that order). |
<|file_name|>01_RedditlistCrawler.py<|end_file_name|><|fim▁begin|>import requests
from bs4 import BeautifulSoup
import urllib2 # require python 2.0
"""
1. Get all subreddit name from redditlist.com
using urllib and BeautifulSoup library
"""
def get_subreddit_list(max_pages):
"""
Get all of the top ~4000 subreddits
from http://www.redditlist.com
"""
page = 1
subs = []
print("Getting subreddits...")
while page <= max_pages:
print("Crawling Page "+ str(page))
if page == 1 :
url = "http://www.redditlist.com"
else:
url = "http://www.redditlist.com?page="+str(page)
source_code = requests.get(url)
plain_text = source_code.text
soup = BeautifulSoup(plain_text, "html.parser")
for link in soup.findAll("a",{"class":"sfw"}):
href = link.get("href")
subs.append(href)
title = link.string
# comment
page += 1
# comment
result = []
subreddits = list(set(subs))
subreddits_count = 0
for subreddit in subreddits:
subreddit_url = "http://reddit.com/r/"
if subreddit_url in subreddit:
print subreddit[20:]
#subreddit_list.append(subreddit[20:])
with open("./Resources/subreddit_list.txt", "a") as myfile:
# comment (important)
myfile.write("{} \n".format(subreddit[20:]))<|fim▁hole|> subreddits_count += 1
print("Collect "+str(subreddits_count)+" subreddits")
# Query on 33 PAGES of http://www.redditlist.com
get_subreddit_list(33)<|fim▁end|> | |
<|file_name|>extHostDocumentSaveParticipant.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Event } from 'vs/base/common/event';
import { URI, UriComponents } from 'vs/base/common/uri';
import { sequence, always } from 'vs/base/common/async';
import { illegalState } from 'vs/base/common/errors';
import { ExtHostDocumentSaveParticipantShape, MainThreadTextEditorsShape, ResourceTextEditDto } from 'vs/workbench/api/node/extHost.protocol';
import { TextEdit } from 'vs/workbench/api/node/extHostTypes';
import { Range, TextDocumentSaveReason, EndOfLine } from 'vs/workbench/api/node/extHostTypeConverters';
import { ExtHostDocuments } from 'vs/workbench/api/node/extHostDocuments';
import { SaveReason } from 'vs/workbench/services/textfile/common/textfiles';
import * as vscode from 'vscode';
import { LinkedList } from 'vs/base/common/linkedList';
import { IExtensionDescription } from 'vs/workbench/services/extensions/common/extensions';
import { ILogService } from 'vs/platform/log/common/log';
type Listener = [Function, any, IExtensionDescription];
export class ExtHostDocumentSaveParticipant implements ExtHostDocumentSaveParticipantShape {
private readonly _callbacks = new LinkedList<Listener>();
private readonly _badListeners = new WeakMap<Function, number>();
constructor(
private readonly _logService: ILogService,
private readonly _documents: ExtHostDocuments,
private readonly _mainThreadEditors: MainThreadTextEditorsShape,
private readonly _thresholds: { timeout: number; errors: number; } = { timeout: 1500, errors: 3 }
) {
//
}
dispose(): void {
this._callbacks.clear();
}
getOnWillSaveTextDocumentEvent(extension: IExtensionDescription): Event<vscode.TextDocumentWillSaveEvent> {<|fim▁hole|> if (Array.isArray(disposables)) {
disposables.push(result);
}
return result;
};
}
$participateInSave(data: UriComponents, reason: SaveReason): Promise<boolean[]> {
const resource = URI.revive(data);
const entries = this._callbacks.toArray();
let didTimeout = false;
let didTimeoutHandle = setTimeout(() => didTimeout = true, this._thresholds.timeout);
const promise = sequence(entries.map(listener => {
return () => {
if (didTimeout) {
// timeout - no more listeners
return undefined;
}
const document = this._documents.getDocumentData(resource).document;
return this._deliverEventAsyncAndBlameBadListeners(listener, <any>{ document, reason: TextDocumentSaveReason.to(reason) });
};
}));
return always(promise, () => clearTimeout(didTimeoutHandle));
}
private _deliverEventAsyncAndBlameBadListeners([listener, thisArg, extension]: Listener, stubEvent: vscode.TextDocumentWillSaveEvent): Promise<any> {
const errors = this._badListeners.get(listener);
if (errors > this._thresholds.errors) {
// bad listener - ignore
return Promise.resolve(false);
}
return this._deliverEventAsync(extension, listener, thisArg, stubEvent).then(() => {
// don't send result across the wire
return true;
}, err => {
this._logService.error(`onWillSaveTextDocument-listener from extension '${extension.id}' threw ERROR`);
this._logService.error(err);
if (!(err instanceof Error) || (<Error>err).message !== 'concurrent_edits') {
const errors = this._badListeners.get(listener);
this._badListeners.set(listener, !errors ? 1 : errors + 1);
if (errors > this._thresholds.errors) {
this._logService.info(`onWillSaveTextDocument-listener from extension '${extension.id}' will now be IGNORED because of timeouts and/or errors`);
}
}
return false;
});
}
private _deliverEventAsync(extension: IExtensionDescription, listener: Function, thisArg: any, stubEvent: vscode.TextDocumentWillSaveEvent): Promise<any> {
const promises: Promise<vscode.TextEdit[]>[] = [];
const t1 = Date.now();
const { document, reason } = stubEvent;
const { version } = document;
const event = Object.freeze(<vscode.TextDocumentWillSaveEvent>{
document,
reason,
waitUntil(p: Promise<any | vscode.TextEdit[]>) {
if (Object.isFrozen(promises)) {
throw illegalState('waitUntil can not be called async');
}
promises.push(Promise.resolve(p));
}
});
try {
// fire event
listener.apply(thisArg, [event]);
} catch (err) {
return Promise.reject(err);
}
// freeze promises after event call
Object.freeze(promises);
return new Promise<vscode.TextEdit[][]>((resolve, reject) => {
// join on all listener promises, reject after timeout
const handle = setTimeout(() => reject(new Error('timeout')), this._thresholds.timeout);
return Promise.all(promises).then(edits => {
this._logService.debug(`onWillSaveTextDocument-listener from extension '${extension.id}' finished after ${(Date.now() - t1)}ms`);
clearTimeout(handle);
resolve(edits);
}).catch(err => {
clearTimeout(handle);
reject(err);
});
}).then(values => {
const resourceEdit: ResourceTextEditDto = {
resource: document.uri,
edits: []
};
for (const value of values) {
if (Array.isArray(value) && (<vscode.TextEdit[]>value).every(e => e instanceof TextEdit)) {
for (const { newText, newEol, range } of value) {
resourceEdit.edits.push({
range: range && Range.from(range),
text: newText,
eol: EndOfLine.from(newEol)
});
}
}
}
// apply edits if any and if document
// didn't change somehow in the meantime
if (resourceEdit.edits.length === 0) {
return undefined;
}
if (version === document.version) {
return this._mainThreadEditors.$tryApplyWorkspaceEdit({ edits: [resourceEdit] });
}
// TODO@joh bubble this to listener?
return Promise.reject(new Error('concurrent_edits'));
});
}
}<|fim▁end|> | return (listener, thisArg, disposables) => {
const remove = this._callbacks.push([listener, thisArg, extension]);
const result = { dispose: remove }; |
<|file_name|>gal.js<|end_file_name|><|fim▁begin|>showWord(["n. ","Gratèl, lagratèl, maladi po ki atrapan; maladi po ki bay pi. Tretman gal fasil. 2. Enfeksyon nan plant."<|fim▁hole|><|fim▁end|> | ]) |
<|file_name|>estimate.rs<|end_file_name|><|fim▁begin|>use std::error::Error;
use std::fs::File;
use std::io;
use bio::stats::{LogProb, Prob};
use clap;
use csv;
use serde_json;
use libprosic;
use libprosic::estimation;
use libprosic::model;
use libprosic::model::AlleleFreq;
use call;
pub fn effective_mutation_rate(matches: &clap::ArgMatches) -> Result<(), Box<Error>> {
let min_af = value_t!(matches, "min-af", f64).unwrap_or(0.12);
let max_af = value_t!(matches, "max-af", f64).unwrap_or(0.25);
let mut reader = csv::Reader::from_reader(io::stdin());
let freqs = try!(reader.deserialize().collect::<Result<Vec<f64>, _>>());
let estimate = estimation::effective_mutation_rate::estimate(
freqs
.into_iter()
.filter(|&f| f >= min_af && f <= max_af)
.map(|f| AlleleFreq(f)),
);
// print estimated mutation rate to stdout
println!("{}", estimate.effective_mutation_rate());
// if --fit is given, print data visualizing model fit
if let Some(path) = matches.value_of("fit") {
let mut f = try!(File::create(path));
serde_json::to_writer(&mut f, &estimate)?;
}
Ok(())
}
<|fim▁hole|> pub name: String,
}
impl libprosic::Event for DummyEvent {
fn name(&self) -> &str {
&self.name
}
}
/// Parse `VariantType` from command line arguments.
pub fn parse_vartype(
vartype: &str,
min_len: Option<u32>,
max_len: Option<u32>,
) -> Result<model::VariantType, Box<Error>> {
Ok(match (vartype, min_len, max_len) {
("SNV", _, _) => model::VariantType::SNV,
("INS", Some(min_len), Some(max_len)) => {
model::VariantType::Insertion(Some(min_len..max_len))
}
("DEL", Some(min_len), Some(max_len)) => {
model::VariantType::Deletion(Some(min_len..max_len))
}
("INS", _, _) => model::VariantType::Insertion(None),
("DEL", _, _) => model::VariantType::Deletion(None),
_ => {
return Err(Box::new(clap::Error {
message: "unsupported variant type (supported: SNV, INS, DEL)".to_owned(),
kind: clap::ErrorKind::InvalidValue,
info: None,
}));
}
})
}
pub fn fdr(matches: &clap::ArgMatches) -> Result<(), Box<Error>> {
let call_bcf = matches.value_of("calls").unwrap();
let alpha = value_t!(matches, "alpha", f64).unwrap();
let events_list = matches.values_of("events").unwrap();
let vartype = matches.value_of("vartype").unwrap();
let min_len = value_t!(matches, "min-len", u32).ok();
let max_len = value_t!(matches, "max-len", u32).ok();
let vartype = parse_vartype(vartype, min_len, max_len)?;
let events: Vec<DummyEvent> = events_list
.map(|ev| DummyEvent {
name: ev.to_owned(),
}).collect();
let alpha = LogProb::from(Prob::checked(alpha)?);
let out = call::path_or_pipe(matches.value_of("output"));
estimation::fdr::ev::control_fdr::<_, _, &str>(call_bcf, out, &events, &vartype, alpha)?;
Ok(())
}<|fim▁end|> | struct DummyEvent { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(config))
return tests<|fim▁end|> | import doctest
from maiden import config
|
<|file_name|>test_power.py<|end_file_name|><|fim▁begin|>from __future__ import division
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from acoustics.power import lw_iso3746
@pytest.mark.parametrize("background_noise, expected", [
(79, 91.153934187),
(83, 90.187405234),
(88, 88.153934187),
])
def test_lw_iso3746(background_noise, expected):
LpAi = np.array([90, 90, 90, 90])
LpAiB = background_noise * np.ones(4)
S = 10
alpha = np.array([0.1, 0.1, 0.1, 0.1, 0.1, 0.1])
surfaces = np.array([10, 10, 10, 10, 10, 10])<|fim▁hole|> calculated = lw_iso3746(LpAi, LpAiB, S, alpha, surfaces)
assert_almost_equal(calculated, expected)<|fim▁end|> | |
<|file_name|>test-multiple-statements-load-data-infile.js<|end_file_name|><|fim▁begin|>var assert = require('assert');
var common = require('../../common');
var path = common.fixtures + '/data.csv';
var table = 'multi_load_data_test';
var newline = common.detectNewline(path);
common.getTestConnection({multipleStatements: true}, function (err, connection) {
assert.ifError(err);<|fim▁hole|> 'CREATE TEMPORARY TABLE ?? (',
'`id` int(11) unsigned NOT NULL AUTO_INCREMENT,',
'`title` varchar(400),',
'PRIMARY KEY (`id`)',
') ENGINE=InnoDB DEFAULT CHARSET=utf8'
].join('\n'), [table], assert.ifError);
var stmt =
'LOAD DATA LOCAL INFILE ? INTO TABLE ?? CHARACTER SET utf8 ' +
'FIELDS TERMINATED BY ? ' +
'LINES TERMINATED BY ? ' +
'(id, title)';
var sql =
connection.format(stmt, [path, table, ',', newline]) + ';' +
connection.format(stmt, [path, table, ',', newline]) + ';';
connection.query(sql, function (err, results) {
assert.ifError(err);
assert.equal(results.length, 2);
assert.equal(results[0].affectedRows, 5);
assert.equal(results[1].affectedRows, 0);
});
connection.query('SELECT * FROM ??', [table], function (err, rows) {
assert.ifError(err);
assert.equal(rows.length, 5);
assert.equal(rows[0].id, 1);
assert.equal(rows[0].title, 'Hello World');
assert.equal(rows[3].id, 4);
assert.equal(rows[3].title, '中文内容');
assert.equal(rows[4].id, 5);
assert.equal(rows[4].title.length, 321);
assert.equal(rows[4].title, 'this is a long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long long string');
});
connection.end(assert.ifError);
});<|fim▁end|> |
common.useTestDb(connection);
connection.query([ |
<|file_name|>TopologyUpdateWaitHandler.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.io.network.group.impl.driver;
import org.apache.reef.io.network.group.api.driver.TaskNode;
import org.apache.reef.io.network.group.impl.GroupCommunicationMessage;
import org.apache.reef.io.network.group.impl.utils.Utils;
import org.apache.reef.io.network.proto.ReefNetworkGroupCommProtos;
import org.apache.reef.tang.annotations.Name;
import org.apache.reef.wake.EStage;
import org.apache.reef.wake.EventHandler;
import java.util.List;
import java.util.logging.Logger;
/**
*
*/
public class TopologyUpdateWaitHandler implements EventHandler<List<TaskNode>> {
private static final Logger LOG = Logger.getLogger(TopologyUpdateWaitHandler.class.getName());
private final EStage<GroupCommunicationMessage> senderStage;
private final Class<? extends Name<String>> groupName;
private final Class<? extends Name<String>> operName;
private final String driverId;
private final int driverVersion;
private final String dstId;
private final int dstVersion;
private final String qualifiedName;
/**
* The handler will wait for all nodes to acquire topoLock
* and send TopologySetup msg. Then it will send TopologyUpdated
* msg. However, any local topology changes are not in effect
* till driver sends TopologySetup once statusMap is emptied
* The operations in the tasks that have topology changes will
* wait for this. However other tasks that do not have any changes
* will continue their regular operation
*/
public TopologyUpdateWaitHandler(final EStage<GroupCommunicationMessage> senderStage,
final Class<? extends Name<String>> groupName,
final Class<? extends Name<String>> operName,
final String driverId, final int driverVersion,
final String dstId, final int dstVersion,
final String qualifiedName) {
super();
this.senderStage = senderStage;
this.groupName = groupName;
this.operName = operName;
this.driverId = driverId;
this.driverVersion = driverVersion;
this.dstId = dstId;
this.dstVersion = dstVersion;
this.qualifiedName = qualifiedName;
}
@Override
public void onNext(final List<TaskNode> nodes) {
LOG.entering("TopologyUpdateWaitHandler", "onNext", new Object[]{qualifiedName, nodes});
for (final TaskNode node : nodes) {
LOG.fine(qualifiedName + "Waiting for " + node + " to enter TopologyUdate phase");
node.waitForTopologySetupOrFailure();
if (node.isRunning()) {
LOG.fine(qualifiedName + node + " is in TopologyUpdate phase");
} else {<|fim▁hole|> }
}
LOG.finest(qualifiedName + "NodeTopologyUpdateWaitStage All to be updated nodes " + "have received TopologySetup");
LOG.fine(qualifiedName + "All affected parts of the topology are in TopologyUpdate phase. Will send a note to ("
+ dstId + "," + dstVersion + ")");
senderStage.onNext(Utils.bldVersionedGCM(groupName, operName,
ReefNetworkGroupCommProtos.GroupCommMessage.Type.TopologyUpdated, driverId, driverVersion, dstId,
dstVersion, Utils.EMPTY_BYTE_ARR));
LOG.exiting("TopologyUpdateWaitHandler", "onNext", qualifiedName);
}
}<|fim▁end|> | LOG.fine(qualifiedName + node + " has failed"); |
<|file_name|>queue-promote-coverage.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:7998c9520ed14ac4fc2dcf6956c1dcbd36b02d6dfe63b0e4ec15a1635b951e08<|fim▁hole|><|fim▁end|> | size 4403 |
<|file_name|>david2expr.py<|end_file_name|><|fim▁begin|>'''The script reads in a list of genes from an output from DAVID and reads in
expression data from a text file containing corresponding genes and report
an expression level of each gene in DAVID list to standard output.
Both gene expression and DAVID files should be in a comma-delimited format.
'''
import sys
david_file = sys.argv[1]
expr_file = sys.argv[2]
genes = {}
print >> sys.stderr, 'Reading %s...', david_file
for line in open(expr_file):
cols = line.strip().split(',')
geneid = cols[0]<|fim▁hole|> genes[geneid] = exprs
else:
raise KeyError('duplicated gene ID')
print >> sys.stderr, 'Reading %s...', expr_file
for line in open(david_file):
geneid = line.strip().split(',')[0]
if geneid in genes:
print '%s\t%s' % (geneid, genes[geneid])<|fim▁end|> | exprs = '\t'.join(cols[1:5])
if geneid not in genes: |
<|file_name|>a.rs<|end_file_name|><|fim▁begin|>use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::error::Error;
static KEYPAD_MAP: [[u8; 3]; 3] = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]];
struct Turtle {
x: i8,
y: i8,
code: String,
}
impl Turtle {
fn new() -> Turtle {
Turtle {
x: 1,
y: 1,
code: String::new(),
}
}
fn set_x(&mut self, x: i8) {
if self.x + x <= 2 && self.x + x >= 0 {
self.x = self.x + x;
}
}
fn set_y(&mut self, y: i8) {
if self.y + y <= 2 && self.y + y >= 0{
self.y = self.y + y;
}
}
fn move_turtle(&mut self, direction: &char) {
match direction.to_lowercase().collect::<String>().as_str() {
"r" => self.set_x(1),
"l" => self.set_x(-1),
"u" => self.set_y(-1),
"d" => self.set_y(1),
_ => panic!("not a valid direction!"),
};
}
fn press_button(&mut self) {<|fim▁hole|> }
}
fn main() {
let mut turtle = Turtle::new();
let f = match File::open("input.txt") {
Err(e) => panic!("open failed: {}", e.description()),
Ok(file) => file,
};
let reader = BufReader::new(f);
for line in reader.lines() {
let line: String = line.unwrap();
if !line.is_empty() {
let digit: Vec<char> = line.trim().chars().collect(); //--------- may req different order
for step in &digit {
turtle.move_turtle(step);
}
turtle.press_button();
}
}
println!("code: {}", turtle.code);
}<|fim▁end|> | self.code.push_str(KEYPAD_MAP[self.y as usize][self.x as usize].to_string().as_str()); |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 nVentiveUX
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
<|fim▁hole|>"""Application configuration"""
from django.apps import AppConfig
class ShowcaseConfig(AppConfig):
name = 'mystartupmanager.showcase'<|fim▁end|> | |
<|file_name|>MessageTranslator.java<|end_file_name|><|fim▁begin|>/*
* GNU LESSER GENERAL PUBLIC LICENSE
* Version 3, 29 June 2007
*
* Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
* Everyone is permitted to copy and distribute verbatim copies
* of this license document, but changing it is not allowed.
*<|fim▁hole|>package org.dragonet.proxy.network.translator;
import java.util.Iterator;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.spacehq.mc.protocol.data.message.Message;
public final class MessageTranslator {
public static String translate(Message message) {
String ret = message.getFullText();
/*
* It is a JSON message?
*/
try {
/*
* Do not ask me why, but json strings has colors.
* Changing this allows colors in plain texts! yay!
*/
JSONObject jObject = null;
if (message.getFullText().startsWith("{") && message.getFullText().endsWith("}")) {
jObject = new JSONObject(message.getFullText());
} else {
jObject = new JSONObject(message.toJsonString());
}
/*
* Let's iterate!
*/
ret = handleKeyObject(jObject);
} catch (JSONException e) {
/*
* If any exceptions happens, then:
* * The JSON message is buggy or
* * It isn't a JSON message
*
* So, if any exceptions happens, we send the original message
*/
}
return ret;
}
public static String handleKeyObject(JSONObject jObject) throws JSONException {
String chatMessage = "";
Iterator<String> iter = jObject.keys();
while (iter.hasNext()) {
String key = iter.next();
try {
if (key.equals("color")) {
String color = jObject.getString(key);
if (color.equals("light_purple")) {
chatMessage = chatMessage + "§d";
}
if (color.equals("blue")) {
chatMessage = chatMessage + "§9";
}
if (color.equals("aqua")) {
chatMessage = chatMessage + "§b";
}
if (color.equals("gold")) {
chatMessage = chatMessage + "§6";
}
if (color.equals("green")) {
chatMessage = chatMessage + "§a";
}
if (color.equals("white")) {
chatMessage = chatMessage + "§f";
}
if (color.equals("yellow")) {
chatMessage = chatMessage + "§e";
}
if (color.equals("gray")) {
chatMessage = chatMessage + "§7";
}
if (color.equals("red")) {
chatMessage = chatMessage + "§c";
}
if (color.equals("black")) {
chatMessage = chatMessage + "§0";
}
if (color.equals("dark_green")) {
chatMessage = chatMessage + "§2";
}
if (color.equals("dark_gray")) {
chatMessage = chatMessage + "§8";
}
if (color.equals("dark_red")) {
chatMessage = chatMessage + "§4";
}
if (color.equals("dark_blue")) {
chatMessage = chatMessage + "§1";
}
if (color.equals("dark_aqua")) {
chatMessage = chatMessage + "§3";
}
if (color.equals("dark_purple")) {
chatMessage = chatMessage + "§5";
}
}
if (key.equals("bold")) {
String bold = jObject.getString(key);
if (bold.equals("true")) {
chatMessage = chatMessage + "§l";
}
}
if (key.equals("italic")) {
String bold = jObject.getString(key);
if (bold.equals("true")) {
chatMessage = chatMessage + "§o";
}
}
if (key.equals("underlined")) {
String bold = jObject.getString(key);
if (bold.equals("true")) {
chatMessage = chatMessage + "§n";
}
}
if (key.equals("strikethrough")) {
String bold = jObject.getString(key);
if (bold.equals("true")) {
chatMessage = chatMessage + "§m";
}
}
if (key.equals("obfuscated")) {
String bold = jObject.getString(key);
if (bold.equals("true")) {
chatMessage = chatMessage + "§k";
}
}
if (key.equals("text")) {
/*
* We only need the text message from the JSON.
*/
String jsonMessage = jObject.getString(key);
chatMessage = chatMessage + jsonMessage;
continue;
}
if (jObject.get(key) instanceof JSONArray) {
chatMessage += handleKeyArray(jObject.getJSONArray(key));
}
if (jObject.get(key) instanceof JSONObject) {
chatMessage += handleKeyObject(jObject.getJSONObject(key));
}
} catch (JSONException e) {
}
}
return chatMessage;
}
public static String handleKeyArray(JSONArray jObject) throws JSONException {
String chatMessage = "";
JSONObject jsonObject = jObject.toJSONObject(jObject);
Iterator<String> iter = jsonObject.keys();
while (iter.hasNext()) {
String key = iter.next();
try {
/*
* We only need the text message from the JSON.
*/
if (key.equals("text")) {
String jsonMessage = jsonObject.getString(key);
chatMessage = chatMessage + jsonMessage;
continue;
}
if (jsonObject.get(key) instanceof JSONArray) {
handleKeyArray(jsonObject.getJSONArray(key));
}
if (jsonObject.get(key) instanceof JSONObject) {
handleKeyObject(jsonObject.getJSONObject(key));
}
} catch (JSONException e) {
}
}
return chatMessage;
}
}<|fim▁end|> | * You can view LICENCE file for details.
*
* @author The Dragonet Team
*/ |
<|file_name|>stdafx.cpp<|end_file_name|><|fim▁begin|>// stdafx.cpp : source file that includes just the standard includes
// importkeyboard.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
// TODO: reference any additional headers you need in STDAFX.H<|fim▁hole|><|fim▁end|> | // and not in this file |
<|file_name|>start_gym_battle_message_pb2.py<|end_file_name|><|fim▁begin|># Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/networking/requests/messages/start_gym_battle_message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/networking/requests/messages/start_gym_battle_message.proto',
package='pogoprotos.networking.requests.messages',
syntax='proto3',
serialized_pb=_b('\nFpogoprotos/networking/requests/messages/start_gym_battle_message.proto\x12\'pogoprotos.networking.requests.messages\"\x97\x01\n\x15StartGymBattleMessage\x12\x0e\n\x06gym_id\x18\x01 \x01(\t\x12\x1d\n\x15\x61ttacking_pokemon_ids\x18\x02 \x03(\x06\x12\x1c\n\x14\x64\x65\x66\x65nding_pokemon_id\x18\x03 \x01(\x06\x12\x17\n\x0fplayer_latitude\x18\x04 \x01(\x01\x12\x18\n\x10player_longitude\x18\x05 \x01(\x01\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_STARTGYMBATTLEMESSAGE = _descriptor.Descriptor(
name='StartGymBattleMessage',
full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='gym_id', full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage.gym_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attacking_pokemon_ids', full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage.attacking_pokemon_ids', index=1,
number=2, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='defending_pokemon_id', full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage.defending_pokemon_id', index=2,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_latitude', full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage.player_latitude', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_longitude', full_name='pogoprotos.networking.requests.messages.StartGymBattleMessage.player_longitude', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=116,
serialized_end=267,
)
DESCRIPTOR.message_types_by_name['StartGymBattleMessage'] = _STARTGYMBATTLEMESSAGE
StartGymBattleMessage = _reflection.GeneratedProtocolMessageType('StartGymBattleMessage', (_message.Message,), dict(
DESCRIPTOR = _STARTGYMBATTLEMESSAGE,
__module__ = 'pogoprotos.networking.requests.messages.start_gym_battle_message_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.StartGymBattleMessage)<|fim▁hole|>_sym_db.RegisterMessage(StartGymBattleMessage)
# @@protoc_insertion_point(module_scope)<|fim▁end|> | )) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import httpbenchmark<|fim▁hole|>from unuk.benchmarks.base import runtests<|fim▁end|> | |
<|file_name|>char_class.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Strahinja Val Markovic
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use base::unicode::{bytesFollowing, readCodepoint};
use super::{Expression, ParseState, ParseResult};
macro_rules! class( ( $ex:expr ) => (
&base::CharClass::new( $ex.as_bytes() ) ) );
fn toU32Vector( input: &[u8] ) -> Vec<u32> {
let mut i = 0;
let mut out_vec : Vec<u32> = vec!();
loop {
match input.get( i ) {
Some( byte ) => match bytesFollowing( *byte ) {
Some( num_following ) => {
if num_following > 0 {
match readCodepoint( &input[ i.. ] ) {
Some( ch ) => {
out_vec.push( ch as u32 );
i += num_following + 1
}
_ => { out_vec.push( *byte as u32 ); i += 1 }
};
} else { out_vec.push( *byte as u32 ); i += 1 }
}
_ => { out_vec.push( *byte as u32 ); i += 1 }
},
_ => return out_vec
}
}
}
pub struct CharClass {
// All the single chars in the char class.
// May be unicode codepoints or binary octets stored as codepoints.
single_chars: Vec<u32>,
// Sequence of [from, to] (inclusive bounds) char ranges.
// May be unicode codepoints or binary octets stored as codepoints.
ranges: Vec<( u32, u32 )>
}
impl CharClass {
// Takes the inner content of square brackets, so for [a-z], send "a-z".
pub fn new( contents: &[u8] ) -> CharClass {
fn rangeAtIndex( index: usize, chars: &[u32] ) -> Option<( u32, u32 )> {
match ( chars.get( index ),
chars.get( index + 1 ),
chars.get( index + 2 ) ) {
( Some( char1 ), Some( char2 ), Some( char3 ) )
if *char2 == '-' as u32 => Some( ( *char1, *char3 ) ),
_ => None
}
}
let chars = toU32Vector( &contents );
let mut char_class = CharClass { single_chars: Vec::new(),
ranges: Vec::new() };
let mut index = 0;
loop {
match rangeAtIndex( index, &chars ) {
Some( range ) => {
char_class.ranges.push( range );
index += 3;
}
_ => {
if index >= chars.len() {
break
}
char_class.single_chars.push( chars[ index ] );
index += 1;
}
};
}
char_class
}
fn matches( &self, character: u32 ) -> bool {
return self.single_chars.contains( &character ) ||
self.ranges.iter().any(
| &(from, to) | character >= from && character <= to );
}
fn applyToUtf8<'a>( &self, parse_state: &ParseState<'a> ) ->
Option< ParseResult<'a> > {
match readCodepoint( parse_state.input ) {
Some( ch ) if self.matches( ch as u32 ) => {
let num_following = bytesFollowing( parse_state.input[ 0 ] ).unwrap();
parse_state.offsetToResult( parse_state.offset + num_following + 1 )
}
_ => None
}
}
fn applyToBytes<'a>( &self, parse_state: &ParseState<'a> ) ->
Option< ParseResult<'a> > {
match parse_state.input.get( 0 ) {
Some( byte ) if self.matches( *byte as u32 ) => {
parse_state.offsetToResult( parse_state.offset + 1 )
}
_ => None
}
}
}
impl Expression for CharClass {
fn apply<'a>( &self, parse_state: &ParseState<'a> ) ->
Option< ParseResult<'a> > {
self.applyToUtf8( parse_state ).or( self.applyToBytes( parse_state ) )
}
}
#[cfg(test)]
mod tests {
use base;
use base::{Node, Data, ParseResult, Expression, ParseState};
use base::test_utils::ToParseState;
use base::unicode::bytesFollowing;
use super::{CharClass};
fn charClassMatch( char_class: &Expression, input: &[u8] ) -> bool {
fn bytesRead( input: &[u8] ) -> usize {
bytesFollowing( input[ 0 ] ).map_or( 1, |num| num + 1 )
}
match char_class.apply( &ToParseState( input ) ) {
Some( ParseResult { nodes, parse_state } ) => {
let bytes_read = bytesRead( input );
assert_eq!( nodes[ 0 ],
Node::withoutName( 0, bytes_read, Data( input ) ) );
assert_eq!( parse_state, ParseState{ input: &[], offset: bytes_read } );
true
}
_ => false
}
}
#[test]
fn CharClass_Match() {
assert!( charClassMatch( class!( "a" ), b"a" ) );
assert!( charClassMatch( class!( "abcdef" ), b"e" ) );
assert!( charClassMatch( class!( "a-z" ), b"a" ) );
assert!( charClassMatch( class!( "a-z" ), b"c" ) );
assert!( charClassMatch( class!( "a-z" ), b"z" ) );
assert!( charClassMatch( class!( "0-9" ), b"2" ) );
assert!( charClassMatch( class!( "α-ω" ), "η".as_bytes() ) );
assert!( charClassMatch( class!( "-" ), b"-" ) );
assert!( charClassMatch( class!( "a-" ), b"-" ) );
assert!( charClassMatch( class!( "-a" ), b"-" ) );
assert!( charClassMatch( class!( "a-zA-Z-" ), b"-" ) );
assert!( charClassMatch( class!( "aa-zA-Z-a" ), b"-" ) );
assert!( charClassMatch( class!( "a-zA-Z-" ), b"z" ) );
assert!( charClassMatch( class!( "aa-zA-Z-0" ), b"0" ) );
assert!( charClassMatch( class!( "a-cdefgh-k" ), b"e" ) );
assert!( charClassMatch( class!( "---" ), b"-" ) );
assert!( charClassMatch( class!( "a-a" ), b"a" ) );
}
#[test]
fn CharClass_Match_NonUnicode() {
assert!( charClassMatch( &CharClass::new( &[255] ), &[255] ) );
}
#[test]
fn CharClass_NoMatch() {
assert!( !charClassMatch( class!( "a" ), b"b" ) );
assert!( !charClassMatch( class!( "-" ), b"a" ) );
assert!( !charClassMatch( class!( "z-a" ), b"a" ) );
assert!( !charClassMatch( class!( "z-a" ), b"b" ) );
assert!( !charClassMatch( class!( "a-z" ), b"0" ) );
assert!( !charClassMatch( class!( "a-z" ), b"A" ) );
}
<|fim▁hole|><|fim▁end|> | // TODO: tests for escaped chars in class
} |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2008-2013 AvanzOSC S.L. All Rights Reserved
# Date: 01/07/2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "AvanzOSC - tree_grid extension",
"version": "1.0",
"depends": ["tree_grid","sale","purchase","stock","account","avanzosc_calculate_coeficient_udv_automatically"],
"author": "AvanzOSC S.L.",
"category": "Generic",
"description": """
Este módulo añade la unidad de venta, y cantidad de venta editables en los tree de
líneas de pedido de compra, y de venta, líneas de factura, y líneas de albaranes.
""",
"init_xml": [],
'update_xml': ['sale_order_view_ext.xml',
'purchase_order_view_ext.xml',
'stock_picking_view_ext.xml',
'account_invoice_view_ext.xml',
'product_product_view_ext.xml'
],
'demo_xml': [],
'installable': True,
'active': False,
# 'certificate': 'certificate',<|fim▁hole|><|fim▁end|> | } |
<|file_name|>NativeEventEmitter.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow
*/
'use strict';
const EventEmitter = require('../../vendor/emitter/EventEmitter');
const RCTDeviceEventEmitter = require('../RCTDeviceEventEmitter');
/**
* Mock the NativeEventEmitter as a normal JS EventEmitter.<|fim▁hole|> super(RCTDeviceEventEmitter.sharedSubscriber);
}
}
module.exports = NativeEventEmitter;<|fim▁end|> | */
class NativeEventEmitter extends EventEmitter {
constructor() { |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.<|fim▁hole|>
setuptools.setup(
setup_requires=['pbr'],
pbr=True)<|fim▁end|> |
import setuptools |
<|file_name|>equal_tests.py<|end_file_name|><|fim▁begin|>from python.equal import Equal
def count_steps_test():
equal_instance = Equal()
array_a = [2, 2, 3, 7]
array_b = [53, 361, 188, 665, 786, 898, 447, 562, 272, 123, 229, 629, 670,
848, 994, 54, 822, 46, 208, 17, 449, 302, 466, 832, 931, 778,
156, 39, 31, 777, 749, 436, 138, 289, 453, 276, 539, 901, 839,<|fim▁hole|> 964, 278, 465, 247, 408, 622, 638, 440, 751, 739, 876, 889, 380,
330, 517, 919, 583, 356, 83, 959, 129, 875, 5, 750, 662, 106,
193, 494, 120, 653, 128, 84, 283, 593, 683, 44, 567, 321, 484,
318, 412, 712, 559, 792, 394, 77, 711, 977, 785, 146, 936, 914,
22, 942, 664, 36, 400, 857]
array_c = [520, 862, 10, 956, 498, 956, 991, 542, 523, 664, 378, 194, 76,
90, 753, 868, 837, 830, 932, 814, 616, 78, 103, 882, 452, 397,
899, 488, 149, 108, 723, 22, 323, 733, 330, 821, 41, 322, 715,
917, 986, 93, 111, 63, 535, 864, 931, 372, 47, 215, 539, 15, 294,
642, 897, 98, 391, 796, 939, 540, 257, 662, 562, 580, 747, 893,
401, 789, 215, 468, 58, 553, 561, 169, 616, 448, 385, 900, 173,
432, 115, 712]
array_d = [761, 706, 697, 212, 97, 845, 151, 637, 102, 165, 200, 34, 912,
445, 435, 53, 12, 255, 111, 565, 816, 632, 534, 617, 18, 786,
790, 802, 253, 502, 602, 15, 208, 651, 227, 305, 848, 730, 294,
303, 895, 846, 337, 159, 291, 125, 565, 655, 380, 28, 221, 549,
13, 107, 166, 31, 245, 308, 185, 498, 810, 139, 865, 370, 790,
444, 27, 639, 174, 321, 294, 421, 168, 631, 933, 811, 756, 498,
467, 137, 878, 40, 686, 891, 499, 204, 274, 744, 512, 460, 242,
674, 599, 108, 396, 742, 552, 423, 733, 79, 96, 27, 852, 264,
658, 785, 76, 415, 635, 895, 904, 514, 935, 942, 757, 434, 498,
32, 178, 10, 844, 772, 36, 795, 880, 432, 537, 785, 855, 270,
864, 951, 649, 716, 568, 308, 854, 996, 75, 489, 891, 331, 355,
178, 273, 113, 612, 771, 497, 142, 133, 341, 914, 521, 488, 147,
953, 26, 284, 160, 648, 500, 463, 298, 568, 31, 958, 422, 379,
385, 264, 622, 716, 619, 800, 341, 732, 764, 464, 581, 258, 949,
922, 173, 470, 411, 672, 423, 789, 956, 583, 789, 808, 46, 439,
376, 430, 749, 151]
array_e = [134, 415, 784, 202, 34, 584, 543, 119, 701, 7, 700, 959, 956,
975, 484, 426, 738, 508, 201, 527, 816, 136, 668, 624, 535, 108,
1, 965, 857, 152, 478, 344, 567, 262, 546, 953, 199, 90, 72, 900,
449, 773, 211, 758, 100, 696, 536, 838, 204, 738, 717, 21, 874,
385, 997, 761, 845, 998, 78, 703, 502, 557, 47, 421, 819, 945,
375, 370, 35, 799, 622, 837, 924, 834, 595, 24, 882, 483, 862,
438, 221, 931, 811, 448, 317, 809, 561, 162, 159, 640, 217, 662,
197, 616, 435, 368, 562, 162, 739, 949, 962, 713, 786, 238, 899,
733, 263, 781, 217, 477, 220, 790, 409, 383, 590, 726, 192, 152,
240, 352, 792, 458, 366, 341, 74, 801, 709, 988, 964, 800, 938,
278, 514, 76, 516, 413, 810, 131, 547, 379, 609, 119, 169, 370,
502, 112, 448, 695, 264, 688, 399, 408, 498, 765, 749, 925, 918,
458, 913, 234, 611]
array_f = [512, 125, 928, 381, 890, 90, 512, 789, 469, 473, 908, 990, 195,
763, 102, 643, 458, 366, 684, 857, 126, 534, 974, 875, 459, 892,
686, 373, 127, 297, 576, 991, 774, 856, 372, 664, 946, 237, 806,
767, 62, 714, 758, 258, 477, 860, 253, 287, 579, 289, 496]
assert equal_instance.count_mim_steps(array_a) == 2
assert equal_instance.count_mim_steps(array_b) == 10605
assert equal_instance.count_mim_steps(array_c) == 8198
assert equal_instance.count_mim_steps(array_d) == 18762
assert equal_instance.count_mim_steps(array_e) == 16931
assert equal_instance.count_mim_steps(array_f) == 5104<|fim▁end|> | 811, 24, 420, 440, 46, 269, 786, 101, 443, 832, 661, 460, 281, |
<|file_name|>glpk_solver_java.py<|end_file_name|><|fim▁begin|>##cobra.solvers.glpk_solver
#This script provides wrappers for libglpk-java 1.0.22 and pyglpk 0.3
from warnings import warn
from copy import deepcopy
###solver specific parameters
from .parameters import status_dict, variable_kind_dict, \
sense_dict, parameter_mappings, parameter_defaults, \
objective_senses, default_objective_sense
from ..core.Solution import Solution
from time import time
solver_name = 'glpk'
sense_dict = eval(sense_dict[solver_name])
#Functions that are different for java implementation of a solver
from os import name
if name != "java":
raise Exception("jython only")
warn("cobra.solvers.glpk_solver isn't mature. consider using gurobi or cplex")
from org.gnu.glpk import GLPK, GLPKConstants, glp_smcp, glp_iocp
variable_kind_dict = eval(variable_kind_dict['%s_%s'%(solver_name,
__name)])
status_dict = eval(status_dict['%s_%s'%(solver_name,
__name)])
objective_senses = objective_senses['%s_%s'%(solver_name,
__name)]
parameter_mappings = parameter_mappings['%s_%s'%(solver_name,
__name)]
parameter_defaults = parameter_defaults['%s_%s'%(solver_name,
__name)]
class Problem():
"""Create a more pythonesqe class to wrap the key
features of the libglpk-java functions.
"""
def __init__(self):
"""the attributes g, lp, mip should be made private
"""
self._g = GLPK
self._lp= GLPK.glp_create_prob()
self._simplex_parameters = glp_smcp()
self._mip_parameters = None
self._g.glp_init_smcp(self._simplex_parameters)
self.status = self.objective_value = None
self._mip = False
def set_name(self, name=''):
self._g.glp_set_prob_name(self._lp, name)
def solve(self):
try:
self._g.glp_simplex(self._lp,
self._simplex_parameters)
if self._mip:
#perform the MIP
setattr(self._mip_parameters, 'msg_lev',
self._simplex_parameters.msg_lev)
self._g.glp_intopt(self._lp, self._mip_parameters)
self.status = self.get_status()
self.objective_value = self.get_objective_value()
except:
self.status = 'failed'
return self.status
def get_status(self):
if self._mip:
status = self._g.glp_mip_status(self._lp)
else:
status = self._g.glp_get_status(self._lp)
return status_dict[status]
def set_objective_sense(self, parameter_value='maximize'):
self._g.glp_set_obj_dir(self._lp,
eval(objective_senses[parameter_value]))
def set_parameter(self, parameter_name, parameter_value, warning=False):
if parameter_name == 'objective_sense':
self.set_objective_sense(parameter_value)
else:
if parameter_name == 'meth' and parameter_value not in [1,2,3]:
parameter_value = 1
try:
setattr(self._simplex_parameters, parameter_name,
parameter_value)
except Exception, e1:
try:
setattr(self._mip_parameters, parameter_name,
parameter_value)
except Exception, e2:
if warning:
print "Could not set simplex parameter " +\
"%s: %s"%(parameter_name, repr(e1))
if self._mip_parameters is not None:
print "Could not set mip parameter " +\
"%s: %s"%(parameter_name, repr(e2))
def get_objective_value(self):
if self._mip:
tmp_value = self._g.glp_mip_obj_val(self._lp)
else:
tmp_value = self._g.glp_get_obj_val(self._lp)
return tmp_value
def create_problem(self, cobra_model):
g = self._g
lp = self._lp
number_of_reactions = len(cobra_model.reactions)
number_of_metabolites = len(cobra_model.metabolites)
g.glp_add_cols(lp, number_of_reactions)
reaction_to_index = {}
objective_dict = {}
#Add in the variables
tmp_kinds = []
for i, the_reaction in enumerate(cobra_model.reactions):
i_offset = i + 1
reaction_to_index[the_reaction] = i_offset
if the_reaction.objective_coefficient != 0:
objective_dict[i_offset] = the_reaction.objective_coefficient
g.glp_set_col_name(lp, i_offset, the_reaction.id)
tmp_kinds.append(the_reaction.variable_kind)
the_kind = variable_kind_dict[the_reaction.variable_kind]
lower_bound = the_reaction.lower_bound
upper_bound = the_reaction.upper_bound
#Note. It is possible to have unbounded or one-bound variables
if lower_bound == upper_bound:
bound_kind = GLPKConstants.GLP_FX
else:
bound_kind = GLPKConstants.GLP_DB
g.glp_set_col_kind(lp, i_offset, the_kind)
g.glp_set_col_bnds(lp, i_offset,
bound_kind, the_reaction.lower_bound,
the_reaction.upper_bound)
tmp_kinds = set(tmp_kinds)
if 'integer' in tmp_kinds or 'binary' in tmp_kinds:
self._mip = True
self._mip_parameters = glp_iocp()
g.glp_init_iocp(self._mip_parameters)
#create constraints
g.glp_add_rows(lp, number_of_metabolites)
row_indices = []
column_indices = []
constraint_values = []
for i, the_metabolite in enumerate(cobra_model.metabolites):
i_offset = i + 1
g.glp_set_row_name(lp, i_offset, the_metabolite.id)
lower_bound = upper_bound = the_metabolite._bound
constraint_sense = sense_dict[the_metabolite._constraint_sense]
if constraint_sense == 'E':
bound_type = GLPKConstants.GLP_FX
elif constraint_sense == 'L':
bound_type = GLPKConstants.GLP_UP
elif constraint_sense == 'G':
bound_type = GLPKConstants.GLP_LO
elif constraint_sense == 'U':
bound_type = GLPKConstants.GLP_FR
elif hasattr(lower_bound, '__iter__'):
lower_bound, upper_bound = lower_bound[:2]
bound_type = GLPKConstants.GLP_DB
g.glp_set_row_bnds(lp, i_offset, bound_type,
lower_bound, upper_bound)
[(row_indices.append(i_offset),
column_indices.append(reaction_to_index[k]),
constraint_values.append(k._metabolites[the_metabolite]))
for k in the_metabolite._reaction]
#Load the constraints into the lp. Need to use
#typed arrays.
number_of_constraints = len(row_indices)
i_array = g.new_intArray(number_of_constraints)
j_array = g.new_intArray(number_of_constraints)
v_array = g.new_doubleArray(number_of_constraints)
for a, (i, j, v) in enumerate(zip(row_indices,
column_indices,
constraint_values)):
g.intArray_setitem(i_array, a+1, i)
g.intArray_setitem(j_array, a+1, j)
g.doubleArray_setitem(v_array, a+1, v)
g.glp_load_matrix(lp, number_of_constraints, i_array,
j_array, v_array)
# the following lines often cause memory crashes
g.delete_intArray(i_array)
g.delete_intArray(j_array)
g.delete_doubleArray(v_array)
g.glp_set_obj_name(lp, "z")
[g.glp_set_obj_coef(lp, k, v)
for k, v in objective_dict.iteritems()]
__solver_class = Problem
def set_parameter(lp, parameter_name, parameter_value):
lp.set_parameter(parameter_name, parameter_value)
def get_status(lp):
return lp.get_status()
def format_solution(lp, cobra_model, **kwargs):
"""
"""
status = get_status(lp)
if not lp._mip:
try:
x = [lp._g.glp_get_col_prim(lp._lp, i + 1)
for i in range(len(cobra_model.reactions))]
x_dict = dict(zip(cobra_model.reactions, x))
y = [lp._g.glp_get_row_dual(lp._lp, i + 1)
for i in range(len(cobra_model.metabolites))]
y_dict = dict(zip(cobra_model.metabolites, y))
objective_value = lp.objective_value
except Exception, e:
print repr(e)
y = y_dict = x = x_dict = objective_value = None
#print status
else:
try:
x = [lp._g.glp_mip_col_val(lp._lp, i + 1)
for i in range(len(cobra_model.reactions))]
x_dict = dict(zip(cobra_model.reactions, x))
y = y_dict = None
objective_value = lp.objective_value
except:
y = y_dict = x = x_dict = objective_value = None
return(Solution(objective_value, x=x, x_dict=x_dict, y=y,
y_dict=y_dict, status=status))
def create_problem(cobra_model, **kwargs):
"""Solver-specific method for constructing a solver problem from
a cobra.Model. This can be tuned for performance using kwargs
"""
the_parameters = parameter_defaults
if kwargs:
the_parameters = deepcopy(parameter_defaults)
the_parameters.update(kwargs)
quadratic_component = the_parameters['quadratic_component']
new_objective = the_parameters['new_objective']
if quadratic_component is not None:
raise Exception('%s cannot solve QPs, try a different solver'%solver_name)
lp = Problem() # Create empty problem instance
lp.create_problem(cobra_model)
[set_parameter(lp, parameter_mappings[k], v)
for k, v in the_parameters.iteritems() if k in parameter_mappings]
return(lp)
def update_problem(lp, cobra_model, **kwargs):
"""
Assumes that neither Metabolites nor Reaction have been
added or removed.
Currently only deals with reaction bounds and objective
coefficients.
"""
g = lp._g
l = lp._lp
for i, the_reaction in enumerate(cobra_model.reactions):
lower_bound = float(the_reaction.lower_bound)
upper_bound = float(the_reaction.upper_bound)
objective_coefficient = float(the_reaction.objective_coefficient)
if lower_bound == upper_bound:
bound_type = GLPKConstants.GLP_FX
else:
bound_type = GLPKConstants.GLP_DB
g.glp_set_col_bnds(l, i + 1, bound_type, lower_bound, upper_bound)
g.glp_set_obj_coef(l, i + 1, objective_coefficient)
def solve_problem(lp, **kwargs):
"""A performance tunable method for updating a model problem file
"""
#Update parameter settings if provided
if kwargs:
[set_parameter(lp, parameter_mappings[k], v)
for k, v in kwargs.iteritems() if k in parameter_mappings]
try:
print_solver_time = kwargs['print_solver_time']
start_time = time()
except:
print_solver_time = False
lp_method = lp._simplex_parameters.meth
lp.solve()
status = get_status(lp)
if print_solver_time:
print 'optimize time: %f'%(time() - start_time)
return status
def solve(cobra_model, **kwargs):
"""Smart interface to optimization solver functions that will convert
the cobra_model to a solver object, set the parameters, and try multiple
methods to get an optimal solution before returning the solver object and
a cobra.Solution (which is attached to cobra_model.solution)
cobra_model: a cobra.Model
returns a dict: {'the_problem': solver specific object, 'the_solution':
cobra.Solution for the optimization problem'}
"""
#Start out with default parameters and then modify if
#new onese are provided
the_parameters = deepcopy(parameter_defaults)
if kwargs:
the_parameters.update(kwargs)
#Update objectives if they are new.
error_reporting = the_parameters['error_reporting']
if 'new_objective' in the_parameters and \
the_parameters['new_objective'] not in ['update problem', None]:
from ..flux_analysis.objective import update_objective
update_objective(cobra_model, the_parameters['new_objective'])
if 'the_problem' in the_parameters:
the_problem = the_parameters['the_problem']
else:
the_problem = None<|fim▁hole|> lp = the_problem
update_problem(lp, cobra_model, **the_parameters)
else:
#Create a new problem
lp = create_problem(cobra_model, **the_parameters)
#Deprecated way for returning a solver problem created from a cobra_model
#without performing optimization
if the_problem == 'setup':
return lp
###Try to solve the problem using other methods if the first method doesn't work
lp_method = the_parameters['lp_method']
the_methods = [1, 2, 3]
if lp_method in the_methods:
the_methods.remove(lp_method)
#Start with the user specified method
the_methods.insert(0, lp_method)
for the_method in the_methods:
the_parameters['lp_method'] = the_method
try:
status = solve_problem(lp, **the_parameters)
except:
status = 'failed'
if status == 'optimal':
break
the_solution = format_solution(lp, cobra_model)
if status != 'optimal' and error_reporting:
print '%s failed: %s'%(solver_name, status)
cobra_model.solution = the_solution
solution = {'the_problem': lp, 'the_solution': the_solution}
return solution<|fim▁end|> | if isinstance(the_problem, __solver_class):
#Update the problem with the current cobra_model |
<|file_name|>A_in.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2000, 2003 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Common Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/cpl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.codeassist.impl;
import java.util.Iterator;
import java.util.Map;
import org.eclipse.jdt.core.compiler.CharOperation;
public class AssistOptions {
/**
* Option IDs
*/
public static final String OPTION_PerformVisibilityCheck =
"org.eclipse.jdt.core.codeComplete.visibilityCheck"; //$NON-NLS-1$
public static final String OPTION_ForceImplicitQualification =
"org.eclipse.jdt.core.codeComplete.forceImplicitQualification"; //$NON-NLS-1$
public static final String OPTION_FieldPrefixes =
"org.eclipse.jdt.core.codeComplete.fieldPrefixes"; //$NON-NLS-1$
public static final String OPTION_StaticFieldPrefixes =
"org.eclipse.jdt.core.codeComplete.staticFieldPrefixes"; //$NON-NLS-1$
public static final String OPTION_LocalPrefixes =
"org.eclipse.jdt.core.codeComplete.localPrefixes"; //$NON-NLS-1$
public static final String OPTION_ArgumentPrefixes =
"org.eclipse.jdt.core.codeComplete.argumentPrefixes"; //$NON-NLS-1$
public static final String OPTION_FieldSuffixes =
"org.eclipse.jdt.core.codeComplete.fieldSuffixes"; //$NON-NLS-1$
public static final String OPTION_StaticFieldSuffixes =
"org.eclipse.jdt.core.codeComplete.staticFieldSuffixes"; //$NON-NLS-1$
public static final String OPTION_LocalSuffixes =
"org.eclipse.jdt.core.codeComplete.localSuffixes"; //$NON-NLS-1$
public static final String OPTION_ArgumentSuffixes =
"org.eclipse.jdt.core.codeComplete.argumentSuffixes"; //$NON-NLS-1$
public static final String ENABLED = "enabled"; //$NON-NLS-1$
public static final String DISABLED = "disabled"; //$NON-NLS-1$
public boolean checkVisibility = false;
public boolean forceImplicitQualification = false;
public char[][] fieldPrefixes = null;
public char[][] staticFieldPrefixes = null;
public char[][] localPrefixes = null;
public char[][] argumentPrefixes = null;
public char[][] fieldSuffixes = null;
public char[][] staticFieldSuffixes = null;
public char[][] localSuffixes = null;
public char[][] argumentSuffixes = null;
/**
* Initializing the assist options with default settings
*/
public AssistOptions() {
// Initializing the assist options with default settings
}
/**
* Initializing the assist options with external settings
*/
public AssistOptions(Map settings) {
if (settings == null)
return;
// filter options which are related to the assist component
Iterator entries = settings.entrySet().iterator();
while (entries.hasNext()) {
Map.Entry entry = (Map.Entry)entries.next();
if (!(entry.getKey() instanceof String))
continue;
if (!(entry.getValue() instanceof String))
continue;
String optionID = (String) entry.getKey();
String optionValue = (String) entry.getValue();
if (optionID.equals(OPTION_PerformVisibilityCheck)) {
if (optionValue.equals(ENABLED)) {
this.checkVisibility = true;
} else
if (optionValue.equals(DISABLED)) {
this.checkVisibility = false;
}
continue;
} else if (optionID.equals(OPTION_ForceImplicitQualification)) {
if (optionValue.equals(ENABLED)) {
this.forceImplicitQualification = true;
} else
if (optionValue.equals(DISABLED)) {
this.forceImplicitQualification = false;
}
continue;
} else if(optionID.equals(OPTION_FieldPrefixes)){
if (optionValue.length() == 0) {
this.fieldPrefixes = null;
} else {
this.fieldPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_StaticFieldPrefixes)){
if (optionValue.length() == 0) {
this.staticFieldPrefixes = null;
} else {
this.staticFieldPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_LocalPrefixes)){
if (optionValue.length() == 0) {
this.localPrefixes = null;
} else {
this.localPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_ArgumentPrefixes)){
if (optionValue.length() == 0) {
this.argumentPrefixes = null;
} else {
this.argumentPrefixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_FieldSuffixes)){
if (optionValue.length() == 0) {
this.fieldSuffixes = null;
} else {
this.fieldSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_StaticFieldSuffixes)){
if (optionValue.length() == 0) {
this.staticFieldSuffixes = null;
} else {
this.staticFieldSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_LocalSuffixes)){
if (optionValue.length() == 0) {
this.localSuffixes = null;
} else {
this.localSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
} else if(optionID.equals(OPTION_ArgumentSuffixes)){<|fim▁hole|> if (optionValue.length() == 0) {
this.argumentSuffixes = null;
} else {
this.argumentSuffixes = CharOperation.splitAndTrimOn(',', optionValue.toCharArray());
}
continue;
}
}
}
}<|fim▁end|> | |
<|file_name|>tool_move.cpp<|end_file_name|><|fim▁begin|>#include "tool_move.hpp"
#include "document/idocument_board.hpp"
#include "board/board.hpp"
#include "document/idocument_package.hpp"
#include "pool/package.hpp"
#include "document/idocument_padstack.hpp"
#include "pool/padstack.hpp"
#include "document/idocument_schematic.hpp"
#include "schematic/schematic.hpp"
#include "document/idocument_symbol.hpp"
#include "pool/symbol.hpp"
#include "imp/imp_interface.hpp"
#include "util/accumulator.hpp"
#include "util/util.hpp"
#include <iostream>
#include "core/tool_id.hpp"
namespace horizon {
ToolMove::ToolMove(IDocument *c, ToolID tid) : ToolBase(c, tid), ToolHelperMove(c, tid), ToolHelperMerge(c, tid)
{
}
void ToolMove::expand_selection()
{
std::set<SelectableRef> pkgs_fixed;
std::set<SelectableRef> new_sel;
for (const auto &it : selection) {
switch (it.type) {
case ObjectType::LINE: {
Line *line = doc.r->get_line(it.uuid);
new_sel.emplace(line->from.uuid, ObjectType::JUNCTION);
new_sel.emplace(line->to.uuid, ObjectType::JUNCTION);
} break;
case ObjectType::POLYGON_EDGE: {
Polygon *poly = doc.r->get_polygon(it.uuid);
auto vs = poly->get_vertices_for_edge(it.vertex);
new_sel.emplace(poly->uuid, ObjectType::POLYGON_VERTEX, vs.first);
new_sel.emplace(poly->uuid, ObjectType::POLYGON_VERTEX, vs.second);
} break;
case ObjectType::NET_LABEL: {
auto &la = doc.c->get_sheet()->net_labels.at(it.uuid);
new_sel.emplace(la.junction->uuid, ObjectType::JUNCTION);
} break;
case ObjectType::BUS_LABEL: {
auto &la = doc.c->get_sheet()->bus_labels.at(it.uuid);
new_sel.emplace(la.junction->uuid, ObjectType::JUNCTION);
} break;
case ObjectType::POWER_SYMBOL: {
auto &ps = doc.c->get_sheet()->power_symbols.at(it.uuid);
new_sel.emplace(ps.junction->uuid, ObjectType::JUNCTION);
} break;
case ObjectType::BUS_RIPPER: {
auto &rip = doc.c->get_sheet()->bus_rippers.at(it.uuid);
new_sel.emplace(rip.junction->uuid, ObjectType::JUNCTION);
} break;
case ObjectType::LINE_NET: {
auto line = &doc.c->get_sheet()->net_lines.at(it.uuid);
for (auto &it_ft : {line->from, line->to}) {
if (it_ft.is_junc()) {
new_sel.emplace(it_ft.junc.uuid, ObjectType::JUNCTION);
}
}
} break;
case ObjectType::TRACK: {
auto track = &doc.b->get_board()->tracks.at(it.uuid);
for (auto &it_ft : {track->from, track->to}) {
if (it_ft.is_junc()) {
new_sel.emplace(it_ft.junc.uuid, ObjectType::JUNCTION);
}
}
} break;
case ObjectType::VIA: {
auto via = &doc.b->get_board()->vias.at(it.uuid);
new_sel.emplace(via->junction->uuid, ObjectType::JUNCTION);
} break;
case ObjectType::POLYGON: {
auto poly = doc.r->get_polygon(it.uuid);
int i = 0;
for (const auto &itv : poly->vertices) {
(void)sizeof itv;
new_sel.emplace(poly->uuid, ObjectType::POLYGON_VERTEX, i);
i++;
}
} break;
case ObjectType::ARC: {
Arc *arc = doc.r->get_arc(it.uuid);
new_sel.emplace(arc->from.uuid, ObjectType::JUNCTION);
new_sel.emplace(arc->to.uuid, ObjectType::JUNCTION);
new_sel.emplace(arc->center.uuid, ObjectType::JUNCTION);
} break;
case ObjectType::SCHEMATIC_SYMBOL: {
auto sym = doc.c->get_schematic_symbol(it.uuid);
for (const auto &itt : sym->texts) {
new_sel.emplace(itt->uuid, ObjectType::TEXT);
}
} break;
case ObjectType::BOARD_PACKAGE: {
BoardPackage *pkg = &doc.b->get_board()->packages.at(it.uuid);
if (pkg->fixed) {
pkgs_fixed.insert(it);
}
else {
for (const auto &itt : pkg->texts) {
new_sel.emplace(itt->uuid, ObjectType::TEXT);
}
}
} break;
default:;
}
}
selection.insert(new_sel.begin(), new_sel.end());
if (pkgs_fixed.size() && imp)
imp->tool_bar_flash("can't move fixed package");
for (auto it = selection.begin(); it != selection.end();) {
if (pkgs_fixed.count(*it))
it = selection.erase(it);
else
++it;
}
}
Coordi ToolMove::get_selection_center()
{
Accumulator<Coordi> accu;
std::set<SelectableRef> items_ignore;
for (const auto &it : selection) {
if (it.type == ObjectType::BOARD_PACKAGE) {
const auto &pkg = doc.b->get_board()->packages.at(it.uuid);
for (auto &it_txt : pkg.texts) {
items_ignore.emplace(it_txt->uuid, ObjectType::TEXT);
}
}
else if (it.type == ObjectType::SCHEMATIC_SYMBOL) {
const auto &sym = doc.c->get_sheet()->symbols.at(it.uuid);
for (auto &it_txt : sym.texts) {
items_ignore.emplace(it_txt->uuid, ObjectType::TEXT);
}
}
}
for (const auto &it : selection) {
if (items_ignore.count(it))
continue;
switch (it.type) {
case ObjectType::JUNCTION:
accu.accumulate(doc.r->get_junction(it.uuid)->position);
break;
case ObjectType::HOLE:
accu.accumulate(doc.r->get_hole(it.uuid)->placement.shift);
break;
case ObjectType::BOARD_HOLE:
accu.accumulate(doc.b->get_board()->holes.at(it.uuid).placement.shift);
break;
case ObjectType::SYMBOL_PIN:
accu.accumulate(doc.y->get_symbol_pin(it.uuid).position);
break;
case ObjectType::SCHEMATIC_SYMBOL:
accu.accumulate(doc.c->get_schematic_symbol(it.uuid)->placement.shift);
break;
case ObjectType::BOARD_PACKAGE:
accu.accumulate(doc.b->get_board()->packages.at(it.uuid).placement.shift);
break;
case ObjectType::PAD:
accu.accumulate(doc.k->get_package().pads.at(it.uuid).placement.shift);
break;
case ObjectType::TEXT:
accu.accumulate(doc.r->get_text(it.uuid)->placement.shift);
break;
case ObjectType::POLYGON_VERTEX:
accu.accumulate(doc.r->get_polygon(it.uuid)->vertices.at(it.vertex).position);
break;
case ObjectType::DIMENSION:
if (it.vertex < 2) {
auto dim = doc.r->get_dimension(it.uuid);
accu.accumulate(it.vertex == 0 ? dim->p0 : dim->p1);
}
break;
case ObjectType::POLYGON_ARC_CENTER:
accu.accumulate(doc.r->get_polygon(it.uuid)->vertices.at(it.vertex).arc_center);
break;
case ObjectType::SHAPE:
accu.accumulate(doc.a->get_padstack().shapes.at(it.uuid).placement.shift);
break;
case ObjectType::BOARD_PANEL:
accu.accumulate(doc.b->get_board()->board_panels.at(it.uuid).placement.shift);
break;
case ObjectType::PICTURE:
accu.accumulate(doc.r->get_picture(it.uuid)->placement.shift);
break;
case ObjectType::BOARD_DECAL:
accu.accumulate(doc.b->get_board()->decals.at(it.uuid).placement.shift);
break;
default:;
}
}
if (doc.c || doc.y)
return (accu.get() / 1.25_mm) * 1.25_mm;
else
return accu.get();
}
ToolResponse ToolMove::begin(const ToolArgs &args)
{
std::cout << "tool move\n";
move_init(args.coords);
Coordi selection_center;
if (tool_id == ToolID::ROTATE_CURSOR || tool_id == ToolID::MIRROR_CURSOR)
selection_center = args.coords;
else
selection_center = get_selection_center();
collect_nets();
if (tool_id == ToolID::ROTATE || tool_id == ToolID::MIRROR_X || tool_id == ToolID::MIRROR_Y
|| tool_id == ToolID::ROTATE_CURSOR || tool_id == ToolID::MIRROR_CURSOR) {
move_mirror_or_rotate(selection_center, tool_id == ToolID::ROTATE || tool_id == ToolID::ROTATE_CURSOR);
if (tool_id == ToolID::MIRROR_Y) {
move_mirror_or_rotate(selection_center, true);
move_mirror_or_rotate(selection_center, true);
}
finish();
return ToolResponse::commit();
}
if (tool_id == ToolID::MOVE_EXACTLY) {
if (auto r = imp->dialogs.ask_datum_coord("Move exactly")) {
move_do(*r);
finish();
return ToolResponse::commit();
}
else {
return ToolResponse::end();
}
}
imp->tool_bar_set_actions({
{InToolActionID::LMB},
{InToolActionID::RMB},
{InToolActionID::ROTATE, InToolActionID::MIRROR, "rotate/mirror"},
{InToolActionID::ROTATE_CURSOR, InToolActionID::MIRROR_CURSOR, "rotate/mirror around cursor"},
{InToolActionID::RESTRICT},
});
update_tip();
for (const auto &it : selection) {
if (it.type == ObjectType::POLYGON_VERTEX || it.type == ObjectType::POLYGON_EDGE) {
auto poly = doc.r->get_polygon(it.uuid);
if (auto plane = dynamic_cast<Plane *>(poly->usage.ptr)) {
planes.insert(plane);
}
}
}
for (auto plane : planes) {
plane->fragments.clear();
plane->revision++;
}
InToolActionID action = InToolActionID::NONE;
switch (tool_id) {
case ToolID::MOVE_KEY_FINE_UP:
action = InToolActionID::MOVE_UP_FINE;
break;
case ToolID::MOVE_KEY_UP:
action = InToolActionID::MOVE_UP;
break;
case ToolID::MOVE_KEY_FINE_DOWN:
action = InToolActionID::MOVE_DOWN_FINE;
break;
case ToolID::MOVE_KEY_DOWN:
action = InToolActionID::MOVE_DOWN;
break;
case ToolID::MOVE_KEY_FINE_LEFT:
action = InToolActionID::MOVE_LEFT_FINE;<|fim▁hole|>
case ToolID::MOVE_KEY_LEFT:
action = InToolActionID::MOVE_LEFT;
break;
case ToolID::MOVE_KEY_FINE_RIGHT:
action = InToolActionID::MOVE_RIGHT_FINE;
break;
case ToolID::MOVE_KEY_RIGHT:
action = InToolActionID::MOVE_RIGHT;
break;
default:;
}
if (action != InToolActionID::NONE) {
is_key = true;
ToolArgs args2;
args2.type = ToolEventType::ACTION;
args2.action = action;
update(args2);
}
if (tool_id == ToolID::MOVE_KEY)
is_key = true;
imp->tool_bar_set_tool_name("Move");
return ToolResponse();
}
void ToolMove::collect_nets()
{
for (const auto &it : selection) {
switch (it.type) {
case ObjectType::BOARD_PACKAGE: {
BoardPackage *pkg = &doc.b->get_board()->packages.at(it.uuid);
for (const auto &itt : pkg->package.pads) {
if (itt.second.net)
nets.insert(itt.second.net->uuid);
}
} break;
case ObjectType::JUNCTION: {
auto ju = doc.r->get_junction(it.uuid);
if (ju->net)
nets.insert(ju->net->uuid);
} break;
default:;
}
}
}
bool ToolMove::can_begin()
{
expand_selection();
return selection.size() > 0;
}
void ToolMove::update_tip()
{
auto delta = get_delta();
std::string s = coord_to_string(delta + key_delta, true) + " ";
if (!is_key) {
s += restrict_mode_to_string();
}
imp->tool_bar_set_tip(s);
}
void ToolMove::do_move(const Coordi &d)
{
move_do_cursor(d);
if (doc.b && update_airwires && nets.size()) {
doc.b->get_board()->update_airwires(true, nets);
}
update_tip();
}
void ToolMove::finish()
{
for (const auto &it : selection) {
if (it.type == ObjectType::SCHEMATIC_SYMBOL) {
auto sym = doc.c->get_schematic_symbol(it.uuid);
doc.c->get_schematic()->autoconnect_symbol(doc.c->get_sheet(), sym);
if (sym->component->connections.size() == 0) {
doc.c->get_schematic()->place_bipole_on_line(doc.c->get_sheet(), sym);
}
}
}
if (doc.c) {
merge_selected_junctions();
}
if (doc.b) {
auto brd = doc.b->get_board();
brd->expand_flags = static_cast<Board::ExpandFlags>(Board::EXPAND_AIRWIRES);
brd->airwires_expand = nets;
for (auto plane : planes) {
brd->update_plane(plane);
}
}
}
ToolResponse ToolMove::update(const ToolArgs &args)
{
if (args.type == ToolEventType::MOVE) {
if (!is_key)
do_move(args.coords);
return ToolResponse();
}
else if (args.type == ToolEventType::ACTION) {
if (any_of(args.action, {InToolActionID::LMB, InToolActionID::COMMIT})
|| (is_transient && args.action == InToolActionID::LMB_RELEASE)) {
finish();
return ToolResponse::commit();
}
else if (any_of(args.action, {InToolActionID::RMB, InToolActionID::CANCEL})) {
return ToolResponse::revert();
}
else if (args.action == InToolActionID::RESTRICT) {
cycle_restrict_mode();
do_move(args.coords);
}
else if (any_of(args.action, {InToolActionID::ROTATE, InToolActionID::MIRROR})) {
bool rotate = args.action == InToolActionID::ROTATE;
const auto selection_center = get_selection_center();
move_mirror_or_rotate(selection_center, rotate);
}
else if (any_of(args.action, {InToolActionID::ROTATE_CURSOR, InToolActionID::MIRROR_CURSOR})) {
bool rotate = args.action == InToolActionID::ROTATE_CURSOR;
move_mirror_or_rotate(args.coords, rotate);
}
else {
const auto [dir, fine] = dir_from_action(args.action);
if (dir.x || dir.y) {
auto sp = imp->get_grid_spacing();
if (fine)
sp = sp / 10;
key_delta += dir * sp;
move_do(dir * sp);
update_tip();
}
}
}
return ToolResponse();
}
} // namespace horizon<|fim▁end|> | break; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#![deny(warnings)]
#![deny(rust_2018_idioms)]
// #![deny(clippy::all)]
#![deny(clippy::clone_on_ref_ptr)]
mod build;
mod errors;
mod ir;
mod program;
mod signatures;
mod transform;<|fim▁hole|>mod visitor;
pub use crate::errors::{ValidationError, ValidationMessage};
pub use build::build_ir as build;
pub use ir::*;
pub use program::Program;
pub use transform::{Transformed, TransformedValue, Transformer};
pub use validator::Validator;
pub use visitor::Visitor;<|fim▁end|> | mod validator; |
<|file_name|>brocade_ip_access_list.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_ip_access_list(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""<|fim▁hole|> extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)<|fim▁end|> | config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list") |
<|file_name|>stdin.rs<|end_file_name|><|fim▁begin|>#![feature(libc)]
extern crate libc;
extern crate termios;
use termios::prelude::*;
fn main() {<|fim▁hole|><|fim▁end|> | println!("{:?}", Termios::fetch(libc::STDIN_FILENO).unwrap());
} |
<|file_name|>vulnerability.pb.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The Grafeas Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.1
// protoc v3.13.0
// source: vulnerability.proto
package vulnerability_go_proto
import (
timestamp "github.com/golang/protobuf/ptypes/timestamp"
common_go_proto "github.com/grafeas/grafeas/proto/v1beta1/common_go_proto"
cvss_go_proto "github.com/grafeas/grafeas/proto/v1beta1/cvss_go_proto"
package_go_proto "github.com/grafeas/grafeas/proto/v1beta1/package_go_proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Note provider-assigned severity/impact ranking.
type Severity int32
const (
// Unknown.
Severity_SEVERITY_UNSPECIFIED Severity = 0
// Minimal severity.
Severity_MINIMAL Severity = 1
// Low severity.
Severity_LOW Severity = 2
// Medium severity.
Severity_MEDIUM Severity = 3
// High severity.
Severity_HIGH Severity = 4
// Critical severity.
Severity_CRITICAL Severity = 5
)
// Enum value maps for Severity.
var (
Severity_name = map[int32]string{
0: "SEVERITY_UNSPECIFIED",
1: "MINIMAL",
2: "LOW",
3: "MEDIUM",
4: "HIGH",
5: "CRITICAL",
}
Severity_value = map[string]int32{
"SEVERITY_UNSPECIFIED": 0,
"MINIMAL": 1,
"LOW": 2,
"MEDIUM": 3,
"HIGH": 4,
"CRITICAL": 5,
}
)
func (x Severity) Enum() *Severity {
p := new(Severity)
*p = x
return p
}
func (x Severity) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (Severity) Descriptor() protoreflect.EnumDescriptor {
return file_vulnerability_proto_enumTypes[0].Descriptor()
}
func (Severity) Type() protoreflect.EnumType {
return &file_vulnerability_proto_enumTypes[0]
}
func (x Severity) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use Severity.Descriptor instead.
func (Severity) EnumDescriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{0}
}
// Vulnerability provides metadata about a security vulnerability in a Note.
type Vulnerability struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The CVSS score for this vulnerability.
CvssScore float32 `protobuf:"fixed32,1,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"`
// Note provider assigned impact of the vulnerability.
Severity Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"severity,omitempty"`
// All information about the package to specifically identify this
// vulnerability. One entry per (version range and cpe_uri) the package
// vulnerability has manifested in.
Details []*Vulnerability_Detail `protobuf:"bytes,3,rep,name=details,proto3" json:"details,omitempty"`
// The full description of the CVSS for version 3.
CvssV3 *cvss_go_proto.CVSS `protobuf:"bytes,4,opt,name=cvss_v3,json=cvssV3,proto3" json:"cvss_v3,omitempty"`
// Windows details get their own format because the information format and
// model don't match a normal detail. Specifically Windows updates are done as
// patches, thus Windows vulnerabilities really are a missing package, rather
// than a package being at an incorrect version.
WindowsDetails []*Vulnerability_WindowsDetail `protobuf:"bytes,5,rep,name=windows_details,json=windowsDetails,proto3" json:"windows_details,omitempty"`
// The time this information was last changed at the source. This is an
// upstream timestamp from the underlying information source - e.g. Ubuntu
// security tracker.
SourceUpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=source_update_time,json=sourceUpdateTime,proto3" json:"source_update_time,omitempty"`
// The full description of the CVSS for version 2.
CvssV2 *cvss_go_proto.CVSS `protobuf:"bytes,7,opt,name=cvss_v2,json=cvssV2,proto3" json:"cvss_v2,omitempty"`
// A list of CWE for this vulnerability.
// For details, see: https://cwe.mitre.org/index.html
Cwe []string `protobuf:"bytes,8,rep,name=cwe,proto3" json:"cwe,omitempty"`
}
func (x *Vulnerability) Reset() {
*x = Vulnerability{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Vulnerability) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Vulnerability) ProtoMessage() {}
func (x *Vulnerability) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Vulnerability.ProtoReflect.Descriptor instead.
func (*Vulnerability) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{0}
}
func (x *Vulnerability) GetCvssScore() float32 {
if x != nil {
return x.CvssScore
}
return 0
}
func (x *Vulnerability) GetSeverity() Severity {
if x != nil {
return x.Severity
}
return Severity_SEVERITY_UNSPECIFIED
}
func (x *Vulnerability) GetDetails() []*Vulnerability_Detail {
if x != nil {
return x.Details
}
return nil
}
func (x *Vulnerability) GetCvssV3() *cvss_go_proto.CVSS {
if x != nil {
return x.CvssV3
}
return nil
}
func (x *Vulnerability) GetWindowsDetails() []*Vulnerability_WindowsDetail {
if x != nil {
return x.WindowsDetails
}
return nil
}
func (x *Vulnerability) GetSourceUpdateTime() *timestamp.Timestamp {
if x != nil {
return x.SourceUpdateTime
}
return nil
}
func (x *Vulnerability) GetCvssV2() *cvss_go_proto.CVSS {
if x != nil {
return x.CvssV2
}
return nil
}
func (x *Vulnerability) GetCwe() []string {
if x != nil {
return x.Cwe
}
return nil
}
// Details of a vulnerability Occurrence.
type Details struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The type of package; whether native or non native(ruby gems, node.js
// packages etc)
Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
// Output only. The note provider assigned Severity of the vulnerability.
Severity Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"severity,omitempty"`
// Output only. The CVSS score of this vulnerability. CVSS score is on a
// scale of 0-10 where 0 indicates low severity and 10 indicates high
// severity.
CvssScore float32 `protobuf:"fixed32,3,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"`
// Required. The set of affected locations and their fixes (if available)
// within the associated resource.
PackageIssue []*PackageIssue `protobuf:"bytes,4,rep,name=package_issue,json=packageIssue,proto3" json:"package_issue,omitempty"`
// Output only. A one sentence description of this vulnerability.
ShortDescription string `protobuf:"bytes,5,opt,name=short_description,json=shortDescription,proto3" json:"short_description,omitempty"`
// Output only. A detailed description of this vulnerability.
LongDescription string `protobuf:"bytes,6,opt,name=long_description,json=longDescription,proto3" json:"long_description,omitempty"`
// Output only. URLs related to this vulnerability.
RelatedUrls []*common_go_proto.RelatedUrl `protobuf:"bytes,7,rep,name=related_urls,json=relatedUrls,proto3" json:"related_urls,omitempty"`
// The distro assigned severity for this vulnerability when it is
// available, and note provider assigned severity when distro has not yet
// assigned a severity for this vulnerability.
//
// When there are multiple PackageIssues for this vulnerability, they can have
// different effective severities because some might be provided by the distro
// while others are provided by the language ecosystem for a language pack.
// For this reason, it is advised to use the effective severity on the
// PackageIssue level. In the case where multiple PackageIssues have differing
// effective severities, this field should be the highest severity for any of
// the PackageIssues.
EffectiveSeverity Severity `protobuf:"varint,8,opt,name=effective_severity,json=effectiveSeverity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"effective_severity,omitempty"`
}
func (x *Details) Reset() {
*x = Details{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Details) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Details) ProtoMessage() {}
func (x *Details) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Details.ProtoReflect.Descriptor instead.
func (*Details) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{1}
}
func (x *Details) GetType() string {
if x != nil {
return x.Type
}
return ""
}
func (x *Details) GetSeverity() Severity {
if x != nil {
return x.Severity
}
return Severity_SEVERITY_UNSPECIFIED
}
func (x *Details) GetCvssScore() float32 {
if x != nil {
return x.CvssScore
}
return 0
}
func (x *Details) GetPackageIssue() []*PackageIssue {
if x != nil {
return x.PackageIssue
}
return nil
}
func (x *Details) GetShortDescription() string {
if x != nil {
return x.ShortDescription
}
return ""
}
func (x *Details) GetLongDescription() string {
if x != nil {
return x.LongDescription
}
return ""
}
func (x *Details) GetRelatedUrls() []*common_go_proto.RelatedUrl {
if x != nil {
return x.RelatedUrls
}
return nil
}
func (x *Details) GetEffectiveSeverity() Severity {
if x != nil {
return x.EffectiveSeverity
}
return Severity_SEVERITY_UNSPECIFIED
}
// This message wraps a location affected by a vulnerability and its
// associated fix (if one is available).
type PackageIssue struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The location of the vulnerability.
AffectedLocation *VulnerabilityLocation `protobuf:"bytes,1,opt,name=affected_location,json=affectedLocation,proto3" json:"affected_location,omitempty"`
// The location of the available fix for vulnerability.
FixedLocation *VulnerabilityLocation `protobuf:"bytes,2,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"`
// Deprecated, use Details.effective_severity instead
// The severity (e.g., distro assigned severity) for this vulnerability.
SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"`
// The type of package (e.g. OS, MAVEN, GO).
PackageType string `protobuf:"bytes,4,opt,name=package_type,json=packageType,proto3" json:"package_type,omitempty"`
// The distro or language system assigned severity for this vulnerability
// when that is available and note provider assigned severity when it is not
// available.
EffectiveSeverity Severity `protobuf:"varint,5,opt,name=effective_severity,json=effectiveSeverity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"effective_severity,omitempty"`
}
func (x *PackageIssue) Reset() {
*x = PackageIssue{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PackageIssue) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PackageIssue) ProtoMessage() {}
func (x *PackageIssue) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PackageIssue.ProtoReflect.Descriptor instead.
func (*PackageIssue) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{2}
}
func (x *PackageIssue) GetAffectedLocation() *VulnerabilityLocation {
if x != nil {
return x.AffectedLocation
}
return nil
}
func (x *PackageIssue) GetFixedLocation() *VulnerabilityLocation {
if x != nil {
return x.FixedLocation
}
return nil
}
func (x *PackageIssue) GetSeverityName() string {
if x != nil {
return x.SeverityName
}
return ""
}
func (x *PackageIssue) GetPackageType() string {
if x != nil {
return x.PackageType
}
return ""
}
func (x *PackageIssue) GetEffectiveSeverity() Severity {
if x != nil {
return x.EffectiveSeverity
}
return Severity_SEVERITY_UNSPECIFIED
}
// The location of the vulnerability.
type VulnerabilityLocation struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The CPE URI in [cpe format](https://cpe.mitre.org/specification/)
// format. Examples include distro or storage location for vulnerable jar.
CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"`
// Required. The package being described.
Package string `protobuf:"bytes,2,opt,name=package,proto3" json:"package,omitempty"`
// Required. The version of the package being described.
Version *package_go_proto.Version `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"`
}
func (x *VulnerabilityLocation) Reset() {
*x = VulnerabilityLocation{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *VulnerabilityLocation) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*VulnerabilityLocation) ProtoMessage() {}
func (x *VulnerabilityLocation) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use VulnerabilityLocation.ProtoReflect.Descriptor instead.
func (*VulnerabilityLocation) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{3}
}
func (x *VulnerabilityLocation) GetCpeUri() string {
if x != nil {
return x.CpeUri
}
return ""
}
func (x *VulnerabilityLocation) GetPackage() string {
if x != nil {
return x.Package
}
return ""
}
func (x *VulnerabilityLocation) GetVersion() *package_go_proto.Version {
if x != nil {
return x.Version
}
return nil
}
// Identifies all appearances of this vulnerability in the package for a
// specific distro/location. For example: glibc in
// cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2
type Vulnerability_Detail struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The CPE URI in
// [cpe format](https://cpe.mitre.org/specification/) in which the
// vulnerability manifests. Examples include distro or storage location for
// vulnerable jar.
CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"`
// Required. The name of the package where the vulnerability was found.
Package string `protobuf:"bytes,2,opt,name=package,proto3" json:"package,omitempty"`
// The min version of the package in which the vulnerability exists.
MinAffectedVersion *package_go_proto.Version `protobuf:"bytes,3,opt,name=min_affected_version,json=minAffectedVersion,proto3" json:"min_affected_version,omitempty"`
// The max version of the package in which the vulnerability exists.
MaxAffectedVersion *package_go_proto.Version `protobuf:"bytes,4,opt,name=max_affected_version,json=maxAffectedVersion,proto3" json:"max_affected_version,omitempty"`
// The severity (eg: distro assigned severity) for this vulnerability.
SeverityName string `protobuf:"bytes,5,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"`
// A vendor-specific description of this note.
Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"`
// The fix for this specific package version.
FixedLocation *VulnerabilityLocation `protobuf:"bytes,7,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"`
// The type of package; whether native or non native(ruby gems, node.js
// packages etc).
PackageType string `protobuf:"bytes,8,opt,name=package_type,json=packageType,proto3" json:"package_type,omitempty"`
// Whether this detail is obsolete. Occurrences are expected not to point to
// obsolete details.
IsObsolete bool `protobuf:"varint,9,opt,name=is_obsolete,json=isObsolete,proto3" json:"is_obsolete,omitempty"`
// The time this information was last changed at the source. This is an
// upstream timestamp from the underlying information source - e.g. Ubuntu
// security tracker.
SourceUpdateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=source_update_time,json=sourceUpdateTime,proto3" json:"source_update_time,omitempty"`
// The source from which the information in this Detail was obtained.
Source string `protobuf:"bytes,11,opt,name=source,proto3" json:"source,omitempty"`
// The name of the vendor of the product.
Vendor string `protobuf:"bytes,12,opt,name=vendor,proto3" json:"vendor,omitempty"`
}
func (x *Vulnerability_Detail) Reset() {
*x = Vulnerability_Detail{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Vulnerability_Detail) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Vulnerability_Detail) ProtoMessage() {}
func (x *Vulnerability_Detail) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Vulnerability_Detail.ProtoReflect.Descriptor instead.
func (*Vulnerability_Detail) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{0, 0}
}
func (x *Vulnerability_Detail) GetCpeUri() string {
if x != nil {
return x.CpeUri
}
return ""
}
func (x *Vulnerability_Detail) GetPackage() string {
if x != nil {
return x.Package
}
return ""
}
func (x *Vulnerability_Detail) GetMinAffectedVersion() *package_go_proto.Version {
if x != nil {
return x.MinAffectedVersion
}
return nil
}
func (x *Vulnerability_Detail) GetMaxAffectedVersion() *package_go_proto.Version {
if x != nil {
return x.MaxAffectedVersion
}
return nil
}
func (x *Vulnerability_Detail) GetSeverityName() string {
if x != nil {
return x.SeverityName
}
return ""
}
func (x *Vulnerability_Detail) GetDescription() string {
if x != nil {
return x.Description
}
return ""
}
func (x *Vulnerability_Detail) GetFixedLocation() *VulnerabilityLocation {
if x != nil {
return x.FixedLocation
}
return nil
}
func (x *Vulnerability_Detail) GetPackageType() string {
if x != nil {
return x.PackageType
}
return ""
}
func (x *Vulnerability_Detail) GetIsObsolete() bool {
if x != nil {
return x.IsObsolete
}
return false
}
func (x *Vulnerability_Detail) GetSourceUpdateTime() *timestamp.Timestamp {
if x != nil {
return x.SourceUpdateTime
}
return nil
}
func (x *Vulnerability_Detail) GetSource() string {
if x != nil {
return x.Source
}
return ""
}
func (x *Vulnerability_Detail) GetVendor() string {
if x != nil {
return x.Vendor
}
return ""
}
type Vulnerability_WindowsDetail struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The CPE URI in
// [cpe format](https://cpe.mitre.org/specification/) in which the
// vulnerability manifests. Examples include distro or storage location for
// vulnerable jar.
CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"`
// Required. The name of the vulnerability.
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
// The description of the vulnerability.
Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
// Required. The names of the KBs which have hotfixes to mitigate this
// vulnerability. Note that there may be multiple hotfixes (and thus
// multiple KBs) that mitigate a given vulnerability. Currently any listed
// kb's presence is considered a fix.
FixingKbs []*Vulnerability_WindowsDetail_KnowledgeBase `protobuf:"bytes,4,rep,name=fixing_kbs,json=fixingKbs,proto3" json:"fixing_kbs,omitempty"`
}
func (x *Vulnerability_WindowsDetail) Reset() {
*x = Vulnerability_WindowsDetail{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Vulnerability_WindowsDetail) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Vulnerability_WindowsDetail) ProtoMessage() {}
func (x *Vulnerability_WindowsDetail) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Vulnerability_WindowsDetail.ProtoReflect.Descriptor instead.
func (*Vulnerability_WindowsDetail) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{0, 1}
}
func (x *Vulnerability_WindowsDetail) GetCpeUri() string {
if x != nil {
return x.CpeUri
}
return ""
}
func (x *Vulnerability_WindowsDetail) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Vulnerability_WindowsDetail) GetDescription() string {
if x != nil {
return x.Description
}
return ""
}
func (x *Vulnerability_WindowsDetail) GetFixingKbs() []*Vulnerability_WindowsDetail_KnowledgeBase {
if x != nil {
return x.FixingKbs
}
return nil
}
type Vulnerability_WindowsDetail_KnowledgeBase struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The KB name (generally of the form KB[0-9]+ i.e. KB123456).
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// A link to the KB in the Windows update catalog -
// https://www.catalog.update.microsoft.com/
Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"`
}
func (x *Vulnerability_WindowsDetail_KnowledgeBase) Reset() {
*x = Vulnerability_WindowsDetail_KnowledgeBase{}
if protoimpl.UnsafeEnabled {
mi := &file_vulnerability_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
<|fim▁hole|>
func (*Vulnerability_WindowsDetail_KnowledgeBase) ProtoMessage() {}
func (x *Vulnerability_WindowsDetail_KnowledgeBase) ProtoReflect() protoreflect.Message {
mi := &file_vulnerability_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Vulnerability_WindowsDetail_KnowledgeBase.ProtoReflect.Descriptor instead.
func (*Vulnerability_WindowsDetail_KnowledgeBase) Descriptor() ([]byte, []int) {
return file_vulnerability_proto_rawDescGZIP(), []int{0, 1, 0}
}
func (x *Vulnerability_WindowsDetail_KnowledgeBase) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Vulnerability_WindowsDetail_KnowledgeBase) GetUrl() string {
if x != nil {
return x.Url
}
return ""
}
var File_vulnerability_proto protoreflect.FileDescriptor
var file_vulnerability_proto_rawDesc = []byte{
0x0a, 0x13, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76,
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69,
0x6c, 0x69, 0x74, 0x79, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69,
0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1a, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x1a, 0x18, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2f, 0x63, 0x76, 0x73, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x70, 0x61, 0x63, 0x6b,
0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc8, 0x0a, 0x0a, 0x0d, 0x56, 0x75,
0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x63,
0x76, 0x73, 0x73, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x02, 0x52,
0x09, 0x63, 0x76, 0x73, 0x73, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x43, 0x0a, 0x08, 0x73, 0x65,
0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x67,
0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76,
0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x53, 0x65, 0x76,
0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12,
0x4d, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b,
0x32, 0x33, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74,
0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79,
0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x44,
0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x3c,
0x0a, 0x07, 0x63, 0x76, 0x73, 0x73, 0x5f, 0x76, 0x33, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x23, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e,
0x43, 0x56, 0x53, 0x53, 0x52, 0x06, 0x63, 0x76, 0x73, 0x73, 0x56, 0x33, 0x12, 0x63, 0x0a, 0x0f,
0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x73, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18,
0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62,
0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c,
0x69, 0x74, 0x79, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x73, 0x44, 0x65, 0x74, 0x61, 0x69,
0x6c, 0x52, 0x0e, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x73, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c,
0x73, 0x12, 0x48, 0x0a, 0x12, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x75, 0x70, 0x64, 0x61,
0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e,
0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x07, 0x63,
0x76, 0x73, 0x73, 0x5f, 0x76, 0x32, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x67,
0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76,
0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x43, 0x56, 0x53,
0x53, 0x52, 0x06, 0x63, 0x76, 0x73, 0x73, 0x56, 0x32, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x77, 0x65,
0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x63, 0x77, 0x65, 0x1a, 0xc5, 0x04, 0x0a, 0x06,
0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x17, 0x0a, 0x07, 0x63, 0x70, 0x65, 0x5f, 0x75, 0x72,
0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x70, 0x65, 0x55, 0x72, 0x69, 0x12,
0x18, 0x0a, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x52, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x52, 0x0a, 0x14, 0x6d, 0x69, 0x6e,
0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f,
0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61,
0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67,
0x65, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x6d, 0x69, 0x6e, 0x41, 0x66,
0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x52, 0x0a,
0x14, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x76, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x67, 0x72,
0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x70, 0x61,
0x63, 0x6b, 0x61, 0x67, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x6d,
0x61, 0x78, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f,
0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x6e, 0x61,
0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69,
0x74, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69,
0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73,
0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5b, 0x0a, 0x0e, 0x66, 0x69, 0x78, 0x65,
0x64, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x34, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74,
0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79,
0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x4c, 0x6f,
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x66, 0x69, 0x78, 0x65, 0x64, 0x4c, 0x6f, 0x63,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65,
0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, 0x63,
0x6b, 0x61, 0x67, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x73, 0x5f, 0x6f,
0x62, 0x73, 0x6f, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x69,
0x73, 0x4f, 0x62, 0x73, 0x6f, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x48, 0x0a, 0x12, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18,
0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d,
0x70, 0x52, 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x54,
0x69, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x0b, 0x20,
0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76,
0x65, 0x6e, 0x64, 0x6f, 0x72, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x76, 0x65, 0x6e,
0x64, 0x6f, 0x72, 0x1a, 0xfe, 0x01, 0x0a, 0x0d, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x73, 0x44,
0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x17, 0x0a, 0x07, 0x63, 0x70, 0x65, 0x5f, 0x75, 0x72, 0x69,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x70, 0x65, 0x55, 0x72, 0x69, 0x12, 0x12,
0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f,
0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70,
0x74, 0x69, 0x6f, 0x6e, 0x12, 0x67, 0x0a, 0x0a, 0x66, 0x69, 0x78, 0x69, 0x6e, 0x67, 0x5f, 0x6b,
0x62, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65,
0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65,
0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61,
0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x73, 0x44, 0x65,
0x74, 0x61, 0x69, 0x6c, 0x2e, 0x4b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x42, 0x61,
0x73, 0x65, 0x52, 0x09, 0x66, 0x69, 0x78, 0x69, 0x6e, 0x67, 0x4b, 0x62, 0x73, 0x1a, 0x35, 0x0a,
0x0d, 0x4b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x03, 0x75, 0x72, 0x6c, 0x22, 0xc3, 0x03, 0x0a, 0x07, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73,
0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x74, 0x79, 0x70, 0x65, 0x12, 0x43, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73,
0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61,
0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52,
0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x76, 0x73,
0x73, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x09, 0x63,
0x76, 0x73, 0x73, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x12, 0x50, 0x0a, 0x0d, 0x70, 0x61, 0x63, 0x6b,
0x61, 0x67, 0x65, 0x5f, 0x69, 0x73, 0x73, 0x75, 0x65, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32,
0x2b, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e,
0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x61,
0x63, 0x6b, 0x61, 0x67, 0x65, 0x49, 0x73, 0x73, 0x75, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x73, 0x68,
0x6f, 0x72, 0x74, 0x5f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18,
0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x44, 0x65, 0x73, 0x63,
0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x6c, 0x6f, 0x6e, 0x67, 0x5f,
0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28,
0x09, 0x52, 0x0f, 0x6c, 0x6f, 0x6e, 0x67, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69,
0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x0c, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x75, 0x72,
0x6c, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65,
0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x52, 0x65, 0x6c, 0x61, 0x74,
0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x0b, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x55, 0x72,
0x6c, 0x73, 0x12, 0x56, 0x0a, 0x12, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f,
0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27,
0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x53,
0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x11, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69,
0x76, 0x65, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x22, 0xf3, 0x02, 0x0a, 0x0c, 0x50,
0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x49, 0x73, 0x73, 0x75, 0x65, 0x12, 0x61, 0x0a, 0x11, 0x61,
0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73,
0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61,
0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69,
0x6c, 0x69, 0x74, 0x79, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x10, 0x61, 0x66,
0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5b,
0x0a, 0x0e, 0x66, 0x69, 0x78, 0x65, 0x64, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73,
0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61,
0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69,
0x6c, 0x69, 0x74, 0x79, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x66, 0x69,
0x78, 0x65, 0x64, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x73,
0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01,
0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x4e, 0x61, 0x6d, 0x65,
0x12, 0x21, 0x0a, 0x0c, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65,
0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x54,
0x79, 0x70, 0x65, 0x12, 0x5b, 0x0a, 0x12, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65,
0x5f, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x27, 0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2e, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x2e,
0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x11, 0x65,
0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79,
0x22, 0x86, 0x01, 0x0a, 0x15, 0x56, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69,
0x74, 0x79, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x07, 0x63, 0x70,
0x65, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x70, 0x65,
0x55, 0x72, 0x69, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x3a, 0x0a,
0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20,
0x2e, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2e, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x2a, 0x5e, 0x0a, 0x08, 0x53, 0x65, 0x76,
0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x45, 0x56, 0x45, 0x52, 0x49, 0x54,
0x59, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12,
0x0b, 0x0a, 0x07, 0x4d, 0x49, 0x4e, 0x49, 0x4d, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03,
0x4c, 0x4f, 0x57, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10,
0x03, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x49, 0x47, 0x48, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x43,
0x52, 0x49, 0x54, 0x49, 0x43, 0x41, 0x4c, 0x10, 0x05, 0x42, 0x6b, 0x0a, 0x20, 0x69, 0x6f, 0x2e,
0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2e,
0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x50, 0x01, 0x5a,
0x3f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x72, 0x61, 0x66,
0x65, 0x61, 0x73, 0x2f, 0x67, 0x72, 0x61, 0x66, 0x65, 0x61, 0x73, 0x2f, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x76, 0x75, 0x6c, 0x6e, 0x65, 0x72,
0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x67, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0xa2, 0x02, 0x03, 0x47, 0x52, 0x41, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_vulnerability_proto_rawDescOnce sync.Once
file_vulnerability_proto_rawDescData = file_vulnerability_proto_rawDesc
)
func file_vulnerability_proto_rawDescGZIP() []byte {
file_vulnerability_proto_rawDescOnce.Do(func() {
file_vulnerability_proto_rawDescData = protoimpl.X.CompressGZIP(file_vulnerability_proto_rawDescData)
})
return file_vulnerability_proto_rawDescData
}
var file_vulnerability_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_vulnerability_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_vulnerability_proto_goTypes = []interface{}{
(Severity)(0), // 0: grafeas.v1beta1.vulnerability.Severity
(*Vulnerability)(nil), // 1: grafeas.v1beta1.vulnerability.Vulnerability
(*Details)(nil), // 2: grafeas.v1beta1.vulnerability.Details
(*PackageIssue)(nil), // 3: grafeas.v1beta1.vulnerability.PackageIssue
(*VulnerabilityLocation)(nil), // 4: grafeas.v1beta1.vulnerability.VulnerabilityLocation
(*Vulnerability_Detail)(nil), // 5: grafeas.v1beta1.vulnerability.Vulnerability.Detail
(*Vulnerability_WindowsDetail)(nil), // 6: grafeas.v1beta1.vulnerability.Vulnerability.WindowsDetail
(*Vulnerability_WindowsDetail_KnowledgeBase)(nil), // 7: grafeas.v1beta1.vulnerability.Vulnerability.WindowsDetail.KnowledgeBase
(*cvss_go_proto.CVSS)(nil), // 8: grafeas.v1beta1.vulnerability.CVSS
(*timestamp.Timestamp)(nil), // 9: google.protobuf.Timestamp
(*common_go_proto.RelatedUrl)(nil), // 10: grafeas.v1beta1.RelatedUrl
(*package_go_proto.Version)(nil), // 11: grafeas.v1beta1.package.Version
}
var file_vulnerability_proto_depIdxs = []int32{
0, // 0: grafeas.v1beta1.vulnerability.Vulnerability.severity:type_name -> grafeas.v1beta1.vulnerability.Severity
5, // 1: grafeas.v1beta1.vulnerability.Vulnerability.details:type_name -> grafeas.v1beta1.vulnerability.Vulnerability.Detail
8, // 2: grafeas.v1beta1.vulnerability.Vulnerability.cvss_v3:type_name -> grafeas.v1beta1.vulnerability.CVSS
6, // 3: grafeas.v1beta1.vulnerability.Vulnerability.windows_details:type_name -> grafeas.v1beta1.vulnerability.Vulnerability.WindowsDetail
9, // 4: grafeas.v1beta1.vulnerability.Vulnerability.source_update_time:type_name -> google.protobuf.Timestamp
8, // 5: grafeas.v1beta1.vulnerability.Vulnerability.cvss_v2:type_name -> grafeas.v1beta1.vulnerability.CVSS
0, // 6: grafeas.v1beta1.vulnerability.Details.severity:type_name -> grafeas.v1beta1.vulnerability.Severity
3, // 7: grafeas.v1beta1.vulnerability.Details.package_issue:type_name -> grafeas.v1beta1.vulnerability.PackageIssue
10, // 8: grafeas.v1beta1.vulnerability.Details.related_urls:type_name -> grafeas.v1beta1.RelatedUrl
0, // 9: grafeas.v1beta1.vulnerability.Details.effective_severity:type_name -> grafeas.v1beta1.vulnerability.Severity
4, // 10: grafeas.v1beta1.vulnerability.PackageIssue.affected_location:type_name -> grafeas.v1beta1.vulnerability.VulnerabilityLocation
4, // 11: grafeas.v1beta1.vulnerability.PackageIssue.fixed_location:type_name -> grafeas.v1beta1.vulnerability.VulnerabilityLocation
0, // 12: grafeas.v1beta1.vulnerability.PackageIssue.effective_severity:type_name -> grafeas.v1beta1.vulnerability.Severity
11, // 13: grafeas.v1beta1.vulnerability.VulnerabilityLocation.version:type_name -> grafeas.v1beta1.package.Version
11, // 14: grafeas.v1beta1.vulnerability.Vulnerability.Detail.min_affected_version:type_name -> grafeas.v1beta1.package.Version
11, // 15: grafeas.v1beta1.vulnerability.Vulnerability.Detail.max_affected_version:type_name -> grafeas.v1beta1.package.Version
4, // 16: grafeas.v1beta1.vulnerability.Vulnerability.Detail.fixed_location:type_name -> grafeas.v1beta1.vulnerability.VulnerabilityLocation
9, // 17: grafeas.v1beta1.vulnerability.Vulnerability.Detail.source_update_time:type_name -> google.protobuf.Timestamp
7, // 18: grafeas.v1beta1.vulnerability.Vulnerability.WindowsDetail.fixing_kbs:type_name -> grafeas.v1beta1.vulnerability.Vulnerability.WindowsDetail.KnowledgeBase
19, // [19:19] is the sub-list for method output_type
19, // [19:19] is the sub-list for method input_type
19, // [19:19] is the sub-list for extension type_name
19, // [19:19] is the sub-list for extension extendee
0, // [0:19] is the sub-list for field type_name
}
func init() { file_vulnerability_proto_init() }
func file_vulnerability_proto_init() {
if File_vulnerability_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_vulnerability_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Vulnerability); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Details); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PackageIssue); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*VulnerabilityLocation); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Vulnerability_Detail); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Vulnerability_WindowsDetail); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_vulnerability_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Vulnerability_WindowsDetail_KnowledgeBase); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_vulnerability_proto_rawDesc,
NumEnums: 1,
NumMessages: 7,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_vulnerability_proto_goTypes,
DependencyIndexes: file_vulnerability_proto_depIdxs,
EnumInfos: file_vulnerability_proto_enumTypes,
MessageInfos: file_vulnerability_proto_msgTypes,
}.Build()
File_vulnerability_proto = out.File
file_vulnerability_proto_rawDesc = nil
file_vulnerability_proto_goTypes = nil
file_vulnerability_proto_depIdxs = nil
}<|fim▁end|> | func (x *Vulnerability_WindowsDetail_KnowledgeBase) String() string {
return protoimpl.X.MessageStringOf(x)
} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class Institution(models.Model):
name = models.CharField(max_length=50);
@property
def teams(self):
return Team.objects.filter(institution=self)
@property
def judges(self):
return Judge.objects.filter(institution=self)<|fim▁hole|> def __str__(self):
return self.name
class Team(models.Model):
name = models.CharField(max_length=50)
institution = models.ForeignKey(Institution)
speaker1 = models.CharField(max_length=50)
speaker2 = models.CharField(max_length=50)
@property
def total_team_points(self):
from results.controllers.PointsController import PointsController
controller = PointsController()
from draw.models import Tournament
return controller.total_points_for_team(self, Tournament.instance().round_with_results)
@property
def total_speaker_sum(self):
from results.controllers.PointsController import PointsController
controller = PointsController()
from draw.models import Tournament
return sum(controller.speaker_points_for_team(self, Tournament.instance().round_with_results))
@property
def speakers(self):
return [self.speaker1, self.speaker2]
def __str__(self):
return self.institution.__str__() + ' ' + self.name
class Judge(models.Model):
name = models.CharField(max_length=80)
institution = models.ForeignKey(Institution)
def __str__(self):
return self.name + ' <' + self.institution.__str__() + '>'
class Venue(models.Model):
name=models.CharField(max_length=50)
def __str__(self):
return self.name<|fim▁end|> | |
<|file_name|>test_us_equity_pricing.py<|end_file_name|><|fim▁begin|>#
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import maxsize
from nose_parameterized import parameterized
from numpy import (
arange,
datetime64,
)
from numpy.testing import (
assert_array_equal,
)
from pandas import (
DataFrame,
Timestamp,
)
from pandas.util.testing import assert_index_equal
from zipline.data.us_equity_pricing import (
BcolzDailyBarReader,
NoDataBeforeDate,
NoDataAfterDate,
)
from zipline.pipeline.loaders.synthetic import (
OHLCV,
asset_start,
asset_end,
expected_bar_value,
expected_bar_values_2d,
make_bar_data,
)
from zipline.testing import seconds_to_timestamp
from zipline.testing.fixtures import (
WithBcolzEquityDailyBarReader,
ZiplineTestCase,
)
from zipline.utils.calendars import get_calendar
TEST_CALENDAR_START = Timestamp('2015-06-01', tz='UTC')
TEST_CALENDAR_STOP = Timestamp('2015-06-30', tz='UTC')
TEST_QUERY_START = Timestamp('2015-06-10', tz='UTC')
TEST_QUERY_STOP = Timestamp('2015-06-19', tz='UTC')
# One asset for each of the cases enumerated in load_raw_arrays_from_bcolz.
EQUITY_INFO = DataFrame(
[
# 1) The equity's trades start and end before query.
{'start_date': '2015-06-01', 'end_date': '2015-06-05'},
# 2) The equity's trades start and end after query.
{'start_date': '2015-06-22', 'end_date': '2015-06-30'},
# 3) The equity's data covers all dates in range.
{'start_date': '2015-06-02', 'end_date': '2015-06-30'},
# 4) The equity's trades start before the query start, but stop
# before the query end.
{'start_date': '2015-06-01', 'end_date': '2015-06-15'},
# 5) The equity's trades start and end during the query.
{'start_date': '2015-06-12', 'end_date': '2015-06-18'},
# 6) The equity's trades start during the query, but extend through
# the whole query.
{'start_date': '2015-06-15', 'end_date': '2015-06-25'},
],
index=arange(1, 7),
columns=['start_date', 'end_date'],
).astype(datetime64)
EQUITY_INFO['symbol'] = [chr(ord('A') + n) for n in range(len(EQUITY_INFO))]
TEST_QUERY_ASSETS = EQUITY_INFO.index
class BcolzDailyBarTestCase(WithBcolzEquityDailyBarReader, ZiplineTestCase):
EQUITY_DAILY_BAR_START_DATE = TEST_CALENDAR_START
EQUITY_DAILY_BAR_END_DATE = TEST_CALENDAR_STOP
@classmethod
def make_equity_info(cls):
return EQUITY_INFO
@classmethod
def make_equity_daily_bar_data(cls):
return make_bar_data(
EQUITY_INFO,
cls.equity_daily_bar_days,
)
@classmethod
def init_class_fixtures(cls):
super(BcolzDailyBarTestCase, cls).init_class_fixtures()
cls.sessions = cls.trading_calendar.sessions_in_range(
cls.trading_calendar.minute_to_session_label(TEST_CALENDAR_START),
cls.trading_calendar.minute_to_session_label(TEST_CALENDAR_STOP)
)
@property
def assets(self):
return EQUITY_INFO.index
def trading_days_between(self, start, end):
return self.sessions[self.sessions.slice_indexer(start, end)]
def asset_start(self, asset_id):
return asset_start(EQUITY_INFO, asset_id)
def asset_end(self, asset_id):
return asset_end(EQUITY_INFO, asset_id)
def dates_for_asset(self, asset_id):
start, end = self.asset_start(asset_id), self.asset_end(asset_id)
return self.trading_days_between(start, end)
def test_write_ohlcv_content(self):
result = self.bcolz_daily_bar_ctable
for column in OHLCV:
idx = 0
data = result[column][:]
multiplier = 1 if column == 'volume' else 1000
for asset_id in self.assets:
for date in self.dates_for_asset(asset_id):
self.assertEqual(
expected_bar_value(
asset_id,
date,
column
) * multiplier,
data[idx],
)
idx += 1
self.assertEqual(idx, len(data))
def test_write_day_and_id(self):
result = self.bcolz_daily_bar_ctable
idx = 0
ids = result['id']
days = result['day']
for asset_id in self.assets:
for date in self.dates_for_asset(asset_id):
self.assertEqual(ids[idx], asset_id)
self.assertEqual(date, seconds_to_timestamp(days[idx]))
idx += 1
def test_write_attrs(self):
result = self.bcolz_daily_bar_ctable
expected_first_row = {
'1': 0,
'2': 5, # Asset 1 has 5 trading days.
'3': 12, # Asset 2 has 7 trading days.
'4': 33, # Asset 3 has 21 trading days.
'5': 44, # Asset 4 has 11 trading days.
'6': 49, # Asset 5 has 5 trading days.
}
expected_last_row = {
'1': 4,
'2': 11,
'3': 32,
'4': 43,
'5': 48,
'6': 57, # Asset 6 has 9 trading days.
}
expected_calendar_offset = {
'1': 0, # Starts on 6-01, 1st trading day of month.
'2': 15, # Starts on 6-22, 16th trading day of month.
'3': 1, # Starts on 6-02, 2nd trading day of month.
'4': 0, # Starts on 6-01, 1st trading day of month.
'5': 9, # Starts on 6-12, 10th trading day of month.
'6': 10, # Starts on 6-15, 11th trading day of month.
}
self.assertEqual(result.attrs['first_row'], expected_first_row)
self.assertEqual(result.attrs['last_row'], expected_last_row)
self.assertEqual(
result.attrs['calendar_offset'],
expected_calendar_offset,
)
cal = get_calendar(result.attrs['calendar_name'])
first_session = Timestamp(result.attrs['start_session_ns'], tz='UTC')
end_session = Timestamp(result.attrs['end_session_ns'], tz='UTC')
sessions = cal.sessions_in_range(first_session, end_session)
assert_index_equal(
self.sessions,
sessions
)
def test_read_first_trading_day(self):
self.assertEqual(
self.bcolz_equity_daily_bar_reader.first_trading_day,
self.sessions[0],
)
def _check_read_results(self, columns, assets, start_date, end_date):
results = self.bcolz_equity_daily_bar_reader.load_raw_arrays(
columns,
start_date,
end_date,
assets,
)
dates = self.trading_days_between(start_date, end_date)
for column, result in zip(columns, results):
assert_array_equal(
result,
expected_bar_values_2d(
dates,
EQUITY_INFO,
column,
)
)
@parameterized.expand([
(['open'],),
(['close', 'volume'],),
(['volume', 'high', 'low'],),
(['open', 'high', 'low', 'close', 'volume'],),
])
def test_read(self, columns):
self._check_read_results(
columns,
self.assets,
TEST_QUERY_START,
TEST_QUERY_STOP,
)
def test_start_on_asset_start(self):
"""
Test loading with queries that starts on the first day of each asset's
lifetime.
"""
columns = ['high', 'volume']
for asset in self.assets:
self._check_read_results(
columns,
self.assets,
start_date=self.asset_start(asset),
end_date=self.sessions[-1],
)
def test_start_on_asset_end(self):
"""
Test loading with queries that start on the last day of each asset's
lifetime.
"""
columns = ['close', 'volume']
for asset in self.assets:
self._check_read_results(<|fim▁hole|> self.assets,
start_date=self.asset_end(asset),
end_date=self.sessions[-1],
)
def test_end_on_asset_start(self):
"""
Test loading with queries that end on the first day of each asset's
lifetime.
"""
columns = ['close', 'volume']
for asset in self.assets:
self._check_read_results(
columns,
self.assets,
start_date=self.sessions[0],
end_date=self.asset_start(asset),
)
def test_end_on_asset_end(self):
"""
Test loading with queries that end on the last day of each asset's
lifetime.
"""
columns = ['close', 'volume']
for asset in self.assets:
self._check_read_results(
columns,
self.assets,
start_date=self.sessions[0],
end_date=self.asset_end(asset),
)
def test_unadjusted_get_value(self):
reader = self.bcolz_equity_daily_bar_reader
# At beginning
price = reader.get_value(1, Timestamp('2015-06-01', tz='UTC'),
'close')
# Synthetic writes price for date.
self.assertEqual(108630.0, price)
# Middle
price = reader.get_value(1, Timestamp('2015-06-02', tz='UTC'),
'close')
self.assertEqual(108631.0, price)
# End
price = reader.get_value(1, Timestamp('2015-06-05', tz='UTC'),
'close')
self.assertEqual(108634.0, price)
# Another sid at beginning.
price = reader.get_value(2, Timestamp('2015-06-22', tz='UTC'),
'close')
self.assertEqual(208651.0, price)
# Ensure that volume does not have float adjustment applied.
volume = reader.get_value(1, Timestamp('2015-06-02', tz='UTC'),
'volume')
self.assertEqual(109631, volume)
def test_unadjusted_get_value_no_data(self):
table = self.bcolz_daily_bar_ctable
reader = BcolzDailyBarReader(table)
# before
with self.assertRaises(NoDataBeforeDate):
reader.get_value(2, Timestamp('2015-06-08', tz='UTC'), 'close')
# after
with self.assertRaises(NoDataAfterDate):
reader.get_value(4, Timestamp('2015-06-16', tz='UTC'), 'close')
def test_unadjusted_get_value_empty_value(self):
reader = self.bcolz_equity_daily_bar_reader
# A sid, day and corresponding index into which to overwrite a zero.
zero_sid = 1
zero_day = Timestamp('2015-06-02', tz='UTC')
zero_ix = reader.sid_day_index(zero_sid, zero_day)
old = reader._spot_col('close')[zero_ix]
try:
# Write a zero into the synthetic pricing data at the day and sid,
# so that a read should now return -1.
# This a little hacky, in lieu of changing the synthetic data set.
reader._spot_col('close')[zero_ix] = 0
close = reader.get_value(zero_sid, zero_day, 'close')
self.assertEqual(-1, close)
finally:
reader._spot_col('close')[zero_ix] = old
class BcolzDailyBarAlwaysReadAllTestCase(BcolzDailyBarTestCase):
"""
Force tests defined in BcolzDailyBarTestCase to always read the entire
column into memory before selecting desired asset data, when invoking
`load_raw_array`.
"""
BCOLZ_DAILY_BAR_READ_ALL_THRESHOLD = 0
class BcolzDailyBarNeverReadAllTestCase(BcolzDailyBarTestCase):
"""
Force tests defined in BcolzDailyBarTestCase to never read the entire
column into memory before selecting desired asset data, when invoking
`load_raw_array`.
"""
BCOLZ_DAILY_BAR_READ_ALL_THRESHOLD = maxsize<|fim▁end|> | columns, |
<|file_name|>HTTPOut.py<|end_file_name|><|fim▁begin|>import mt, os, mimetypes
from time import strftime
class HTTPOut():
class mtEntry():
def __init__(self):
self.html = False
self.css = False
self.js = False
self.data = ""
self.target = ""
def __init__(self, session = None):
self.session = session
self.http_version = ""
self.status = ""
self.cookies = {}
self.headers = {}
self.mt_entrys = [];
self.text_entry = "";
self.binary_entry = "";
self.binary_start = 0;
self.binary_end = 0;
# CSS
def cssFile(self, filename): self.css(self._getFileContents(filename))
def css(self, data):
newEntry = HTTPOut.mtEntry()
newEntry.data = data
newEntry.css = True
self.mt_entrys.append(newEntry)
# Javascript
def jsFunction(self, funcName, *args):
processed_args = []
for arg in args:
if ( isinstance(arg, basestring) ): processed_args.append("\"" + arg.replace("\"", "\\\"") + "\"")
elif ( isinstance(arg, list) or isinstance(arg, dict) ): processed_args.append(str(arg))
else: processed_args.append(str(arg))
self.js(funcName + "(" + ", ".join(processed_args) + ");")
def jsFile(self, filename):
self.js(self._getFileContents(filename))
def js(self, data):
newEntry = HTTPOut.mtEntry()
newEntry.data = data
newEntry.js = True
self.mt_entrys.append(newEntry)
# HTML
def htmlFile(self, filename, target="", append=False): self.html(self._getFileContents(filename), target, append)
def html(self, data, target="", append = False):
newEntry = HTTPOut.mtEntry()
newEntry.data = data
if ( append ): newEntry.target = "+" + target
else: newEntry.target = target
newEntry.html = True
self.mt_entrys.append(newEntry)
def file(self, filepath):
self.binary_entry = filepath
def text(self, data):
self.text_entry += data
def _getFileContents(self, filepath):
if ( os.path.isfile(filepath) ):
f = open(filepath, "rb")
data = f.read()
f.close()
return str(data)
mt.log.error("404 Not Found: " + filepath)
self.status = "404 Not Found"
return None
def append(self, targ):
if ( targ == None ): return
self.cookies.update(targ.cookies)
self.headers.update(targ.headers)
self.mt_entrys.extend(targ.mt_entrys)
self.text_entry += targ.text_entry
if ( targ.http_version != "" ): self.http_version = targ.http_version
if ( targ.status != "" ): self.status = targ.status
if ( targ.binary_entry != "" ): self.binary_entry = targ.binary_entry
def send(self, socket, header_only = False):
self.headers["Date"] = strftime('%a, %d %b %Y %H:%M:%S GMT')
self.headers["Server"] = "metaTower/0.5"
content = ""
socket.settimeout(None)
if ( self.binary_entry != "" ):
if ( os.path.isfile(self.binary_entry) ):
self.status = "200 OK"
binary_size = os.path.getsize(self.binary_entry)
if ( self.binary_end == 0 ): self.binary_end = binary_size - 1
if ( self.binary_start != 0 ) or ( self.binary_end != binary_size - 1 ):
self.status = "206 Partial Content"
self.headers["Content-Range"] = "bytes " + str(self.binary_start) + "-" + str(self.binary_end) + "/" + str(binary_size)
self.headers["Accept-Ranges"] = "bytes"
self.headers["Content-Type"] = mimetypes.guess_type(self.binary_entry)[0]
self.headers["Content-Length"] = str(self.binary_end - self.binary_start + 1)
else:
mt.log.error("404 Not Found: " + self.binary_entry)
self.binary_entry = ""
self.status = "404 Not Found"
content = "404 Not Found."
self.headers["Content-Type"] = "text/plain"
self.headers["Content-Length"] = len(content)
elif ( len(self.mt_entrys) > 0 ):
self.headers["Cache-Control"] = "no-store"
locations = ""
data = ""
for entry in self.mt_entrys:
if ( entry.html ):
locations += "html:" + str(len(data)) + "," + str(len(entry.data)) + "," + entry.target + ";"
data += entry.data
if ( entry.js ):
locations += "js:" + str(len(data)) + "," + str(len(entry.data)) + ";"
data += entry.data
if ( entry.css ):
locations += "css:" + str(len(data)) + "," + str(len(entry.data)) + ";"
data += entry.data
content = "!mt:" + str(len(locations)) + ";" + locations + data
self.headers["Content-Type"] = "text/plain"
self.headers["Content-Length"] = len(content)
elif ( self.text_entry != "" ):
if ( not self.headers.has_key("Content-Type") ):
self.headers["Content-Type"] = "text/plain"
if ( not self.headers.has_key("Content-Length") ):
self.headers["Content-Length"] = len(self.text_entry)
content = self.text_entry
else:
if ( not self.headers.has_key("Content-Length") ):
self.headers["Content-Length"] = 0
# Generate and send the headers.
if ( self.http_version == "" ): self.http_version = "HTTP/1.1"
if ( self.status == "" ): self.status = "200 OK"
headers = self.http_version + " " + self.status + "\r\n"
for key in self.headers.keys():
headers += key + ": " + str(self.headers[key]) + "\r\n"<|fim▁hole|>
if ( len(self.cookies) > 0 ):
for key, value in self.cookies.items():
headers += "Set-Cookie: " + key + "=" + value + "\r\n"
headers += "\r\n"
socket.send(headers)
if ( header_only ): return
# send the content.
if ( self.binary_entry != "" ):
f = None
try:
f = open(self.binary_entry, "rb")
f.seek(self.binary_start)
while (self.binary_start <= self.binary_end):
chunk_size = 4096
if ( (self.binary_start+chunk_size) > (self.binary_end) ): chunk_size = (self.binary_end-self.binary_start)+1
chunk = f.read(chunk_size)
if not chunk: break
socket.send(chunk)
self.binary_start += len(chunk)
f.close()
f = None
except Exception as inst:
mt.log.error("Error reading file:" + str(inst))
finally:
if ( f != None ): f.close()
else:
socket.send(content)<|fim▁end|> | |
<|file_name|>not_const_clusure_in_const.rs<|end_file_name|><|fim▁begin|>// run-pass
const _FOO: fn() -> String = || "foo".into();<|fim▁hole|>
pub fn bar() -> fn() -> String {
|| "bar".into()
}
fn main(){}<|fim▁end|> | |
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { Blog32 as default } from "../../"; |
<|file_name|>abstract_rotation.rs<|end_file_name|><|fim▁begin|>use crate::allocator::Allocator;
use crate::geometry::{Rotation, UnitComplex, UnitQuaternion};
use crate::{DefaultAllocator, DimName, Point, Scalar, SimdRealField, VectorN, U2, U3};
use simba::scalar::ClosedMul;
/// Trait implemented by rotations that can be used inside of an `Isometry` or `Similarity`.
pub trait AbstractRotation<N: Scalar, D: DimName>: PartialEq + ClosedMul + Clone {
/// The rotation identity.
fn identity() -> Self;
/// The rotation inverse.
fn inverse(&self) -> Self;
/// Change `self` to its inverse.
fn inverse_mut(&mut self);
/// Apply the rotation to the given vector.
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the rotation to the given point.
fn transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the inverse rotation to the given vector.
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>;
/// Apply the inverse rotation to the given point.
fn inverse_transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>;
}
impl<N: SimdRealField, D: DimName> AbstractRotation<N, D> for Rotation<N, D>
where
N::Element: SimdRealField,
DefaultAllocator: Allocator<N, D, D>,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, D>) -> VectorN<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, D>) -> Point<N, D>
where
DefaultAllocator: Allocator<N, D>,
{
self.inverse_transform_point(p)
}
}
impl<N: SimdRealField> AbstractRotation<N, U3> for UnitQuaternion<N>
where
N::Element: SimdRealField,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}<|fim▁hole|> #[inline]
fn transform_vector(&self, v: &VectorN<N, U3>) -> VectorN<N, U3> {
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, U3>) -> Point<N, U3> {
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, U3>) -> VectorN<N, U3> {
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, U3>) -> Point<N, U3> {
self.inverse_transform_point(p)
}
}
impl<N: SimdRealField> AbstractRotation<N, U2> for UnitComplex<N>
where
N::Element: SimdRealField,
{
#[inline]
fn identity() -> Self {
Self::identity()
}
#[inline]
fn inverse(&self) -> Self {
self.inverse()
}
#[inline]
fn inverse_mut(&mut self) {
self.inverse_mut()
}
#[inline]
fn transform_vector(&self, v: &VectorN<N, U2>) -> VectorN<N, U2> {
self * v
}
#[inline]
fn transform_point(&self, p: &Point<N, U2>) -> Point<N, U2> {
self * p
}
#[inline]
fn inverse_transform_vector(&self, v: &VectorN<N, U2>) -> VectorN<N, U2> {
self.inverse_transform_vector(v)
}
#[inline]
fn inverse_transform_point(&self, p: &Point<N, U2>) -> Point<N, U2> {
self.inverse_transform_point(p)
}
}<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# urls.py ---
#
# Created: Wed Dec 14 23:02:53 2011 (+0200)
# Author: Janne Kuuskeri
#
<|fim▁hole|>dictresource = resources.MyDictResource()
textresource = resources.MyTextResource()
respresource = resources.MyRespResource()
authresource = resources.MyAuthResource()
anonresource = resources.MyAnonResource()
permresource = resources.MyPermResource()
noneresource = resources.MyNoneResource()
echoresource = resources.MyEchoResource()
personresource = resources.PersonResource()
mapperresource = resources.MyMapperResource()
decimalresource = resources.MyDecimalResource()
scandicresource = resources.MyScandicResource()
validationresource = resources.MyValidationResource()
scandicjsonresource = resources.MyScandicJsonResource()
defaulttxtmapperresource = resources.MyDefaultMapperResource_1()
defaultobjmapperresource = resources.MyDefaultMapperResource_2()
factoryresource = resources.FactoryResource()
acl_resources = (
dictresource,
textresource,
respresource,
authresource,
anonresource,
permresource,
)
urlpatterns = patterns('',
url(r'^perm', permresource),
url(r'^auth$', authresource),
url(r'^person', personresource),
url(r'^auth/anon', anonresource),
url(r'^valid', validationresource, name='validation'),
url(r'^factory', factoryresource),
url(r'^mapper/dict', dictresource),
url(r'^mapper/text', textresource),
url(r'^mapper/resp', respresource),
url(r'^mapper/none', noneresource),
url(r'^mapper/echo', echoresource),
url(r'^mapper/reverse', mapperresource),
url(r'^mapper/decimal', decimalresource),
url(r'^mapper/scandic$', scandicresource),
url(r'^mapper/scandic/json', scandicjsonresource),
url(r'^mapper/default/txt$', defaulttxtmapperresource),
url(r'^mapper/default/obj$', defaultobjmapperresource),
)
#
# urls.py ends here<|fim▁end|> | from django.conf.urls.defaults import patterns, url
import resources
|
<|file_name|>_x.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class XValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="x", parent_name="choropleth.colorbar", **kwargs):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
max=kwargs.pop("max", 3),<|fim▁hole|><|fim▁end|> | min=kwargs.pop("min", -2),
**kwargs
) |
<|file_name|>htmltextareaelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrValue};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::refcounted::Trusted;
use dom::document::Document;
use dom::element::{AttributeMutation, Element};
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::eventtarget::EventTarget;
use dom::htmlelement::HTMLElement;
use dom::htmlfieldsetelement::HTMLFieldSetElement;
use dom::htmlformelement::{FormControl, HTMLFormElement};
use dom::keyboardevent::KeyboardEvent;
use dom::node::{ChildrenMutation, Node, NodeDamage};
use dom::node::{document_from_node, window_from_node};
use dom::nodelist::NodeList;
use dom::virtualmethods::VirtualMethods;
use msg::constellation_msg::ConstellationChan;
use msg::constellation_msg::ScriptMsg as ConstellationMsg;
use script_task::ScriptTaskEventCategory::InputEvent;
use script_task::{CommonScriptMsg, Runnable};
use selectors::states::*;
use std::cell::Cell;
use string_cache::Atom;
use textinput::{KeyReaction, Lines, TextInput};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLTextAreaElement {
htmlelement: HTMLElement,
#[ignore_heap_size_of = "#7193"]
textinput: DOMRefCell<TextInput<ConstellationChan<ConstellationMsg>>>,
cols: Cell<u32>,
rows: Cell<u32>,
// https://html.spec.whatwg.org/multipage/#concept-textarea-dirty
value_changed: Cell<bool>,
}
pub trait LayoutHTMLTextAreaElementHelpers {
#[allow(unsafe_code)]
unsafe fn get_value_for_layout(self) -> String;
#[allow(unsafe_code)]
unsafe fn get_absolute_insertion_point_for_layout(self) -> usize;
}
pub trait RawLayoutHTMLTextAreaElementHelpers {
#[allow(unsafe_code)]
unsafe fn get_cols_for_layout(self) -> u32;
#[allow(unsafe_code)]
unsafe fn get_rows_for_layout(self) -> u32;
}
impl LayoutHTMLTextAreaElementHelpers for LayoutJS<HTMLTextAreaElement> {
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn get_value_for_layout(self) -> String {
String::from((*self.unsafe_get()).textinput.borrow_for_layout().get_content())
}
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn get_absolute_insertion_point_for_layout(self) -> usize {
(*self.unsafe_get()).textinput.borrow_for_layout().get_absolute_insertion_point()
}
}
impl<'a> RawLayoutHTMLTextAreaElementHelpers for &'a HTMLTextAreaElement {
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn get_cols_for_layout(self) -> u32 {
self.cols.get()
}
#[allow(unrooted_must_root)]
#[allow(unsafe_code)]
unsafe fn get_rows_for_layout(self) -> u32 {
self.rows.get()
}
}
static DEFAULT_COLS: u32 = 20;
static DEFAULT_ROWS: u32 = 2;
impl HTMLTextAreaElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLTextAreaElement {
let chan = document.window().constellation_chan();
HTMLTextAreaElement {
htmlelement:
HTMLElement::new_inherited_with_state(IN_ENABLED_STATE,
localName, prefix, document),
textinput: DOMRefCell::new(TextInput::new(Lines::Multiple, DOMString::new(), chan)),
cols: Cell::new(DEFAULT_COLS),
rows: Cell::new(DEFAULT_ROWS),
value_changed: Cell::new(false),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTextAreaElement> {
let element = HTMLTextAreaElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLTextAreaElementBinding::Wrap)
}
}
impl HTMLTextAreaElementMethods for HTMLTextAreaElement {
// TODO A few of these attributes have default values and additional
// constraints
// https://html.spec.whatwg.org/multipage/#dom-textarea-cols
make_uint_getter!(Cols, "cols", DEFAULT_COLS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-cols
make_limited_uint_setter!(SetCols, "cols", DEFAULT_COLS);
// https://html.spec.whatwg.org/multipage/#dom-fe-disabled
make_bool_getter!(Disabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled");
// https://html.spec.whatwg.org/multipage/#dom-fae-form
fn GetForm(&self) -> Option<Root<HTMLFormElement>> {
self.form_owner()
}
// https://html.spec.whatwg.org/multipage/#attr-fe-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#attr-fe-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-textarea-placeholder
make_getter!(Placeholder, "placeholder");
// https://html.spec.whatwg.org/multipage/#dom-textarea-placeholder
make_setter!(SetPlaceholder, "placeholder");
// https://html.spec.whatwg.org/multipage/#attr-textarea-readonly
make_bool_getter!(ReadOnly, "readonly");
// https://html.spec.whatwg.org/multipage/#attr-textarea-readonly
make_bool_setter!(SetReadOnly, "readonly");
// https://html.spec.whatwg.org/multipage/#dom-textarea-required
make_bool_getter!(Required, "required");
// https://html.spec.whatwg.org/multipage/#dom-textarea-required
make_bool_setter!(SetRequired, "required");
// https://html.spec.whatwg.org/multipage/#dom-textarea-rows
make_uint_getter!(Rows, "rows", DEFAULT_ROWS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-rows
make_limited_uint_setter!(SetRows, "rows", DEFAULT_ROWS);
// https://html.spec.whatwg.org/multipage/#dom-textarea-wrap
make_getter!(Wrap, "wrap");
// https://html.spec.whatwg.org/multipage/#dom-textarea-wrap
make_setter!(SetWrap, "wrap");
// https://html.spec.whatwg.org/multipage/#dom-textarea-type
fn Type(&self) -> DOMString {
DOMString::from("textarea")
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-defaultvalue
fn DefaultValue(&self) -> DOMString {
self.upcast::<Node>().GetTextContent().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-defaultvalue
fn SetDefaultValue(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value));
// if the element's dirty value flag is false, then the element's
// raw value must be set to the value of the element's textContent IDL attribute
if !self.value_changed.get() {
self.reset();
}
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-value
fn Value(&self) -> DOMString {
self.textinput.borrow().get_content()
}
// https://html.spec.whatwg.org/multipage/#dom-textarea-value
fn SetValue(&self, value: DOMString) {
// TODO move the cursor to the end of the field
self.textinput.borrow_mut().set_content(value);
self.value_changed.set(true);
self.force_relayout();
}
// https://html.spec.whatwg.org/multipage/#dom-lfe-labels
fn Labels(&self) -> Root<NodeList> {
self.upcast::<HTMLElement>().labels()
}
}
impl HTMLTextAreaElement {
// https://html.spec.whatwg.org/multipage/#concept-fe-mutable
pub fn mutable(&self) -> bool {
// https://html.spec.whatwg.org/multipage/#the-textarea-element:concept-fe-mutable
!(self.Disabled() || self.ReadOnly())
}
pub fn reset(&self) {
// https://html.spec.whatwg.org/multipage/#the-textarea-element:concept-form-reset-control
self.SetValue(self.DefaultValue());
self.value_changed.set(false);
}
}
impl HTMLTextAreaElement {
fn force_relayout(&self) {
let doc = document_from_node(self);
doc.content_changed(self.upcast(), NodeDamage::OtherNodeDamage)
}
fn dispatch_change_event(&self) {
let window = window_from_node(self);
let window = window.r();
let event = Event::new(GlobalRef::Window(window),
DOMString::from("input"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable);
self.upcast::<EventTarget>().dispatch_event(&event);
}
}
impl VirtualMethods for HTMLTextAreaElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match *attr.local_name() {
atom!("disabled") => {
let el = self.upcast::<Element>();
match mutation {
AttributeMutation::Set(_) => {
el.set_disabled_state(true);
el.set_enabled_state(false);
},
AttributeMutation::Removed => {
el.set_disabled_state(false);
el.set_enabled_state(true);
el.check_ancestors_disabled_state_for_form_control();
}
}
},
atom!("cols") => {
let cols = mutation.new_value(attr).map(|value| {
value.as_uint()
});
self.cols.set(cols.unwrap_or(DEFAULT_COLS));
},
atom!("rows") => {
let rows = mutation.new_value(attr).map(|value| {
value.as_uint()
});
self.rows.set(rows.unwrap_or(DEFAULT_ROWS));
},
_ => {},
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
self.upcast::<Element>().check_ancestors_disabled_state_for_form_control();
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match *name {
atom!("cols") => AttrValue::from_limited_u32(value, DEFAULT_COLS),
atom!("rows") => AttrValue::from_limited_u32(value, DEFAULT_ROWS),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(tree_in_doc);
}
let node = self.upcast::<Node>();
let el = self.upcast::<Element>();
if node.ancestors().any(|ancestor| ancestor.is::<HTMLFieldSetElement>()) {
el.check_ancestors_disabled_state_for_form_control();
} else {
el.check_disabled_attribute();
}
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if !self.value_changed.get() {
self.reset();
}
}
// copied and modified from htmlinputelement.rs
fn handle_event(&self, event: &Event) {
if let Some(s) = self.super_type() {
s.handle_event(event);
}
if event.type_() == atom!("click") && !event.DefaultPrevented() {
//TODO: set the editing position for text inputs
document_from_node(self).request_focus(self.upcast());
} else if event.type_() == atom!("keydown") && !event.DefaultPrevented() {
if let Some(kevent) = event.downcast::<KeyboardEvent>() {
match self.textinput.borrow_mut().handle_keydown(kevent) {
KeyReaction::TriggerDefaultAction => (),
KeyReaction::DispatchInput => {
self.value_changed.set(true);
if event.IsTrusted() {
let window = window_from_node(self);
let window = window.r();
let chan = window.script_chan();
let handler = Trusted::new(window.get_cx(), self, chan.clone());
let dispatcher = ChangeEventRunnable {
element: handler,
};
let _ = chan.send(CommonScriptMsg::RunnableMsg(InputEvent, box dispatcher));
}
self.force_relayout();
event.PreventDefault();
}
KeyReaction::RedrawSelection => {
self.force_relayout();
event.PreventDefault();
}
KeyReaction::Nothing => (),
}
}
}
}
}
impl FormControl for HTMLTextAreaElement {}<|fim▁hole|>pub struct ChangeEventRunnable {
element: Trusted<HTMLTextAreaElement>,
}
impl Runnable for ChangeEventRunnable {
fn handler(self: Box<ChangeEventRunnable>) {
let target = self.element.root();
target.dispatch_change_event();
}
}<|fim▁end|> | |
<|file_name|>rpcrawtransaction.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2014 The Lioncoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "base58.h"
#include "core.h"
#include "init.h"
#include "keystore.h"
#include "main.h"
#include "net.h"
#include "rpcserver.h"
#include "uint256.h"
#ifdef ENABLE_WALLET
#include "wallet.h"
#endif
#include <stdint.h>
#include <boost/assign/list_of.hpp>
#include "json/json_spirit_utils.h"
#include "json/json_spirit_value.h"
using namespace std;
using namespace boost;
using namespace boost::assign;
using namespace json_spirit;
void ScriptPubKeyToJSON(const CScript& scriptPubKey, Object& out, bool fIncludeHex)
{
txnouttype type;
vector<CTxDestination> addresses;
int nRequired;
out.push_back(Pair("asm", scriptPubKey.ToString()));
if (fIncludeHex)
out.push_back(Pair("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end())));
if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired))
{
out.push_back(Pair("type", GetTxnOutputType(type)));
return;
}
out.push_back(Pair("reqSigs", nRequired));
out.push_back(Pair("type", GetTxnOutputType(type)));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CLioncoinAddress(addr).ToString());
out.push_back(Pair("addresses", a));
}
void TxToJSON(const CTransaction& tx, const uint256 hashBlock, Object& entry)
{
entry.push_back(Pair("txid", tx.GetHash().GetHex()));
entry.push_back(Pair("version", tx.nVersion));
entry.push_back(Pair("locktime", (boost::int64_t)tx.nLockTime));
Array vin;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
Object in;
if (tx.IsCoinBase())
in.push_back(Pair("coinbase", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
else
{
in.push_back(Pair("txid", txin.prevout.hash.GetHex()));
in.push_back(Pair("vout", (boost::int64_t)txin.prevout.n));
Object o;
o.push_back(Pair("asm", txin.scriptSig.ToString()));
o.push_back(Pair("hex", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
in.push_back(Pair("scriptSig", o));
}
in.push_back(Pair("sequence", (boost::int64_t)txin.nSequence));
vin.push_back(in);
}
entry.push_back(Pair("vin", vin));
Array vout;
for (unsigned int i = 0; i < tx.vout.size(); i++)
{
const CTxOut& txout = tx.vout[i];
Object out;
out.push_back(Pair("value", ValueFromAmount(txout.nValue)));
out.push_back(Pair("n", (boost::int64_t)i));
Object o;
ScriptPubKeyToJSON(txout.scriptPubKey, o, true);
out.push_back(Pair("scriptPubKey", o));
vout.push_back(out);
}
entry.push_back(Pair("vout", vout));
if (hashBlock != 0)
{
entry.push_back(Pair("blockhash", hashBlock.GetHex()));
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (chainActive.Contains(pindex))
{<|fim▁hole|> entry.push_back(Pair("blocktime", (boost::int64_t)pindex->nTime));
}
else
entry.push_back(Pair("confirmations", 0));
}
}
}
Value getrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getrawtransaction \"txid\" ( verbose )\n"
"\nReturn the raw transaction data.\n"
"\nIf verbose=0, returns a string that is serialized, hex-encoded data for 'txid'.\n"
"If verbose is non-zero, returns an Object with information about 'txid'.\n"
"\nArguments:\n"
"1. \"txid\" (string, required) The transaction id\n"
"2. verbose (numeric, optional, default=0) If 0, return a string, other return a json object\n"
"\nResult (if verbose is not set or set to 0):\n"
"\"data\" (string) The serialized, hex-encoded data for 'txid'\n"
"\nResult (if verbose > 0):\n"
"{\n"
" \"hex\" : \"data\", (string) The serialized, hex-encoded data for 'txid'\n"
" \"txid\" : \"id\", (string) The transaction id (same as provided)\n"
" \"version\" : n, (numeric) The version\n"
" \"locktime\" : ttt, (numeric) The lock time\n"
" \"vin\" : [ (array of json objects)\n"
" {\n"
" \"txid\": \"id\", (string) The transaction id\n"
" \"vout\": n, (numeric) \n"
" \"scriptSig\": { (json object) The script\n"
" \"asm\": \"asm\", (string) asm\n"
" \"hex\": \"hex\" (string) hex\n"
" },\n"
" \"sequence\": n (numeric) The script sequence number\n"
" }\n"
" ,...\n"
" ],\n"
" \"vout\" : [ (array of json objects)\n"
" {\n"
" \"value\" : x.xxx, (numeric) The value in btc\n"
" \"n\" : n, (numeric) index\n"
" \"scriptPubKey\" : { (json object)\n"
" \"asm\" : \"asm\", (string) the asm\n"
" \"hex\" : \"hex\", (string) the hex\n"
" \"reqSigs\" : n, (numeric) The required sigs\n"
" \"type\" : \"pubkeyhash\", (string) The type, eg 'pubkeyhash'\n"
" \"addresses\" : [ (json array of string)\n"
" \"lioncoinaddress\" (string) lioncoin address\n"
" ,...\n"
" ]\n"
" }\n"
" }\n"
" ,...\n"
" ],\n"
" \"blockhash\" : \"hash\", (string) the block hash\n"
" \"confirmations\" : n, (numeric) The confirmations\n"
" \"time\" : ttt, (numeric) The transaction time in seconds since epoch (Jan 1 1970 GMT)\n"
" \"blocktime\" : ttt (numeric) The block time in seconds since epoch (Jan 1 1970 GMT)\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("getrawtransaction", "\"mytxid\"")
+ HelpExampleCli("getrawtransaction", "\"mytxid\" 1")
+ HelpExampleRpc("getrawtransaction", "\"mytxid\", 1")
);
uint256 hash = ParseHashV(params[0], "parameter 1");
bool fVerbose = false;
if (params.size() > 1)
fVerbose = (params[1].get_int() != 0);
CTransaction tx;
uint256 hashBlock = 0;
if (!GetTransaction(hash, tx, hashBlock, true))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction");
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << tx;
string strHex = HexStr(ssTx.begin(), ssTx.end());
if (!fVerbose)
return strHex;
Object result;
result.push_back(Pair("hex", strHex));
TxToJSON(tx, hashBlock, result);
return result;
}
#ifdef ENABLE_WALLET
Value listunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listunspent ( minconf maxconf [\"address\",...] )\n"
"\nReturns array of unspent transaction outputs\n"
"with between minconf and maxconf (inclusive) confirmations.\n"
"Optionally filter to only include txouts paid to specified addresses.\n"
"Results are an array of Objects, each of which has:\n"
"{txid, vout, scriptPubKey, amount, confirmations}\n"
"\nArguments:\n"
"1. minconf (numeric, optional, default=1) The minimum confirmationsi to filter\n"
"2. maxconf (numeric, optional, default=9999999) The maximum confirmations to filter\n"
"3. \"addresses\" (string) A json array of lioncoin addresses to filter\n"
" [\n"
" \"address\" (string) lioncoin address\n"
" ,...\n"
" ]\n"
"\nResult\n"
"[ (array of json object)\n"
" {\n"
" \"txid\" : \"txid\", (string) the transaction id \n"
" \"vout\" : n, (numeric) the vout value\n"
" \"address\" : \"address\", (string) the lioncoin address\n"
" \"account\" : \"account\", (string) The associated account, or \"\" for the default account\n"
" \"scriptPubKey\" : \"key\", (string) the script key\n"
" \"amount\" : x.xxx, (numeric) the transaction amount in btc\n"
" \"confirmations\" : n (numeric) The number of confirmations\n"
" }\n"
" ,...\n"
"]\n"
"\nExamples\n"
+ HelpExampleCli("listunspent", "")
+ HelpExampleCli("listunspent", "6 9999999 \"[\\\"1PGFqEzfmQch1gKD3ra4k18PNj3tTUUSqg\\\",\\\"1LtvqCaApEdUGFkpKMM4MstjcaL4dKg8SP\\\"]\"")
+ HelpExampleRpc("listunspent", "6, 9999999 \"[\\\"1PGFqEzfmQch1gKD3ra4k18PNj3tTUUSqg\\\",\\\"1LtvqCaApEdUGFkpKMM4MstjcaL4dKg8SP\\\"]\"")
);
RPCTypeCheck(params, list_of(int_type)(int_type)(array_type));
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
int nMaxDepth = 9999999;
if (params.size() > 1)
nMaxDepth = params[1].get_int();
set<CLioncoinAddress> setAddress;
if (params.size() > 2)
{
Array inputs = params[2].get_array();
BOOST_FOREACH(Value& input, inputs)
{
CLioncoinAddress address(input.get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Lioncoin address: ")+input.get_str());
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+input.get_str());
setAddress.insert(address);
}
}
Array results;
vector<COutput> vecOutputs;
assert(pwalletMain != NULL);
pwalletMain->AvailableCoins(vecOutputs, false);
BOOST_FOREACH(const COutput& out, vecOutputs)
{
if (out.nDepth < nMinDepth || out.nDepth > nMaxDepth)
continue;
if (setAddress.size())
{
CTxDestination address;
if (!ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
continue;
if (!setAddress.count(address))
continue;
}
int64_t nValue = out.tx->vout[out.i].nValue;
const CScript& pk = out.tx->vout[out.i].scriptPubKey;
Object entry;
entry.push_back(Pair("txid", out.tx->GetHash().GetHex()));
entry.push_back(Pair("vout", out.i));
CTxDestination address;
if (ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
{
entry.push_back(Pair("address", CLioncoinAddress(address).ToString()));
if (pwalletMain->mapAddressBook.count(address))
entry.push_back(Pair("account", pwalletMain->mapAddressBook[address].name));
}
entry.push_back(Pair("scriptPubKey", HexStr(pk.begin(), pk.end())));
if (pk.IsPayToScriptHash())
{
CTxDestination address;
if (ExtractDestination(pk, address))
{
const CScriptID& hash = boost::get<const CScriptID&>(address);
CScript redeemScript;
if (pwalletMain->GetCScript(hash, redeemScript))
entry.push_back(Pair("redeemScript", HexStr(redeemScript.begin(), redeemScript.end())));
}
}
entry.push_back(Pair("amount",ValueFromAmount(nValue)));
entry.push_back(Pair("confirmations",out.nDepth));
results.push_back(entry);
}
return results;
}
#endif
Value createrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"createrawtransaction [{\"txid\":\"id\",\"vout\":n},...] {\"address\":amount,...}\n"
"\nCreate a transaction spending the given inputs and sending to the given addresses.\n"
"Returns hex-encoded raw transaction.\n"
"Note that the transaction's inputs are not signed, and\n"
"it is not stored in the wallet or transmitted to the network.\n"
"\nArguments:\n"
"1. \"transactions\" (string, required) A json array of json objects\n"
" [\n"
" {\n"
" \"txid\":\"id\", (string, required) The transaction id\n"
" \"vout\":n (numeric, required) The output number\n"
" }\n"
" ,...\n"
" ]\n"
"2. \"addresses\" (string, required) a json object with addresses as keys and amounts as values\n"
" {\n"
" \"address\": x.xxx (numeric, required) The key is the lioncoin address, the value is the btc amount\n"
" ,...\n"
" }\n"
"\nResult:\n"
"\"transaction\" (string) hex string of the transaction\n"
"\nExamples\n"
+ HelpExampleCli("createrawtransaction", "\"[{\\\"txid\\\":\\\"myid\\\",\\\"vout\\\":0}]\" \"{\\\"address\\\":0.01}\"")
+ HelpExampleRpc("createrawtransaction", "\"[{\\\"txid\\\":\\\"myid\\\",\\\"vout\\\":0}]\", \"{\\\"address\\\":0.01}\"")
);
RPCTypeCheck(params, list_of(array_type)(obj_type));
Array inputs = params[0].get_array();
Object sendTo = params[1].get_obj();
CTransaction rawTx;
BOOST_FOREACH(const Value& input, inputs)
{
const Object& o = input.get_obj();
uint256 txid = ParseHashO(o, "txid");
const Value& vout_v = find_value(o, "vout");
if (vout_v.type() != int_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing vout key");
int nOutput = vout_v.get_int();
if (nOutput < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive");
CTxIn in(COutPoint(txid, nOutput));
rawTx.vin.push_back(in);
}
set<CLioncoinAddress> setAddress;
BOOST_FOREACH(const Pair& s, sendTo)
{
CLioncoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Lioncoin address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64_t nAmount = AmountFromValue(s.value_);
CTxOut out(nAmount, scriptPubKey);
rawTx.vout.push_back(out);
}
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss << rawTx;
return HexStr(ss.begin(), ss.end());
}
Value decoderawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decoderawtransaction \"hexstring\"\n"
"\nReturn a JSON object representing the serialized, hex-encoded transaction.\n"
"\nArguments:\n"
"1. \"txid\" (string, required) The transaction hex string\n"
"\nResult:\n"
"{\n"
" \"hex\" : \"data\", (string) The serialized, hex-encoded data for 'txid'\n"
" \"txid\" : \"id\", (string) The transaction id (same as provided)\n"
" \"version\" : n, (numeric) The version\n"
" \"locktime\" : ttt, (numeric) The lock time\n"
" \"vin\" : [ (array of json objects)\n"
" {\n"
" \"txid\": \"id\", (string) The transaction id\n"
" \"vout\": n, (numeric) The output number\n"
" \"scriptSig\": { (json object) The script\n"
" \"asm\": \"asm\", (string) asm\n"
" \"hex\": \"hex\" (string) hex\n"
" },\n"
" \"sequence\": n (numeric) The script sequence number\n"
" }\n"
" ,...\n"
" ],\n"
" \"vout\" : [ (array of json objects)\n"
" {\n"
" \"value\" : x.xxx, (numeric) The value in btc\n"
" \"n\" : n, (numeric) index\n"
" \"scriptPubKey\" : { (json object)\n"
" \"asm\" : \"asm\", (string) the asm\n"
" \"hex\" : \"hex\", (string) the hex\n"
" \"reqSigs\" : n, (numeric) The required sigs\n"
" \"type\" : \"pubkeyhash\", (string) The type, eg 'pubkeyhash'\n"
" \"addresses\" : [ (json array of string)\n"
" \"12tvKAXCxZjSmdNbao16dKXC8tRWfcF5oc\" (string) lioncoin address\n"
" ,...\n"
" ]\n"
" }\n"
" }\n"
" ,...\n"
" ],\n"
" \"blockhash\" : \"hash\", (string) the block hash\n"
" \"confirmations\" : n, (numeric) The confirmations\n"
" \"time\" : ttt, (numeric) The transaction time in seconds since epoch (Jan 1 1970 GMT)\n"
" \"blocktime\" : ttt (numeric) The block time in seconds since epoch (Jan 1 1970 GMT)\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("decoderawtransaction", "\"hexstring\"")
+ HelpExampleRpc("decoderawtransaction", "\"hexstring\"")
);
vector<unsigned char> txData(ParseHexV(params[0], "argument"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
Object result;
TxToJSON(tx, 0, result);
return result;
}
Value decodescript(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decodescript \"hex\"\n"
"\nDecode a hex-encoded script.\n"
"\nArguments:\n"
"1. \"hex\" (string) the hex encoded script\n"
"\nResult:\n"
"{\n"
" \"asm\":\"asm\", (string) Script public key\n"
" \"hex\":\"hex\", (string) hex encoded public key\n"
" \"type\":\"type\", (string) The output type\n"
" \"reqSigs\": n, (numeric) The required signatures\n"
" \"addresses\": [ (json array of string)\n"
" \"address\" (string) lioncoin address\n"
" ,...\n"
" ],\n"
" \"p2sh\",\"address\" (string) script address\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("decodescript", "\"hexstring\"")
+ HelpExampleRpc("decodescript", "\"hexstring\"")
);
RPCTypeCheck(params, list_of(str_type));
Object r;
CScript script;
if (params[0].get_str().size() > 0){
vector<unsigned char> scriptData(ParseHexV(params[0], "argument"));
script = CScript(scriptData.begin(), scriptData.end());
} else {
// Empty scripts are valid
}
ScriptPubKeyToJSON(script, r, false);
r.push_back(Pair("p2sh", CLioncoinAddress(script.GetID()).ToString()));
return r;
}
Value signrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 4)
throw runtime_error(
"signrawtransaction \"hexstring\" ( [{\"txid\":\"id\",\"vout\":n,\"scriptPubKey\":\"hex\",\"redeemScript\":\"hex\"},...] [\"privatekey1\",...] sighashtype )\n"
"\nSign inputs for raw transaction (serialized, hex-encoded).\n"
"The second optional argument (may be null) is an array of previous transaction outputs that\n"
"this transaction depends on but may not yet be in the block chain.\n"
"The third optional argument (may be null) is an array of base58-encoded private\n"
"keys that, if given, will be the only keys used to sign the transaction.\n"
#ifdef ENABLE_WALLET
+ HelpRequiringPassphrase() + "\n"
#endif
"\nArguments:\n"
"1. \"hexstring\" (string, required) The transaction hex string\n"
"2. \"prevtxs\" (string, optional) An json array of previous dependent transaction outputs\n"
" [ (json array of json objects, or 'null' if none provided)\n"
" {\n"
" \"txid\":\"id\", (string, required) The transaction id\n"
" \"vout\":n, (numeric, required) The output number\n"
" \"scriptPubKey\": \"hex\", (string, required) script key\n"
" \"redeemScript\": \"hex\" (string, required) redeem script\n"
" }\n"
" ,...\n"
" ]\n"
"3. \"privatekeys\" (string, optional) A json array of base58-encoded private keys for signing\n"
" [ (json array of strings, or 'null' if none provided)\n"
" \"privatekey\" (string) private key in base58-encoding\n"
" ,...\n"
" ]\n"
"4. \"sighashtype\" (string, optional, default=ALL) The signature has type. Must be one of\n"
" \"ALL\"\n"
" \"NONE\"\n"
" \"SINGLE\"\n"
" \"ALL|ANYONECANPAY\"\n"
" \"NONE|ANYONECANPAY\"\n"
" \"SINGLE|ANYONECANPAY\"\n"
"\nResult:\n"
"{\n"
" \"hex\": \"value\", (string) The raw transaction with signature(s) (hex-encoded string)\n"
" \"complete\": n (numeric) if transaction has a complete set of signature (0 if not)\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("signrawtransaction", "\"myhex\"")
+ HelpExampleRpc("signrawtransaction", "\"myhex\"")
);
RPCTypeCheck(params, list_of(str_type)(array_type)(array_type)(str_type), true);
vector<unsigned char> txData(ParseHexV(params[0], "argument 1"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
vector<CTransaction> txVariants;
while (!ssData.empty())
{
try {
CTransaction tx;
ssData >> tx;
txVariants.push_back(tx);
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
}
if (txVariants.empty())
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Missing transaction");
// mergedTx will end up with all the signatures; it
// starts as a clone of the rawtx:
CTransaction mergedTx(txVariants[0]);
bool fComplete = true;
// Fetch previous transactions (inputs):
CCoinsView viewDummy;
CCoinsViewCache view(viewDummy);
{
LOCK(mempool.cs);
CCoinsViewCache &viewChain = *pcoinsTip;
CCoinsViewMemPool viewMempool(viewChain, mempool);
view.SetBackend(viewMempool); // temporarily switch cache backend to db+mempool view
BOOST_FOREACH(const CTxIn& txin, mergedTx.vin) {
const uint256& prevHash = txin.prevout.hash;
CCoins coins;
view.GetCoins(prevHash, coins); // this is certainly allowed to fail
}
view.SetBackend(viewDummy); // switch back to avoid locking mempool for too long
}
bool fGivenKeys = false;
CBasicKeyStore tempKeystore;
if (params.size() > 2 && params[2].type() != null_type)
{
fGivenKeys = true;
Array keys = params[2].get_array();
BOOST_FOREACH(Value k, keys)
{
CLioncoinSecret vchSecret;
bool fGood = vchSecret.SetString(k.get_str());
if (!fGood)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid private key");
CKey key = vchSecret.GetKey();
tempKeystore.AddKey(key);
}
}
#ifdef ENABLE_WALLET
else
EnsureWalletIsUnlocked();
#endif
// Add previous txouts given in the RPC call:
if (params.size() > 1 && params[1].type() != null_type)
{
Array prevTxs = params[1].get_array();
BOOST_FOREACH(Value& p, prevTxs)
{
if (p.type() != obj_type)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "expected object with {\"txid'\",\"vout\",\"scriptPubKey\"}");
Object prevOut = p.get_obj();
RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type));
uint256 txid = ParseHashO(prevOut, "txid");
int nOut = find_value(prevOut, "vout").get_int();
if (nOut < 0)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "vout must be positive");
vector<unsigned char> pkData(ParseHexO(prevOut, "scriptPubKey"));
CScript scriptPubKey(pkData.begin(), pkData.end());
CCoins coins;
if (view.GetCoins(txid, coins)) {
if (coins.IsAvailable(nOut) && coins.vout[nOut].scriptPubKey != scriptPubKey) {
string err("Previous output scriptPubKey mismatch:\n");
err = err + coins.vout[nOut].scriptPubKey.ToString() + "\nvs:\n"+
scriptPubKey.ToString();
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, err);
}
// what todo if txid is known, but the actual output isn't?
}
if ((unsigned int)nOut >= coins.vout.size())
coins.vout.resize(nOut+1);
coins.vout[nOut].scriptPubKey = scriptPubKey;
coins.vout[nOut].nValue = 0; // we don't know the actual output value
view.SetCoins(txid, coins);
// if redeemScript given and not using the local wallet (private keys
// given), add redeemScript to the tempKeystore so it can be signed:
if (fGivenKeys && scriptPubKey.IsPayToScriptHash())
{
RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type)("redeemScript",str_type));
Value v = find_value(prevOut, "redeemScript");
if (!(v == Value::null))
{
vector<unsigned char> rsData(ParseHexV(v, "redeemScript"));
CScript redeemScript(rsData.begin(), rsData.end());
tempKeystore.AddCScript(redeemScript);
}
}
}
}
#ifdef ENABLE_WALLET
const CKeyStore& keystore = ((fGivenKeys || !pwalletMain) ? tempKeystore : *pwalletMain);
#else
const CKeyStore& keystore = tempKeystore;
#endif
int nHashType = SIGHASH_ALL;
if (params.size() > 3 && params[3].type() != null_type)
{
static map<string, int> mapSigHashValues =
boost::assign::map_list_of
(string("ALL"), int(SIGHASH_ALL))
(string("ALL|ANYONECANPAY"), int(SIGHASH_ALL|SIGHASH_ANYONECANPAY))
(string("NONE"), int(SIGHASH_NONE))
(string("NONE|ANYONECANPAY"), int(SIGHASH_NONE|SIGHASH_ANYONECANPAY))
(string("SINGLE"), int(SIGHASH_SINGLE))
(string("SINGLE|ANYONECANPAY"), int(SIGHASH_SINGLE|SIGHASH_ANYONECANPAY))
;
string strHashType = params[3].get_str();
if (mapSigHashValues.count(strHashType))
nHashType = mapSigHashValues[strHashType];
else
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid sighash param");
}
bool fHashSingle = ((nHashType & ~SIGHASH_ANYONECANPAY) == SIGHASH_SINGLE);
// Sign what we can:
for (unsigned int i = 0; i < mergedTx.vin.size(); i++)
{
CTxIn& txin = mergedTx.vin[i];
CCoins coins;
if (!view.GetCoins(txin.prevout.hash, coins) || !coins.IsAvailable(txin.prevout.n))
{
fComplete = false;
continue;
}
const CScript& prevPubKey = coins.vout[txin.prevout.n].scriptPubKey;
txin.scriptSig.clear();
// Only sign SIGHASH_SINGLE if there's a corresponding output:
if (!fHashSingle || (i < mergedTx.vout.size()))
SignSignature(keystore, prevPubKey, mergedTx, i, nHashType);
// ... and merge in other signatures:
BOOST_FOREACH(const CTransaction& txv, txVariants)
{
txin.scriptSig = CombineSignatures(prevPubKey, mergedTx, i, txin.scriptSig, txv.vin[i].scriptSig);
}
if (!VerifyScript(txin.scriptSig, prevPubKey, mergedTx, i, SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC, 0))
fComplete = false;
}
Object result;
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << mergedTx;
result.push_back(Pair("hex", HexStr(ssTx.begin(), ssTx.end())));
result.push_back(Pair("complete", fComplete));
return result;
}
Value sendrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"sendrawtransaction \"hexstring\" ( allowhighfees )\n"
"\nSubmits raw transaction (serialized, hex-encoded) to local node and network.\n"
"\nAlso see createrawtransaction and signrawtransaction calls.\n"
"\nArguments:\n"
"1. \"hexstring\" (string, required) The hex string of the raw transaction)\n"
"2. allowhighfees (boolean, optional, default=false) Allow high fees\n"
"\nResult:\n"
"\"hex\" (string) The transaction hash in hex\n"
"\nExamples:\n"
"\nCreate a transaction\n"
+ HelpExampleCli("createrawtransaction", "\"[{\\\"txid\\\" : \\\"mytxid\\\",\\\"vout\\\":0}]\" \"{\\\"myaddress\\\":0.01}\"") +
"Sign the transaction, and get back the hex\n"
+ HelpExampleCli("signrawtransaction", "\"myhex\"") +
"\nSend the transaction (signed hex)\n"
+ HelpExampleCli("sendrawtransaction", "\"signedhex\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("sendrawtransaction", "\"signedhex\"")
);
// parse hex string from parameter
vector<unsigned char> txData(ParseHexV(params[0], "parameter"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
bool fOverrideFees = false;
if (params.size() > 1)
fOverrideFees = params[1].get_bool();
// deserialize binary data stream
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
uint256 hashTx = tx.GetHash();
bool fHave = false;
CCoinsViewCache &view = *pcoinsTip;
CCoins existingCoins;
{
fHave = view.GetCoins(hashTx, existingCoins);
if (!fHave) {
// push to local node
CValidationState state;
if (!AcceptToMemoryPool(mempool, state, tx, false, NULL, !fOverrideFees))
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX rejected"); // TODO: report validation state
}
}
if (fHave) {
if (existingCoins.nHeight < 1000000000)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "transaction already in block chain");
// Not in block, but already in the memory pool; will drop
// through to re-relay it.
} else {
SyncWithWallets(hashTx, tx, NULL);
}
RelayTransaction(tx, hashTx);
return hashTx.GetHex();
}<|fim▁end|> | entry.push_back(Pair("confirmations", 1 + chainActive.Height() - pindex->nHeight));
entry.push_back(Pair("time", (boost::int64_t)pindex->nTime)); |
<|file_name|>HexagonViewNorthSouth.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2016 Pablo Guardiola Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pguardiola;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Path;
import android.graphics.Region;
import android.util.AttributeSet;
import android.view.View;
public class HexagonViewNorthSouth extends View {
private Path hexagonPath;
private Path hexagonBorderPath;
private float radius;
private float width, height;
private int maskColor;
public HexagonViewNorthSouth(Context context) {
super(context);
init();
}
public HexagonViewNorthSouth(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public HexagonViewNorthSouth(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
public void setRadius(float r) {
this.radius = r;
calculatePath();
}
public void setMaskColor(int color) {
this.maskColor = color;
invalidate();
}
@Override public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
width = MeasureSpec.getSize(widthMeasureSpec);
height = MeasureSpec.getSize(heightMeasureSpec);
radius = height / 2;
calculatePath();
}
@Override public void onDraw(Canvas c) {
super.onDraw(c);
c.clipPath(hexagonBorderPath, Region.Op.DIFFERENCE);
c.drawColor(Color.WHITE);
c.save();
c.clipPath(hexagonPath, Region.Op.DIFFERENCE);
c.drawColor(maskColor);
c.save();
}
private void init() {
hexagonPath = new Path();
hexagonBorderPath = new Path();
maskColor = 0xFFb2c311;
}
private void calculatePath() {
float centerX = width / 2;
float centerY = height / 2;
float adjacent = (float) (Math.sqrt(3) * radius / 2);
float opposite = radius / 2;
float hypotenuse = radius;
// North-South
hexagonPath.moveTo(centerX, centerY + hypotenuse);
hexagonPath.lineTo(centerX - adjacent, centerY + opposite);
hexagonPath.lineTo(centerX - adjacent, centerY - opposite);
hexagonPath.lineTo(centerX, centerY - hypotenuse);
hexagonPath.lineTo(centerX + adjacent, centerY - opposite);
hexagonPath.lineTo(centerX + adjacent, centerY + opposite);
hexagonPath.moveTo(centerX, centerY + hypotenuse);<|fim▁hole|> float oppositeBorder = radiusBorder / 2;
float hypotenuseBorder = radiusBorder;
// North-South
hexagonBorderPath.moveTo(centerX, centerY + hypotenuseBorder);
hexagonBorderPath.lineTo(centerX - adjacentBorder, centerY + oppositeBorder);
hexagonBorderPath.lineTo(centerX - adjacentBorder, centerY - oppositeBorder);
hexagonBorderPath.lineTo(centerX, centerY - hypotenuseBorder);
hexagonBorderPath.lineTo(centerX + adjacentBorder, centerY - oppositeBorder);
hexagonBorderPath.lineTo(centerX + adjacentBorder, centerY + oppositeBorder);
hexagonBorderPath.moveTo(centerX, centerY + hypotenuseBorder);
invalidate();
}
}<|fim▁end|> |
float radiusBorder = radius - 5;
float adjacentBorder = (float) (Math.sqrt(3) * radiusBorder / 2); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015, The Radare Project. All rights reserved.
// See the COPYING file at the top-level directory of this distribution.
// Licensed under the BSD 3-Clause License:
// <http://opensource.org/licenses/BSD-3-Clause>
// This file may not be copied, modified, or distributed
// except according to those terms.
//! Module that implements analysis and optimizations on radeco IR.
//!
//!
#[allow(dead_code)]<|fim▁hole|>// pub mod propagate;
pub mod dom;
pub mod constant_propagation;<|fim▁end|> | pub mod valueset; |
<|file_name|>restore.go<|end_file_name|><|fim▁begin|>// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package restore
import (
"context"
"database/sql"
"fmt"
"io"
"math"
"os"
"strings"
"sync"
"time"
"github.com/coreos/go-semver/semver"
"github.com/docker/go-units"
"github.com/google/uuid"
"github.com/pingcap/errors"
"github.com/pingcap/failpoint"
sstpb "github.com/pingcap/kvproto/pkg/import_sstpb"
berrors "github.com/pingcap/tidb/br/pkg/errors"
"github.com/pingcap/tidb/br/pkg/lightning/backend"
"github.com/pingcap/tidb/br/pkg/lightning/backend/importer"
"github.com/pingcap/tidb/br/pkg/lightning/backend/kv"
"github.com/pingcap/tidb/br/pkg/lightning/backend/local"
"github.com/pingcap/tidb/br/pkg/lightning/backend/tidb"
"github.com/pingcap/tidb/br/pkg/lightning/checkpoints"
"github.com/pingcap/tidb/br/pkg/lightning/common"
"github.com/pingcap/tidb/br/pkg/lightning/config"
"github.com/pingcap/tidb/br/pkg/lightning/errormanager"
"github.com/pingcap/tidb/br/pkg/lightning/glue"
"github.com/pingcap/tidb/br/pkg/lightning/log"
"github.com/pingcap/tidb/br/pkg/lightning/metric"
"github.com/pingcap/tidb/br/pkg/lightning/mydump"
"github.com/pingcap/tidb/br/pkg/lightning/tikv"
verify "github.com/pingcap/tidb/br/pkg/lightning/verification"
"github.com/pingcap/tidb/br/pkg/lightning/web"
"github.com/pingcap/tidb/br/pkg/lightning/worker"
"github.com/pingcap/tidb/br/pkg/pdutil"
"github.com/pingcap/tidb/br/pkg/storage"
"github.com/pingcap/tidb/br/pkg/utils"
"github.com/pingcap/tidb/br/pkg/version"
"github.com/pingcap/tidb/br/pkg/version/build"
"github.com/pingcap/tidb/meta/autoid"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/util/collate"
pd "github.com/tikv/pd/client"
"go.uber.org/atomic"
"go.uber.org/multierr"
"go.uber.org/zap"
"modernc.org/mathutil"
)
const (
FullLevelCompact = -1
Level1Compact = 1
)
const (
defaultGCLifeTime = 100 * time.Hour
)
const (
indexEngineID = -1
)
const (
compactStateIdle int32 = iota
compactStateDoing
)
const (
TaskMetaTableName = "task_meta"
TableMetaTableName = "table_meta"
// CreateTableMetadataTable stores the per-table sub jobs information used by TiDB Lightning
CreateTableMetadataTable = `CREATE TABLE IF NOT EXISTS %s (
task_id BIGINT(20) UNSIGNED,
table_id BIGINT(64) NOT NULL,
table_name VARCHAR(64) NOT NULL,
row_id_base BIGINT(20) NOT NULL DEFAULT 0,
row_id_max BIGINT(20) NOT NULL DEFAULT 0,
total_kvs_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_bytes_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
checksum_base BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_kvs BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
total_bytes BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
checksum BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
status VARCHAR(32) NOT NULL,
has_duplicates BOOL NOT NULL DEFAULT 0,
PRIMARY KEY (table_id, task_id)
);`
// CreateTaskMetaTable stores the pre-lightning metadata used by TiDB Lightning
CreateTaskMetaTable = `CREATE TABLE IF NOT EXISTS %s (
task_id BIGINT(20) UNSIGNED NOT NULL,
pd_cfgs VARCHAR(2048) NOT NULL DEFAULT '',
status VARCHAR(32) NOT NULL,
state TINYINT(1) NOT NULL DEFAULT 0 COMMENT '0: normal, 1: exited before finish',
source_bytes BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
cluster_avail BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
PRIMARY KEY (task_id)
);`
compactionLowerThreshold = 512 * units.MiB
compactionUpperThreshold = 32 * units.GiB
)
var (
minTiKVVersionForDuplicateResolution = *semver.New("5.2.0")
maxTiKVVersionForDuplicateResolution = version.NextMajorVersion()
)
// DeliverPauser is a shared pauser to pause progress to (*chunkRestore).encodeLoop
var DeliverPauser = common.NewPauser()
// nolint:gochecknoinits // TODO: refactor
func init() {
failpoint.Inject("SetMinDeliverBytes", func(v failpoint.Value) {
minDeliverBytes = uint64(v.(int))
})
}
type saveCp struct {
tableName string
merger checkpoints.TableCheckpointMerger
waitCh chan<- error
}
type errorSummary struct {
status checkpoints.CheckpointStatus
err error
}
type errorSummaries struct {
sync.Mutex
logger log.Logger
summary map[string]errorSummary
}
// makeErrorSummaries returns an initialized errorSummaries instance
func makeErrorSummaries(logger log.Logger) errorSummaries {
return errorSummaries{
logger: logger,
summary: make(map[string]errorSummary),
}
}
func (es *errorSummaries) emitLog() {
es.Lock()
defer es.Unlock()
if errorCount := len(es.summary); errorCount > 0 {
logger := es.logger
logger.Error("tables failed to be imported", zap.Int("count", errorCount))
for tableName, errorSummary := range es.summary {
logger.Error("-",
zap.String("table", tableName),
zap.String("status", errorSummary.status.MetricName()),
log.ShortError(errorSummary.err),
)
}
}
}
func (es *errorSummaries) record(tableName string, err error, status checkpoints.CheckpointStatus) {
es.Lock()
defer es.Unlock()
es.summary[tableName] = errorSummary{status: status, err: err}
}
const (
diskQuotaStateIdle int32 = iota
diskQuotaStateChecking
diskQuotaStateImporting
diskQuotaMaxReaders = 1 << 30
)
// diskQuotaLock is essentially a read/write lock. The implement here is inspired by sync.RWMutex.
// diskQuotaLock removed the unnecessary blocking `RLock` method and add a non-blocking `TryRLock` method.
type diskQuotaLock struct {
w sync.Mutex // held if there are pending writers
writerSem chan struct{} // semaphore for writers to wait for completing readers
readerCount atomic.Int32 // number of pending readers
readerWait atomic.Int32 // number of departing readers
}
func newDiskQuotaLock() *diskQuotaLock {
return &diskQuotaLock{writerSem: make(chan struct{})}
}
func (d *diskQuotaLock) Lock() {
d.w.Lock()
// Announce to readers there is a pending writer.
r := d.readerCount.Sub(diskQuotaMaxReaders) + diskQuotaMaxReaders
if r != 0 && d.readerWait.Add(r) != 0 {
// Wait for active readers.
<-d.writerSem
}
}
func (d *diskQuotaLock) Unlock() {
d.readerCount.Add(diskQuotaMaxReaders)
d.w.Unlock()
}
func (d *diskQuotaLock) TryRLock() (locked bool) {
r := d.readerCount.Load()
for r >= 0 {
if d.readerCount.CAS(r, r+1) {
return true
}
r = d.readerCount.Load()
}
return false
}
func (d *diskQuotaLock) RUnlock() {
if d.readerCount.Dec() < 0 {
if d.readerWait.Dec() == 0 {
// The last reader unblocks the writer.
d.writerSem <- struct{}{}
}
}
}
type Controller struct {
cfg *config.Config
dbMetas []*mydump.MDDatabaseMeta
dbInfos map[string]*checkpoints.TidbDBInfo
tableWorkers *worker.Pool
indexWorkers *worker.Pool
regionWorkers *worker.Pool
ioWorkers *worker.Pool
checksumWorks *worker.Pool
pauser *common.Pauser
backend backend.Backend
tidbGlue glue.Glue
alterTableLock sync.Mutex
sysVars map[string]string
tls *common.TLS
checkTemplate Template
errorSummaries errorSummaries
checkpointsDB checkpoints.DB
saveCpCh chan saveCp
checkpointsWg sync.WaitGroup
closedEngineLimit *worker.Pool
store storage.ExternalStorage
metaMgrBuilder metaMgrBuilder
errorMgr *errormanager.ErrorManager
taskMgr taskMetaMgr
diskQuotaLock *diskQuotaLock
diskQuotaState atomic.Int32
compactState atomic.Int32
status *LightningStatus
}
type LightningStatus struct {
FinishedFileSize atomic.Int64
TotalFileSize atomic.Int64
}
func NewRestoreController(
ctx context.Context,
dbMetas []*mydump.MDDatabaseMeta,
cfg *config.Config,
status *LightningStatus,
s storage.ExternalStorage,
g glue.Glue,
) (*Controller, error) {
return NewRestoreControllerWithPauser(ctx, dbMetas, cfg, status, s, DeliverPauser, g)
}
func NewRestoreControllerWithPauser(
ctx context.Context,
dbMetas []*mydump.MDDatabaseMeta,
cfg *config.Config,
status *LightningStatus,
s storage.ExternalStorage,
pauser *common.Pauser,
g glue.Glue,
) (*Controller, error) {
tls, err := cfg.ToTLS()
if err != nil {
return nil, err
}
cpdb, err := g.OpenCheckpointsDB(ctx, cfg)
if err != nil {
return nil, errors.Annotate(err, "open checkpoint db failed")
}
taskCp, err := cpdb.TaskCheckpoint(ctx)
if err != nil {
return nil, errors.Annotate(err, "get task checkpoint failed")
}
if err := verifyCheckpoint(cfg, taskCp); err != nil {
return nil, errors.Trace(err)
}
// reuse task id to reuse task meta correctly.
if taskCp != nil {
cfg.TaskID = taskCp.TaskID
}
// TODO: support Lightning via SQL
db, err := g.GetDB()
if err != nil {
return nil, errors.Trace(err)
}
errorMgr := errormanager.New(db, cfg)
if err := errorMgr.Init(ctx); err != nil {
return nil, errors.Annotate(err, "failed to init error manager")
}
var backend backend.Backend
switch cfg.TikvImporter.Backend {
case config.BackendImporter:
var err error
backend, err = importer.NewImporter(ctx, tls, cfg.TikvImporter.Addr, cfg.TiDB.PdAddr)
if err != nil {
return nil, errors.Annotate(err, "open importer backend failed")
}
case config.BackendTiDB:
db, err := g.GetDB()
if err != nil {
return nil, errors.Annotate(err, "open tidb backend failed")
}
backend = tidb.NewTiDBBackend(db, cfg.TikvImporter.OnDuplicate, errorMgr)
case config.BackendLocal:
var rLimit local.Rlim_t
rLimit, err = local.GetSystemRLimit()
if err != nil {
return nil, err
}
maxOpenFiles := int(rLimit / local.Rlim_t(cfg.App.TableConcurrency))
// check overflow
if maxOpenFiles < 0 {
maxOpenFiles = math.MaxInt32
}
if cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
if err := tikv.CheckTiKVVersion(ctx, tls, cfg.TiDB.PdAddr, minTiKVVersionForDuplicateResolution, maxTiKVVersionForDuplicateResolution); err != nil {
if berrors.Is(err, berrors.ErrVersionMismatch) {
log.L().Warn("TiKV version doesn't support duplicate resolution. The resolution algorithm will fall back to 'none'", zap.Error(err))
cfg.TikvImporter.DuplicateResolution = config.DupeResAlgNone
} else {
return nil, errors.Annotate(err, "check TiKV version for duplicate resolution failed")
}
}
}
backend, err = local.NewLocalBackend(ctx, tls, cfg, g, maxOpenFiles, errorMgr)
if err != nil {
return nil, errors.Annotate(err, "build local backend failed")
}
err = verifyLocalFile(ctx, cpdb, cfg.TikvImporter.SortedKVDir)
if err != nil {
return nil, err
}
default:
return nil, errors.New("unknown backend: " + cfg.TikvImporter.Backend)
}
var metaBuilder metaMgrBuilder
switch cfg.TikvImporter.Backend {
case config.BackendLocal, config.BackendImporter:
metaBuilder = &dbMetaMgrBuilder{
db: db,
taskID: cfg.TaskID,
schema: cfg.App.MetaSchemaName,
needChecksum: cfg.PostRestore.Checksum != config.OpLevelOff,
}
default:
metaBuilder = noopMetaMgrBuilder{}
}
rc := &Controller{
cfg: cfg,
dbMetas: dbMetas,
tableWorkers: nil,
indexWorkers: nil,
regionWorkers: worker.NewPool(ctx, cfg.App.RegionConcurrency, "region"),
ioWorkers: worker.NewPool(ctx, cfg.App.IOConcurrency, "io"),
checksumWorks: worker.NewPool(ctx, cfg.TiDB.ChecksumTableConcurrency, "checksum"),
pauser: pauser,
backend: backend,
tidbGlue: g,
sysVars: defaultImportantVariables,
tls: tls,
checkTemplate: NewSimpleTemplate(),
errorSummaries: makeErrorSummaries(log.L()),
checkpointsDB: cpdb,
saveCpCh: make(chan saveCp),
closedEngineLimit: worker.NewPool(ctx, cfg.App.TableConcurrency*2, "closed-engine"),
store: s,
metaMgrBuilder: metaBuilder,
errorMgr: errorMgr,
diskQuotaLock: newDiskQuotaLock(),
status: status,
taskMgr: nil,
}
return rc, nil
}
func (rc *Controller) Close() {
rc.backend.Close()
rc.tidbGlue.GetSQLExecutor().Close()
}
func (rc *Controller) Run(ctx context.Context) error {
opts := []func(context.Context) error{
rc.setGlobalVariables,
rc.restoreSchema,
rc.preCheckRequirements,
rc.restoreTables,
rc.fullCompact,
rc.cleanCheckpoints,
}
task := log.L().Begin(zap.InfoLevel, "the whole procedure")
var err error
finished := false
outside:
for i, process := range opts {
err = process(ctx)
if i == len(opts)-1 {
finished = true
}
logger := task.With(zap.Int("step", i), log.ShortError(err))
switch {
case err == nil:
case log.IsContextCanceledError(err):
logger.Info("task canceled")
err = nil
break outside
default:
logger.Error("run failed")
fmt.Fprintf(os.Stderr, "Error: %s\n", err)
break outside // ps : not continue
}
}
// if process is cancelled, should make sure checkpoints are written to db.
if !finished {
rc.waitCheckpointFinish()
}
task.End(zap.ErrorLevel, err)
rc.errorSummaries.emitLog()
return errors.Trace(err)
}
type schemaStmtType int
func (stmtType schemaStmtType) String() string {
switch stmtType {
case schemaCreateDatabase:
return "restore database schema"
case schemaCreateTable:
return "restore table schema"
case schemaCreateView:
return "restore view schema"
}
return "unknown statement of schema"
}
const (
schemaCreateDatabase schemaStmtType = iota
schemaCreateTable
schemaCreateView
)
type schemaJob struct {
dbName string
tblName string // empty for create db jobs
stmtType schemaStmtType
stmts []*schemaStmt
}
type schemaStmt struct {
sql string
}
type restoreSchemaWorker struct {
ctx context.Context
quit context.CancelFunc
jobCh chan *schemaJob
errCh chan error
wg sync.WaitGroup
glue glue.Glue
store storage.ExternalStorage
}
func (worker *restoreSchemaWorker) makeJobs(
dbMetas []*mydump.MDDatabaseMeta,
getTables func(context.Context, string) ([]*model.TableInfo, error),
) error {
defer func() {
close(worker.jobCh)
worker.quit()
}()
var err error
// 1. restore databases, execute statements concurrency
for _, dbMeta := range dbMetas {
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
stmtType: schemaCreateDatabase,
stmts: make([]*schemaStmt, 0, 1),
}
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: createDatabaseIfNotExistStmt(dbMeta.Name),
})
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
}
err = worker.wait()
if err != nil {
return err
}
// 2. restore tables, execute statements concurrency
for _, dbMeta := range dbMetas {
// we can ignore error here, and let check failed later if schema not match
tables, _ := getTables(worker.ctx, dbMeta.Name)
tableMap := make(map[string]struct{})
for _, t := range tables {
tableMap[t.Name.L] = struct{}{}
}
for _, tblMeta := range dbMeta.Tables {
if _, ok := tableMap[strings.ToLower(tblMeta.Name)]; ok {
// we already has this table in TiDB.
// we should skip ddl job and let SchemaValid check.
continue
} else if tblMeta.SchemaFile.FileMeta.Path == "" {
return errors.Errorf("table `%s`.`%s` schema not found", dbMeta.Name, tblMeta.Name)
}
sql, err := tblMeta.GetSchema(worker.ctx, worker.store)
if sql != "" {
stmts, err := createTableIfNotExistsStmt(worker.glue.GetParser(), sql, dbMeta.Name, tblMeta.Name)
if err != nil {
return err
}
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
tblName: tblMeta.Name,
stmtType: schemaCreateTable,
stmts: make([]*schemaStmt, 0, len(stmts)),
}
for _, sql := range stmts {
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: sql,
})
}
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
}
if err != nil {
return err
}
}
}
err = worker.wait()
if err != nil {
return err
}
// 3. restore views. Since views can cross database we must restore views after all table schemas are restored.
for _, dbMeta := range dbMetas {
for _, viewMeta := range dbMeta.Views {
sql, err := viewMeta.GetSchema(worker.ctx, worker.store)
if sql != "" {
stmts, err := createTableIfNotExistsStmt(worker.glue.GetParser(), sql, dbMeta.Name, viewMeta.Name)
if err != nil {
return err
}
restoreSchemaJob := &schemaJob{
dbName: dbMeta.Name,
tblName: viewMeta.Name,
stmtType: schemaCreateView,
stmts: make([]*schemaStmt, 0, len(stmts)),
}
for _, sql := range stmts {
restoreSchemaJob.stmts = append(restoreSchemaJob.stmts, &schemaStmt{
sql: sql,
})
}
err = worker.appendJob(restoreSchemaJob)
if err != nil {
return err
}
// we don't support restore views concurrency, cauz it maybe will raise a error
err = worker.wait()
if err != nil {
return err
}
}
if err != nil {
return err
}
}
}
return nil
}
func (worker *restoreSchemaWorker) doJob() {
var session *sql.Conn
defer func() {
if session != nil {
_ = session.Close()
}<|fim▁hole|> case <-worker.ctx.Done():
// don't `return` or throw `worker.ctx.Err()`here,
// if we `return`, we can't mark cancelled jobs as done,
// if we `throw(worker.ctx.Err())`, it will be blocked to death
break loop
case job := <-worker.jobCh:
if job == nil {
// successful exit
return
}
var err error
if session == nil {
session, err = func() (*sql.Conn, error) {
// TODO: support lightning in SQL
db, err := worker.glue.GetDB()
if err != nil {
return nil, errors.Trace(err)
}
return db.Conn(worker.ctx)
}()
if err != nil {
worker.wg.Done()
worker.throw(err)
// don't return
break loop
}
}
logger := log.With(zap.String("db", job.dbName), zap.String("table", job.tblName))
sqlWithRetry := common.SQLWithRetry{
Logger: log.L(),
DB: session,
}
for _, stmt := range job.stmts {
task := logger.Begin(zap.DebugLevel, fmt.Sprintf("execute SQL: %s", stmt.sql))
err = sqlWithRetry.Exec(worker.ctx, "run create schema job", stmt.sql)
task.End(zap.ErrorLevel, err)
if err != nil {
err = errors.Annotatef(err, "%s %s failed", job.stmtType.String(), common.UniqueTable(job.dbName, job.tblName))
worker.wg.Done()
worker.throw(err)
// don't return
break loop
}
}
worker.wg.Done()
}
}
// mark the cancelled job as `Done`, a little tricky,
// cauz we need make sure `worker.wg.Wait()` wouldn't blocked forever
for range worker.jobCh {
worker.wg.Done()
}
}
func (worker *restoreSchemaWorker) wait() error {
// avoid to `worker.wg.Wait()` blocked forever when all `doJob`'s goroutine exited.
// don't worry about goroutine below, it never become a zombie,
// cauz we have mechanism to clean cancelled jobs in `worker.jobCh`.
// means whole jobs has been send to `worker.jobCh` would be done.
waitCh := make(chan struct{})
go func() {
worker.wg.Wait()
close(waitCh)
}()
select {
case err := <-worker.errCh:
return err
case <-worker.ctx.Done():
return worker.ctx.Err()
case <-waitCh:
return nil
}
}
func (worker *restoreSchemaWorker) throw(err error) {
select {
case <-worker.ctx.Done():
// don't throw `worker.ctx.Err()` again, it will be blocked to death.
return
case worker.errCh <- err:
worker.quit()
}
}
func (worker *restoreSchemaWorker) appendJob(job *schemaJob) error {
worker.wg.Add(1)
select {
case err := <-worker.errCh:
// cancel the job
worker.wg.Done()
return err
case <-worker.ctx.Done():
// cancel the job
worker.wg.Done()
return worker.ctx.Err()
case worker.jobCh <- job:
return nil
}
}
func (rc *Controller) restoreSchema(ctx context.Context) error {
// create table with schema file
// we can handle the duplicated created with createIfNotExist statement
// and we will check the schema in TiDB is valid with the datafile in DataCheck later.
logTask := log.L().Begin(zap.InfoLevel, "restore all schema")
concurrency := utils.MinInt(rc.cfg.App.RegionConcurrency, 8)
childCtx, cancel := context.WithCancel(ctx)
worker := restoreSchemaWorker{
ctx: childCtx,
quit: cancel,
jobCh: make(chan *schemaJob, concurrency),
errCh: make(chan error),
glue: rc.tidbGlue,
store: rc.store,
}
for i := 0; i < concurrency; i++ {
go worker.doJob()
}
getTableFunc := rc.backend.FetchRemoteTableModels
if !rc.tidbGlue.OwnsSQLExecutor() {
getTableFunc = rc.tidbGlue.GetTables
}
err := worker.makeJobs(rc.dbMetas, getTableFunc)
logTask.End(zap.ErrorLevel, err)
if err != nil {
return err
}
dbInfos, err := LoadSchemaInfo(ctx, rc.dbMetas, getTableFunc)
if err != nil {
return errors.Trace(err)
}
rc.dbInfos = dbInfos
if rc.tidbGlue.OwnsSQLExecutor() {
if err = rc.DataCheck(ctx); err != nil {
return errors.Trace(err)
}
}
// Load new checkpoints
err = rc.checkpointsDB.Initialize(ctx, rc.cfg, dbInfos)
if err != nil {
return errors.Trace(err)
}
failpoint.Inject("InitializeCheckpointExit", func() {
log.L().Warn("exit triggered", zap.String("failpoint", "InitializeCheckpointExit"))
os.Exit(0)
})
go rc.listenCheckpointUpdates()
sysVars := ObtainImportantVariables(ctx, rc.tidbGlue.GetSQLExecutor(), !rc.isTiDBBackend())
// override by manually set vars
for k, v := range rc.cfg.TiDB.Vars {
sysVars[k] = v
}
rc.sysVars = sysVars
// Estimate the number of chunks for progress reporting
err = rc.estimateChunkCountIntoMetrics(ctx)
if err != nil {
return errors.Trace(err)
}
return nil
}
// verifyCheckpoint check whether previous task checkpoint is compatible with task config
func verifyCheckpoint(cfg *config.Config, taskCp *checkpoints.TaskCheckpoint) error {
if taskCp == nil {
return nil
}
// always check the backend value even with 'check-requirements = false'
retryUsage := "destroy all checkpoints"
if cfg.Checkpoint.Driver == config.CheckpointDriverFile {
retryUsage = fmt.Sprintf("delete the file '%s'", cfg.Checkpoint.DSN)
}
retryUsage += " and remove all restored tables and try again"
if cfg.TikvImporter.Backend != taskCp.Backend {
return errors.Errorf("config 'tikv-importer.backend' value '%s' different from checkpoint value '%s', please %s", cfg.TikvImporter.Backend, taskCp.Backend, retryUsage)
}
if cfg.App.CheckRequirements {
if build.ReleaseVersion != taskCp.LightningVer {
var displayVer string
if len(taskCp.LightningVer) != 0 {
displayVer = fmt.Sprintf("at '%s'", taskCp.LightningVer)
} else {
displayVer = "before v4.0.6/v3.0.19"
}
return errors.Errorf("lightning version is '%s', but checkpoint was created %s, please %s", build.ReleaseVersion, displayVer, retryUsage)
}
errorFmt := "config '%s' value '%s' different from checkpoint value '%s'. You may set 'check-requirements = false' to skip this check or " + retryUsage
if cfg.Mydumper.SourceDir != taskCp.SourceDir {
return errors.Errorf(errorFmt, "mydumper.data-source-dir", cfg.Mydumper.SourceDir, taskCp.SourceDir)
}
if cfg.TikvImporter.Backend == config.BackendLocal && cfg.TikvImporter.SortedKVDir != taskCp.SortedKVDir {
return errors.Errorf(errorFmt, "mydumper.sorted-kv-dir", cfg.TikvImporter.SortedKVDir, taskCp.SortedKVDir)
}
if cfg.TikvImporter.Backend == config.BackendImporter && cfg.TikvImporter.Addr != taskCp.ImporterAddr {
return errors.Errorf(errorFmt, "tikv-importer.addr", cfg.TikvImporter.Backend, taskCp.Backend)
}
if cfg.TiDB.Host != taskCp.TiDBHost {
return errors.Errorf(errorFmt, "tidb.host", cfg.TiDB.Host, taskCp.TiDBHost)
}
if cfg.TiDB.Port != taskCp.TiDBPort {
return errors.Errorf(errorFmt, "tidb.port", cfg.TiDB.Port, taskCp.TiDBPort)
}
if cfg.TiDB.PdAddr != taskCp.PdAddr {
return errors.Errorf(errorFmt, "tidb.pd-addr", cfg.TiDB.PdAddr, taskCp.PdAddr)
}
}
return nil
}
// for local backend, we should check if local SST exists in disk, otherwise we'll lost data
func verifyLocalFile(ctx context.Context, cpdb checkpoints.DB, dir string) error {
targetTables, err := cpdb.GetLocalStoringTables(ctx)
if err != nil {
return errors.Trace(err)
}
for tableName, engineIDs := range targetTables {
for _, engineID := range engineIDs {
_, eID := backend.MakeUUID(tableName, engineID)
file := local.Engine{UUID: eID}
err := file.Exist(dir)
if err != nil {
log.L().Error("can't find local file",
zap.String("table name", tableName),
zap.Int32("engine ID", engineID))
return errors.Trace(err)
}
}
}
return nil
}
func (rc *Controller) estimateChunkCountIntoMetrics(ctx context.Context) error {
estimatedChunkCount := 0.0
estimatedEngineCnt := int64(0)
batchSize := rc.cfg.Mydumper.BatchSize
if batchSize <= 0 {
// if rows in source files are not sorted by primary key(if primary is number or cluster index enabled),
// the key range in each data engine may have overlap, thus a bigger engine size can somewhat alleviate it.
batchSize = config.DefaultBatchSize
}
for _, dbMeta := range rc.dbMetas {
for _, tableMeta := range dbMeta.Tables {
tableName := common.UniqueTable(dbMeta.Name, tableMeta.Name)
dbCp, err := rc.checkpointsDB.Get(ctx, tableName)
if err != nil {
return errors.Trace(err)
}
fileChunks := make(map[string]float64)
for engineID, eCp := range dbCp.Engines {
if eCp.Status < checkpoints.CheckpointStatusImported {
estimatedEngineCnt++
}
if engineID == indexEngineID {
continue
}
for _, c := range eCp.Chunks {
if _, ok := fileChunks[c.Key.Path]; !ok {
fileChunks[c.Key.Path] = 0.0
}
remainChunkCnt := float64(c.Chunk.EndOffset-c.Chunk.Offset) / float64(c.Chunk.EndOffset-c.Key.Offset)
fileChunks[c.Key.Path] += remainChunkCnt
}
}
// estimate engines count if engine cp is empty
if len(dbCp.Engines) == 0 {
estimatedEngineCnt += ((tableMeta.TotalSize + int64(batchSize) - 1) / int64(batchSize)) + 1
}
for _, fileMeta := range tableMeta.DataFiles {
if cnt, ok := fileChunks[fileMeta.FileMeta.Path]; ok {
estimatedChunkCount += cnt
continue
}
if fileMeta.FileMeta.Type == mydump.SourceTypeCSV {
cfg := rc.cfg.Mydumper
if fileMeta.FileMeta.FileSize > int64(cfg.MaxRegionSize) && cfg.StrictFormat && !cfg.CSV.Header {
estimatedChunkCount += math.Round(float64(fileMeta.FileMeta.FileSize) / float64(cfg.MaxRegionSize))
} else {
estimatedChunkCount++
}
} else {
estimatedChunkCount++
}
}
}
}
metric.ChunkCounter.WithLabelValues(metric.ChunkStateEstimated).Add(estimatedChunkCount)
metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStateEstimated, metric.TableResultSuccess).
Add(float64(estimatedEngineCnt))
rc.tidbGlue.Record(glue.RecordEstimatedChunk, uint64(estimatedChunkCount))
return nil
}
func firstErr(errors ...error) error {
for _, err := range errors {
if err != nil {
return err
}
}
return nil
}
func (rc *Controller) saveStatusCheckpoint(ctx context.Context, tableName string, engineID int32, err error, statusIfSucceed checkpoints.CheckpointStatus) error {
merger := &checkpoints.StatusCheckpointMerger{Status: statusIfSucceed, EngineID: engineID}
logger := log.L().With(zap.String("table", tableName), zap.Int32("engine_id", engineID),
zap.String("new_status", statusIfSucceed.MetricName()), zap.Error(err))
logger.Debug("update checkpoint")
switch {
case err == nil:
break
case !common.IsContextCanceledError(err):
merger.SetInvalid()
rc.errorSummaries.record(tableName, err, statusIfSucceed)
default:
return nil
}
if engineID == checkpoints.WholeTableEngineID {
metric.RecordTableCount(statusIfSucceed.MetricName(), err)
} else {
metric.RecordEngineCount(statusIfSucceed.MetricName(), err)
}
waitCh := make(chan error, 1)
rc.saveCpCh <- saveCp{tableName: tableName, merger: merger, waitCh: waitCh}
select {
case saveCpErr := <-waitCh:
if saveCpErr != nil {
logger.Error("failed to save status checkpoint", log.ShortError(saveCpErr))
}
return saveCpErr
case <-ctx.Done():
return ctx.Err()
}
}
// listenCheckpointUpdates will combine several checkpoints together to reduce database load.
func (rc *Controller) listenCheckpointUpdates() {
rc.checkpointsWg.Add(1)
var lock sync.Mutex
coalesed := make(map[string]*checkpoints.TableCheckpointDiff)
var waiters []chan<- error
hasCheckpoint := make(chan struct{}, 1)
defer close(hasCheckpoint)
go func() {
for range hasCheckpoint {
lock.Lock()
cpd := coalesed
coalesed = make(map[string]*checkpoints.TableCheckpointDiff)
ws := waiters
waiters = nil
lock.Unlock()
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("SlowDownCheckpointUpdate", func() {})
if len(cpd) > 0 {
err := rc.checkpointsDB.Update(cpd)
for _, w := range ws {
w <- err
}
web.BroadcastCheckpointDiff(cpd)
}
rc.checkpointsWg.Done()
}
}()
for scp := range rc.saveCpCh {
lock.Lock()
cpd, ok := coalesed[scp.tableName]
if !ok {
cpd = checkpoints.NewTableCheckpointDiff()
coalesed[scp.tableName] = cpd
}
scp.merger.MergeInto(cpd)
if scp.waitCh != nil {
waiters = append(waiters, scp.waitCh)
}
if len(hasCheckpoint) == 0 {
rc.checkpointsWg.Add(1)
hasCheckpoint <- struct{}{}
}
lock.Unlock()
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfImportedChunk", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.ChunkCheckpointMerger); ok && merger.Checksum.SumKVS() >= uint64(val.(int)) {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfImportedChunk")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfStatusBecomes", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.StatusCheckpointMerger); ok && merger.EngineID >= 0 && int(merger.Status) == val.(int) {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfStatusBecomes")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("FailIfIndexEngineImported", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.StatusCheckpointMerger); ok &&
merger.EngineID == checkpoints.WholeTableEngineID &&
merger.Status == checkpoints.CheckpointStatusIndexImported && val.(int) > 0 {
rc.checkpointsWg.Done()
rc.checkpointsWg.Wait()
panic("forcing failure due to FailIfIndexEngineImported")
}
})
//nolint:scopelint // This would be either INLINED or ERASED, at compile time.
failpoint.Inject("KillIfImportedChunk", func(val failpoint.Value) {
if merger, ok := scp.merger.(*checkpoints.ChunkCheckpointMerger); ok && merger.Checksum.SumKVS() >= uint64(val.(int)) {
if err := common.KillMySelf(); err != nil {
log.L().Warn("KillMySelf() failed to kill itself", log.ShortError(err))
}
}
})
}
rc.checkpointsWg.Done()
}
// buildRunPeriodicActionAndCancelFunc build the runPeriodicAction func and a cancel func
func (rc *Controller) buildRunPeriodicActionAndCancelFunc(ctx context.Context, stop <-chan struct{}) (func(), func(bool)) {
cancelFuncs := make([]func(bool), 0)
closeFuncs := make([]func(), 0)
// a nil channel blocks forever.
// if the cron duration is zero we use the nil channel to skip the action.
var logProgressChan <-chan time.Time
if rc.cfg.Cron.LogProgress.Duration > 0 {
logProgressTicker := time.NewTicker(rc.cfg.Cron.LogProgress.Duration)
closeFuncs = append(closeFuncs, func() {
logProgressTicker.Stop()
})
logProgressChan = logProgressTicker.C
}
glueProgressTicker := time.NewTicker(3 * time.Second)
closeFuncs = append(closeFuncs, func() {
glueProgressTicker.Stop()
})
var switchModeChan <-chan time.Time
// tidb backend don't need to switch tikv to import mode
if rc.cfg.TikvImporter.Backend != config.BackendTiDB && rc.cfg.Cron.SwitchMode.Duration > 0 {
switchModeTicker := time.NewTicker(rc.cfg.Cron.SwitchMode.Duration)
cancelFuncs = append(cancelFuncs, func(bool) { switchModeTicker.Stop() })
cancelFuncs = append(cancelFuncs, func(do bool) {
if do {
rc.switchToNormalMode(ctx)
}
})
switchModeChan = switchModeTicker.C
}
var checkQuotaChan <-chan time.Time
// only local storage has disk quota concern.
if rc.cfg.TikvImporter.Backend == config.BackendLocal && rc.cfg.Cron.CheckDiskQuota.Duration > 0 {
checkQuotaTicker := time.NewTicker(rc.cfg.Cron.CheckDiskQuota.Duration)
cancelFuncs = append(cancelFuncs, func(bool) { checkQuotaTicker.Stop() })
checkQuotaChan = checkQuotaTicker.C
}
return func() {
defer func() {
for _, f := range closeFuncs {
f()
}
}()
if rc.cfg.Cron.SwitchMode.Duration > 0 {
rc.switchToImportMode(ctx)
}
start := time.Now()
for {
select {
case <-ctx.Done():
log.L().Warn("stopping periodic actions", log.ShortError(ctx.Err()))
return
case <-stop:
log.L().Info("everything imported, stopping periodic actions")
return
case <-switchModeChan:
// periodically switch to import mode, as requested by TiKV 3.0
rc.switchToImportMode(ctx)
case <-logProgressChan:
// log the current progress periodically, so OPS will know that we're still working
nanoseconds := float64(time.Since(start).Nanoseconds())
// the estimated chunk is not accurate(likely under estimated), but the actual count is not accurate
// before the last table start, so use the bigger of the two should be a workaround
estimated := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateEstimated))
pending := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStatePending))
if estimated < pending {
estimated = pending
}
finished := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateFinished))
totalTables := metric.ReadCounter(metric.TableCounter.WithLabelValues(metric.TableStatePending, metric.TableResultSuccess))
completedTables := metric.ReadCounter(metric.TableCounter.WithLabelValues(metric.TableStateCompleted, metric.TableResultSuccess))
bytesRead := metric.ReadHistogramSum(metric.RowReadBytesHistogram)
engineEstimated := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStateEstimated, metric.TableResultSuccess))
enginePending := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.ChunkStatePending, metric.TableResultSuccess))
if engineEstimated < enginePending {
engineEstimated = enginePending
}
engineFinished := metric.ReadCounter(metric.ProcessedEngineCounter.WithLabelValues(metric.TableStateImported, metric.TableResultSuccess))
bytesWritten := metric.ReadCounter(metric.BytesCounter.WithLabelValues(metric.TableStateWritten))
bytesImported := metric.ReadCounter(metric.BytesCounter.WithLabelValues(metric.TableStateImported))
var state string
var remaining zap.Field
switch {
case finished >= estimated:
if engineFinished < engineEstimated {
state = "importing"
} else {
state = "post-processing"
}
case finished > 0:
state = "writing"
default:
state = "preparing"
}
// since we can't accurately estimate the extra time cost by import after all writing are finished,
// so here we use estimatedWritingProgress * 0.8 + estimatedImportingProgress * 0.2 as the total
// progress.
remaining = zap.Skip()
totalPercent := 0.0
if finished > 0 {
writePercent := math.Min(finished/estimated, 1.0)
importPercent := 1.0
if bytesWritten > 0 {
totalBytes := bytesWritten / writePercent
importPercent = math.Min(bytesImported/totalBytes, 1.0)
}
totalPercent = writePercent*0.8 + importPercent*0.2
if totalPercent < 1.0 {
remainNanoseconds := (1.0 - totalPercent) / totalPercent * nanoseconds
remaining = zap.Duration("remaining", time.Duration(remainNanoseconds).Round(time.Second))
}
}
formatPercent := func(finish, estimate float64) string {
speed := ""
if estimated > 0 {
speed = fmt.Sprintf(" (%.1f%%)", finish/estimate*100)
}
return speed
}
// avoid output bytes speed if there are no unfinished chunks
chunkSpeed := zap.Skip()
if bytesRead > 0 {
chunkSpeed = zap.Float64("speed(MiB/s)", bytesRead/(1048576e-9*nanoseconds))
}
// Note: a speed of 28 MiB/s roughly corresponds to 100 GiB/hour.
log.L().Info("progress",
zap.String("total", fmt.Sprintf("%.1f%%", totalPercent*100)),
// zap.String("files", fmt.Sprintf("%.0f/%.0f (%.1f%%)", finished, estimated, finished/estimated*100)),
zap.String("tables", fmt.Sprintf("%.0f/%.0f%s", completedTables, totalTables, formatPercent(completedTables, totalTables))),
zap.String("chunks", fmt.Sprintf("%.0f/%.0f%s", finished, estimated, formatPercent(finished, estimated))),
zap.String("engines", fmt.Sprintf("%.f/%.f%s", engineFinished, engineEstimated, formatPercent(engineFinished, engineEstimated))),
chunkSpeed,
zap.String("state", state),
remaining,
)
case <-checkQuotaChan:
// verify the total space occupied by sorted-kv-dir is below the quota,
// otherwise we perform an emergency import.
rc.enforceDiskQuota(ctx)
case <-glueProgressTicker.C:
finished := metric.ReadCounter(metric.ChunkCounter.WithLabelValues(metric.ChunkStateFinished))
rc.tidbGlue.Record(glue.RecordFinishedChunk, uint64(finished))
}
}
}, func(do bool) {
log.L().Info("cancel periodic actions", zap.Bool("do", do))
for _, f := range cancelFuncs {
f(do)
}
}
}
var checksumManagerKey struct{}
const (
pauseGCTTLForDupeRes = time.Hour
pauseGCIntervalForDupeRes = time.Minute
)
func (rc *Controller) keepPauseGCForDupeRes(ctx context.Context) (<-chan struct{}, error) {
tlsOpt := rc.tls.ToPDSecurityOption()
pdCli, err := pd.NewClientWithContext(ctx, []string{rc.cfg.TiDB.PdAddr}, tlsOpt)
if err != nil {
return nil, errors.Trace(err)
}
serviceID := "lightning-duplicate-resolution-" + uuid.New().String()
ttl := int64(pauseGCTTLForDupeRes / time.Second)
var (
safePoint uint64
paused bool
)
// Try to get the minimum safe point across all services as our GC safe point.
for i := 0; i < 10; i++ {
if i > 0 {
time.Sleep(time.Second * 3)
}
minSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, 1)
if err != nil {
pdCli.Close()
return nil, errors.Trace(err)
}
newMinSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, minSafePoint)
if err != nil {
pdCli.Close()
return nil, errors.Trace(err)
}
if newMinSafePoint <= minSafePoint {
safePoint = minSafePoint
paused = true
break
}
log.L().Warn(
"Failed to register GC safe point because the current minimum safe point is newer"+
" than what we assume, will retry newMinSafePoint next time",
zap.Uint64("minSafePoint", minSafePoint),
zap.Uint64("newMinSafePoint", newMinSafePoint),
)
}
if !paused {
pdCli.Close()
return nil, errors.New("failed to pause GC for duplicate resolution after all retries")
}
exitCh := make(chan struct{})
go func(safePoint uint64) {
defer pdCli.Close()
defer close(exitCh)
ticker := time.NewTicker(pauseGCIntervalForDupeRes)
defer ticker.Stop()
for {
select {
case <-ticker.C:
minSafePoint, err := pdCli.UpdateServiceGCSafePoint(ctx, serviceID, ttl, safePoint)
if err != nil {
log.L().Warn("Failed to register GC safe point", zap.Error(err))
continue
}
if minSafePoint > safePoint {
log.L().Warn("The current minimum safe point is newer than what we hold, duplicate records are at"+
"risk of being GC and not detectable",
zap.Uint64("safePoint", safePoint),
zap.Uint64("minSafePoint", minSafePoint),
)
safePoint = minSafePoint
}
case <-ctx.Done():
stopCtx, cancelFunc := context.WithTimeout(context.Background(), time.Second*5)
if _, err := pdCli.UpdateServiceGCSafePoint(stopCtx, serviceID, 0, safePoint); err != nil {
log.L().Warn("Failed to reset safe point ttl to zero", zap.Error(err))
}
// just make compiler happy
cancelFunc()
return
}
}
}(safePoint)
return exitCh, nil
}
func (rc *Controller) restoreTables(ctx context.Context) error {
if rc.cfg.TikvImporter.DuplicateResolution != config.DupeResAlgNone {
subCtx, cancel := context.WithCancel(ctx)
exitCh, err := rc.keepPauseGCForDupeRes(subCtx)
if err != nil {
cancel()
return errors.Trace(err)
}
defer func() {
cancel()
<-exitCh
}()
}
logTask := log.L().Begin(zap.InfoLevel, "restore all tables data")
if rc.tableWorkers == nil {
rc.tableWorkers = worker.NewPool(ctx, rc.cfg.App.TableConcurrency, "table")
}
if rc.indexWorkers == nil {
rc.indexWorkers = worker.NewPool(ctx, rc.cfg.App.IndexConcurrency, "index")
}
// for local backend, we should disable some pd scheduler and change some settings, to
// make split region and ingest sst more stable
// because importer backend is mostly use for v3.x cluster which doesn't support these api,
// so we also don't do this for import backend
finishSchedulers := func() {}
// if one lightning failed abnormally, and can't determine whether it needs to switch back,
// we do not do switch back automatically
cleanupFunc := func() {}
switchBack := false
taskFinished := false
if rc.cfg.TikvImporter.Backend == config.BackendLocal {
logTask.Info("removing PD leader®ion schedulers")
restoreFn, err := rc.taskMgr.CheckAndPausePdSchedulers(ctx)
finishSchedulers = func() {
if restoreFn != nil {
// use context.Background to make sure this restore function can still be executed even if ctx is canceled
restoreCtx := context.Background()
needSwitchBack, needCleanup, err := rc.taskMgr.CheckAndFinishRestore(restoreCtx, taskFinished)
if err != nil {
logTask.Warn("check restore pd schedulers failed", zap.Error(err))
return
}
switchBack = needSwitchBack
if needSwitchBack {
if restoreE := restoreFn(restoreCtx); restoreE != nil {
logTask.Warn("failed to restore removed schedulers, you may need to restore them manually", zap.Error(restoreE))
}
logTask.Info("add back PD leader®ion schedulers")
// clean up task metas
if needCleanup {
logTask.Info("cleanup task metas")
if cleanupErr := rc.taskMgr.Cleanup(restoreCtx); cleanupErr != nil {
logTask.Warn("failed to clean task metas, you may need to restore them manually", zap.Error(cleanupErr))
}
// cleanup table meta and schema db if needed.
cleanupFunc = func() {
if e := rc.taskMgr.CleanupAllMetas(restoreCtx); err != nil {
logTask.Warn("failed to clean table task metas, you may need to restore them manually", zap.Error(e))
}
}
}
}
}
rc.taskMgr.Close()
}
if err != nil {
return errors.Trace(err)
}
}
defer func() {
if switchBack {
cleanupFunc()
}
}()
type task struct {
tr *TableRestore
cp *checkpoints.TableCheckpoint
}
totalTables := 0
for _, dbMeta := range rc.dbMetas {
totalTables += len(dbMeta.Tables)
}
postProcessTaskChan := make(chan task, totalTables)
var wg sync.WaitGroup
var restoreErr common.OnceError
stopPeriodicActions := make(chan struct{})
periodicActions, cancelFunc := rc.buildRunPeriodicActionAndCancelFunc(ctx, stopPeriodicActions)
go periodicActions()
finishFuncCalled := false
defer func() {
if !finishFuncCalled {
finishSchedulers()
cancelFunc(switchBack)
finishFuncCalled = true
}
}()
defer close(stopPeriodicActions)
taskCh := make(chan task, rc.cfg.App.IndexConcurrency)
defer close(taskCh)
manager, err := newChecksumManager(ctx, rc)
if err != nil {
return errors.Trace(err)
}
ctx2 := context.WithValue(ctx, &checksumManagerKey, manager)
for i := 0; i < rc.cfg.App.IndexConcurrency; i++ {
go func() {
for task := range taskCh {
tableLogTask := task.tr.logger.Begin(zap.InfoLevel, "restore table")
web.BroadcastTableCheckpoint(task.tr.tableName, task.cp)
needPostProcess, err := task.tr.restoreTable(ctx2, rc, task.cp)
err = errors.Annotatef(err, "restore table %s failed", task.tr.tableName)
tableLogTask.End(zap.ErrorLevel, err)
web.BroadcastError(task.tr.tableName, err)
metric.RecordTableCount("completed", err)
restoreErr.Set(err)
if needPostProcess {
postProcessTaskChan <- task
}
wg.Done()
}
}()
}
for _, dbMeta := range rc.dbMetas {
dbInfo := rc.dbInfos[dbMeta.Name]
for _, tableMeta := range dbMeta.Tables {
tableInfo := dbInfo.Tables[tableMeta.Name]
tableName := common.UniqueTable(dbInfo.Name, tableInfo.Name)
cp, err := rc.checkpointsDB.Get(ctx, tableName)
if err != nil {
return errors.Trace(err)
}
igCols, err := rc.cfg.Mydumper.IgnoreColumns.GetIgnoreColumns(dbInfo.Name, tableInfo.Name, rc.cfg.Mydumper.CaseSensitive)
if err != nil {
return errors.Trace(err)
}
tr, err := NewTableRestore(tableName, tableMeta, dbInfo, tableInfo, cp, igCols.Columns)
if err != nil {
return errors.Trace(err)
}
wg.Add(1)
select {
case taskCh <- task{tr: tr, cp: cp}:
case <-ctx.Done():
return ctx.Err()
}
}
}
wg.Wait()
// if context is done, should return directly
select {
case <-ctx.Done():
err = restoreErr.Get()
if err == nil {
err = ctx.Err()
}
logTask.End(zap.ErrorLevel, err)
return err
default:
}
// stop periodic tasks for restore table such as pd schedulers and switch-mode tasks.
// this can help make cluster switching back to normal state more quickly.
// finishSchedulers()
// cancelFunc(switchBack)
// finishFuncCalled = true
taskFinished = true
close(postProcessTaskChan)
// otherwise, we should run all tasks in the post-process task chan
for i := 0; i < rc.cfg.App.TableConcurrency; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for task := range postProcessTaskChan {
metaMgr := rc.metaMgrBuilder.TableMetaMgr(task.tr)
// force all the remain post-process tasks to be executed
_, err = task.tr.postProcess(ctx2, rc, task.cp, true, metaMgr)
restoreErr.Set(err)
}
}()
}
wg.Wait()
err = restoreErr.Get()
logTask.End(zap.ErrorLevel, err)
return err
}
func (tr *TableRestore) restoreTable(
ctx context.Context,
rc *Controller,
cp *checkpoints.TableCheckpoint,
) (bool, error) {
// 1. Load the table info.
select {
case <-ctx.Done():
return false, ctx.Err()
default:
}
metaMgr := rc.metaMgrBuilder.TableMetaMgr(tr)
// no need to do anything if the chunks are already populated
if len(cp.Engines) > 0 {
tr.logger.Info("reusing engines and files info from checkpoint",
zap.Int("enginesCnt", len(cp.Engines)),
zap.Int("filesCnt", cp.CountChunks()),
)
} else if cp.Status < checkpoints.CheckpointStatusAllWritten {
if err := tr.populateChunks(ctx, rc, cp); err != nil {
return false, errors.Trace(err)
}
// fetch the max chunk row_id max value as the global max row_id
rowIDMax := int64(0)
for _, engine := range cp.Engines {
if len(engine.Chunks) > 0 && engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax > rowIDMax {
rowIDMax = engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax
}
}
db, _ := rc.tidbGlue.GetDB()
versionStr, err := version.FetchVersion(ctx, db)
if err != nil {
return false, errors.Trace(err)
}
versionInfo := version.ParseServerInfo(versionStr)
// "show table next_row_id" is only available after tidb v4.0.0
if versionInfo.ServerVersion.Major >= 4 &&
(rc.cfg.TikvImporter.Backend == config.BackendLocal || rc.cfg.TikvImporter.Backend == config.BackendImporter) {
// first, insert a new-line into meta table
if err = metaMgr.InitTableMeta(ctx); err != nil {
return false, err
}
checksum, rowIDBase, err := metaMgr.AllocTableRowIDs(ctx, rowIDMax)
if err != nil {
return false, err
}
tr.RebaseChunkRowIDs(cp, rowIDBase)
if checksum != nil {
if cp.Checksum != *checksum {
cp.Checksum = *checksum
rc.saveCpCh <- saveCp{
tableName: tr.tableName,
merger: &checkpoints.TableChecksumMerger{
Checksum: cp.Checksum,
},
}
}
tr.logger.Info("checksum before restore table", zap.Object("checksum", &cp.Checksum))
}
}
if err := rc.checkpointsDB.InsertEngineCheckpoints(ctx, tr.tableName, cp.Engines); err != nil {
return false, errors.Trace(err)
}
web.BroadcastTableCheckpoint(tr.tableName, cp)
// rebase the allocator so it exceeds the number of rows.
if tr.tableInfo.Core.PKIsHandle && tr.tableInfo.Core.ContainsAutoRandomBits() {
cp.AllocBase = mathutil.MaxInt64(cp.AllocBase, tr.tableInfo.Core.AutoRandID)
if err := tr.alloc.Get(autoid.AutoRandomType).Rebase(context.Background(), cp.AllocBase, false); err != nil {
return false, err
}
} else {
cp.AllocBase = mathutil.MaxInt64(cp.AllocBase, tr.tableInfo.Core.AutoIncID)
if err := tr.alloc.Get(autoid.RowIDAllocType).Rebase(context.Background(), cp.AllocBase, false); err != nil {
return false, err
}
}
rc.saveCpCh <- saveCp{
tableName: tr.tableName,
merger: &checkpoints.RebaseCheckpointMerger{
AllocBase: cp.AllocBase,
},
}
}
// 2. Restore engines (if still needed)
err := tr.restoreEngines(ctx, rc, cp)
if err != nil {
return false, errors.Trace(err)
}
err = metaMgr.UpdateTableStatus(ctx, metaStatusRestoreFinished)
if err != nil {
return false, errors.Trace(err)
}
// 3. Post-process. With the last parameter set to false, we can allow delay analyze execute latter
return tr.postProcess(ctx, rc, cp, false /* force-analyze */, metaMgr)
}
// do full compaction for the whole data.
func (rc *Controller) fullCompact(ctx context.Context) error {
if !rc.cfg.PostRestore.Compact {
log.L().Info("skip full compaction")
return nil
}
// wait until any existing level-1 compact to complete first.
task := log.L().Begin(zap.InfoLevel, "wait for completion of existing level 1 compaction")
for !rc.compactState.CAS(compactStateIdle, compactStateDoing) {
time.Sleep(100 * time.Millisecond)
}
task.End(zap.ErrorLevel, nil)
return errors.Trace(rc.doCompact(ctx, FullLevelCompact))
}
func (rc *Controller) doCompact(ctx context.Context, level int32) error {
tls := rc.tls.WithHost(rc.cfg.TiDB.PdAddr)
return tikv.ForAllStores(
ctx,
tls,
tikv.StoreStateDisconnected,
func(c context.Context, store *tikv.Store) error {
return tikv.Compact(c, tls, store.Address, level)
},
)
}
func (rc *Controller) switchToImportMode(ctx context.Context) {
log.L().Info("switch to import mode")
rc.switchTiKVMode(ctx, sstpb.SwitchMode_Import)
}
func (rc *Controller) switchToNormalMode(ctx context.Context) {
log.L().Info("switch to normal mode")
rc.switchTiKVMode(ctx, sstpb.SwitchMode_Normal)
}
func (rc *Controller) switchTiKVMode(ctx context.Context, mode sstpb.SwitchMode) {
// // tidb backend don't need to switch tikv to import mode
if rc.isTiDBBackend() {
return
}
// It is fine if we miss some stores which did not switch to Import mode,
// since we're running it periodically, so we exclude disconnected stores.
// But it is essential all stores be switched back to Normal mode to allow
// normal operation.
var minState tikv.StoreState
if mode == sstpb.SwitchMode_Import {
minState = tikv.StoreStateOffline
} else {
minState = tikv.StoreStateDisconnected
}
tls := rc.tls.WithHost(rc.cfg.TiDB.PdAddr)
// we ignore switch mode failure since it is not fatal.
// no need log the error, it is done in kv.SwitchMode already.
_ = tikv.ForAllStores(
ctx,
tls,
minState,
func(c context.Context, store *tikv.Store) error {
return tikv.SwitchMode(c, tls, store.Address, mode)
},
)
}
func (rc *Controller) enforceDiskQuota(ctx context.Context) {
if !rc.diskQuotaState.CAS(diskQuotaStateIdle, diskQuotaStateChecking) {
// do not run multiple the disk quota check / import simultaneously.
// (we execute the lock check in background to avoid blocking the cron thread)
return
}
go func() {
// locker is assigned when we detect the disk quota is exceeded.
// before the disk quota is confirmed exceeded, we keep the diskQuotaLock
// unlocked to avoid periodically interrupting the writer threads.
var locker sync.Locker
defer func() {
rc.diskQuotaState.Store(diskQuotaStateIdle)
if locker != nil {
locker.Unlock()
}
}()
isRetrying := false
for {
// sleep for a cycle if we are retrying because there is nothing new to import.
if isRetrying {
select {
case <-ctx.Done():
return
case <-time.After(rc.cfg.Cron.CheckDiskQuota.Duration):
}
} else {
isRetrying = true
}
quota := int64(rc.cfg.TikvImporter.DiskQuota)
largeEngines, inProgressLargeEngines, totalDiskSize, totalMemSize := rc.backend.CheckDiskQuota(quota)
metric.LocalStorageUsageBytesGauge.WithLabelValues("disk").Set(float64(totalDiskSize))
metric.LocalStorageUsageBytesGauge.WithLabelValues("mem").Set(float64(totalMemSize))
logger := log.With(
zap.Int64("diskSize", totalDiskSize),
zap.Int64("memSize", totalMemSize),
zap.Int64("quota", quota),
zap.Int("largeEnginesCount", len(largeEngines)),
zap.Int("inProgressLargeEnginesCount", inProgressLargeEngines))
if len(largeEngines) == 0 && inProgressLargeEngines == 0 {
logger.Debug("disk quota respected")
return
}
if locker == nil {
// blocks all writers when we detected disk quota being exceeded.
rc.diskQuotaLock.Lock()
locker = rc.diskQuotaLock
}
logger.Warn("disk quota exceeded")
if len(largeEngines) == 0 {
logger.Warn("all large engines are already importing, keep blocking all writes")
continue
}
// flush all engines so that checkpoints can be updated.
if err := rc.backend.FlushAll(ctx); err != nil {
logger.Error("flush engine for disk quota failed, check again later", log.ShortError(err))
return
}
// at this point, all engines are synchronized on disk.
// we then import every large engines one by one and complete.
// if any engine failed to import, we just try again next time, since the data are still intact.
rc.diskQuotaState.Store(diskQuotaStateImporting)
task := logger.Begin(zap.WarnLevel, "importing large engines for disk quota")
var importErr error
for _, engine := range largeEngines {
// Use a larger split region size to avoid split the same region by many times.
if err := rc.backend.UnsafeImportAndReset(ctx, engine, int64(config.SplitRegionSize)*int64(config.MaxSplitRegionSizeRatio)); err != nil {
importErr = multierr.Append(importErr, err)
}
}
task.End(zap.ErrorLevel, importErr)
return
}
}()
}
func (rc *Controller) setGlobalVariables(ctx context.Context) error {
// skip for tidb backend to be compatible with MySQL
if rc.isTiDBBackend() {
return nil
}
// set new collation flag base on tidb config
enabled := ObtainNewCollationEnabled(ctx, rc.tidbGlue.GetSQLExecutor())
// we should enable/disable new collation here since in server mode, tidb config
// may be different in different tasks
collate.SetNewCollationEnabledForTest(enabled)
return nil
}
func (rc *Controller) waitCheckpointFinish() {
// wait checkpoint process finish so that we can do cleanup safely
close(rc.saveCpCh)
rc.checkpointsWg.Wait()
}
func (rc *Controller) cleanCheckpoints(ctx context.Context) error {
rc.waitCheckpointFinish()
if !rc.cfg.Checkpoint.Enable {
return nil
}
logger := log.With(
zap.Stringer("keepAfterSuccess", rc.cfg.Checkpoint.KeepAfterSuccess),
zap.Int64("taskID", rc.cfg.TaskID),
)
task := logger.Begin(zap.InfoLevel, "clean checkpoints")
var err error
switch rc.cfg.Checkpoint.KeepAfterSuccess {
case config.CheckpointRename:
err = rc.checkpointsDB.MoveCheckpoints(ctx, rc.cfg.TaskID)
case config.CheckpointRemove:
err = rc.checkpointsDB.RemoveCheckpoint(ctx, "all")
}
task.End(zap.ErrorLevel, err)
return errors.Annotate(err, "clean checkpoints")
}
func (rc *Controller) isLocalBackend() bool {
return rc.cfg.TikvImporter.Backend == config.BackendLocal
}
func (rc *Controller) isTiDBBackend() bool {
return rc.cfg.TikvImporter.Backend == config.BackendTiDB
}
// preCheckRequirements checks
// 1. Cluster resource
// 2. Local node resource
// 3. Cluster region
// 4. Lightning configuration
// before restore tables start.
func (rc *Controller) preCheckRequirements(ctx context.Context) error {
if rc.cfg.App.CheckRequirements {
if err := rc.ClusterIsAvailable(ctx); err != nil {
return errors.Trace(err)
}
if err := rc.StoragePermission(ctx); err != nil {
return errors.Trace(err)
}
}
if err := rc.metaMgrBuilder.Init(ctx); err != nil {
return err
}
taskExist := false
// We still need to sample source data even if this task has existed, because we need to judge whether the
// source is in order as row key to decide how to sort local data.
source, err := rc.estimateSourceData(ctx)
if err != nil {
return errors.Trace(err)
}
if rc.isLocalBackend() {
pdController, err := pdutil.NewPdController(ctx, rc.cfg.TiDB.PdAddr,
rc.tls.TLSConfig(), rc.tls.ToPDSecurityOption())
if err != nil {
return errors.Trace(err)
}
// PdController will be closed when `taskMetaMgr` closes.
rc.taskMgr = rc.metaMgrBuilder.TaskMetaMgr(pdController)
taskExist, err = rc.taskMgr.CheckTaskExist(ctx)
if err != nil {
return errors.Trace(err)
}
if !taskExist {
if err = rc.taskMgr.InitTask(ctx, source); err != nil {
return errors.Trace(err)
}
if rc.cfg.App.CheckRequirements {
err = rc.localResource(source)
if err != nil {
return errors.Trace(err)
}
if err := rc.clusterResource(ctx, source); err != nil {
rc.taskMgr.CleanupTask(ctx)
return errors.Trace(err)
}
if err := rc.checkClusterRegion(ctx); err != nil {
return errors.Trace(err)
}
}
}
}
if rc.tidbGlue.OwnsSQLExecutor() && rc.cfg.App.CheckRequirements {
fmt.Print(rc.checkTemplate.Output())
}
if !rc.checkTemplate.Success() {
if !taskExist && rc.taskMgr != nil {
rc.taskMgr.CleanupTask(ctx)
}
return errors.Errorf("tidb-lightning check failed."+
" Please fix the failed check(s):\n %s", rc.checkTemplate.FailedMsg())
}
return nil
}
// DataCheck checks the data schema which needs #rc.restoreSchema finished.
func (rc *Controller) DataCheck(ctx context.Context) error {
var err error
if rc.cfg.App.CheckRequirements {
err = rc.HasLargeCSV(rc.dbMetas)
if err != nil {
return errors.Trace(err)
}
}
checkPointCriticalMsgs := make([]string, 0, len(rc.dbMetas))
schemaCriticalMsgs := make([]string, 0, len(rc.dbMetas))
var msgs []string
for _, dbInfo := range rc.dbMetas {
for _, tableInfo := range dbInfo.Tables {
// if hasCheckpoint is true, the table will start import from the checkpoint
// so we can skip TableHasDataInCluster and SchemaIsValid check.
noCheckpoint := true
if rc.cfg.Checkpoint.Enable {
if msgs, noCheckpoint, err = rc.CheckpointIsValid(ctx, tableInfo); err != nil {
return errors.Trace(err)
}
if len(msgs) != 0 {
checkPointCriticalMsgs = append(checkPointCriticalMsgs, msgs...)
}
}
if rc.cfg.App.CheckRequirements && noCheckpoint && rc.cfg.TikvImporter.Backend != config.BackendTiDB {
if msgs, err = rc.SchemaIsValid(ctx, tableInfo); err != nil {
return errors.Trace(err)
}
if len(msgs) != 0 {
schemaCriticalMsgs = append(schemaCriticalMsgs, msgs...)
}
}
}
}
err = rc.checkCSVHeader(ctx, rc.dbMetas)
if err != nil {
return err
}
if len(checkPointCriticalMsgs) != 0 {
rc.checkTemplate.Collect(Critical, false, strings.Join(checkPointCriticalMsgs, "\n"))
} else {
rc.checkTemplate.Collect(Critical, true, "checkpoints are valid")
}
if len(schemaCriticalMsgs) != 0 {
rc.checkTemplate.Collect(Critical, false, strings.Join(schemaCriticalMsgs, "\n"))
} else {
rc.checkTemplate.Collect(Critical, true, "table schemas are valid")
}
return nil
}
type chunkRestore struct {
parser mydump.Parser
index int
chunk *checkpoints.ChunkCheckpoint
}
func newChunkRestore(
ctx context.Context,
index int,
cfg *config.Config,
chunk *checkpoints.ChunkCheckpoint,
ioWorkers *worker.Pool,
store storage.ExternalStorage,
tableInfo *checkpoints.TidbTableInfo,
) (*chunkRestore, error) {
blockBufSize := int64(cfg.Mydumper.ReadBlockSize)
var reader storage.ReadSeekCloser
var err error
if chunk.FileMeta.Type == mydump.SourceTypeParquet {
reader, err = mydump.OpenParquetReader(ctx, store, chunk.FileMeta.Path, chunk.FileMeta.FileSize)
} else {
reader, err = store.Open(ctx, chunk.FileMeta.Path)
}
if err != nil {
return nil, errors.Trace(err)
}
var parser mydump.Parser
switch chunk.FileMeta.Type {
case mydump.SourceTypeCSV:
hasHeader := cfg.Mydumper.CSV.Header && chunk.Chunk.Offset == 0
// Create a utf8mb4 convertor to encode and decode data with the charset of CSV files.
charsetConvertor, err := mydump.NewCharsetConvertor(cfg.Mydumper.DataCharacterSet, cfg.Mydumper.DataInvalidCharReplace)
if err != nil {
return nil, err
}
parser, err = mydump.NewCSVParser(&cfg.Mydumper.CSV, reader, blockBufSize, ioWorkers, hasHeader, charsetConvertor)
if err != nil {
return nil, errors.Trace(err)
}
case mydump.SourceTypeSQL:
parser = mydump.NewChunkParser(cfg.TiDB.SQLMode, reader, blockBufSize, ioWorkers)
case mydump.SourceTypeParquet:
parser, err = mydump.NewParquetParser(ctx, store, reader, chunk.FileMeta.Path)
if err != nil {
return nil, errors.Trace(err)
}
default:
panic(fmt.Sprintf("file '%s' with unknown source type '%s'", chunk.Key.Path, chunk.FileMeta.Type.String()))
}
if err = parser.SetPos(chunk.Chunk.Offset, chunk.Chunk.PrevRowIDMax); err != nil {
return nil, errors.Trace(err)
}
if len(chunk.ColumnPermutation) > 0 {
parser.SetColumns(getColumnNames(tableInfo.Core, chunk.ColumnPermutation))
}
return &chunkRestore{
parser: parser,
index: index,
chunk: chunk,
}, nil
}
func (cr *chunkRestore) close() {
cr.parser.Close()
}
func getColumnNames(tableInfo *model.TableInfo, permutation []int) []string {
colIndexes := make([]int, 0, len(permutation))
for i := 0; i < len(permutation); i++ {
colIndexes = append(colIndexes, -1)
}
colCnt := 0
for i, p := range permutation {
if p >= 0 {
colIndexes[p] = i
colCnt++
}
}
names := make([]string, 0, colCnt)
for _, idx := range colIndexes {
// skip columns with index -1
if idx >= 0 {
// original fields contains _tidb_rowid field
if idx == len(tableInfo.Columns) {
names = append(names, model.ExtraHandleName.O)
} else {
names = append(names, tableInfo.Columns[idx].Name.O)
}
}
}
return names
}
var (
maxKVQueueSize = 32 // Cache at most this number of rows before blocking the encode loop
minDeliverBytes uint64 = 96 * units.KiB // 96 KB (data + index). batch at least this amount of bytes to reduce number of messages
)
type deliveredKVs struct {
kvs kv.Row // if kvs is nil, this indicated we've got the last message.
columns []string
offset int64
rowID int64
}
type deliverResult struct {
totalDur time.Duration
err error
}
//nolint:nakedret // TODO: refactor
func (cr *chunkRestore) deliverLoop(
ctx context.Context,
kvsCh <-chan []deliveredKVs,
t *TableRestore,
engineID int32,
dataEngine, indexEngine *backend.LocalEngineWriter,
rc *Controller,
) (deliverTotalDur time.Duration, err error) {
var channelClosed bool
deliverLogger := t.logger.With(
zap.Int32("engineNumber", engineID),
zap.Int("fileIndex", cr.index),
zap.Stringer("path", &cr.chunk.Key),
zap.String("task", "deliver"),
)
// Fetch enough KV pairs from the source.
dataKVs := rc.backend.MakeEmptyRows()
indexKVs := rc.backend.MakeEmptyRows()
dataSynced := true
for !channelClosed {
var dataChecksum, indexChecksum verify.KVChecksum
var columns []string
var kvPacket []deliveredKVs
// init these two field as checkpoint current value, so even if there are no kv pairs delivered,
// chunk checkpoint should stay the same
offset := cr.chunk.Chunk.Offset
rowID := cr.chunk.Chunk.PrevRowIDMax
populate:
for dataChecksum.SumSize()+indexChecksum.SumSize() < minDeliverBytes {
select {
case kvPacket = <-kvsCh:
if len(kvPacket) == 0 {
channelClosed = true
break populate
}
for _, p := range kvPacket {
p.kvs.ClassifyAndAppend(&dataKVs, &dataChecksum, &indexKVs, &indexChecksum)
columns = p.columns
offset = p.offset
rowID = p.rowID
}
case <-ctx.Done():
err = ctx.Err()
return
}
}
err = func() error {
// We use `TryRLock` with sleep here to avoid blocking current goroutine during importing when disk-quota is
// triggered, so that we can save chunkCheckpoint as soon as possible after `FlushEngine` is called.
// This implementation may not be very elegant or even completely correct, but it is currently a relatively
// simple and effective solution.
for !rc.diskQuotaLock.TryRLock() {
// try to update chunk checkpoint, this can help save checkpoint after importing when disk-quota is triggered
if !dataSynced {
dataSynced = cr.maybeSaveCheckpoint(rc, t, engineID, cr.chunk, dataEngine, indexEngine)
}
time.Sleep(time.Millisecond)
}
defer rc.diskQuotaLock.RUnlock()
// Write KVs into the engine
start := time.Now()
if err = dataEngine.WriteRows(ctx, columns, dataKVs); err != nil {
if !common.IsContextCanceledError(err) {
deliverLogger.Error("write to data engine failed", log.ShortError(err))
}
return errors.Trace(err)
}
if err = indexEngine.WriteRows(ctx, columns, indexKVs); err != nil {
if !common.IsContextCanceledError(err) {
deliverLogger.Error("write to index engine failed", log.ShortError(err))
}
return errors.Trace(err)
}
deliverDur := time.Since(start)
deliverTotalDur += deliverDur
metric.BlockDeliverSecondsHistogram.Observe(deliverDur.Seconds())
metric.BlockDeliverBytesHistogram.WithLabelValues(metric.BlockDeliverKindData).Observe(float64(dataChecksum.SumSize()))
metric.BlockDeliverBytesHistogram.WithLabelValues(metric.BlockDeliverKindIndex).Observe(float64(indexChecksum.SumSize()))
metric.BlockDeliverKVPairsHistogram.WithLabelValues(metric.BlockDeliverKindData).Observe(float64(dataChecksum.SumKVS()))
metric.BlockDeliverKVPairsHistogram.WithLabelValues(metric.BlockDeliverKindIndex).Observe(float64(indexChecksum.SumKVS()))
return nil
}()
if err != nil {
return
}
dataSynced = false
dataKVs = dataKVs.Clear()
indexKVs = indexKVs.Clear()
// Update the table, and save a checkpoint.
// (the write to the importer is effective immediately, thus update these here)
// No need to apply a lock since this is the only thread updating `cr.chunk.**`.
// In local mode, we should write these checkpoint after engine flushed.
cr.chunk.Checksum.Add(&dataChecksum)
cr.chunk.Checksum.Add(&indexChecksum)
cr.chunk.Chunk.Offset = offset
cr.chunk.Chunk.PrevRowIDMax = rowID
if dataChecksum.SumKVS() != 0 || indexChecksum.SumKVS() != 0 {
// No need to save checkpoint if nothing was delivered.
dataSynced = cr.maybeSaveCheckpoint(rc, t, engineID, cr.chunk, dataEngine, indexEngine)
}
failpoint.Inject("SlowDownWriteRows", func() {
deliverLogger.Warn("Slowed down write rows")
})
failpoint.Inject("FailAfterWriteRows", nil)
// TODO: for local backend, we may save checkpoint more frequently, e.g. after written
// 10GB kv pairs to data engine, we can do a flush for both data & index engine, then we
// can safely update current checkpoint.
failpoint.Inject("LocalBackendSaveCheckpoint", func() {
if !rc.isLocalBackend() && (dataChecksum.SumKVS() != 0 || indexChecksum.SumKVS() != 0) {
// No need to save checkpoint if nothing was delivered.
saveCheckpoint(rc, t, engineID, cr.chunk)
}
})
}
return
}
func (cr *chunkRestore) maybeSaveCheckpoint(
rc *Controller,
t *TableRestore,
engineID int32,
chunk *checkpoints.ChunkCheckpoint,
data, index *backend.LocalEngineWriter,
) bool {
if data.IsSynced() && index.IsSynced() {
saveCheckpoint(rc, t, engineID, chunk)
return true
}
return false
}
func saveCheckpoint(rc *Controller, t *TableRestore, engineID int32, chunk *checkpoints.ChunkCheckpoint) {
// We need to update the AllocBase every time we've finished a file.
// The AllocBase is determined by the maximum of the "handle" (_tidb_rowid
// or integer primary key), which can only be obtained by reading all data.
var base int64
if t.tableInfo.Core.PKIsHandle && t.tableInfo.Core.ContainsAutoRandomBits() {
base = t.alloc.Get(autoid.AutoRandomType).Base() + 1
} else {
base = t.alloc.Get(autoid.RowIDAllocType).Base() + 1
}
rc.saveCpCh <- saveCp{
tableName: t.tableName,
merger: &checkpoints.RebaseCheckpointMerger{
AllocBase: base,
},
}
rc.saveCpCh <- saveCp{
tableName: t.tableName,
merger: &checkpoints.ChunkCheckpointMerger{
EngineID: engineID,
Key: chunk.Key,
Checksum: chunk.Checksum,
Pos: chunk.Chunk.Offset,
RowID: chunk.Chunk.PrevRowIDMax,
ColumnPermutation: chunk.ColumnPermutation,
},
}
}
//nolint:nakedret // TODO: refactor
func (cr *chunkRestore) encodeLoop(
ctx context.Context,
kvsCh chan<- []deliveredKVs,
t *TableRestore,
logger log.Logger,
kvEncoder kv.Encoder,
deliverCompleteCh <-chan deliverResult,
rc *Controller,
) (readTotalDur time.Duration, encodeTotalDur time.Duration, err error) {
send := func(kvs []deliveredKVs) error {
select {
case kvsCh <- kvs:
return nil
case <-ctx.Done():
return ctx.Err()
case deliverResult, ok := <-deliverCompleteCh:
if deliverResult.err == nil && !ok {
deliverResult.err = ctx.Err()
}
if deliverResult.err == nil {
deliverResult.err = errors.New("unexpected premature fulfillment")
logger.DPanic("unexpected: deliverCompleteCh prematurely fulfilled with no error", zap.Bool("chIsOpen", ok))
}
return errors.Trace(deliverResult.err)
}
}
pauser, maxKvPairsCnt := rc.pauser, rc.cfg.TikvImporter.MaxKVPairs
initializedColumns, reachEOF := false, false
for !reachEOF {
if err = pauser.Wait(ctx); err != nil {
return
}
offset, _ := cr.parser.Pos()
if offset >= cr.chunk.Chunk.EndOffset {
break
}
var readDur, encodeDur time.Duration
canDeliver := false
kvPacket := make([]deliveredKVs, 0, maxKvPairsCnt)
curOffset := offset
var newOffset, rowID int64
var kvSize uint64
outLoop:
for !canDeliver {
readDurStart := time.Now()
err = cr.parser.ReadRow()
columnNames := cr.parser.Columns()
newOffset, rowID = cr.parser.Pos()
switch errors.Cause(err) {
case nil:
if !initializedColumns {
if len(cr.chunk.ColumnPermutation) == 0 {
if err = t.initializeColumns(columnNames, cr.chunk); err != nil {
return
}
}
initializedColumns = true
}
case io.EOF:
reachEOF = true
break outLoop
default:
err = errors.Annotatef(err, "in file %s at offset %d", &cr.chunk.Key, newOffset)
return
}
readDur += time.Since(readDurStart)
encodeDurStart := time.Now()
lastRow := cr.parser.LastRow()
// sql -> kv
kvs, encodeErr := kvEncoder.Encode(logger, lastRow.Row, lastRow.RowID, cr.chunk.ColumnPermutation, cr.chunk.Key.Path, curOffset)
encodeDur += time.Since(encodeDurStart)
hasIgnoredEncodeErr := false
if encodeErr != nil {
rowText := tidb.EncodeRowForRecord(t.encTable, rc.cfg.TiDB.SQLMode, lastRow.Row, cr.chunk.ColumnPermutation)
encodeErr = rc.errorMgr.RecordTypeError(ctx, logger, t.tableName, cr.chunk.Key.Path, newOffset, rowText, encodeErr)
err = errors.Annotatef(encodeErr, "in file %s at offset %d", &cr.chunk.Key, newOffset)
hasIgnoredEncodeErr = true
}
cr.parser.RecycleRow(lastRow)
curOffset = newOffset
if err != nil {
return
}
if hasIgnoredEncodeErr {
continue
}
kvPacket = append(kvPacket, deliveredKVs{kvs: kvs, columns: columnNames, offset: newOffset, rowID: rowID})
kvSize += kvs.Size()
failpoint.Inject("mock-kv-size", func(val failpoint.Value) {
kvSize += uint64(val.(int))
})
// pebble cannot allow > 4.0G kv in one batch.
// we will meet pebble panic when import sql file and each kv has the size larger than 4G / maxKvPairsCnt.
// so add this check.
if kvSize >= minDeliverBytes || len(kvPacket) >= maxKvPairsCnt || newOffset == cr.chunk.Chunk.EndOffset {
canDeliver = true
kvSize = 0
}
}
encodeTotalDur += encodeDur
metric.RowEncodeSecondsHistogram.Observe(encodeDur.Seconds())
readTotalDur += readDur
metric.RowReadSecondsHistogram.Observe(readDur.Seconds())
metric.RowReadBytesHistogram.Observe(float64(newOffset - offset))
if len(kvPacket) != 0 {
deliverKvStart := time.Now()
if err = send(kvPacket); err != nil {
return
}
metric.RowKVDeliverSecondsHistogram.Observe(time.Since(deliverKvStart).Seconds())
}
}
err = send([]deliveredKVs{})
return
}
func (cr *chunkRestore) restore(
ctx context.Context,
t *TableRestore,
engineID int32,
dataEngine, indexEngine *backend.LocalEngineWriter,
rc *Controller,
) error {
// Create the encoder.
kvEncoder, err := rc.backend.NewEncoder(t.encTable, &kv.SessionOptions{
SQLMode: rc.cfg.TiDB.SQLMode,
Timestamp: cr.chunk.Timestamp,
SysVars: rc.sysVars,
// use chunk.PrevRowIDMax as the auto random seed, so it can stay the same value after recover from checkpoint.
AutoRandomSeed: cr.chunk.Chunk.PrevRowIDMax,
})
if err != nil {
return err
}
kvsCh := make(chan []deliveredKVs, maxKVQueueSize)
deliverCompleteCh := make(chan deliverResult)
defer func() {
kvEncoder.Close()
kvEncoder = nil
close(kvsCh)
}()
go func() {
defer close(deliverCompleteCh)
dur, err := cr.deliverLoop(ctx, kvsCh, t, engineID, dataEngine, indexEngine, rc)
select {
case <-ctx.Done():
case deliverCompleteCh <- deliverResult{dur, err}:
}
}()
logTask := t.logger.With(
zap.Int32("engineNumber", engineID),
zap.Int("fileIndex", cr.index),
zap.Stringer("path", &cr.chunk.Key),
).Begin(zap.InfoLevel, "restore file")
readTotalDur, encodeTotalDur, err := cr.encodeLoop(ctx, kvsCh, t, logTask.Logger, kvEncoder, deliverCompleteCh, rc)
if err != nil {
return err
}
select {
case deliverResult, ok := <-deliverCompleteCh:
if ok {
logTask.End(zap.ErrorLevel, deliverResult.err,
zap.Duration("readDur", readTotalDur),
zap.Duration("encodeDur", encodeTotalDur),
zap.Duration("deliverDur", deliverResult.totalDur),
zap.Object("checksum", &cr.chunk.Checksum),
)
return errors.Trace(deliverResult.err)
}
// else, this must cause by ctx cancel
return ctx.Err()
case <-ctx.Done():
return ctx.Err()
}
}<|fim▁end|> | }()
loop:
for {
select { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const readdirp = require('readdirp'),
path = require('path'),
fs = require('graceful-fs'),
_ = require('lodash'),
frontMatterParser = require('./parsers/front_matter_parser'),
markdownParser = require('./parsers/markdown_parser'),
fileParser = require('./parsers/file_parser'),<|fim▁hole|> linkParser = require('./parsers/link_parser'),
linkAttributeParser = require('./parsers/link_attribute_parser'),
parsers = [
frontMatterParser,
markdownParser,
fileParser,
linkAttributeParser
],
directoryFilters = ['!node_modules', '!lib', '!Archive'],
fileFilters = '*.md',
F = {
failed: null
},
Ignored = {
files: []
};
function loadIgnored() {
let err, files;
try {
files = fs.readFileSync('../commit_analyzer_ignore.txt', 'utf-8');
files = files.split('\n');
console.log("\n\nIgnoring " + (files.join(', ')));
return _.each(files, function(file) {
return Ignored.files.push(file);
});
} catch (error) {
err = error;
return console.log("\n\nunable to find commit_analyzer_ignore.txt file; not ignoring any files...\n\n");
}
}
function readOpts() {
return {
root: path.join(__dirname, '..'),
fileFilter: fileFilters,
directoryFilter: directoryFilters
};
}
function fileIsIgnored(file) {
return _.indexOf(Ignored.files, file) >= 0;
}
function analyze() {
loadIgnored();
return readdirp(
readOpts(),
function(file) {
if (fileIsIgnored(file.name)) {
return;
}
return _.each(parsers, function(parser) {
const failed = F.failed;
return F.failed = parser.parse(file, failed);
});
},
function(err, res) {
if (F.failed) {
return process.exit(1);
} else {
return process.exit(0);
}
}
);
}
analyze();<|fim▁end|> | |
<|file_name|>pubsub-macros.rs<|end_file_name|><|fim▁begin|>use jsonrpc_core;
use jsonrpc_pubsub;
use serde_json;
#[macro_use]
extern crate jsonrpc_derive;
use jsonrpc_core::futures::channel::mpsc;
use jsonrpc_pubsub::typed::Subscriber;
use jsonrpc_pubsub::{PubSubHandler, PubSubMetadata, Session, SubscriptionId};
use std::sync::Arc;
pub enum MyError {}
impl From<MyError> for jsonrpc_core::Error {
fn from(_e: MyError) -> Self {
unreachable!()
}
}
type Result<T> = ::std::result::Result<T, MyError>;
#[rpc]
pub trait Rpc {
type Metadata;
/// Hello subscription.
#[pubsub(subscription = "hello", subscribe, name = "hello_subscribe", alias("hello_alias"))]
fn subscribe(&self, a: Self::Metadata, b: Subscriber<String>, c: u32, d: Option<u64>);
/// Hello subscription through different method.
#[pubsub(subscription = "hello", subscribe, name = "hello_subscribe_second")]
fn subscribe_second(&self, a: Self::Metadata, b: Subscriber<String>, e: String);
/// Unsubscribe from hello subscription.
#[pubsub(subscription = "hello", unsubscribe, name = "hello_unsubscribe")]
fn unsubscribe(&self, a: Option<Self::Metadata>, b: SubscriptionId) -> Result<bool>;
/// A regular rpc method alongside pubsub.
#[rpc(name = "add")]
fn add(&self, a: u64, b: u64) -> Result<u64>;
/// A notification alongside pubsub.
#[rpc(name = "notify")]
fn notify(&self, a: u64);
}
#[derive(Default)]
struct RpcImpl;
impl Rpc for RpcImpl {
type Metadata = Metadata;
fn subscribe(&self, _meta: Self::Metadata, subscriber: Subscriber<String>, _pre: u32, _trailing: Option<u64>) {
let _sink = subscriber.assign_id(SubscriptionId::Number(5));
}
fn subscribe_second(&self, _meta: Self::Metadata, subscriber: Subscriber<String>, _e: String) {
let _sink = subscriber.assign_id(SubscriptionId::Number(6));
}
fn unsubscribe(&self, _meta: Option<Self::Metadata>, _id: SubscriptionId) -> Result<bool> {
Ok(true)
}
fn add(&self, a: u64, b: u64) -> Result<u64> {
Ok(a + b)
}
fn notify(&self, a: u64) {
println!("Received `notify` with value: {}", a);
}
}
#[derive(Clone, Default)]
struct Metadata;
impl jsonrpc_core::Metadata for Metadata {}
impl PubSubMetadata for Metadata {
fn session(&self) -> Option<Arc<Session>> {
let (tx, _rx) = mpsc::unbounded();
Some(Arc::new(Session::new(tx)))
}
}
#[test]
fn test_invalid_trailing_pubsub_params() {
let mut io = PubSubHandler::default();
let rpc = RpcImpl::default();
io.extend_with(rpc.to_delegate());
// when
let meta = Metadata;
let req = r#"{"jsonrpc":"2.0","id":1,"method":"hello_subscribe","params":[]}"#;
let res = io.handle_request_sync(req, meta);
let expected = r#"{
"jsonrpc": "2.0",
"error": {
"code": -32602,
"message": "`params` should have at least 1 argument(s)"
},
"id": 1
}"#;
let expected: jsonrpc_core::Response = serde_json::from_str(expected).unwrap();
let result: jsonrpc_core::Response = serde_json::from_str(&res.unwrap()).unwrap();
assert_eq!(expected, result);
}
<|fim▁hole|> let rpc = RpcImpl::default();
io.extend_with(rpc.to_delegate());
// when
let meta = Metadata;
let req = r#"{"jsonrpc":"2.0","id":1,"method":"hello_alias","params":[1]}"#;
let res = io.handle_request_sync(req, meta);
let expected = r#"{
"jsonrpc": "2.0",
"result": 5,
"id": 1
}"#;
let expected: jsonrpc_core::Response = serde_json::from_str(expected).unwrap();
let result: jsonrpc_core::Response = serde_json::from_str(&res.unwrap()).unwrap();
assert_eq!(expected, result);
}
#[test]
fn test_subscribe_alternate_method() {
let mut io = PubSubHandler::default();
let rpc = RpcImpl::default();
io.extend_with(rpc.to_delegate());
// when
let meta = Metadata;
let req = r#"{"jsonrpc":"2.0","id":1,"method":"hello_subscribe_second","params":["Data"]}"#;
let res = io.handle_request_sync(req, meta);
let expected = r#"{
"jsonrpc": "2.0",
"result": 6,
"id": 1
}"#;
let expected: jsonrpc_core::Response = serde_json::from_str(expected).unwrap();
let result: jsonrpc_core::Response = serde_json::from_str(&res.unwrap()).unwrap();
assert_eq!(expected, result);
}<|fim▁end|> | #[test]
fn test_subscribe_with_alias() {
let mut io = PubSubHandler::default(); |
<|file_name|>ObjectCloner.java<|end_file_name|><|fim▁begin|>package util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
/**
* Taken from
* http://www.javaworld.com/article/2077578/learn-java/java-tip-76--an-alternative-to-the-deep-copy-technique.html
*
* @author David Miller (maybe)
*/
public class ObjectCloner
{
// so that nobody can accidentally create an ObjectCloner object<|fim▁hole|>
// returns a deep copy of an object
static public Object deepCopy(Object oldObj) throws Exception
{
ObjectOutputStream oos = null;
ObjectInputStream ois = null;
try
{
ByteArrayOutputStream bos =
new ByteArrayOutputStream(); // A
oos = new ObjectOutputStream(bos); // B
// serialize and pass the object
oos.writeObject(oldObj); // C
oos.flush(); // D
ByteArrayInputStream bin =
new ByteArrayInputStream(bos.toByteArray()); // E
ois = new ObjectInputStream(bin); // F
// return the new object
return ois.readObject(); // G
} catch (Exception e)
{
System.out.println("Exception in ObjectCloner = " + e);
throw (e);
} finally
{
oos.close();
ois.close();
}
}
}<|fim▁end|> | private ObjectCloner() {
} |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 1 23:32:37 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
import cherrymusicserver as cherry
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'CherryMusic'
copyright = '2013, Tom Wallroth, with Tilman Boerner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = os.path.splitext(cherry.VERSION)[0]
# The full version, including alpha/beta/rc tags.
release = cherry.VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
html_theme_options = {
# 'textcolor': '#333333',
'headingcolor': '#892601',
'linkcolor': '#2c5792',
'visitedlinkcolor': '#0c3762',
# 'hoverlinkcolor': '#0c3762',
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
html_title = 'CherryMusic %s documentation' % (cherry.VERSION,)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cherrymusicdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').<|fim▁hole|>#'papersize': 'letterpaper',
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CherryMusic.tex', 'CherryMusic Documentation',
'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'CherryMusic', 'CherryMusic Documentation',
['Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CherryMusic', 'CherryMusic Documentation',
'Author', 'CherryMusic', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'CherryMusic'
epub_author = 'Author'
epub_publisher = 'Author'
epub_copyright = '2013, Author'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True<|fim▁end|> | |
<|file_name|>XDivision.java<|end_file_name|><|fim▁begin|>package org.wingx;
import java.awt.Color;
import org.wings.*;
import org.wings.style.CSSAttributeSet;
import org.wings.style.CSSProperty;
import org.wings.style.CSSStyle;
import org.wings.style.CSSStyleSheet;
import org.wings.style.Selector;
import org.wings.style.Style;
public class XDivision
extends SContainer
implements LowLevelEventListener
{
String title;
SIcon icon;
/**
* Is the XDivision shaded?
*/
boolean shaded;
/**
* Is the title clickable? Default is false.
*/
protected boolean isTitleClickable = false;
public static final Selector SELECTOR_TITLE = new Selector("xdiv.title");
/**
* Creates a XDivision instance with the specified LayoutManager
* @param l the LayoutManager
*/
public XDivision(SLayoutManager l) {
super(l);
}
/**
* Creates a XDivision instance
*/
public XDivision() {
}
public XDivision(String title) {
this.title = title;
}
/**
* Returns the title of the XDivision.
* @return String the title
*/
public String getTitle() {
return title;
}
/**
* Sets the title of the XDivision.
* @param title the title
*/
public void setTitle(String title) {
String oldVal = this.title;
reloadIfChange(this.title, title);
this.title = title;
propertyChangeSupport.firePropertyChange("title", oldVal, this.title);
}
/**
* Sets the title-font of the XDivision.
* @param titleFont the font for the title
*/
public void setTitleFont( org.wings.SFont titleFont) {
SFont oldVal = this.getTitleFont();
CSSAttributeSet attributes = CSSStyleSheet.getAttributes(titleFont);
Style style = getDynamicStyle(SELECTOR_TITLE);
if (style == null) {
addDynamicStyle(new CSSStyle(SELECTOR_TITLE, attributes));
}
else {
style.remove(CSSProperty.FONT);
style.remove(CSSProperty.FONT_FAMILY);
style.remove(CSSProperty.FONT_SIZE);
style.remove(CSSProperty.FONT_STYLE);
style.remove(CSSProperty.FONT_WEIGHT);
style.putAll(attributes);
}
propertyChangeSupport.firePropertyChange("titleFont", oldVal, this.getTitleFont());
}
/**
* Returns the title-font of the XDivision.
* @return SFont the font for the title
*/
public SFont getTitleFont() {
return dynamicStyles == null || dynamicStyles.get(SELECTOR_TITLE) == null ? null : CSSStyleSheet.getFont((CSSAttributeSet) dynamicStyles.get(SELECTOR_TITLE));
}
/**
* Sets the title-color of the XDivision.
* @param titleColor the color for the title
*/
public void setTitleColor( Color titleColor ) {
Color oldVal = this.getTitleColor();
setAttribute( SELECTOR_TITLE, CSSProperty.COLOR, CSSStyleSheet.getAttribute( titleColor ) );
propertyChangeSupport.firePropertyChange("titleColor", oldVal, this.getTitleColor());
}
/**
* Returns the title-color of the XDivision.
* @return titleColor the color for the title
*/
public Color getTitleColor() {
return dynamicStyles == null || dynamicStyles.get(SELECTOR_TITLE) == null ? null : CSSStyleSheet.getForeground((CSSAttributeSet) dynamicStyles.get(SELECTOR_TITLE));
}
/**
* Determines whether or not the title is clickable.
* @param clickable true if the title is clickable
*/
public void setTitleClickable( boolean clickable ) {<|fim▁hole|> }
/**
* Returns true if the title is clickable.
* @return boolean true if the title is clickable
*/
public boolean isTitleClickable() {
return this.isTitleClickable;
}
public SIcon getIcon() {
return icon;
}
public void setIcon(SIcon icon) {
SIcon oldVal = this.icon;
reloadIfChange(this.icon, icon);
this.icon = icon;
propertyChangeSupport.firePropertyChange("icon", oldVal, this.icon);
}
/**
* Returns true if the XDivision is shaded.
* @return boolean true if the XDivision is shaded
*/
public boolean isShaded() {
return shaded;
}
/**
* Determines whether or not the XDivision is shaded.
* @param shaded true if the XDivision is shaded
*/
public void setShaded(boolean shaded) {
if (this.shaded != shaded) {
reload();
this.shaded = shaded;
propertyChangeSupport.firePropertyChange("shaded", !this.shaded, this.shaded);
setRecursivelyVisible(isRecursivelyVisible());
}
}
@Override
public void processLowLevelEvent(String name, String... values) {
if (values.length == 1 && "t".equals(values[0])) {
setShaded(!shaded);
}
/*
TODO: first focusable component
if (!shaded && getComponentCount() > 0)
getComponent(0).requestFocus();
else
requestFocus();
*/
}
@Override
public void fireIntermediateEvents() {
}
@Override
public boolean isEpochCheckEnabled() {
return false;
}
@Override
protected boolean isShowingChildren() {
return !shaded;
}
}<|fim▁end|> | boolean oldVal = this.isTitleClickable;
this.isTitleClickable = clickable;
propertyChangeSupport.firePropertyChange("titleClickable", oldVal, this.isTitleClickable); |
<|file_name|>mfn.menu.js<|end_file_name|><|fim▁begin|>/*
@Name: Horizontal multilevel menu
@Author: Muffin Group
@WWW: www.muffingroup.com
@Version: 1.5.2
*/
(function($){
$.fn.extend({
muffingroup_menu: function(options) {
var menu = $(this);
var defaults = {
delay : 100,<|fim▁hole|> };
options = $.extend(defaults, options);
// add class if menu item has sumbenu
menu.find("li:has(ul)")
.addClass("submenu")
.append("<span class='menu-toggle'>") // responsive menu toggle
.append("<span class='menu-arr-bottom'></span><span class='menu-arr-top'></span>") // border arrows
;
// add class if submanu item has another sumbenu
if( options.arrows ) {
menu.find( "li ul li:has(ul) > a" ).append( "<span class='menu-arrow'><i class='icon-chevron-right'></i></span>" );
}
// add bullets in each top-level menu item
menu.children( "li:not(:last)" ).append( "<em>•</em>" );
// hover
menu.find("li").hover(function() {
$(this).addClass(options.hoverClass);
if (options.animation === "fade") {
$(this).children("ul").fadeIn(options.delay);
} else if (options.animation === "toggle") {
$(this).children("ul").stop(true,true).slideDown(options.delay);
}
}, function(){
$(this).removeClass(options.hoverClass);
if (options.animation === "fade") {
$(this).children("ul").fadeOut(options.delay);
} else if (options.animation === "toggle") {
$(this).children("ul").stop(true,true).slideUp(options.delay);
}
});
// add class .last-item to last sumbenu item
$(".submenu ul li:last-child", menu).addClass("last-item");
// addLast
if(options.addLast) {
$("> li:last-child", menu)
.addClass("last")
.prev()
.addClass("last");
}
}
});
})(jQuery);<|fim▁end|> | hoverClass : 'hover',
arrows : true,
animation : 'fade',
addLast : true |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
// Declare app level module which depends on views, and components
<|fim▁hole|>var partnerModule = angular.module('partnerModule', []);
var dodoModule = angular.module('dodoModule', []);
angular.module('myApp', [
'ngMaterial',
'ngRoute',
'myApp.version',
'accountModule',
'partnerModule',
'dodoModule'
]).
config(['$routeProvider', function ($routeProvider) {
$routeProvider.otherwise({redirectTo: '/view1'});
}]).factory('storageService', ['$log', '$rootScope', function ($log, $rootScope) {
var localStorageMaxCount = 10;
var storageService = {
save: function (storageName, data) {
localStorage[storageName] = JSON.stringify(data);
if (localStorage.length > localStorageMaxCount) {
$log.warn('local storage count (length) is over 10, this may be a bug, please check it out - Message from StorageService');
}
$rootScope.$broadcast(storageName, data);
return data;
},
load: function (storageName) {
var storedData = localStorage[storageName];
if (typeof storedData !== "undefined") {
return JSON.parse(storedData);
} else {
return undefined;
}
},
clear: function (storageName) {
delete localStorage[storageName];
$rootScope.$broadcast(storageName);
return undefined;
},
swipeAll: function () {
localStorage.clear();
return 'localStorage is cleared!';
},
status: function () {
$log.info('Current status -> localstorage: ', localStorage);
}
};
return storageService;
}]).config(['$sceProvider', function ($sceProvider) {
// Completely disable SCE. For demonstration purposes only!
// Do not use in new projects.
$sceProvider.enabled(false);
}]).run(['storageService', '$location', '$http', function (storageService, $location, $http) {
var currentUser = storageService.load('currentUser');
if (typeof currentUser == "undefined") {
$location.path('/account/login');
} else {
$http.defaults.headers.common["X-Parse-Session-Token"] = currentUser.sessionToken;
}
}])
.config(function ($mdThemingProvider) {
$mdThemingProvider.theme('default')
.primaryPalette('pink')
.accentPalette('orange');
});<|fim▁end|> | var accountModule = angular.module('accountModule', []); |
<|file_name|>ChromeapiPlugupCardTerminalFactory.js<|end_file_name|><|fim▁begin|>/*
************************************************************************
Copyright (c) 2013 UBINITY SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*************************************************************************
*/
var ChromeapiPlugupCardTerminalFactory = Class.extend(CardTerminalFactory, {
/** @lends ChromeapiPlugupCardTerminalFactory.prototype */
<|fim▁hole|> /**
* @class Implementation of the {@link CardTerminalFactory} using the Chrome API for Plug-up Dongle
* @constructs
* @augments CardTerminalFactory
*/
initialize: function(pid, usagePage, ledgerTransport, vid) {
this.pid = pid;
this.vid = vid;
this.usagePage = usagePage;
this.ledgerTransport = ledgerTransport;
},
list_async: function(pid, usagePage) {
if (typeof chromeDevice == "undefined") {
throw "Content script is not available";
}
return chromeDevice.enumerateDongles_async(this.pid, this.usagePage, this.vid)
.then(function(result) {
return result.deviceList;
});
},
waitInserted: function() {
throw "Not implemented"
},
getCardTerminal: function(device) {
return new ChromeapiPlugupCardTerminal(device, undefined, this.ledgerTransport);
}
});<|fim▁end|> | |
<|file_name|>hcat_client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from hcat import hcat
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
from resource_management.core.logger import Logger
from resource_management.core.exceptions import ClientComponentHasNoStatus
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.script.script import Script
class HCatClient(Script):
def install(self, env):
import params
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
hcat()
def status(self, env):
raise ClientComponentHasNoStatus()
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class HCatClientWindows(HCatClient):
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HCatClientDefault(HCatClient):
def get_component_name(self):
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
return "hive-webhcat"
def pre_upgrade_restart(self, env, upgrade_type=None):
"""
Execute <stack-selector-tool> before reconfiguring this client to the new stack version.
<|fim▁hole|> """
Logger.info("Executing Hive HCat Client Stack Upgrade pre-restart")
import params
env.set_params(params)
# this function should not execute if the stack version does not support rolling upgrade
if not (params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version)):
return
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
stack_select.select("hive-webhcat", params.version)
if __name__ == "__main__":
HCatClient().execute()<|fim▁end|> | :param env:
:param upgrade_type:
:return: |
<|file_name|>app.e2e-spec.ts<|end_file_name|><|fim▁begin|>import { AuthMeanPage } from './app.po';
describe('auth-mean App', () => {
let page: AuthMeanPage;
beforeEach(() => {<|fim▁hole|> });
it('should display message saying app works', () => {
page.navigateTo();
expect(page.getParagraphText()).toEqual('app works!');
});
});<|fim▁end|> | page = new AuthMeanPage(); |
<|file_name|>test_service.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from climate.api.v1.oshosts import service as service_api
from climate import tests
class RPCApiTestCase(tests.TestCase):
def setUp(self):
super(RPCApiTestCase, self).setUp()
self.s_api = service_api
self.fake_list = []
self.fake_computehost = {}
<|fim▁hole|> self.patch(self.s_api.API, "create_computehost").return_value = True
fake_get_computehost = self.patch(self.s_api.API, "get_computehost")
fake_get_computehost.return_value = self.fake_computehost
self.patch(self.s_api.API, "update_computehost").return_value = True
self.patch(self.s_api.API, "delete_computehost").return_value = True
def test_get_computehost(self):
pass
def test_create_computehost(self):
pass
def test_update_computehost(self):
pass
def test_delete_computehost(self):
pass<|fim▁end|> | fake_get_computehosts = self.patch(self.s_api.API, "get_computehosts")
fake_get_computehosts.return_value = self.fake_list |
<|file_name|>gap_analysis.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
from datetime import datetime
import time
from osv import fields, osv
from tools.translate import _
from tools import ustr
#import tools
class gap_analysis_effort(osv.Model):
_name = "gap_analysis.effort"
_description = "Gap Analysis Efforts"
_columns = {
'name': fields.char('Effort', size=4, required=True,),
'unknown': fields.boolean('Undefined duration ?', help='If checked, when this effort is used, the user would have to specify the duration manually.'),
'duration': fields.float('Duration (hour)', help='Duration in hour for this effort.', required=True,),
}
def onchange_unknown(self, cr, uid, ids, unknown):
val = {}
val['unknown'] = unknown
if not unknown:
val['duration'] = 0.0
return {'value': val}
_order = 'name'
class gap_analysis_workload_type(osv.Model):
_name = "gap_analysis.workload.type"
_description = "Gap Analysis Workload Type"
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
'category': fields.selection([('Functional Analysis','Functional'), ('Technical Analysis','Technical')], 'Analysis', required=True,),
'code': fields.char('Code for Report', size=8, required=True, translate=True, help="Set the code if name is too long (eg: in reports)."),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of workload type."),
'duration': fields.float('Duration (hour)', help='Default duration in hour for this type of workload.', required=True,),
}
_defaults = {
'sequence': 10,
'category': 'Functional Analysis',
'duration': 4,
}
_order = 'sequence'
class gap_analysis_workload(osv.Model):
_name = "gap_analysis.workload"
_description = "Gap Analysis Workload"
_columns = {
'gap_line_id': fields.many2one('gap_analysis.line', 'Gap-analysis Line', ondelete='cascade', select=True, readonly=True),
'fct_id': fields.many2one('gap_analysis.functionality', 'Gap-analysis Functionality Template', ondelete='cascade', select=True, readonly=True),
'type': fields.many2one('gap_analysis.workload.type', 'Type', required=True, select=True),
'duration': fields.float('Duration (hour)', help='Duration in hour for this task.', required=True,),
}
def onchange_type_id(self, cr, uid, ids, type_id):
val = {}
my_type = self.pool.get('gap_analysis.workload.type').browse(cr, uid, type_id)
val['duration'] = my_type.duration
return {'value': val}
class gap_analysis_functionality_category(osv.Model):
_inherit = "product.category"
_name = "gap_analysis.functionality.category"
_description = "Gap Analysis Functionality Categories"
def _category_to_update(self, cr, uid, ids, fields=None, arg=None, context=None):
if type(ids) != type([]):
ids = [ids]
return self.pool.get('gap_analysis.functionality.category').search(cr, uid, [], order='parent_left') or []
def _name_get_full_path(self, cursor, uid, ids, fields, arg, context=None):
result = {}
for category in self.browse(cursor, uid, ids):
full_path = ''
current_category = category
while current_category:
if full_path=='':
full_path = ustr(current_category.name)
else:
full_path = ustr(current_category.name) + ' / ' + full_path
current_category = current_category.parent_id or False
result[category.id] = full_path
return result
_columns = {
'parent_id': fields.many2one('gap_analysis.functionality.category','Parent Category', select=True, ondelete='cascade'),
'child_id': fields.one2many('gap_analysis.functionality.category', 'parent_id', string='Child Categories'),
'code': fields.char('Code', size=8, required=True, help="Use for functionality sequencing."),
'full_path': fields.function(_name_get_full_path, type="char", method=True, size=2048, store={'gap_analysis.functionality.category': (_category_to_update, ['name','parent_id'], 10)}, string='Name'),
}
def _check_recursion(self, cr, uid, ids, context=None):
level = 100
while len(ids):
cr.execute('select distinct parent_id from gap_analysis_functionality_category where id IN %s',(tuple(ids),))
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
_constraints = [
(_check_recursion, 'Error ! You cannot create recursive categories.', ['parent_id'])
]
_parent_name = "parent_id"
_parent_store = True
_parent_order = 'sequence, name'
_order = 'parent_left'
class gap_analysis_functionality(osv.Model):
_name = "gap_analysis.functionality"
_description = "Gap Analysis Functionalities"
_columns = {
'name': fields.char('Functionality', size=256, required=True, translate=True),
'description': fields.text('Description'),
'category': fields.many2one('gap_analysis.functionality.category', 'Category', required=True, select=True),
'is_tmpl': fields.boolean('Template ?', help='This Functionality is a Template ?'),<|fim▁hole|> 'workloads': fields.one2many('gap_analysis.workload', 'fct_id', 'Default Workloads'),
'openerp_fct': fields.many2one('gap_analysis.openerp', 'Default OpenERP feature', select=True),
'critical': fields.integer('Default Critical Level', help='Indicator to specify the importance of this functionality in the project.'),
'testing': fields.float('Test (hour)'),
'effort': fields.many2one('gap_analysis.effort', 'Default Effort', help="Development Effort for this functionality."),
'duration_wk': fields.float('Default Duration (hour)', help='Since this effort has no pre-defined duration, you must set one.'),
'unknown_wk': fields.boolean('Must set the duration manually ? (Default)',),
}
def onchange_effort_id(self, cr, uid, ids, effort_id, unknown_wk):
val = {}
my_effort = self.pool.get('gap_analysis.effort').browse(cr, uid, effort_id)
val['unknown_wk'] = my_effort.unknown
return {'value': val}
def write(self, cr, uid, ids, vals, context=None):
if 'is_tmpl' in vals and vals['is_tmpl'] == True:
vals['proposed'] = False
return super(gap_analysis_functionality, self).write(cr, uid, ids, vals, context=context)
class gap_analysis_openerp(osv.Model):
_name = "gap_analysis.openerp"
_description = "Gap Analysis OpenERP features"
_columns = {
'name': fields.char('OpenERP feature', size=256, required=True, translate=True),
}
class gap_analysis(osv.Model):
_name = "gap_analysis"
_description = "Gap Analysis"
def _estimated_time_cost(self, cursor, uid, ids, fields, arg, context=None):
result = {}
for gap in self.browse(cursor, uid, ids):
res = {}
res['estimated_time'] = 0.0
res['estimated_cost'] = 0.0
for gap_line in gap.gap_lines:
if gap_line.keep:
res['estimated_time'] += gap_line.total_time
res['estimated_cost'] += gap_line.total_cost
result[gap.id] = res
return result
def _sorted_distinct_workloads(self, cursor, uid, ids, arg, context=None):
result = {}
for gap in self.browse(cursor, uid, ids):
types = []
line_ids = [l.id for l in gap.gap_lines]
if line_ids:
cursor.execute("SELECT id, code FROM gap_analysis_workload_type T WHERE id in (SELECT DISTINCT(W.type) FROM gap_analysis_workload W WHERE W.gap_line_id IN %s) ORDER BY T.sequence ASC",(tuple(line_ids),))
types = cursor.fetchall()
return types
def button_dummy(self, cr, uid, ids, context=None):
gapline_pool = self.pool.get('gap_analysis.line')
gap_cat_pool = self.pool.get('gap_analysis.functionality.category')
if type(ids) != type([]):
ids = [ids]
for gap_id in ids:
cr.execute("SELECT DISTINCT c.code FROM gap_analysis_line l, gap_analysis_functionality_category c WHERE l.category=c.id AND l.gap_id = %s",(gap_id,))
categ_codes = map(lambda x: x[0], cr.fetchall()) or []
for code in categ_codes:
idx = 1
seq = 999
cr.execute("SELECT id FROM gap_analysis_functionality_category WHERE id IN (SELECT DISTINCT c.id FROM gap_analysis_line l, gap_analysis_functionality_category c WHERE l.category=c.id AND c.code = %s AND l.gap_id = %s) ORDER BY parent_left",(code, gap_id,))
categ_ids = map(lambda x: x[0], cr.fetchall()) or []
for categ in gap_cat_pool.browse(cr, uid, categ_ids):
current_categ = categ
seq = ''
while current_categ:
seq = str(current_categ.sequence) + seq
current_categ = current_categ.parent_id or False
line_ids = gapline_pool.search(cr, uid, [('category','=',categ.id),('gap_id','=',gap_id)], order='critical desc, effort asc') or []
for line_id in line_ids:
code_line = code
code_line += str(idx).rjust(3, '0')
gapline_pool.write(cr, uid, [line_id], {'code':code_line,'seq':seq})
idx += 1
return True
def import_from_tmpl(self, cr, uid, ids, context=None):
return {
'name': _('Import from Template'),
'view_type': 'form',
'view_mode': 'form',
'view_id': False,
'res_model': 'gap_analysis.import_from_tmpl',
'context': context,
'type': 'ir.actions.act_window',
'target': 'new',
'res_id': False,
}
def _get_lines(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('gap_analysis.line').browse(cr, uid, ids, context=context):
result[line.gap_id.id] = True
return result.keys()
def action_change(self, cr, uid, ids, context=None):
for o in self.browse(cr, uid, ids):
self.write(cr, uid, [o.id], {'state':'draft', 'date_confirm': False})
return True
def action_done(self, cr, uid, ids, context=None):
for o in self.browse(cr, uid, ids):
self.write(cr, uid, [o.id], {'state': 'done', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
return True
def action_cancel(self, cr, uid, ids, context=None):
for o in self.browse(cr, uid, ids):
self.write(cr, uid, [o.id], {'state': 'cancel'})
return True
def copy(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_('Warning'), _("Copying a Gap Analysis is currently not allowed."))
return False
def onchange_project_id(self, cr, uid, ids, project_id):
val = {}
my_project = self.pool.get('project.project').browse(cr, uid, project_id)
if my_project.partner_id:
val['partner_id'] = my_project.partner_id.id
return {'value': val}
_columns = {
'reference': fields.char('Reference', size=64, required=True, readonly=True, states={'draft': [('readonly', False)]}, select=True),
'name': fields.char('Name', size=256, required=True, readonly=True, states={'draft': [('readonly', False)]}),
'state': fields.selection([('draft', 'Draft'), ('done', 'Done'), ('cancel', 'Cancelled')], 'State', readonly=True, help="Gives the state of the gap-analysis.", select=True),
'note': fields.text('Note'),
'date_create': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which the gap-analysis is created."),
'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which the gap-analysis is confirmed."),
'user_id': fields.many2one('res.users', 'Analyst', readonly=True, states={'draft': [('readonly', False)]}, select=True),
'partner_id': fields.many2one('res.partner', 'Customer', select=True, readonly=True, states={'draft': [('readonly', False)]}, ),
'gap_lines': fields.one2many('gap_analysis.line', 'gap_id', 'Functionalities', readonly=True, states={'draft': [('readonly', False)]}),
'estimated_time': fields.function(_estimated_time_cost, type='float', multi="gapsums", string='Estimated Time', store = False),
'estimated_cost': fields.function(_estimated_time_cost, type='float', multi="gapsums", string='Estimated Selling Price', store = False),
'project_id': fields.many2one('project.project', 'Project'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'is_tmpl': fields.boolean('Template ?', help='This Gap Analysis is a Template ?'),
'tech_cost': fields.float('Technical Analysis Price', help='Default Price per hour for Technical Analysis.'),
'func_cost': fields.float('Functional Analysis Price', help='Default Price per hour for Functional Analysis.'),
'dev_cost': fields.float('Effort Price', help='Price per hour for Effort.'),
'user_functional': fields.many2one('res.users', 'Default Functional Analyst'),
'user_technical': fields.many2one('res.users', 'Default Technical Analyst'),
'user_dev': fields.many2one('res.users', 'Default Developer'),
'user_test': fields.many2one('res.users', 'Default Tester'),
}
_defaults = {
'state': 'draft',
'user_id': lambda obj, cr, uid, context: uid,
'user_functional': lambda obj, cr, uid, context: uid,
'reference': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'gap_analysis'),
'date_create': fields.date.context_today,
'tech_cost': 500.0,
'func_cost': 500.0,
'dev_cost': 250.0,
}
_sql_constraints = [
('reference_uniq', 'unique(reference)', 'Reference must be unique !'),
]
_order = 'name desc'
class gap_analysis_line(osv.Model):
_name = "gap_analysis.line"
_description = "Gap-analysis Lines"
def _estimated_line_time_cost(self, cursor, uid, ids, fields, arg, context=None):
result = {}
gap = False
for gap_line in self.browse(cursor, uid, ids):
res = {}
res['total_time'] = 0
res['total_cost'] = 0
if not gap:
gap = self.pool.get("gap_analysis").browse(cursor, uid, gap_line.gap_id.id)
if gap_line.effort:
if gap_line.effort.unknown:
thistime = gap_line.duration_wk
else:
thistime = gap_line.effort.duration
res['total_time'] = thistime
res['total_cost'] = (gap.dev_cost * thistime)
for workload in gap_line.workloads:
if workload.type.category == "Technical Analysis":
workload_cost = gap.tech_cost
else:
workload_cost = gap.func_cost
res['total_time'] += workload.duration
res['total_cost'] += (workload.duration * workload_cost)
if gap_line.testing:
res['total_time'] += gap_line.testing
res['total_cost'] += (gap_line.testing * gap.tech_cost)
result[gap_line.id] = res
return result
def _get_lines_from_workload(self, cr, uid, ids, context=None):
result = {}
for workload in self.pool.get('gap_analysis.workload').browse(cr, uid, ids, context=context):
result[workload.gap_line_id.id] = True
return result.keys()
def _total_workloads(self, cursor, uid, ids, arg, context=None):
result = {}
for line in self.browse(cursor, uid, ids):
amount = 0
for w in line.workloads:
if w.type.id == arg:
amount += w.duration
return amount
def onchange_functionality_id(self, cr, uid, ids, functionality_id, gap_line_id):
val = {}
functionality_tmpl = self.pool.get('gap_analysis.functionality').browse(cr, uid, functionality_id)
if functionality_tmpl.effort:
val['effort'] = functionality_tmpl.effort.id
if functionality_tmpl.category:
val['category'] = functionality_tmpl.category.id
if functionality_tmpl.testing:
val['testing'] = functionality_tmpl.testing
if functionality_tmpl.unknown_wk:
val['unknown_wk'] = functionality_tmpl.unknown_wk
if functionality_tmpl.duration_wk:
val['duration_wk'] = functionality_tmpl.duration_wk
if functionality_tmpl.critical:
val['critical'] = functionality_tmpl.critical
if functionality_tmpl.openerp_fct:
val['openerp_fct'] = functionality_tmpl.openerp_fct.id
if functionality_tmpl.workloads:
workload_pool = self.pool.get('gap_analysis.workload')
my_workloads = []
for workload in functionality_tmpl.workloads:
workload_vals = {'type':workload.type.id,'duration':workload.duration,}
if gap_line_id:
workload_vals['gap_line_id'] = gap_line_id
workload_id = workload_pool.create(cr, uid, workload_vals)
if workload_id:
my_workloads.append(workload_id)
if my_workloads:
val['workloads'] = my_workloads
return {'value': val}
def onchange_effort_id(self, cr, uid, ids, effort_id, unknown_wk):
val = {}
my_effort = self.pool.get('gap_analysis.effort').browse(cr, uid, effort_id)
val['unknown_wk'] = my_effort.unknown
return {'value': val}
_columns = {
'gap_id': fields.many2one('gap_analysis', 'Gap-analysis', required=True, ondelete='cascade', select=True, readonly=True),
'seq': fields.char('Sequence', size=48),
'code': fields.char('Code', size=6),
'functionality': fields.many2one('gap_analysis.functionality', 'Functionality', required=True, select=True),
'category': fields.many2one('gap_analysis.functionality.category', 'Category', required=True, select=True),
'workloads': fields.one2many('gap_analysis.workload', 'gap_line_id', 'Workloads'),
'total_time': fields.function(_estimated_line_time_cost, method=True, type='float', multi=True, string='Estimated Time', store = {'gap_analysis.line': (lambda self, cr, uid, ids, c={}: ids, ['testing','workloads','duration_wk','effort','unknown_wk'], 10),'gap_analysis.workload': (_get_lines_from_workload, ['workload', 'duration'], 10),}),
'total_cost': fields.function(_estimated_line_time_cost, method=True, type='float', multi=True, string='Estimated Selling Price', store = {'gap_analysis.line': (lambda self, cr, uid, ids, c={}: ids, ['testing','workloads','duration_wk','effort','unknown_wk'], 10),'gap_analysis.workload': (_get_lines_from_workload, ['workload', 'duration'], 10),}),
'openerp_fct': fields.many2one('gap_analysis.openerp', 'OpenERP feature', select=True),
'contributors': fields.char('Contributor', size=256, help='Who is/are your main contact(s) to define this functionality.'),
'keep': fields.boolean('Keep ?', help='Keep the functionality in the Gap Analysis. If unchecked, the functionality will be print in the report but not used for the price calculation.'),
'critical': fields.integer('Critical Level', help='Indicator to specify the importance of this functionality in the project.'),
'testing': fields.float('Test (hour)'),
'effort': fields.many2one('gap_analysis.effort', 'Effort', help="Development Effort for this functionality."),
'duration_wk': fields.float('Duration (hour)', help='Since this effort has no pre-defined duration, you must set one.'),
'unknown_wk': fields.boolean('Must set the duration manually ?',),
}
_defaults = {
'unknown_wk': False,
'keep': True,
'critical': 1,
}
_order = 'seq asc, code asc'
_rec_name = 'code'
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | 'proposed': fields.boolean('Propose as template ?'),
#### Default values (Templating) #### |
<|file_name|>test_validate_invalid.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2021 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator, ValidationResult
from ansible.module_utils.errors import AnsibleValidationErrorMultiple
from ansible.module_utils.six import PY2
# Each item is id, argument_spec, parameters, expected, unsupported parameters, error test string
INVALID_SPECS = [
(
'invalid-list',
{'packages': {'type': 'list'}},
{'packages': {'key': 'value'}},
{'packages': {'key': 'value'}},
set(),
"unable to convert to list: <class 'dict'> cannot be converted to a list",
),
(
'invalid-dict',
{'users': {'type': 'dict'}},
{'users': ['one', 'two']},
{'users': ['one', 'two']},
set(),
"unable to convert to dict: <class 'list'> cannot be converted to a dict",
),
(
'invalid-bool',
{'bool': {'type': 'bool'}},
{'bool': {'k': 'v'}},
{'bool': {'k': 'v'}},
set(),
"unable to convert to bool: <class 'dict'> cannot be converted to a bool",
),
(
'invalid-float',
{'float': {'type': 'float'}},
{'float': 'hello'},
{'float': 'hello'},
set(),
"unable to convert to float: <class 'str'> cannot be converted to a float",
),
(
'invalid-bytes',
{'bytes': {'type': 'bytes'}},
{'bytes': 'one'},
{'bytes': 'one'},
set(),
"unable to convert to bytes: <class 'str'> cannot be converted to a Byte value",
),
(
'invalid-bits',
{'bits': {'type': 'bits'}},
{'bits': 'one'},
{'bits': 'one'},
set(),
"unable to convert to bits: <class 'str'> cannot be converted to a Bit value",
),
(
'invalid-jsonargs',
{'some_json': {'type': 'jsonarg'}},
{'some_json': set()},
{'some_json': set()},
set(),
"unable to convert to jsonarg: <class 'set'> cannot be converted to a json string",
),
(
'invalid-parameter',
{'name': {}},
{
'badparam': '',
'another': '',
},
{
'name': None,
'badparam': '',
'another': '',
},
set(('another', 'badparam')),
"another, badparam. Supported parameters include: name.",
),
(
'invalid-elements',
{'numbers': {'type': 'list', 'elements': 'int'}},
{'numbers': [55, 33, 34, {'key': 'value'}]},
{'numbers': [55, 33, 34]},
set(),
"Elements value for option 'numbers' is of type <class 'dict'> and we were unable to convert to int: <class 'dict'> cannot be converted to an int"
),
(
'required',
{'req': {'required': True}},
{},
{'req': None},
set(),
"missing required arguments: req"
)
]
@pytest.mark.parametrize(
('arg_spec', 'parameters', 'expected', 'unsupported', 'error'),
(i[1:] for i in INVALID_SPECS),
ids=[i[0] for i in INVALID_SPECS]
)
def test_invalid_spec(arg_spec, parameters, expected, unsupported, error):
v = ArgumentSpecValidator(arg_spec)
result = v.validate(parameters)
with pytest.raises(AnsibleValidationErrorMultiple) as exc_info:
raise result.errors
if PY2:
error = error.replace('class', 'type')
assert isinstance(result, ValidationResult)
assert error in exc_info.value.msg
assert error in result.error_messages[0]<|fim▁hole|><|fim▁end|> | assert result.unsupported_parameters == unsupported
assert result.validated_parameters == expected |
<|file_name|>course_module.py<|end_file_name|><|fim▁begin|>"""
Django module container for classes and operations related to the "Course Module" content type
"""
import logging
from cStringIO import StringIO
from lxml import etree
from path import Path as path
from pytz import utc
import requests
from datetime import datetime
from lazy import lazy
from xmodule import course_metadata_utils
from xmodule.course_metadata_utils import DEFAULT_START_DATE
from xmodule.exceptions import UndefinedContext
from xmodule.seq_module import SequenceDescriptor, SequenceModule
from xmodule.graders import grader_from_conf
from xmodule.tabs import CourseTabList, InvalidTabsException
from xmodule.mixin import LicenseMixin
import json
from xblock.core import XBlock
from xblock.fields import Scope, List, String, Dict, Boolean, Integer, Float
from .fields import Date
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
CATALOG_VISIBILITY_CATALOG_AND_ABOUT = "both"
CATALOG_VISIBILITY_ABOUT = "about"
CATALOG_VISIBILITY_NONE = "none"
class StringOrDate(Date):
def from_json(self, value):
"""
Parse an optional metadata key containing a time or a string:
if present, assume it's a string if it doesn't parse.
"""
try:
result = super(StringOrDate, self).from_json(value)
except ValueError:
return value
if result is None:
return value
else:
return result
def to_json(self, value):
"""
Convert a time struct or string to a string.
"""
try:
result = super(StringOrDate, self).to_json(value)
except:
return value
if result is None:
return value
else:
return result
edx_xml_parser = etree.XMLParser(dtd_validation=False, load_dtd=False,
remove_comments=True, remove_blank_text=True)
_cached_toc = {}
class Textbook(object):
def __init__(self, title, book_url):
self.title = title
self.book_url = book_url
@lazy
def start_page(self):
return int(self.table_of_contents[0].attrib['page'])
@lazy
def end_page(self):
# The last page should be the last element in the table of contents,
# but it may be nested. So recurse all the way down the last element
last_el = self.table_of_contents[-1]
while last_el.getchildren():
last_el = last_el[-1]
return int(last_el.attrib['page'])
@lazy
def table_of_contents(self):
"""
Accesses the textbook's table of contents (default name "toc.xml") at the URL self.book_url
Returns XML tree representation of the table of contents
"""
toc_url = self.book_url + 'toc.xml'
# cdodge: I've added this caching of TOC because in Mongo-backed instances (but not Filesystem stores)
# course modules have a very short lifespan and are constantly being created and torn down.
# Since this module in the __init__() method does a synchronous call to AWS to get the TOC
# this is causing a big performance problem. So let's be a bit smarter about this and cache
# each fetch and store in-mem for 10 minutes.
# NOTE: I have to get this onto sandbox ASAP as we're having runtime failures. I'd like to swing back and
# rewrite to use the traditional Django in-memory cache.
try:
# see if we already fetched this
if toc_url in _cached_toc:
(table_of_contents, timestamp) = _cached_toc[toc_url]
age = datetime.now(utc) - timestamp
# expire every 10 minutes
if age.seconds < 600:
return table_of_contents
except Exception as err:
pass
# Get the table of contents from S3
log.info("Retrieving textbook table of contents from %s", toc_url)
try:
r = requests.get(toc_url)
except Exception as err:
msg = 'Error %s: Unable to retrieve textbook table of contents at %s' % (err, toc_url)
log.error(msg)
raise Exception(msg)
# TOC is XML. Parse it
try:
table_of_contents = etree.fromstring(r.text)
except Exception as err:
msg = 'Error %s: Unable to parse XML for textbook table of contents at %s' % (err, toc_url)
log.error(msg)
raise Exception(msg)
return table_of_contents
def __eq__(self, other):
return (self.title == other.title and
self.book_url == other.book_url)
def __ne__(self, other):
return not self == other
class TextbookList(List):
def from_json(self, values):
textbooks = []
for title, book_url in values:
try:
textbooks.append(Textbook(title, book_url))
except:
# If we can't get to S3 (e.g. on a train with no internet), don't break
# the rest of the courseware.
log.exception("Couldn't load textbook ({0}, {1})".format(title, book_url))
continue
return textbooks
def to_json(self, values):
json_data = []
for val in values:
if isinstance(val, Textbook):
json_data.append((val.title, val.book_url))
elif isinstance(val, tuple):
json_data.append(val)
else:
continue
return json_data
class CourseFields(object):
lti_passports = List(
display_name=_("LTI Passports"),
help=_('Enter the passports for course LTI tools in the following format: "id:client_key:client_secret".'),
scope=Scope.settings
)
textbooks = TextbookList(
help=_("List of pairs of (title, url) for textbooks used in this course"),
default=[],
scope=Scope.content
)
wiki_slug = String(help=_("Slug that points to the wiki for this course"), scope=Scope.content)
enrollment_start = Date(help=_("Date that enrollment for this class is opened"), scope=Scope.settings)
enrollment_end = Date(help=_("Date that enrollment for this class is closed"), scope=Scope.settings)
start = Date(
help=_("Start time when this module is visible"),
default=DEFAULT_START_DATE,
scope=Scope.settings
)
end = Date(help=_("Date that this class ends"), scope=Scope.settings)
individual_end_days = Integer(
help=_("Number of days from the base date to the course ends"),
scope=Scope.settings
)
individual_end_hours = Integer(
help=_("Number of hours from the base date to the course ends"),
scope=Scope.settings
)
individual_end_minutes = Integer(
help=_("Number of minutes from the base date to the course ends"),
scope=Scope.settings
)
cosmetic_display_price = Integer(
display_name=_("Cosmetic Course Display Price"),
help=_(
"The cost displayed to students for enrolling in the course. If a paid course registration price is "
"set by an administrator in the database, that price will be displayed instead of this one."
),
default=0,
scope=Scope.settings,
)
deadline_start = Date(help=_("Date that this course is closed, this setting is only affect to course list"), scope=Scope.settings)
terminate_start = Date(help=_("Date that this course terminate(course is hidden)"), scope=Scope.settings)
advertised_start = String(
display_name=_("Course Advertised Start Date"),
help=_(
"Enter the date you want to advertise as the course start date, if this date is different from the set "
"start date. To advertise the set start date, enter null."
),
scope=Scope.settings
)
pre_requisite_courses = List(
display_name=_("Pre-Requisite Courses"),
help=_("Pre-Requisite Course key if this course has a pre-requisite course"),
scope=Scope.settings
)
grading_policy = Dict(
help=_("Grading policy definition for this class"),
default={
"GRADER": [
{
"type": "Homework",
"min_count": 12,
"drop_count": 2,
"short_label": "HW",
"weight": 0.15,
},
{
"type": "Lab",
"min_count": 12,
"drop_count": 2,
"weight": 0.15,
},
{
"type": "Midterm Exam",
"short_label": "Midterm",
"min_count": 1,
"drop_count": 0,
"weight": 0.3,
},
{
"type": "Final Exam",
"short_label": "Final",
"min_count": 1,
"drop_count": 0,
"weight": 0.4,
}
],
"GRADE_CUTOFFS": {
"Pass": 0.5,
},
},
scope=Scope.content
)
show_calculator = Boolean(
display_name=_("Show Calculator"),
help=_("Enter true or false. When true, students can see the calculator in the course."),
default=False,
scope=Scope.settings
)
display_name = String(
help=_("Enter the name of the course as it should appear in the edX.org course list."),
default="Empty",
display_name=_("Course Display Name"),
scope=Scope.settings
)
course_edit_method = String(
display_name=_("Course Editor"),
help=_('Enter the method by which this course is edited ("XML" or "Studio").'),
default="Studio",
scope=Scope.settings,
deprecated=True # Deprecated because someone would not edit this value within Studio.
)
tabs = CourseTabList(help="List of tabs to enable in this course", scope=Scope.settings, default=[])
end_of_course_survey_url = String(
display_name=_("Course Survey URL"),
help=_("Enter the URL for the end-of-course survey. If your course does not have a survey, enter null."),
scope=Scope.settings,
deprecated=True # We wish to remove this entirely, TNL-3399
)
discussion_blackouts = List(
display_name=_("Discussion Blackout Dates"),
help=_(
'Enter pairs of dates between which students cannot post to discussion forums. Inside the provided '
'brackets, enter an additional set of square brackets surrounding each pair of dates you add. '
'Format each pair of dates as ["YYYY-MM-DD", "YYYY-MM-DD"]. To specify times as well as dates, '
'format each pair as ["YYYY-MM-DDTHH:MM", "YYYY-MM-DDTHH:MM"]. Be sure to include the "T" between '
'the date and time. For example, an entry defining two blackout periods looks like this, including '
'the outer pair of square brackets: [["2015-09-15", "2015-09-21"], ["2015-10-01", "2015-10-08"]] '
),
scope=Scope.settings
)
discussion_topics = Dict(
display_name=_("Discussion Topic Mapping"),
help=_(
'Enter discussion categories in the following format: "CategoryName": '
'{"id": "i4x-InstitutionName-CourseNumber-course-CourseRun"}. For example, one discussion '
'category may be "Lydian Mode": {"id": "i4x-UniversityX-MUS101-course-2015_T1"}. The "id" '
'value for each category must be unique. In "id" values, the only special characters that are '
'supported are underscore, hyphen, and period.'
),
scope=Scope.settings
)
discussion_sort_alpha = Boolean(
display_name=_("Discussion Sorting Alphabetical"),
scope=Scope.settings, default=False,
help=_(
"Enter true or false. If true, discussion categories and subcategories are sorted alphabetically. "
"If false, they are sorted chronologically."
)
)
announcement = Date(
display_name=_("Course Announcement Date"),
help=_("Enter the date to announce your course."),
scope=Scope.settings
)
cohort_config = Dict(
display_name=_("Cohort Configuration"),
help=_(
"Enter policy keys and values to enable the cohort feature, define automated student assignment to "
"groups, or identify any course-wide discussion topics as private to cohort members."
),
scope=Scope.settings
)
is_new = Boolean(
display_name=_("Course Is New"),
help=_(
"Enter true or false. If true, the course appears in the list of new courses on edx.org, and a New! "
"badge temporarily appears next to the course image."
),
scope=Scope.settings
)
mobile_available = Boolean(
display_name=_("Mobile Course Available"),
help=_("Enter true or false. If true, the course will be available to mobile devices."),
default=False,
scope=Scope.settings
)
video_upload_pipeline = Dict(
display_name=_("Video Upload Credentials"),
help=_("Enter the unique identifier for your course's video files provided by edX."),
scope=Scope.settings
)
facebook_url = String(
help=_(
"Enter the URL for the official course Facebook group. "
"If you provide a URL, the mobile app includes a button that students can tap to access the group."
),
default=None,
display_name=_("Facebook URL"),
scope=Scope.settings
)
no_grade = Boolean(
display_name=_("Course Not Graded"),
help=_("Enter true or false. If true, the course will not be graded."),
default=False,
scope=Scope.settings
)
disable_progress_graph = Boolean(
display_name=_("Disable Progress Graph"),
help=_("Enter true or false. If true, students cannot view the progress graph."),
default=False,
scope=Scope.settings
)
pdf_textbooks = List(
display_name=_("PDF Textbooks"),
help=_("List of dictionaries containing pdf_textbook configuration"), scope=Scope.settings
)
html_textbooks = List(
display_name=_("HTML Textbooks"),
help=_(
"For HTML textbooks that appear as separate tabs in the courseware, enter the name of the tab (usually "
"the name of the book) as well as the URLs and titles of all the chapters in the book."
),
scope=Scope.settings
)
remote_gradebook = Dict(
display_name=_("Remote Gradebook"),
help=_(
"Enter the remote gradebook mapping. Only use this setting when "
"REMOTE_GRADEBOOK_URL has been specified."
),
scope=Scope.settings
)
enable_ccx = Boolean(
# Translators: Custom Courses for edX (CCX) is an edX feature for re-using course content. CCX Coach is
# a role created by a course Instructor to enable a person (the "Coach") to manage the custom course for
# his students.
display_name=_("Enable CCX"),
help=_(
# Translators: Custom Courses for edX (CCX) is an edX feature for re-using course content. CCX Coach is
# a role created by a course Instructor to enable a person (the "Coach") to manage the custom course for
# his students.
"Allow course instructors to assign CCX Coach roles, and allow coaches to manage Custom Courses on edX."
" When false, Custom Courses cannot be created, but existing Custom Courses will be preserved."
),
default=False,
scope=Scope.settings
)
allow_anonymous = Boolean(
display_name=_("Allow Anonymous Discussion Posts"),
help=_("Enter true or false. If true, students can create discussion posts that are anonymous to all users."),
scope=Scope.settings, default=True
)
allow_anonymous_to_peers = Boolean(
display_name=_("Allow Anonymous Discussion Posts to Peers"),
help=_(
"Enter true or false. If true, students can create discussion posts that are anonymous to other "
"students. This setting does not make posts anonymous to course staff."
),
scope=Scope.settings, default=False
)
advanced_modules = List(
display_name=_("Advanced Module List"),
help=_("Enter the names of the advanced components to use in your course."),
scope=Scope.settings
)
has_children = True
info_sidebar_name = String(
display_name=_("Course Info Sidebar Name"),
help=_(
"Enter the heading that you want students to see above your course handouts on the Course Info page. "
"Your course handouts appear in the right panel of the page."
),
scope=Scope.settings, default='Course Handouts')
show_timezone = Boolean(
help=_(
"True if timezones should be shown on dates in the courseware. "
"Deprecated in favor of due_date_display_format."
),
scope=Scope.settings, default=True
)
due_date_display_format = String(
display_name=_("Due Date Display Format"),
help=_(
"Enter the format for due dates. The default is Mon DD, YYYY. Enter \"%m-%d-%Y\" for MM-DD-YYYY, "
"\"%d-%m-%Y\" for DD-MM-YYYY, \"%Y-%m-%d\" for YYYY-MM-DD, or \"%Y-%d-%m\" for YYYY-DD-MM."
),
scope=Scope.settings, default=None
)
enrollment_domain = String(
display_name=_("External Login Domain"),
help=_("Enter the external login method students can use for the course."),
scope=Scope.settings
)
certificates_show_before_end = Boolean(
display_name=_("Certificates Downloadable Before End"),
help=_(
"Enter true or false. If true, students can download certificates before the course ends, if they've "
"met certificate requirements."
),
scope=Scope.settings,
default=False,
deprecated=True
)
certificates_display_behavior = String(
display_name=_("Certificates Display Behavior"),
help=_(
"Enter end, early_with_info, or early_no_info. After certificate generation, students who passed see a "
"link to their certificates on the dashboard and students who did not pass see information about the "
"grading configuration. The default is early_with_info, which displays this certificate information to "
"all students as soon as certificates are generated. To display this certificate information to all "
"students after the course end date, enter end. To display only the links to passing students "
"as soon as certificates are generated, enter early_no_info."
),
scope=Scope.settings,
default="early_with_info"
)
course_image = String(
display_name=_("Course About Page Image"),
help=_(
"Edit the name of the course image file. You must upload this file on the Files & Uploads page. "
"You can also set the course image on the Settings & Details page."
),
scope=Scope.settings,
# Ensure that courses imported from XML keep their image
default="images_course_image.jpg"
)
custom_logo = String(
display_name=_("Custom Logo Image"),
help=_(
"Edit the name of the custom logo image file. You must upload this file on the Files & Uploads page. "
"You can also set the custom logo image on the Settings & Details page."
),
scope=Scope.settings,
# Ensure that courses imported from XML keep their image
default=""
)
issue_badges = Boolean(
display_name=_("Issue Open Badges"),
help=_(
"Issue Open Badges badges for this course. Badges are generated when certificates are created."
),
scope=Scope.settings,
default=True
)
## Course level Certificate Name overrides.
cert_name_short = String(
help=_(
"Use this setting only when generating PDF certificates. "
"Between quotation marks, enter the short name of the course to use on the certificate that "
"students receive when they complete the course."
),
display_name=_("Certificate Name (Short)"),
scope=Scope.settings,
default=""
)
cert_name_long = String(
help=_(
"Use this setting only when generating PDF certificates. "
"Between quotation marks, enter the long name of the course to use on the certificate that students "
"receive when they complete the course."
),
display_name=_("Certificate Name (Long)"),
scope=Scope.settings,
default=""
)
cert_html_view_enabled = Boolean(
display_name=_("Certificate Web/HTML View Enabled"),
help=_("If true, certificate Web/HTML views are enabled for the course."),
scope=Scope.settings,
default=False,
)
cert_html_view_overrides = Dict(
# Translators: This field is the container for course-specific certifcate configuration values
display_name=_("Certificate Web/HTML View Overrides"),
# Translators: These overrides allow for an alternative configuration of the certificate web view
help=_("Enter course-specific overrides for the Web/HTML template parameters here (JSON format)"),
scope=Scope.settings,
)
# Specific certificate information managed via Studio (should eventually fold other cert settings into this)
certificates = Dict(
# Translators: This field is the container for course-specific certifcate configuration values
display_name=_("Certificate Configuration"),
# Translators: These overrides allow for an alternative configuration of the certificate web view
help=_("Enter course-specific configuration information here (JSON format)"),
scope=Scope.settings,
)
# An extra property is used rather than the wiki_slug/number because
# there are courses that change the number for different runs. This allows
# courses to share the same css_class across runs even if they have
# different numbers.
#
# TODO get rid of this as soon as possible or potentially build in a robust
# way to add in course-specific styling. There needs to be a discussion
# about the right way to do this, but arjun will address this ASAP. Also
# note that the courseware template needs to change when this is removed.
css_class = String(
display_name=_("CSS Class for Course Reruns"),
help=_("Allows courses to share the same css class across runs even if they have different numbers."),
scope=Scope.settings, default="",
deprecated=True
)
# TODO: This is a quick kludge to allow CS50 (and other courses) to
# specify their own discussion forums as external links by specifying a
# "discussion_link" in their policy JSON file. This should later get
# folded in with Syllabus, Course Info, and additional Custom tabs in a
# more sensible framework later.
discussion_link = String(
display_name=_("Discussion Forum External Link"),
help=_("Allows specification of an external link to replace discussion forums."),
scope=Scope.settings,
deprecated=True
)
# TODO: same as above, intended to let internal CS50 hide the progress tab
# until we get grade integration set up.
# Explicit comparison to True because we always want to return a bool.
hide_progress_tab = Boolean(
display_name=_("Hide Progress Tab"),
help=_("Allows hiding of the progress tab."),
scope=Scope.settings,
deprecated=True
)
display_organization = String(
display_name=_("Course Organization Display String"),
help=_(
"Enter the course organization that you want to appear in the courseware. This setting overrides the "
"organization that you entered when you created the course. To use the organization that you entered "
"when you created the course, enter null."
),
scope=Scope.settings
)
display_coursenumber = String(
display_name=_("Course Number Display String"),
help=_(
"Enter the course number that you want to appear in the courseware. This setting overrides the course "
"number that you entered when you created the course. To use the course number that you entered when "
"you created the course, enter null."
),
scope=Scope.settings,
default=""
)
max_student_enrollments_allowed = Integer(
display_name=_("Course Maximum Student Enrollment"),
help=_(
"Enter the maximum number of students that can enroll in the course. To allow an unlimited number of "
"students, enter null."
),
scope=Scope.settings
)
allow_public_wiki_access = Boolean(
display_name=_("Allow Public Wiki Access"),
help=_(
"Enter true or false. If true, edX users can view the course wiki even "<|fim▁hole|> )
invitation_only = Boolean(
display_name=_("Invitation Only"),
help=_("Whether to restrict enrollment to invitation by the course staff."),
default=False,
scope=Scope.settings
)
course_survey_name = String(
display_name=_("Pre-Course Survey Name"),
help=_("Name of SurveyForm to display as a pre-course survey to the user."),
default=None,
scope=Scope.settings,
deprecated=True
)
course_survey_required = Boolean(
display_name=_("Pre-Course Survey Required"),
help=_(
"Specify whether students must complete a survey before they can view your course content. If you "
"set this value to true, you must add a name for the survey to the Course Survey Name setting above."
),
default=False,
scope=Scope.settings,
deprecated=True
)
catalog_visibility = String(
display_name=_("Course Visibility In Catalog"),
help=_(
"Defines the access permissions for showing the course in the course catalog. This can be set to one "
"of three values: 'both' (show in catalog and allow access to about page), 'about' (only allow access "
"to about page), 'none' (do not show in catalog and do not allow access to an about page)."
),
default=CATALOG_VISIBILITY_CATALOG_AND_ABOUT,
scope=Scope.settings,
values=[
{"display_name": _("Both"), "value": CATALOG_VISIBILITY_CATALOG_AND_ABOUT},
{"display_name": _("About"), "value": CATALOG_VISIBILITY_ABOUT},
{"display_name": _("None"), "value": CATALOG_VISIBILITY_NONE}]
)
entrance_exam_enabled = Boolean(
display_name=_("Entrance Exam Enabled"),
help=_(
"Specify whether students must complete an entrance exam before they can view your course content. "
"Note, you must enable Entrance Exams for this course setting to take effect."
),
default=False,
scope=Scope.settings,
)
entrance_exam_minimum_score_pct = Float(
display_name=_("Entrance Exam Minimum Score (%)"),
help=_(
"Specify a minimum percentage score for an entrance exam before students can view your course content. "
"Note, you must enable Entrance Exams for this course setting to take effect."
),
default=65,
scope=Scope.settings,
)
entrance_exam_id = String(
display_name=_("Entrance Exam ID"),
help=_("Content module identifier (location) of entrance exam."),
default=None,
scope=Scope.settings,
)
is_course_hidden = Boolean(
display_name=_("Course Is Hidden"),
default=False,
help=_(
"Enter true or false. If true, the course is hidden."
),
scope=Scope.settings,
deprecated=True
)
course_order = String(
display_name=_("Course Order"),
help=_("Course Order"),
default=None,
scope=Scope.settings,
)
course_category = List(
display_name=_("Course Category"),
help=_("Course Category"),
default=[],
scope=Scope.settings,
)
course_category_order = String(
display_name=_("Course Category Order"),
help=_("Course Category Order"),
default=None,
scope=Scope.settings,
)
course_category2 = String(
display_name=_("Course Category2"),
help=_("Course Category2"),
default=None,
scope=Scope.settings,
)
course_category_order2 = String(
display_name=_("Course Category Order2"),
help=_("Course Category Order2"),
default=None,
scope=Scope.settings,
)
is_f2f_course = Boolean(
display_name=_("Face-to-Face Course"),
default=False,
help=_("Enter true or false. If true, course is f2f classroom."),
scope=Scope.settings,
)
is_f2f_course_sell = Boolean(
display_name=_("Sell Face-to-Face Course"),
default=False,
help=_("Enter true or false. If true, f2f classroom is for sale."),
scope=Scope.settings,
)
course_canonical_name = String(
display_name=_("Course Canonical Name"),
help=_("Course Canonical Name."),
default="",
scope=Scope.settings,
)
course_contents_provider= String(
display_name=_("Course Contents Provider"),
help=_("Course contents provider."),
default="",
scope=Scope.settings,
)
teacher_name = String(
display_name=_("Teacher Name"),
help=_("Teacher name"),
default="",
scope=Scope.settings,
)
course_span = String(
display_name=_("Course Span"),
help=_("Offer period of the course."),
default="",
scope=Scope.settings,
)
social_sharing_url = String(
display_name=_("Social Media Sharing URL"),
help=_(
"If dashboard social sharing and custom course URLs are enabled, you can provide a URL "
"(such as the URL to a course About page) that social media sites can link to. URLs must "
"be fully qualified. For example: http://www.edx.org/course/Introduction-to-MOOCs-ITM001"
),
default=None,
scope=Scope.settings,
)
language = String(
display_name=_("Course Language"),
help=_("Specify the language of your course."),
default=None,
scope=Scope.settings
)
teams_configuration = Dict(
display_name=_("Teams Configuration"),
help=_(
"Enter configuration for the teams feature. Expects two entries: max_team_size and topics, where "
"topics is a list of topics."
),
scope=Scope.settings,
deprecated=True, # Deprecated until the teams feature is made generally available
)
enable_proctored_exams = Boolean(
display_name=_("Enable Proctored Exams"),
help=_(
"Enter true or false. If this value is true, proctored exams are enabled in your course. "
"Note that enabling proctored exams will also enable timed exams."
),
default=False,
scope=Scope.settings
)
enable_timed_exams = Boolean(
display_name=_("Enable Timed Exams"),
help=_(
"Enter true or false. If this value is true, timed exams are enabled in your course."
),
default=False,
scope=Scope.settings
)
minimum_grade_credit = Float(
display_name=_("Minimum Grade for Credit"),
help=_(
"The minimum grade that a learner must earn to receive credit in the course, "
"as a decimal between 0.0 and 1.0. For example, for 75%, enter 0.75."
),
default=0.8,
scope=Scope.settings,
)
self_paced = Boolean(
display_name=_("Self Paced"),
help=_(
"Set this to \"true\" to mark this course as self-paced. Self-paced courses do not have "
"due dates for assignments, and students can progress through the course at any rate before "
"the course ends."
),
default=False,
scope=Scope.settings
)
show_playback_tab = Boolean(
display_name=_("Show Playback Tab"),
help=_("Allows showing of the playback tab."),
default=False,
scope=Scope.settings,
)
show_attendance_tab = Boolean(
display_name=_("Show Attendance Tab"),
help=_("Allows showing of the attendance tab."),
default=False,
scope=Scope.settings
)
is_status_managed = Boolean(
display_name=_("Set course as status management target."),
help=_("Select 'True' to manage the status of this course."),
default=False,
scope=Scope.settings
)
target_library = List(
help=_(
"Target library list for the course."
),
scope=Scope.settings,
default=[]
)
playback_rate_1x_only = Boolean(
display_name=_("Hide Playback Rate"),
default=False,
help=_("JW Player playbackrate setting 1.0x only."),
scope=Scope.settings,
)
new_icon_display_days = Integer(
display_name=_("New Icon Display Days"),
help=_(
"The number of days to display new icon for date section in the info page."
),
default=7,
scope=Scope.settings,
)
class CourseModule(CourseFields, SequenceModule): # pylint: disable=abstract-method
"""
The CourseDescriptor needs its module_class to be a SequenceModule, but some code that
expects a CourseDescriptor to have all its fields can fail if it gets a SequenceModule instead.
This class is to make sure that all the fields are present in all cases.
"""
class CourseDescriptor(CourseFields, SequenceDescriptor, LicenseMixin):
"""
The descriptor for the course XModule
"""
module_class = CourseModule
def __init__(self, *args, **kwargs):
"""
Expects the same arguments as XModuleDescriptor.__init__
"""
super(CourseDescriptor, self).__init__(*args, **kwargs)
_ = self.runtime.service(self, "i18n").ugettext
if self.wiki_slug is None:
self.wiki_slug = self.location.course
if self.due_date_display_format is None and self.show_timezone is False:
# For existing courses with show_timezone set to False (and no due_date_display_format specified),
# set the due_date_display_format to what would have been shown previously (with no timezone).
# Then remove show_timezone so that if the user clears out the due_date_display_format,
# they get the default date display.
self.due_date_display_format = "DATE_TIME"
del self.show_timezone
# NOTE: relies on the modulestore to call set_grading_policy() right after
# init. (Modulestore is in charge of figuring out where to load the policy from)
# NOTE (THK): This is a last-minute addition for Fall 2012 launch to dynamically
# disable the syllabus content for courses that do not provide a syllabus
if self.system.resources_fs is None:
self.syllabus_present = False
else:
self.syllabus_present = self.system.resources_fs.exists(path('syllabus'))
self._grading_policy = {}
self.set_grading_policy(self.grading_policy)
if self.discussion_topics == {}:
self.discussion_topics = {_('General'): {'id': self.location.html_id()}}
try:
if not getattr(self, "tabs", []):
CourseTabList.initialize_default(self)
except InvalidTabsException as err:
raise type(err)('{msg} For course: {course_id}'.format(msg=err.message, course_id=unicode(self.id)))
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(CourseDescriptor, self).non_editable_metadata_fields
# The only supported mode is currently 'random'.
# Add the mode field to non_editable_metadata_fields so that it doesn't
# render in the edit form.
non_editable_fields.extend([CourseFields.target_library])
return non_editable_fields
def set_grading_policy(self, course_policy):
"""
The JSON object can have the keys GRADER and GRADE_CUTOFFS. If either is
missing, it reverts to the default.
"""
if course_policy is None:
course_policy = {}
# Load the global settings as a dictionary
grading_policy = self.grading_policy
# BOY DO I HATE THIS grading_policy CODE ACROBATICS YET HERE I ADD MORE (dhm)--this fixes things persisted w/
# defective grading policy values (but not None)
if 'GRADER' not in grading_policy:
grading_policy['GRADER'] = CourseFields.grading_policy.default['GRADER']
if 'GRADE_CUTOFFS' not in grading_policy:
grading_policy['GRADE_CUTOFFS'] = CourseFields.grading_policy.default['GRADE_CUTOFFS']
# Override any global settings with the course settings
grading_policy.update(course_policy)
# Here is where we should parse any configurations, so that we can fail early
# Use setters so that side effecting to .definitions works
self.raw_grader = grading_policy['GRADER'] # used for cms access
self.grade_cutoffs = grading_policy['GRADE_CUTOFFS']
@classmethod
def read_grading_policy(cls, paths, system):
"""Load a grading policy from the specified paths, in order, if it exists."""
# Default to a blank policy dict
policy_str = '{}'
for policy_path in paths:
if not system.resources_fs.exists(policy_path):
continue
log.debug("Loading grading policy from {0}".format(policy_path))
try:
with system.resources_fs.open(policy_path) as grading_policy_file:
policy_str = grading_policy_file.read()
# if we successfully read the file, stop looking at backups
break
except IOError:
msg = "Unable to load course settings file from '{0}'".format(policy_path)
log.warning(msg)
return policy_str
@classmethod
def from_xml(cls, xml_data, system, id_generator):
instance = super(CourseDescriptor, cls).from_xml(xml_data, system, id_generator)
# bleh, have to parse the XML here to just pull out the url_name attribute
# I don't think it's stored anywhere in the instance.
course_file = StringIO(xml_data.encode('ascii', 'ignore'))
xml_obj = etree.parse(course_file, parser=edx_xml_parser).getroot()
policy_dir = None
url_name = xml_obj.get('url_name', xml_obj.get('slug'))
if url_name:
policy_dir = 'policies/' + url_name
# Try to load grading policy
paths = ['grading_policy.json']
if policy_dir:
paths = [policy_dir + '/grading_policy.json'] + paths
try:
policy = json.loads(cls.read_grading_policy(paths, system))
except ValueError:
system.error_tracker("Unable to decode grading policy as json")
policy = {}
# now set the current instance. set_grading_policy() will apply some inheritance rules
instance.set_grading_policy(policy)
return instance
@classmethod
def definition_from_xml(cls, xml_object, system):
textbooks = []
for textbook in xml_object.findall("textbook"):
textbooks.append((textbook.get('title'), textbook.get('book_url')))
xml_object.remove(textbook)
# Load the wiki tag if it exists
wiki_slug = None
wiki_tag = xml_object.find("wiki")
if wiki_tag is not None:
wiki_slug = wiki_tag.attrib.get("slug", default=None)
xml_object.remove(wiki_tag)
definition, children = super(CourseDescriptor, cls).definition_from_xml(xml_object, system)
definition['textbooks'] = textbooks
definition['wiki_slug'] = wiki_slug
# load license if it exists
definition = LicenseMixin.parse_license_from_xml(definition, xml_object)
return definition, children
def definition_to_xml(self, resource_fs):
xml_object = super(CourseDescriptor, self).definition_to_xml(resource_fs)
if len(self.textbooks) > 0:
textbook_xml_object = etree.Element('textbook')
for textbook in self.textbooks:
textbook_xml_object.set('title', textbook.title)
textbook_xml_object.set('book_url', textbook.book_url)
xml_object.append(textbook_xml_object)
if self.wiki_slug is not None:
wiki_xml_object = etree.Element('wiki')
wiki_xml_object.set('slug', self.wiki_slug)
xml_object.append(wiki_xml_object)
# handle license specifically. Default the course to have a license
# of "All Rights Reserved", if a license is not explicitly set.
self.add_license_to_xml(xml_object, default="all-rights-reserved")
return xml_object
def has_ended(self):
"""
Returns True if the current time is after the specified course end date.
Returns False if there is no end date specified.
"""
return course_metadata_utils.has_course_ended(self.end)
def has_terminated(self):
"""
Returns True if the current time is after the specified course terminated date.
Returns False if there is no terminated date specified.
"""
# backward compatibility
if self.is_course_hidden:
return True
if self.terminate_start is None:
return False
return datetime.now(utc) > self.terminate_start
def is_course_deadline(self):
"""
Returns True if the current time is after the specified course terminated date.
Returns False if there is no terminated date specified.
"""
if self.deadline_start is None:
return False
return datetime.now(utc) > self.deadline_start
def may_certify(self):
"""
Return whether it is acceptable to show the student a certificate download link.
"""
return course_metadata_utils.may_certify_for_course(
self.certificates_display_behavior,
self.certificates_show_before_end,
self.has_ended()
)
def has_started(self):
return course_metadata_utils.has_course_started(self.start)
@property
def grader(self):
return grader_from_conf(self.raw_grader)
@property
def raw_grader(self):
# force the caching of the xblock value so that it can detect the change
# pylint: disable=pointless-statement
self.grading_policy['GRADER']
return self._grading_policy['RAW_GRADER']
@raw_grader.setter
def raw_grader(self, value):
# NOTE WELL: this change will not update the processed graders. If we need that, this needs to call grader_from_conf
self._grading_policy['RAW_GRADER'] = value
self.grading_policy['GRADER'] = value
@property
def grade_cutoffs(self):
return self._grading_policy['GRADE_CUTOFFS']
@grade_cutoffs.setter
def grade_cutoffs(self, value):
self._grading_policy['GRADE_CUTOFFS'] = value
# XBlock fields don't update after mutation
policy = self.grading_policy
policy['GRADE_CUTOFFS'] = value
self.grading_policy = policy
@property
def lowest_passing_grade(self):
return min(self._grading_policy['GRADE_CUTOFFS'].values())
@property
def is_cohorted(self):
"""
Return whether the course is cohorted.
Note: No longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
config = self.cohort_config
if config is None:
return False
return bool(config.get("cohorted"))
@property
def auto_cohort(self):
"""
Return whether the course is auto-cohorted.
Note: No longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
if not self.is_cohorted:
return False
return bool(self.cohort_config.get(
"auto_cohort", False))
@property
def auto_cohort_groups(self):
"""
Return the list of groups to put students into. Returns [] if not
specified. Returns specified list even if is_cohorted and/or auto_cohort are
false.
Note: No longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
if self.cohort_config is None:
return []
else:
return self.cohort_config.get("auto_cohort_groups", [])
@property
def top_level_discussion_topic_ids(self):
"""
Return list of topic ids defined in course policy.
"""
topics = self.discussion_topics
return [d["id"] for d in topics.values()]
@property
def cohorted_discussions(self):
"""
Return the set of discussions that is explicitly cohorted. It may be
the empty set. Note that all inline discussions are automatically
cohorted based on the course's is_cohorted setting.
Note: No longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
config = self.cohort_config
if config is None:
return set()
return set(config.get("cohorted_discussions", []))
@property
def always_cohort_inline_discussions(self):
"""
This allow to change the default behavior of inline discussions cohorting. By
setting this to False, all inline discussions are non-cohorted unless their
ids are specified in cohorted_discussions.
Note: No longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
config = self.cohort_config
if config is None:
return True
return bool(config.get("always_cohort_inline_discussions", True))
@property
def is_newish(self):
"""
Returns if the course has been flagged as new. If
there is no flag, return a heuristic value considering the
announcement and the start dates.
"""
flag = self.is_new
if flag is None:
# Use a heuristic if the course has not been flagged
announcement, start, now = course_metadata_utils.sorting_dates(
self.start, self.advertised_start, self.announcement
)
if announcement and (now - announcement).days < 30:
# The course has been announced for less that month
return True
elif (now - start).days < 1:
# The course has not started yet
return True
else:
return False
elif isinstance(flag, basestring):
return flag.lower() in ['true', 'yes', 'y']
else:
return bool(flag)
@property
def sorting_score(self):
"""
Returns a tuple that can be used to sort the courses according
the how "new" they are. The "newness" score is computed using a
heuristic that takes into account the announcement and
(advertised) start dates of the course if available.
The lower the number the "newer" the course.
"""
return course_metadata_utils.sorting_score(self.start, self.advertised_start, self.announcement)
@lazy
def grading_context(self):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_descriptor" : The section descriptor
"xmoduledescriptors" : An array of xmoduledescriptors that
could possibly be in the section, for any student
all_descriptors - This contains a list of all xmodules that can
effect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
# If this descriptor has been bound to a student, return the corresponding
# XModule. If not, just use the descriptor itself
try:
module = getattr(self, '_xmodule', None)
if not module:
module = self
except UndefinedContext:
module = self
def possibly_scored(usage_key):
"""Can this XBlock type can have a score or children?"""
return usage_key.block_type in self.block_types_affecting_grading
all_descriptors = []
graded_sections = {}
def yield_descriptor_descendents(module_descriptor):
for child in module_descriptor.get_children(usage_key_filter=possibly_scored):
yield child
for module_descriptor in yield_descriptor_descendents(child):
yield module_descriptor
for chapter in self.get_children():
for section in chapter.get_children():
if section.graded:
xmoduledescriptors = list(yield_descriptor_descendents(section))
xmoduledescriptors.append(section)
# The xmoduledescriptors included here are only the ones that have scores.
section_description = {
'section_descriptor': section,
'xmoduledescriptors': [child for child in xmoduledescriptors if child.has_score]
}
section_format = section.format if section.format is not None else ''
graded_sections[section_format] = graded_sections.get(section_format, []) + [section_description]
all_descriptors.extend(xmoduledescriptors)
all_descriptors.append(section)
return {'graded_sections': graded_sections,
'all_descriptors': all_descriptors, }
@lazy
def block_types_affecting_grading(self):
"""Return all block types that could impact grading (i.e. scored, or having children)."""
return frozenset(
cat for (cat, xblock_class) in XBlock.load_classes() if (
getattr(xblock_class, 'has_score', False) or getattr(xblock_class, 'has_children', False)
)
)
@staticmethod
def make_id(org, course, url_name):
return '/'.join([org, course, url_name])
@property
def id(self):
"""Return the course_id for this course"""
return self.location.course_key
def start_datetime_text(self, format_string="SHORT_DATE", time_zone=utc):
"""
Returns the desired text corresponding the course's start date and time in specified time zone, defaulted
to UTC. Prefers .advertised_start, then falls back to .start
"""
i18n = self.runtime.service(self, "i18n")
return course_metadata_utils.course_start_datetime_text(
self.start,
self.advertised_start,
format_string,
time_zone,
i18n.ugettext,
i18n.strftime
)
@property
def start_date_is_still_default(self):
"""
Checks if the start date set for the course is still default, i.e. .start has not been modified,
and .advertised_start has not been set.
"""
return course_metadata_utils.course_start_date_is_default(
self.start,
self.advertised_start
)
def end_datetime_text(self, format_string="SHORT_DATE", time_zone=utc):
"""
Returns the end date or date_time for the course formatted as a string.
"""
return course_metadata_utils.course_end_datetime_text(
self.end,
format_string,
time_zone,
self.runtime.service(self, "i18n").strftime
)
def get_discussion_blackout_datetimes(self):
"""
Get a list of dicts with start and end fields with datetime values from
the discussion_blackouts setting
"""
date_proxy = Date()
try:
ret = [
{"start": date_proxy.from_json(start), "end": date_proxy.from_json(end)}
for start, end
in filter(None, self.discussion_blackouts)
]
for blackout in ret:
if not blackout["start"] or not blackout["end"]:
raise ValueError
return ret
except (TypeError, ValueError):
log.exception(
"Error parsing discussion_blackouts %s for course %s",
self.discussion_blackouts,
self.id
)
return []
@property
def forum_posts_allowed(self):
"""
Return whether forum posts are allowed by the discussion_blackouts
setting
"""
blackouts = self.get_discussion_blackout_datetimes()
now = datetime.now(utc)
for blackout in blackouts:
if blackout["start"] <= now <= blackout["end"]:
return False
return True
@property
def number(self):
"""
Returns this course's number.
This is a "number" in the sense of the "course numbers" that you see at
lots of universities. For example, given a course
"Intro to Computer Science" with the course key "edX/CS-101/2014", the
course number would be "CS-101"
"""
return course_metadata_utils.number_for_course_location(self.location)
@property
def display_number_with_default(self):
"""
Return a display course number if it has been specified, otherwise return the 'course' that is in the location
"""
if self.display_coursenumber:
return self.display_coursenumber
return self.number
@property
def org(self):
return self.location.org
@property
def display_org_with_default(self):
"""
Return a display organization if it has been specified, otherwise return the 'org' that is in the location
"""
if self.display_organization:
return self.display_organization
return self.org
@property
def video_pipeline_configured(self):
"""
Returns whether the video pipeline advanced setting is configured for this course.
"""
return (
self.video_upload_pipeline is not None and
'course_video_upload_token' in self.video_upload_pipeline
)
def clean_id(self, padding_char='='):
"""
Returns a unique deterministic base32-encoded ID for the course.
The optional padding_char parameter allows you to override the "=" character used for padding.
"""
return course_metadata_utils.clean_course_key(self.location.course_key, padding_char)
@property
def teams_enabled(self):
"""
Returns whether or not teams has been enabled for this course.
Currently, teams are considered enabled when at least one topic has been configured for the course.
"""
if self.teams_configuration:
return len(self.teams_configuration.get('topics', [])) > 0
return False
@property
def teams_max_size(self):
"""
Returns the max size for teams if teams has been configured, else None.
"""
return self.teams_configuration.get('max_team_size', None)
@property
def teams_topics(self):
"""
Returns the topics that have been configured for teams for this course, else None.
"""
return self.teams_configuration.get('topics', None)
def get_user_partitions_for_scheme(self, scheme):
"""
Retrieve all user partitions defined in the course for a particular
partition scheme.
Arguments:
scheme (object): The user partition scheme.
Returns:
list of `UserPartition`
"""
return [
p for p in self.user_partitions
if p.scheme == scheme
]
def set_user_partitions_for_scheme(self, partitions, scheme):
"""
Set the user partitions for a particular scheme.
Preserves partitions associated with other schemes.
Arguments:
scheme (object): The user partition scheme.
Returns:
list of `UserPartition`
"""
other_partitions = [
p for p in self.user_partitions # pylint: disable=access-member-before-definition
if p.scheme != scheme
]
self.user_partitions = other_partitions + partitions # pylint: disable=attribute-defined-outside-init
@property
def can_toggle_course_pacing(self):
"""
Whether or not the course can be set to self-paced at this time.
Returns:
bool: False if the course has already started, True otherwise.
"""
return datetime.now(utc) <= self.start
class CourseSummary(object):
"""
A lightweight course summary class, which constructs split/mongo course summary without loading
the course. It is used at cms for listing courses to global staff user.
"""
course_info_fields = ['display_name', 'display_coursenumber', 'display_organization']
def __init__(self, course_locator, display_name=u"Empty", display_coursenumber=None, display_organization=None):
"""
Initialize and construct course summary
Arguments:
course_locator (CourseLocator): CourseLocator object of the course.
display_name (unicode): display name of the course. When you create a course from console, display_name
isn't set (course block has no key `display_name`). "Empty" name is returned when we load the course.
If `display_name` isn't present in the course block, use the `Empty` as default display name.
We can set None as a display_name in Course Advance Settings; Do not use "Empty" when display_name is
set to None.
display_coursenumber (unicode|None): Course number that is specified & appears in the courseware
display_organization (unicode|None): Course organization that is specified & appears in the courseware
"""
self.display_coursenumber = display_coursenumber
self.display_organization = display_organization
self.display_name = display_name
self.id = course_locator # pylint: disable=invalid-name
self.location = course_locator.make_usage_key('course', 'course')
@property
def display_org_with_default(self):
"""
Return a display organization if it has been specified, otherwise return the 'org' that
is in the location
"""
if self.display_organization:
return self.display_organization
return self.location.org
@property
def display_number_with_default(self):
"""
Return a display course number if it has been specified, otherwise return the 'course' that
is in the location
"""
if self.display_coursenumber:
return self.display_coursenumber
return self.location.course<|fim▁end|> | "if they're not enrolled in the course."
),
default=False,
scope=Scope.settings |
<|file_name|>be_equivalent_to_matcher.go<|end_file_name|><|fim▁begin|>package matchers
import (<|fim▁hole|> "fmt"
"github.com/bfontaine/go-tchoutchou/Godeps/_workspace/src/github.com/onsi/gomega/format"
"reflect"
)
type BeEquivalentToMatcher struct {
Expected interface{}
}
func (matcher *BeEquivalentToMatcher) Match(actual interface{}) (success bool, err error) {
if actual == nil && matcher.Expected == nil {
return false, fmt.Errorf("Both actual and expected must not be nil.")
}
convertedActual := actual
if actual != nil && matcher.Expected != nil && reflect.TypeOf(actual).ConvertibleTo(reflect.TypeOf(matcher.Expected)) {
convertedActual = reflect.ValueOf(actual).Convert(reflect.TypeOf(matcher.Expected)).Interface()
}
return reflect.DeepEqual(convertedActual, matcher.Expected), nil
}
func (matcher *BeEquivalentToMatcher) FailureMessage(actual interface{}) (message string) {
return format.Message(actual, "to be equivalent to", matcher.Expected)
}
func (matcher *BeEquivalentToMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return format.Message(actual, "not to be equivalent to", matcher.Expected)
}<|fim▁end|> | |
<|file_name|>replica_group.js<|end_file_name|><|fim▁begin|>/*!
* Copyright (C) 2015 SequoiaDB Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var constants = require('./const');
var Node = require('./node');
var util = require('util');
var ReplicaGroup = function (conn, name, groupId) {
Object.defineProperty(this, 'conn', {
value: conn,
enumerable: false
});
this.name = name;
this.groupId = groupId;
this.isCatalog = (name === constants.CATALOG_GROUP);
};
/** \fn bool Stop()
* \brief Stop the current node
* \return True if succeed or False if fail
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.stop = function (callback) {
this.stopStart(false, callback);
};
/** \fn bool Start()
* \brief Start the current node
* \return True if succeed or False if fail
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.start = function (callback) {
this.stopStart(true, callback);
};
/** \fn var GetNodeNum( SDBConst.NodeStatus status)
* \brief Get the count of node with given status
* \param status The specified status as below:
*
* SDB_NODE_ALL
* SDB_NODE_ACTIVE
* SDB_NODE_INACTIVE
* SDB_NODE_UNKNOWN
* \return The count of node
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getNodeCount = function (callback) {
this.getDetail(function (err, detail) {
if (err) {
return callback(err);
}
var nodes = detail[constants.FIELD_GROUP];
callback(null, nodes.length || 0);
});
};
/** \fn var GetDetail()
* \brief Get the detail information of current group
* \return The detail information in var object
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getDetail = function (callback) {
var matcher = {};
matcher[constants.FIELD_GROUPNAME] = this.name;
matcher[constants.FIELD_GROUPID] = this.groupId;
this.conn.getList(constants.SDB_LIST_GROUPS, matcher, {}, {}, function (err, cursor) {
if (err) {
return callback(err);
}
if (cursor) {
cursor.next(function (err, detail) {
if (err) {
return callback(err);
}
if (detail) {
callback(null, detail);
} else {
callback(new Error('SDB_CLS_GRP_NOT_EXIST'));
}
});
} else {
callback(new Error('SDB_SYS'));
}
});
};
/** \fn Node CreateNode(string hostName, var port, string dbpath,
Dictionary<string, string> map)
* \brief Create the replica node
* \param hostName The host name of node
* \param port The port of node
* \param dbpath The database path of node
* \param map The other configure information of node
* \return The Node object
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.createNode = function (hostname, port, dbpath, map, callback) {
if (!hostname || port < 0 || port > 65535 || !dbpath) {
throw new Error('SDB_INVALIDARG');
}
var command = constants.ADMIN_PROMPT + constants.CREATE_CMD + " " +
constants.NODE;
var matcher = {};
matcher[constants.FIELD_GROUPNAME] = this.name;
// TODO: 删除属性不好
delete map[constants.FIELD_GROUPNAME];
matcher[constants.FIELD_HOSTNAME] = hostname;
delete map[constants.FIELD_HOSTNAME];
matcher[constants.SVCNAME] = '' + port;
delete map[constants.SVCNAME];
matcher[constants.DBPATH] = dbpath;
delete map[constants.DBPATH];
util._extend(matcher, map);
var that = this;
this.conn.sendAdminCommand(command, matcher, {}, {}, {}, function (err) {
if (err) {
return callback(err);
}
that.getNode(hostname, port, callback);
});
};
/** \fn void RemoveNode(string hostName, var port,
var configure)
* \brief Remove the specified replica node
* \param hostName The host name of node
* \param port The port of node
* \param configure The configurations for the replica node
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.removeNode = function (hostname, port, configure, callback) {
if (!hostname || port < 0 || port > 65535) {
throw new Error("SDB_INVALIDARG");
}
var command = constants.ADMIN_PROMPT + constants.REMOVE_CMD + " " +
constants.NODE;
var config = {};
config[constants.FIELD_GROUPNAME] = this.name;
config[constants.FIELD_HOSTNAME] = hostname;
config[constants.SVCNAME] = '' + port;
if (!configure) {
var keys = Object.keys(configure);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (key === constants.FIELD_GROUPNAME ||
key === constants.FIELD_HOSTNAME ||
key === constants.SVCNAME) {
continue;
}
config[key] = configure[key];
}
}
this.conn.sendAdminCommand(command, config, {}, {}, {}, function (err) {
callback(err);
});
};
/** \fn Node GetMaster()
* \brief Get the master node of current group
* \return The fitted node or null
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getMaster = function (callback) {
var that = this;
this.getDetail(function (err, detail) {
if (err) {
return callback(err);
}
var primaryNode = detail[constants.FIELD_PRIMARYNODE];
var nodes = detail[constants.FIELD_GROUP];
if (typeof primaryNode !== 'number' || !Array.isArray(nodes)) {
return callback(new Error("SDB_SYS"));
}
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
var nodeId = node[constants.FIELD_NODEID];
if (typeof nodeId !== 'number') {
return callback(new Error("SDB_SYS"));
}
if (nodeId === primaryNode) {
var extracted = that.extractNode(node);
return callback(null, extracted);
}
}
callback(null, null);
});
};
/** \fn Node GetSlave()
* \brief Get the slave node of current group
* \return The fitted node or null
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getSlave = function (callback) {
var that = this;
this.getDetail(function (err, detail) {
if (err) {
return callback(err);
}
var primaryID = detail[constants.FIELD_PRIMARYNODE];
var nodes = detail[constants.FIELD_GROUP];
if (typeof primaryID !== 'number' || !Array.isArray(nodes)) {
return callback(new Error("SDB_SYS"));
}
var slaves = [];
var primaryNode;
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
var nodeId = node[constants.FIELD_NODEID];
if (typeof nodeId !== 'number') {
return callback(new Error("SDB_SYS"));
}
if (nodeId !== primaryID) {
slaves.push(node);
} else {
primaryNode = node;
}
}
if (slaves.length > 0) {
// 随机取一个
var index = (new Date().getTime()) % slaves.length;
callback(null, that.extractNode(nodes[index]));
} else {
callback(null, that.extractNode(primaryNode));
}
});
};
/** \fn Node GetNode(string nodeName)
* \brief Get the node by node name
* \param nodeName The node name
* \return The fitted node or null
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getNodeByName = function (nodename, callback) {
if (!nodename || nodename.indexOf(constants.NODE_NAME_SERVICE_SEP) === -1) {
throw new Error("SDB_INVALIDARG");
}
var parts = nodename.split(constants.NODE_NAME_SERVICE_SEP);
var hostname = parts[0];
var port = parseInt(parts[1], 10);
if (!hostname || !port) {
throw new Error("SDB_INVALIDARG");
}
this.getNode(hostname, port, callback);
};
/** \fn Node GetNode(string hostName, var port)
* \brief Get the node by host name and port
* \param hostName The host name
* \param port The port
* \return The fitted node or null
* \exception SequoiaDB.Error
* \exception System.Exception
*/
ReplicaGroup.prototype.getNode = function (hostname, port, callback) {
var that = this;
this.getDetail(function (err, detail) {
if (err) {
return callback(err);
}
var nodes = detail[constants.FIELD_GROUP];
if (!Array.isArray(nodes)) {
return callback(new Error("SDB_SYS"));
}
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
var _hostname = node[constants.FIELD_HOSTNAME];
if (typeof _hostname !== 'string') {
return callback(new Error("SDB_SYS"));
}
if (hostname === _hostname) {
var extracted = that.extractNode(node);
if (extracted.port === port) {
return callback(null, extracted);
}
}
}
callback(null, null);
});
};
ReplicaGroup.prototype.extractNode = function (node) {
var hostname = node[constants.FIELD_HOSTNAME];
if (typeof hostname !== 'string') {
throw new Error("SDB_SYS");
}
var nodeId = node[constants.FIELD_NODEID];
if (typeof nodeId !== 'number') {
throw new Error("SDB_SYS");
}
var svcs = node[constants.FIELD_SERVICE];
if (!Array.isArray(svcs)) {
throw new Error("SDB_SYS");
}
for (var i = 0; i < svcs.length; i++) {<|fim▁hole|> if (typeof type !== 'number') {
throw new Error("SDB_SYS");
}
if (type === 0) {
var serviceName = svc[constants.FIELD_NAME];
return new Node(this, hostname, parseInt(serviceName, 10), nodeId);
}
}
return null;
};
ReplicaGroup.prototype.stopStart = function (start, callback) {
var command = constants.ADMIN_PROMPT +
(start ? constants.ACTIVE_CMD
: constants.SHUTDOWN_CMD) + " " + constants.GROUP;
var matcher = {};
matcher[constants.FIELD_GROUPNAME] = this.name;
matcher[constants.FIELD_GROUPID] = this.groupId;
this.conn.sendAdminCommand(command, matcher, {}, {}, {}, function (err, response) {
callback(null, !err);
});
};
module.exports = ReplicaGroup;<|fim▁end|> | var svc = svcs[i];
var type = svc[constants.FIELD_SERVICE_TYPE]; |
<|file_name|>datafactory.py<|end_file_name|><|fim▁begin|># This file is part of PlexPy.
#
# PlexPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PlexPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, datatables, common, database, helpers
import datetime
class DataFactory(object):
"""
Retrieve and process data from the monitor database
"""
<|fim▁hole|> data_tables = datatables.DataTables()
group_by = ['session_history.reference_id'] if grouping else ['session_history.id']
columns = ['session_history.reference_id',
'session_history.id',
'started AS date',
'MIN(started) AS started',
'MAX(stopped) AS stopped',
'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \
SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS duration',
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter',
'session_history.user_id',
'session_history.user',
'(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) \
AS friendly_name',
'platform',
'player',
'ip_address',
'session_history.media_type',
'session_history_metadata.rating_key',
'session_history_metadata.parent_rating_key',
'session_history_metadata.grandparent_rating_key',
'session_history_metadata.full_title',
'session_history_metadata.parent_title',
'session_history_metadata.year',
'session_history_metadata.media_index',
'session_history_metadata.parent_media_index',
'session_history_metadata.thumb',
'session_history_metadata.parent_thumb',
'session_history_metadata.grandparent_thumb',
'((CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) / \
(CASE WHEN session_history_metadata.duration IS NULL THEN 1.0 \
ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete',
'session_history_media_info.video_decision',
'session_history_media_info.audio_decision',
'COUNT(*) AS group_count',
'GROUP_CONCAT(session_history.id) AS group_ids'
]
try:
query = data_tables.ssp_query(table_name='session_history',
columns=columns,
custom_where=custom_where,
group_by=group_by,
join_types=['LEFT OUTER JOIN',
'JOIN',
'JOIN'],
join_tables=['users',
'session_history_metadata',
'session_history_media_info'],
join_evals=[['session_history.user_id', 'users.user_id'],
['session_history.id', 'session_history_metadata.id'],
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_history: %s." % e)
return {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
'data': 'null',
'error': 'Unable to execute database query.'}
history = query['result']
filter_duration = 0
total_duration = self.get_total_duration(custom_where=custom_where)
rows = []
for item in history:
filter_duration += int(item['duration'])
if item['media_type'] == 'episode' and item['parent_thumb']:
thumb = item['parent_thumb']
elif item['media_type'] == 'episode':
thumb = item['grandparent_thumb']
else:
thumb = item['thumb']
if item['percent_complete'] >= watched_percent:
watched_status = 1
elif item['percent_complete'] >= watched_percent/2:
watched_status = 0.5
else:
watched_status = 0
# Rename Mystery platform names
platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
row = {'reference_id': item['reference_id'],
'id': item['id'],
'date': item['date'],
'started': item['started'],
'stopped': item['stopped'],
'duration': item['duration'],
'paused_counter': item['paused_counter'],
'user_id': item['user_id'],
'user': item['user'],
'friendly_name': item['friendly_name'],
'platform': platform,
'player': item['player'],
'ip_address': item['ip_address'],
'media_type': item['media_type'],
'rating_key': item['rating_key'],
'parent_rating_key': item['parent_rating_key'],
'grandparent_rating_key': item['grandparent_rating_key'],
'full_title': item['full_title'],
'parent_title': item['parent_title'],
'year': item['year'],
'media_index': item['media_index'],
'parent_media_index': item['parent_media_index'],
'thumb': thumb,
'video_decision': item['video_decision'],
'audio_decision': item['audio_decision'],
'percent_complete': int(round(item['percent_complete'])),
'watched_status': watched_status,
'group_count': item['group_count'],
'group_ids': item['group_ids']
}
rows.append(row)
dict = {'recordsFiltered': query['filteredCount'],
'recordsTotal': query['totalCount'],
'data': rows,
'draw': query['draw'],
'filter_duration': helpers.human_duration(filter_duration, sig='dhm'),
'total_duration': helpers.human_duration(total_duration, sig='dhm')
}
return dict
def get_home_stats(self, grouping=0, time_range='30', stats_type=0, stats_count='5', stats_cards=[], notify_watched_percent='85'):
monitor_db = database.MonitorDatabase()
group_by = 'session_history.reference_id' if grouping else 'session_history.id'
sort_type = 'total_plays' if stats_type == 0 else 'total_duration'
home_stats = []
for stat in stats_cards:
if stat == 'top_tv':
top_tv = []
try:
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "episode" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.grandparent_title ' \
'ORDER BY %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
return None
for item in result:
row = {'title': item['grandparent_title'],
'total_plays': item['total_plays'],
'total_duration': item['total_duration'],
'users_watched': '',
'rating_key': item['grandparent_rating_key'],
'last_play': item['last_watch'],
'grandparent_thumb': item['grandparent_thumb'],
'thumb': '',
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
top_tv.append(row)
home_stats.append({'stat_id': stat,
'stat_type': sort_type,
'rows': top_tv})
elif stat == 'popular_tv':
popular_tv = []
try:
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, ' \
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "episode" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.grandparent_title ' \
'ORDER BY users_watched DESC, %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
return None
for item in result:
row = {'title': item['grandparent_title'],
'users_watched': item['users_watched'],
'rating_key': item['grandparent_rating_key'],
'last_play': item['last_watch'],
'total_plays': item['total_plays'],
'grandparent_thumb': item['grandparent_thumb'],
'thumb': '',
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
popular_tv.append(row)
home_stats.append({'stat_id': stat,
'rows': popular_tv})
elif stat == 'top_movies':
top_movies = []
try:
query = 'SELECT t.id, t.full_title, t.rating_key, t.thumb, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "movie" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.full_title ' \
'ORDER BY %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
return None
for item in result:
row = {'title': item['full_title'],
'total_plays': item['total_plays'],
'total_duration': item['total_duration'],
'users_watched': '',
'rating_key': item['rating_key'],
'last_play': item['last_watch'],
'grandparent_thumb': '',
'thumb': item['thumb'],
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
top_movies.append(row)
home_stats.append({'stat_id': stat,
'stat_type': sort_type,
'rows': top_movies})
elif stat == 'popular_movies':
popular_movies = []
try:
query = 'SELECT t.id, t.full_title, t.rating_key, t.thumb, ' \
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "movie" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.full_title ' \
'ORDER BY users_watched DESC, %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
return None
for item in result:
row = {'title': item['full_title'],
'users_watched': item['users_watched'],
'rating_key': item['rating_key'],
'last_play': item['last_watch'],
'total_plays': item['total_plays'],
'grandparent_thumb': '',
'thumb': item['thumb'],
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
popular_movies.append(row)
home_stats.append({'stat_id': stat,
'rows': popular_movies})
elif stat == 'top_music':
top_music = []
try:
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "track" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.grandparent_title ' \
'ORDER BY %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
return None
for item in result:
row = {'title': item['grandparent_title'],
'total_plays': item['total_plays'],
'total_duration': item['total_duration'],
'users_watched': '',
'rating_key': item['grandparent_rating_key'],
'last_play': item['last_watch'],
'grandparent_thumb': item['grandparent_thumb'],
'thumb': '',
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
top_music.append(row)
home_stats.append({'stat_id': stat,
'stat_type': sort_type,
'rows': top_music})
elif stat == 'popular_music':
popular_music = []
try:
query = 'SELECT t.id, t.grandparent_title, t.grandparent_rating_key, t.grandparent_thumb, ' \
'COUNT(DISTINCT t.user_id) AS users_watched, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) as total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND session_history.media_type = "track" ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.grandparent_title ' \
'ORDER BY users_watched DESC, %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
return None
for item in result:
row = {'title': item['grandparent_title'],
'users_watched': item['users_watched'],
'rating_key': item['grandparent_rating_key'],
'last_play': item['last_watch'],
'total_plays': item['total_plays'],
'grandparent_thumb': item['grandparent_thumb'],
'thumb': '',
'user': '',
'friendly_name': '',
'platform_type': '',
'platform': '',
'row_id': item['id']
}
popular_music.append(row)
home_stats.append({'stat_id': stat,
'rows': popular_music})
elif stat == 'top_users':
top_users = []
try:
query = 'SELECT t.user, t.user_id, t.user_thumb, t.custom_thumb, ' \
'(CASE WHEN t.friendly_name IS NULL THEN t.username ELSE t.friendly_name END) ' \
' AS friendly_name, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d, users.thumb AS user_thumb, users.custom_avatar_url AS custom_thumb ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.user_id ' \
'ORDER BY %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
return None
for item in result:
if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']:
user_thumb = item['custom_thumb']
elif item['user_thumb']:
user_thumb = item['user_thumb']
else:
user_thumb = common.DEFAULT_USER_THUMB
row = {'user': item['user'],
'user_id': item['user_id'],
'friendly_name': item['friendly_name'],
'total_plays': item['total_plays'],
'total_duration': item['total_duration'],
'last_play': item['last_watch'],
'user_thumb': user_thumb,
'grandparent_thumb': '',
'users_watched': '',
'rating_key': '',
'title': '',
'platform_type': '',
'platform': '',
'row_id': ''
}
top_users.append(row)
home_stats.append({'stat_id': stat,
'stat_type': sort_type,
'rows': top_users})
elif stat == 'top_platforms':
top_platform = []
try:
query = 'SELECT t.platform, ' \
'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \
'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \
' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \
' AS d ' \
' FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' GROUP BY %s) AS t ' \
'GROUP BY t.platform ' \
'ORDER BY %s DESC ' \
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
return None
for item in result:
# Rename Mystery platform names
platform_type = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform'])
row = {'platform': item['platform'],
'total_plays': item['total_plays'],
'total_duration': item['total_duration'],
'last_play': item['last_watch'],
'platform_type': platform_type,
'title': '',
'thumb': '',
'grandparent_thumb': '',
'users_watched': '',
'rating_key': '',
'user': '',
'friendly_name': '',
'row_id': ''
}
top_platform.append(row)
home_stats.append({'stat_id': stat,
'stat_type': sort_type,
'rows': top_platform})
elif stat == 'last_watched':
last_watched = []
try:
query = 'SELECT t.id, t.full_title, t.rating_key, t.thumb, t.grandparent_thumb, ' \
't.user, t.user_id, t.custom_avatar_url as user_thumb, t.player, ' \
'(CASE WHEN t.friendly_name IS NULL THEN t.username ELSE t.friendly_name END) ' \
' AS friendly_name, ' \
'MAX(t.started) AS last_watch, ' \
'((CASE WHEN t.view_offset IS NULL THEN 0.1 ELSE t.view_offset * 1.0 END) / ' \
' (CASE WHEN t.duration IS NULL THEN 1.0 ELSE t.duration * 1.0 END) * 100) ' \
' AS percent_complete ' \
'FROM (SELECT * FROM session_history ' \
' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
' LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \
' WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
' >= datetime("now", "-%s days", "localtime") ' \
' AND (session_history.media_type = "movie" ' \
' OR session_history_metadata.media_type = "episode") ' \
' GROUP BY %s) AS t ' \
'WHERE percent_complete >= %s ' \
'GROUP BY t.id ' \
'ORDER BY last_watch DESC ' \
'LIMIT %s' % (time_range, group_by, notify_watched_percent, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e)
return None
for item in result:
if not item['grandparent_thumb'] or item['grandparent_thumb'] == '':
thumb = item['thumb']
else:
thumb = item['grandparent_thumb']
row = {'row_id': item['id'],
'user': item['user'],
'friendly_name': item['friendly_name'],
'user_id': item['user_id'],
'user_thumb': item['user_thumb'],
'title': item['full_title'],
'rating_key': item['rating_key'],
'thumb': thumb,
'grandparent_thumb': item['grandparent_thumb'],
'last_watch': item['last_watch'],
'player': item['player']
}
last_watched.append(row)
home_stats.append({'stat_id': stat,
'rows': last_watched})
elif stat == 'most_concurrent':
def calc_most_concurrent(title, result):
'''
Function to calculate most concurrent streams
Input: Stat title, SQLite query result
Output: Dict {title, count, started, stopped}
'''
times = []
for item in result:
times.append({'time': str(item['started']) + 'B', 'count': 1})
times.append({'time': str(item['stopped']) + 'A', 'count': -1})
times = sorted(times, key=lambda k: k['time'])
count = 0
last_count = 0
last_start = 0
concurrent = {'title': title,
'count': 0,
'started': None,
'stopped': None
}
for d in times:
if d['count'] == 1:
count += d['count']
if count >= last_count:
last_start = d['time']
else:
if count >= last_count:
last_count = count
concurrent['count'] = count
concurrent['started'] = last_start[:-1]
concurrent['stopped'] = d['time'][:-1]
count += d['count']
return concurrent
most_concurrent = []
try:
base_query = 'SELECT session_history.started, session_history.stopped ' \
'FROM session_history ' \
'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \
'WHERE datetime(stopped, "unixepoch", "localtime") ' \
'>= datetime("now", "-%s days", "localtime") ' % time_range
title = 'Concurrent Streams'
query = base_query
result = monitor_db.select(query)
if result:
most_concurrent.append(calc_most_concurrent(title, result))
title = 'Concurrent Transcodes'
query = base_query \
+ 'AND (session_history_media_info.video_decision = "transcode" ' \
'OR session_history_media_info.audio_decision = "transcode") '
result = monitor_db.select(query)
if result:
most_concurrent.append(calc_most_concurrent(title, result))
title = 'Concurrent Direct Streams'
query = base_query \
+ 'AND (session_history_media_info.video_decision != "transcode" ' \
'AND session_history_media_info.audio_decision = "copy") '
result = monitor_db.select(query)
if result:
most_concurrent.append(calc_most_concurrent(title, result))
title = 'Concurrent Direct Plays'
query = base_query \
+ 'AND (session_history_media_info.video_decision = "direct play" ' \
'OR session_history_media_info.audio_decision = "direct play") '
result = monitor_db.select(query)
if result:
most_concurrent.append(calc_most_concurrent(title, result))
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e)
return None
home_stats.append({'stat_id': stat,
'rows': most_concurrent})
return home_stats
def get_library_stats(self, library_cards=[]):
monitor_db = database.MonitorDatabase()
library_stats = []
for id in library_cards:
if id.isdigit():
try:
query = 'SELECT section_id, section_name, section_type, thumb, count, parent_count, child_count ' \
'FROM library_sections ' \
'WHERE section_id = %s ' % id
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_library_stats: %s." % e)
return None
for item in result:
library = {'section_id': item['section_id'],
'section_name': item['section_name'],
'section_type': item['section_type'],
'thumb': item['thumb'],
'count': item['count'],
'parent_count': item['parent_count'],
'child_count': item['child_count']
}
library_stats.append(library)
return library_stats
def get_stream_details(self, row_id=None):
monitor_db = database.MonitorDatabase()
if row_id:
query = 'SELECT container, bitrate, video_resolution, width, height, aspect_ratio, video_framerate, ' \
'video_codec, audio_codec, audio_channels, video_decision, transcode_video_codec, transcode_height, ' \
'transcode_width, audio_decision, transcode_audio_codec, transcode_audio_channels, media_type, ' \
'title, grandparent_title ' \
'from session_history_media_info ' \
'join session_history_metadata on session_history_media_info.id = session_history_metadata.id ' \
'where session_history_media_info.id = ?'
result = monitor_db.select(query, args=[row_id])
else:
return None
stream_output = {}
for item in result:
stream_output = {'container': item['container'],
'bitrate': item['bitrate'],
'video_resolution': item['video_resolution'],
'width': item['width'],
'height': item['height'],
'aspect_ratio': item['aspect_ratio'],
'video_framerate': item['video_framerate'],
'video_codec': item['video_codec'],
'audio_codec': item['audio_codec'],
'audio_channels': item['audio_channels'],
'transcode_video_dec': item['video_decision'],
'transcode_video_codec': item['transcode_video_codec'],
'transcode_height': item['transcode_height'],
'transcode_width': item['transcode_width'],
'transcode_audio_dec': item['audio_decision'],
'transcode_audio_codec': item['transcode_audio_codec'],
'transcode_audio_channels': item['transcode_audio_channels'],
'media_type': item['media_type'],
'title': item['title'],
'grandparent_title': item['grandparent_title']
}
return stream_output
def get_metadata_details(self, rating_key):
monitor_db = database.MonitorDatabase()
if rating_key:
query = 'SELECT session_history_metadata.rating_key, session_history_metadata.parent_rating_key, ' \
'session_history_metadata.grandparent_rating_key, session_history_metadata.title, ' \
'session_history_metadata.parent_title, session_history_metadata.grandparent_title, ' \
'session_history_metadata.full_title, library_sections.section_name, ' \
'session_history_metadata.media_index, session_history_metadata.parent_media_index, ' \
'session_history_metadata.section_id, session_history_metadata.thumb, ' \
'session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, ' \
'session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, ' \
'session_history_metadata.originally_available_at, session_history_metadata.added_at, ' \
'session_history_metadata.updated_at, session_history_metadata.last_viewed_at, ' \
'session_history_metadata.content_rating, session_history_metadata.summary, ' \
'session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, ' \
'session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, ' \
'session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, ' \
'session_history_media_info.container, session_history_media_info.bitrate, ' \
'session_history_media_info.video_codec, session_history_media_info.video_resolution, ' \
'session_history_media_info.video_framerate, session_history_media_info.audio_codec, ' \
'session_history_media_info.audio_channels ' \
'FROM session_history_metadata ' \
'JOIN library_sections ON session_history_metadata.section_id = library_sections.section_id ' \
'JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id ' \
'WHERE session_history_metadata.rating_key = ?'
result = monitor_db.select(query=query, args=[rating_key])
else:
result = []
metadata = {}
for item in result:
directors = item['directors'].split(';') if item['directors'] else []
writers = item['writers'].split(';') if item['writers'] else []
actors = item['actors'].split(';') if item['actors'] else []
genres = item['genres'].split(';') if item['genres'] else []
metadata = {'media_type': item['media_type'],
'rating_key': item['rating_key'],
'parent_rating_key': item['parent_rating_key'],
'grandparent_rating_key': item['grandparent_rating_key'],
'grandparent_title': item['grandparent_title'],
'parent_media_index': item['parent_media_index'],
'parent_title': item['parent_title'],
'media_index': item['media_index'],
'studio': item['studio'],
'title': item['title'],
'content_rating': item['content_rating'],
'summary': item['summary'],
'tagline': item['tagline'],
'rating': item['rating'],
'duration': item['duration'],
'year': item['year'],
'thumb': item['thumb'],
'parent_thumb': item['parent_thumb'],
'grandparent_thumb': item['grandparent_thumb'],
'art': item['art'],
'originally_available_at': item['originally_available_at'],
'added_at': item['added_at'],
'updated_at': item['updated_at'],
'last_viewed_at': item['last_viewed_at'],
'guid': item['guid'],
'writers': writers,
'directors': directors,
'genres': genres,
'actors': actors,
'library_name': item['section_name'],
'section_id': item['section_id'],
'container': item['container'],
'bitrate': item['bitrate'],
'video_codec': item['video_codec'],
'video_resolution': item['video_resolution'],
'video_framerate': item['video_framerate'],
'audio_codec': item['audio_codec'],
'audio_channels': item['audio_channels']
}
return metadata
def get_total_duration(self, custom_where=None):
monitor_db = database.MonitorDatabase()
# Split up custom wheres
if custom_where:
where = 'WHERE ' + ' AND '.join([w[0] + ' = "' + w[1] + '"' for w in custom_where])
else:
where = ''
try:
query = 'SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - ' \
'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration ' \
'FROM session_history ' \
'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \
'%s ' % where
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_total_duration: %s." % e)
return None
total_duration = 0
for item in result:
total_duration = item['total_duration']
return total_duration
def get_session_ip(self, session_key=''):
monitor_db = database.MonitorDatabase()
if session_key:
query = 'SELECT ip_address FROM sessions WHERE session_key = %d' % int(session_key)
result = monitor_db.select(query)
else:
return None
ip_address = 'N/A'
for item in result:
ip_address = item['ip_address']
return ip_address
def get_search_query(self, rating_key=''):
monitor_db = database.MonitorDatabase()
if rating_key:
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \
'media_index, parent_media_index, year, media_type ' \
'FROM session_history_metadata ' \
'WHERE rating_key = ? ' \
'OR parent_rating_key = ? ' \
'OR grandparent_rating_key = ? ' \
'LIMIT 1'
result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key])
else:
result = []
query = {}
query_string = None
media_type = None
for item in result:
title = item['title']
parent_title = item['parent_title']
grandparent_title = item['grandparent_title']
media_index = item['media_index']
parent_media_index = item['parent_media_index']
year = item['year']
if str(item['rating_key']) == rating_key:
query_string = item['title']
media_type = item['media_type']
elif str(item['parent_rating_key']) == rating_key:
if item['media_type'] == 'episode':
query_string = item['grandparent_title']
media_type = 'season'
elif item['media_type'] == 'track':
query_string = item['parent_title']
media_type = 'album'
elif str(item['grandparent_rating_key']) == rating_key:
if item['media_type'] == 'episode':
query_string = item['grandparent_title']
media_type = 'show'
elif item['media_type'] == 'track':
query_string = item['grandparent_title']
media_type = 'artist'
if query_string and media_type:
query = {'query_string': query_string,
'title': title,
'parent_title': parent_title,
'grandparent_title': grandparent_title,
'media_index': media_index,
'parent_media_index': parent_media_index,
'year': year,
'media_type': media_type,
'rating_key': rating_key
}
else:
return None
return query
def get_rating_keys_list(self, rating_key='', media_type=''):
monitor_db = database.MonitorDatabase()
if media_type == 'movie':
key_list = {0: {'rating_key': int(rating_key)}}
return key_list
if media_type == 'artist' or media_type == 'album' or media_type == 'track':
match_type = 'title'
else:
match_type = 'index'
# Get the grandparent rating key
try:
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key ' \
'FROM session_history_metadata ' \
'WHERE rating_key = ? ' \
'OR parent_rating_key = ? ' \
'OR grandparent_rating_key = ? ' \
'LIMIT 1'
result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key])
grandparent_rating_key = result[0]['grandparent_rating_key']
except Exception as e:
logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e)
return {}
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \
'media_index, parent_media_index ' \
'FROM session_history_metadata ' \
'WHERE {0} = ? ' \
'GROUP BY {1} '
# get grandparent_rating_keys
grandparents = {}
result = monitor_db.select(query=query.format('grandparent_rating_key', 'grandparent_rating_key'),
args=[grandparent_rating_key])
for item in result:
# get parent_rating_keys
parents = {}
result = monitor_db.select(query=query.format('grandparent_rating_key', 'parent_rating_key'),
args=[item['grandparent_rating_key']])
for item in result:
# get rating_keys
children = {}
result = monitor_db.select(query=query.format('parent_rating_key', 'rating_key'),
args=[item['parent_rating_key']])
for item in result:
key = item['media_index']
children.update({key: {'rating_key': item['rating_key']}})
key = item['parent_media_index'] if match_type == 'index' else item['parent_title']
parents.update({key:
{'rating_key': item['parent_rating_key'],
'children': children}
})
key = 0 if match_type == 'index' else item['grandparent_title']
grandparents.update({key:
{'rating_key': item['grandparent_rating_key'],
'children': parents}
})
key_list = grandparents
return key_list
def delete_session_history_rows(self, row_id=None):
monitor_db = database.MonitorDatabase()
if row_id.isdigit():
logger.info(u"PlexPy DataFactory :: Deleting row id %s from the session history database." % row_id)
session_history_del = \
monitor_db.action('DELETE FROM session_history WHERE id = ?', [row_id])
session_history_media_info_del = \
monitor_db.action('DELETE FROM session_history_media_info WHERE id = ?', [row_id])
session_history_metadata_del = \
monitor_db.action('DELETE FROM session_history_metadata WHERE id = ?', [row_id])
return 'Deleted rows %s.' % row_id
else:
return 'Unable to delete rows. Input row not valid.'
def update_metadata(self, old_key_list='', new_key_list='', media_type=''):
from plexpy import pmsconnect
pms_connect = pmsconnect.PmsConnect()
monitor_db = database.MonitorDatabase()
# function to map rating keys pairs
def get_pairs(old, new):
pairs = {}
for k, v in old.iteritems():
if k in new:
pairs.update({v['rating_key']: new[k]['rating_key']})
if 'children' in old[k]:
pairs.update(get_pairs(old[k]['children'], new[k]['children']))
return pairs
# map rating keys pairs
mapping = {}
if old_key_list and new_key_list:
mapping = get_pairs(old_key_list, new_key_list)
if mapping:
logger.info(u"PlexPy DataFactory :: Updating metadata in the database.")
for old_key, new_key in mapping.iteritems():
result = pms_connect.get_metadata_details(new_key)
if result:
metadata = result['metadata']
if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist':
# check grandparent_rating_key (2 tables)
monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
[new_key, old_key])
monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?',
[new_key, old_key])
elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album':
# check parent_rating_key (2 tables)
monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?',
[new_key, old_key])
monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?',
[new_key, old_key])
else:
# check rating_key (2 tables)
monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?',
[new_key, old_key])
monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?',
[new_key, old_key])
# update session_history_metadata table
self.update_metadata_details(old_key, new_key, metadata)
return 'Updated metadata in database.'
else:
return 'Unable to update metadata in database. No changes were made.'
def update_metadata_details(self, old_rating_key='', new_rating_key='', metadata=None):
if metadata:
# Create full_title
if metadata['media_type'] == 'episode' or metadata['media_type'] == 'track':
full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title'])
else:
full_title = metadata['title']
directors = ";".join(metadata['directors'])
writers = ";".join(metadata['writers'])
actors = ";".join(metadata['actors'])
genres = ";".join(metadata['genres'])
#logger.info(u"PlexPy DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key)
monitor_db = database.MonitorDatabase()
# Update the session_history_metadata table
query = 'UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, ' \
'grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, full_title = ?, ' \
'media_index = ?, parent_media_index = ?, section_id = ?, thumb = ?, parent_thumb = ?, ' \
'grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, ' \
'added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, ' \
'tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, ' \
'genres = ?, studio = ? ' \
'WHERE rating_key = ?'
args = [metadata['rating_key'], metadata['parent_rating_key'], metadata['grandparent_rating_key'],
metadata['title'], metadata['parent_title'], metadata['grandparent_title'], full_title,
metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'],
metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], metadata['media_type'],
metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'],
metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'],
metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres,
metadata['studio'],
old_rating_key]
monitor_db.action(query=query, args=args)<|fim▁end|> | def __init__(self):
pass
def get_datatables_history(self, kwargs=None, custom_where=None, grouping=0, watched_percent=85): |
<|file_name|>hydrogen.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
hydrogen
~~~~~~~~
Hydrogen is an extremely lightweight workflow enhancement tool for Python
web applications, providing bower/npm-like functionality for both pip and
bower packages.
:author: David Gidwani <[email protected]>
:license: BSD, see LICENSE for details
"""
import atexit
from collections import defaultdict
from functools import update_wrapper
import json
import os
import re
import shutil
import sys
import tempfile
import yaml
import zipfile
import click
import envoy
from pathlib import Path, PurePath
from pathspec import GitIgnorePattern, PathSpec
from pip._vendor import pkg_resources
import requests
import rfc6266
import semver
__version__ = "0.0.1-alpha"
prog_name = "hydrogen"
app_dir = click.get_app_dir(prog_name)
github_api_uri = "https://api.github.com"
debug = True
# borrowed from werkzeug._compat
PY2 = sys.version_info[0] == 2
if PY2:
from urlparse import urlparse
text_type = unicode # noqa: Undefined in py3
else:
from urllib.parse import urlparse
text_type = str
class InvalidRequirementSpecError(Exception):
pass
class InvalidPackageError(Exception):
pass
class PackageNotFoundError(Exception):
pass
class VersionNotFoundError(Exception):
pass
def get_installed_pypackages():
return {p.project_name.lower(): p for p in pkg_resources.working_set}
def success(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "green")
click.secho(message, **kwargs)
def warning(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"warning: {}".format(message), **kwargs)
def error(message, level="error", exit_code=1, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"error: {}".format(message), **kwargs)
sys.exit(exit_code)
def fatal(message, **kwargs):
error(message, level="fatal", **kwargs)
def secure_filename(filename):
r"""Borrowed from :mod:`werkzeug.utils`, under the BSD 3-clause license.
Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows systems the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
:param filename: the filename to secure
"""
_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]')
_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4',
'LPT1', 'LPT2', 'LPT3', 'PRN', 'NUL')
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize('NFKD', filename).encode('ascii', 'ignore')
if not PY2:
filename = filename.decode('ascii')
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, ' ')
filename = str(_filename_ascii_strip_re.sub('', '_'.join(
filename.split()))).strip('._')
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if os.name == 'nt' and filename and \
filename.split('.')[0].upper() in _windows_device_files:
filename = '_' + filename
return filename
def get(url, session=None, silent=not debug, **kwargs):
"""Retrieve a given URL and log response.
:param session: a :class:`requests.Session` object.
:param silent: if **True**, response status and URL will not be printed.
"""
session = session or requests
kwargs["verify"] = kwargs.get("verify", True)
r = session.get(url, **kwargs)
if not silent:
status_code = click.style(
str(r.status_code),
fg="green" if r.status_code in (200, 304) else "red")
click.echo(status_code + " " + url)
if r.status_code == 404:
raise PackageNotFoundError
return r
def download_file(url, dest=None, chunk_size=1024, replace="ask",
label="Downloading {dest_basename} ({size:.2f}MB)",
expected_extension=None):
"""Download a file from a given URL and display progress.
:param dest: If the destination exists and is a directory, the filename
will be guessed from the Content-Disposition header. If the destination
is an existing file, the user will either be prompted to overwrite, or
the file will be replaced (depending on the value of **replace**). If
the destination does not exist, it will be used as the filename.
:param int chunk_size: bytes read in at a time.
:param replace: If `False`, an existing destination file will not be
overwritten.
:param label: a string which is formatted and displayed as the progress bar
label. Variables provided include *dest_basename*, *dest*, and *size*.
:param expected_extension: if set, the filename will be sanitized to ensure
it has the given extension. The extension should not start with a dot
(`.`).
"""
dest = Path(dest or url.split("/")[-1])
response = get(url, stream=True)
if (dest.exists()
and dest.is_dir()
and "Content-Disposition" in response.headers):
content_disposition = rfc6266.parse_requests_response(response)
if expected_extension is not None:
filename = content_disposition.filename_sanitized(
expected_extension)
filename = secure_filename(filename)
dest = dest / filename
if dest.exists() and not dest.is_dir():
if (replace is False
or replace == "ask"
and not click.confirm("Replace {}?".format(dest))):
return str(dest)
size = int(response.headers.get("content-length", 0))
label = label.format(dest=dest, dest_basename=dest.name,
size=size/1024.0/1024)
with click.open_file(str(dest), "wb") as f:
content_iter = response.iter_content(chunk_size=chunk_size)
with click.progressbar(content_iter, length=size/1024,
label=label) as bar:
for chunk in bar:<|fim▁hole|> return str(dest)
def get_dir_from_zipfile(zip_file, fallback=None):
"""Return the name of the root folder in a zip file.
:param zip_file: a :class:`zipfile.ZipFile` instance.
:param fallback: if `None`, the name of the zip file is used. This is
returned if the zip file contains more than one top-level directory,
or none at all.
"""
fallback = fallback or zip_file.filename
directories = [name for name in zip_file.namelist() if name.endswith("/")
and len(PurePath(name).parts) == 1]
return fallback if len(directories) > 1 else directories[0]
def mkdtemp(suffix="", prefix=__name__ + "_", dir=None, cleanup=True,
on_cleanup_error=None):
"""Create a temporary directory and register a handler to cleanup on exit.
:param suffix: suffix of the temporary directory, defaults to empty.
:param prefix: prefix of the temporary directory, defaults to `__name__`
and an underscore.
:param dir: if provided, the directory will be created in `dir` rather than
the system default temp directory.
:param cleanup: if `True`, an atexit handler will be registered to remove
the temp directory on exit.
:param on_cleanup_error: a callback which is called if the atexit handler
encounters an exception. It is passed three parameters: *function*,
*path*, and *excinfo*. For more information, see the :mod:`atexit`
documentation.
"""
path = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
if cleanup:
if on_cleanup_error is None:
def on_cleanup_error(function, path, excinfo):
click.secho("warning: failed to remove file or directory: {}\n"
"please delete it manually.".format(path),
fg="red")
atexit.register(shutil.rmtree, path=path, onerror=on_cleanup_error)
return path
class Requirement(object):
"""Represents a single package requirement.
.. note::
This class overrides `__hash__` in order to ensure that package
names remain unique when in a set.
.. todo::
Extend :class:`pkg_resources.Requirement` for Python requirements.
"""
# TODO: support multiple version specs (e.g. >=1.0,<=2.0)
spec_regex = r"(.+?)\s*(?:([<>~=]?=)\s*(.+?))?$"
def __init__(self, package, version):
"""Construct a new requirement.
:param package: the package name.
:param version: a semver compatible version specification.
"""
self.package = package
self.version = version
if self.version and not re.match(r"[<=>~]", version[:2]):
self.version = "=={}".format(self.version)
@classmethod
def coerce(cls, string):
"""Create a :class:`Requirement` object from a given package spec."""
match = re.match(cls.spec_regex, string)
if not match:
raise InvalidRequirementSpecError("could not parse requirement")
package = match.group(1)
if all(match.group(2, 3)):
version = "".join(match.group(2, 3))
else:
version = None
return cls(package, version)
def load_installed_version(self):
installed_packages = get_installed_pypackages()
if self.package in installed_packages:
self.version = "=={}".format(
installed_packages[self.package].version)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
other.package == self.package)
def __hash__(self):
return hash(self.package)
def __str__(self):
return "".join([self.package, self.version or ""])
def __repr__(self):
return "<Requirement(package={package}, version='{version}')>".format(
package=self.package, version=self.version)
class Requirements(set):
"""Represents a set of requirements."""
def __init__(self, filename=None):
self.filename = None
if filename:
self.load(filename)
def add(self, elem, replace=False):
"""Add a requirement.
:param elem: a string or :class:`Requirement` instance.
:param replace: if `True`, packages in the set with the same name will
be removed first.
"""
if isinstance(elem, text_type):
elem = Requirement.coerce(elem)
if replace and elem in self:
self.remove(elem)
super(Requirements, self).add(elem)
def load(self, requirements_file=None):
"""Load or reload requirements from a requirements.txt file.
:param requirements_file: if not given, the filename used from
initialization will be read again.
"""
if requirements_file is None:
requirements_file = self.filename
if requirements_file is None:
raise ValueError("no filename provided")
elif isinstance(requirements_file, text_type):
requirements_file = Path(requirements_file)
self.clear()
with requirements_file.open() as f:
self.loads(f.read())
if isinstance(requirements_file, (text_type, Path)):
self.filename = requirements_file
def loads(self, requirements_text):
lines = re.findall(Requirement.spec_regex,
requirements_text,
re.MULTILINE)
for line in lines:
self.add(Requirement(line[0], "".join(line[1:])))
def remove(self, elem):
"""Remove a requirement.
:param elem: a string or :class:`Requirement` instance.
"""
if isinstance(elem, text_type):
for requirement in self:
if requirement.package == elem:
return super(Requirements, self).remove(requirement)
return super(Requirements, self).remove(elem)
def __str__(self):
return "\n".join([str(x) for x in self])
def __repr__(self):
return "<Requirements({})>".format(self.filename.name or "")
class NamedRequirements(Requirements):
def __init__(self, name, filename=None):
self.name = name
super(NamedRequirements, self).__init__(filename=filename)
def __repr__(self):
return "<NamedRequirements({}{})>".format(
self.name,
", filename='{}'".format(self.filename.name) if self.filename
else "")
class GroupedRequirements(defaultdict):
default_groups = ["all", "dev", "bower", "bower-dev"]
default_pip_files = {
"all": "requirements.txt",
"dev": "dev-requirements.txt"
}
def __init__(self, groups=None):
super(GroupedRequirements, self).__init__(NamedRequirements)
self.groups = groups or self.default_groups
self.filename = None
self.create_default_groups()
def clear(self):
super(GroupedRequirements, self).clear()
self.create_default_groups()
def create_default_groups(self):
for group in self.groups:
group = group.replace(" ", "_").lower()
self[group] = NamedRequirements(group)
def load_pip_requirements(self, files_map=None, freeze=True):
if files_map is None:
files_map = self.default_pip_files
for group, requirements_txt in files_map.items():
path = Path(requirements_txt)
if not path.exists() and group.lower() == "all" and freeze:
cmd = envoy.run("pip freeze")
self[group].loads(cmd.std_out)
elif path.exists():
self[group].load(path)
def load(self, filename, create_if_missing=True):
filename = Path(filename)
if not filename.exists() and create_if_missing:
self.load_pip_requirements()
with filename.open("w") as f:
f.write(yaml.dump(self.serialized, default_flow_style=False,
encoding=None))
self.filename = filename
return self.save(filename)
with filename.open() as f:
for group, requirements in yaml.load(f.read()).items():
for requirement in requirements:
self[group].add(Requirement.coerce(requirement))
self.filename = filename
def save(self, filename=None):
filename = Path(filename) if filename is not None else self.filename
with filename.open("w") as f:
f.write(self.yaml)
@property
def serialized(self):
to_ret = {}
for group, requirements in self.items():
to_ret[group] = [str(requirement) for requirement in requirements]
return to_ret
@property
def yaml(self):
return yaml.dump(self.serialized, default_flow_style=False,
encoding=None)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
else:
ret = self[key] = self.default_factory(name=key)
return ret
class Bower(object):
bower_base_uri = "https://bower.herokuapp.com"
@classmethod
def get_package_url(cls, package, session=None, silent=False):
response = get("{}/packages/{}".format(cls.bower_base_uri, package))
return response.json().get("url", None)
@classmethod
def clean_semver(cls, version_spec):
return re.sub(r"([<>=~])\s+?v?", "\\1", version_spec, re.IGNORECASE)
class Hydrogen(object):
def __init__(self, assets_dir=None, requirements_file="requirements.yml"):
self.assets_dir = assets_dir or Path(".") / "assets"
self.requirements = GroupedRequirements()
self.requirements.load(requirements_file)
self.temp_dir = mkdtemp()
def extract_bower_zipfile(self, zip_file, dest, expected_version=None):
bower_json = None
root = None
deps_installed = []
for info in zip_file.infolist():
if PurePath(info.filename).name == "bower.json":
with zip_file.open(info) as f:
bower_json = json.load(f)
root = str(PurePath(info.filename).parent)
break
version = bower_json["version"]
if expected_version is not None:
expected_version = Bower.clean_semver(expected_version)
if not semver.match(version, expected_version):
click.secho("error: versions do not match ({} =/= {})".format(
version, expected_version))
raise InvalidPackageError
if "dependencies" in bower_json:
for package, version in bower_json["dependencies"].items():
url = Bower.get_package_url(package)
deps_installed.extend(self.get_bower_package(
url, dest=dest, version=version))
ignore_patterns = [GitIgnorePattern(ig) for ig in bower_json["ignore"]]
path_spec = PathSpec(ignore_patterns)
namelist = [path for path in zip_file.namelist()
if PurePath(path).parts[0] == root]
ignored = list(path_spec.match_files(namelist))
for path in namelist:
dest_path = PurePath(
bower_json["name"],
*PurePath(path).parts[1:])
if path in ignored:
continue
for path in ignored:
for parent in PurePath(path):
if parent in ignored:
continue
if path.endswith("/"):
if list(path_spec.match_files([str(dest_path)])):
ignored.append(PurePath(path))
elif not (dest / dest_path).is_dir():
(dest / dest_path).mkdir(parents=True)
else:
target_path = dest / dest_path.parent / dest_path.name
source = zip_file.open(path)
target = target_path.open("wb")
with source, target:
shutil.copyfileobj(source, target)
deps_installed.append((bower_json["name"], bower_json["version"]))
return deps_installed
def get_bower_package(self, url, dest=None, version=None,
process_deps=True):
dest = dest or Path(".") / "assets"
parsed_url = urlparse(url)
if parsed_url.scheme == "git" or parsed_url.path.endswith(".git"):
if parsed_url.netloc == "github.com":
user, repo = parsed_url.path[1:-4].split("/")
response = get(github_api_uri +
"/repos/{}/{}/tags".format(user, repo))
tags = response.json()
target = None
if not len(tags):
click.secho("fatal: no tags exist for {}/{}".format(
user, repo), fg="red")
raise InvalidPackageError
if version is None:
target = tags[0]
else:
for tag in tags:
if semver.match(tag["name"],
Bower.clean_semver(version)):
target = tag
break
if not target:
click.secho(
"fatal: failed to find matching tag for "
"{user}/{repo} {version}".format(user, repo, version),
fg="red")
raise VersionNotFoundError
click.secho("installing {}/{}#{}".format(
user, repo, tags[0]["name"]), fg="green")
return self.get_bower_package(
url=target["zipball_url"],
dest=dest,
version=version)
raise NotImplementedError
click.echo("git clone {url}".format(url=url))
cmd = envoy.run('git clone {url} "{dest}"'.format(
url=url, dest=dest))
elif parsed_url.scheme in ("http", "https"):
zip_dest = download_file(url, dest=self.temp_dir,
label="{dest_basename}",
expected_extension="zip")
with zipfile.ZipFile(zip_dest, "r") as pkg:
return self.extract_bower_zipfile(pkg, dest,
expected_version=version)
# pkg.extractall(str(dest))
else:
click.secho("protocol currently unsupported :(")
sys.exit(1)
def install_bower(self, package, save=True, save_dev=False):
"""Installs a bower package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a list of tuples, containing all installed package names
and versions, including any dependencies.
"""
requirement = Requirement.coerce(package)
url = Bower.get_package_url(requirement.package)
installed = []
for name, _ in self.get_bower_package(url):
installed.append(Requirement(name, requirement.version))
for requirement in installed:
if save:
self.requirements["bower"].add(requirement, replace=True)
if save_dev:
self.requirements["bower-dev"].add(requirement, replace=True)
success("installed {}".format(str(requirement)))
if save or save_dev:
self.requirements.save()
return installed
def install_pip(self, package, save=True, save_dev=False):
"""Installs a pip package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a **single** :class:`Requirement` object, representing
the installed version of the given package.
"""
requirement = Requirement.coerce(package)
click.echo("pip install " + requirement.package)
cmd = envoy.run("pip install {}".format(str(requirement)))
if cmd.status_code == 0:
installed_packages = get_installed_pypackages()
package = installed_packages[requirement.package]
requirement.version = "=={}".format(package.version)
if save:
self.requirements["all"].add(requirement)
if save_dev:
self.requirements["dev"].add(requirement)
if save or save_dev:
self.requirements.save()
return requirement
else:
fatal(cmd.std_err)
def groups_option(f):
new_func = click.option("-g", "--groups",
help="Comma-separated list of requirement groups "
"to include.")(f)
return update_wrapper(new_func, f)
@click.group()
@click.version_option(prog_name=prog_name)
@click.pass_context
def main(ctx):
which = "where" if sys.platform == "win32" else "which"
if envoy.run(which + " git").status_code != 0:
click.secho("fatal: git not found in PATH", fg="red")
sys.exit(1)
ctx.obj = Hydrogen()
@main.command()
@click.pass_obj
@click.option("output_yaml", "--yaml", "-y", is_flag=True,
help="Show requirements in YAML format.")
@click.option("--resolve", "-r", is_flag=True,
help="Resolve version numbers for ambiguous packages.")
@groups_option
def freeze(h, output_yaml, resolve, groups):
"""Output installed packages."""
if not groups:
groups = filter(lambda group: not group.lower().startswith("bower"),
h.requirements.keys())
else:
groups = [text_type.strip(group) for group in groups.split(",")]
if output_yaml:
for requirements in h.requirements.values():
for requirement in requirements:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(h.requirements.yaml)
else:
for group in groups:
if not h.requirements[group]:
continue
click.echo("# {}".format(group))
for requirement in h.requirements[group]:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(str(requirement))
@main.command()
@click.pass_obj
@click.option("--pip/--bower", default=True)
@groups_option
@click.option("--save", is_flag=True)
@click.option("--save-dev", is_flag=True)
@click.argument("packages", nargs=-1)
def install(h, pip, groups, save, save_dev, packages):
"""Install a pip or bower package."""
if groups:
groups = [text_type.strip(group) for group in groups.split(",")]
else:
groups = h.requirements.keys()
if not packages:
for group in groups:
if group not in h.requirements:
warning("{} not in requirements".format(group))
continue
install = (h.install_bower if group.startswith("bower")
else h.install_pip)
for requirement in h.requirements[group]:
install(str(requirement), save=False, save_dev=False)
if pip:
for package in packages:
h.install_pip(package, save=save, save_dev=save_dev)
else:
for package in packages:
h.install_bower(package, save=save, save_dev=save_dev)
if __name__ == "__main__":
main()<|fim▁end|> | if chunk:
f.write(chunk)
f.flush() |
<|file_name|>IndexedTriangleStripSet.js<|end_file_name|><|fim▁begin|>/* -*- Mode: JavaScript; coding: utf-8; tab-width: 3; indent-tabs-mode: tab; c-basic-offset: 3 -*-
*******************************************************************************
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* Copyright create3000, Scheffelstraße 31a, Leipzig, Germany 2011.
*
* All rights reserved. Holger Seelig <[email protected]>.
*
* The copyright notice above does not evidence any actual of intended
* publication of such source code, and is an unpublished work by create3000.
* This material contains CONFIDENTIAL INFORMATION that is the property of
* create3000.
*
* No permission is granted to copy, distribute, or create derivative works from
* the contents of this software, in whole or in part, without the prior written
* permission of create3000.
*
* NON-MILITARY USE ONLY
*
* All create3000 software are effectively free software with a non-military use
* restriction. It is free. Well commented source is provided. You may reuse the
* source in any way you please with the exception anything that uses it must be
* marked to indicate is contains 'non-military use only' components.
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* Copyright 2015, 2016 Holger Seelig <[email protected]>.
*
* This file is part of the Cobweb Project.
*
* Cobweb is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 3 only, as published by the
* Free Software Foundation.
*
* Cobweb is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License version 3 for more
* details (a copy is included in the LICENSE file that accompanied this code).
*<|fim▁hole|> * along with Cobweb. If not, see <http://www.gnu.org/licenses/gpl.html> for a
* copy of the GPLv3 License.
*
* For Silvio, Joy and Adi.
*
******************************************************************************/
define ([
"jquery",
"cobweb/Fields",
"cobweb/Basic/X3DFieldDefinition",
"cobweb/Basic/FieldDefinitionArray",
"cobweb/Components/Rendering/X3DComposedGeometryNode",
"cobweb/Bits/X3DConstants",
],
function ($,
Fields,
X3DFieldDefinition,
FieldDefinitionArray,
X3DComposedGeometryNode,
X3DConstants)
{
"use strict";
function IndexedTriangleStripSet (executionContext)
{
X3DComposedGeometryNode .call (this, executionContext);
this .addType (X3DConstants .IndexedTriangleStripSet);
this .triangleIndex = [ ];
}
IndexedTriangleStripSet .prototype = $.extend (Object .create (X3DComposedGeometryNode .prototype),
{
constructor: IndexedTriangleStripSet,
fieldDefinitions: new FieldDefinitionArray ([
new X3DFieldDefinition (X3DConstants .inputOutput, "metadata", new Fields .SFNode ()),
new X3DFieldDefinition (X3DConstants .initializeOnly, "solid", new Fields .SFBool (true)),
new X3DFieldDefinition (X3DConstants .initializeOnly, "ccw", new Fields .SFBool (true)),
new X3DFieldDefinition (X3DConstants .initializeOnly, "colorPerVertex", new Fields .SFBool (true)),
new X3DFieldDefinition (X3DConstants .initializeOnly, "normalPerVertex", new Fields .SFBool (true)),
new X3DFieldDefinition (X3DConstants .initializeOnly, "index", new Fields .MFInt32 ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "attrib", new Fields .MFNode ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "fogCoord", new Fields .SFNode ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "color", new Fields .SFNode ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "texCoord", new Fields .SFNode ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "normal", new Fields .SFNode ()),
new X3DFieldDefinition (X3DConstants .inputOutput, "coord", new Fields .SFNode ()),
]),
getTypeName: function ()
{
return "IndexedTriangleStripSet";
},
getComponentName: function ()
{
return "Rendering";
},
getContainerField: function ()
{
return "geometry";
},
initialize: function ()
{
X3DComposedGeometryNode .prototype .initialize .call (this);
this .index_ .addInterest ("set_index__", this);
this .set_index__ ();
},
set_index__: function ()
{
// Build coordIndex
var
index = this .index_ .getValue (),
triangleIndex = this .triangleIndex;
triangleIndex .length = 0;
// Build coordIndex
for (var i = 0, length = index .length; i < length; ++ i)
{
var first = index [i] .getValue ();
if (first < 0)
continue;
if (++ i < length)
{
var second = index [i] .getValue ();
if (second < 0)
continue;
++ i;
for (var face = 0; i < length; ++ i, ++ face)
{
var third = index [i] .getValue ();
if (third < 0)
break;
triangleIndex .push (first, second, third);
if (face & 1)
second = third;
else
first = third;
}
}
}
},
getPolygonIndex: function (index)
{
return this .triangleIndex [index];
},
build: function ()
{
X3DComposedGeometryNode .prototype .build .call (this, 3, this .triangleIndex .length, 3, this .triangleIndex .length);
},
});
return IndexedTriangleStripSet;
});<|fim▁end|> | * You should have received a copy of the GNU General Public License version 3 |
<|file_name|>BeanParamParser.java<|end_file_name|><|fim▁begin|>package org.jboss.resteasy.reactive.client.processor.beanparam;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.BEAN_PARAM;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.COOKIE_PARAM;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.FORM_PARAM;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.HEADER_PARAM;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.PATH_PARAM;
import static org.jboss.resteasy.reactive.common.processor.ResteasyReactiveDotNames.QUERY_PARAM;
import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.FieldInfo;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.MethodInfo;
import org.jboss.jandex.Type;
import org.jboss.resteasy.reactive.common.processor.JandexUtil;
public class BeanParamParser {
public static List<Item> parse(ClassInfo beanParamClass, IndexView index) {
Set<ClassInfo> processedBeanParamClasses = Collections.newSetFromMap(new IdentityHashMap<>());
return parseInternal(beanParamClass, index, processedBeanParamClasses);
}
private static List<Item> parseInternal(ClassInfo beanParamClass, IndexView index,<|fim▁hole|> Set<ClassInfo> processedBeanParamClasses) {
if (!processedBeanParamClasses.add(beanParamClass)) {
throw new IllegalArgumentException("Cycle detected in BeanParam annotations; already processed class "
+ beanParamClass.name());
}
try {
List<Item> resultList = new ArrayList<>();
// Parse class tree recursively
if (!JandexUtil.DOTNAME_OBJECT.equals(beanParamClass.superName())) {
resultList
.addAll(parseInternal(index.getClassByName(beanParamClass.superName()), index,
processedBeanParamClasses));
}
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, QUERY_PARAM,
(annotationValue, fieldInfo) -> new QueryParamItem(annotationValue,
new FieldExtractor(null, fieldInfo.name(), fieldInfo.declaringClass().name().toString()),
fieldInfo.type()),
(annotationValue, getterMethod) -> new QueryParamItem(annotationValue, new GetterExtractor(getterMethod),
getterMethod.returnType())));
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, BEAN_PARAM,
(annotationValue, fieldInfo) -> {
Type type = fieldInfo.type();
if (type.kind() == Type.Kind.CLASS) {
List<Item> subBeanParamItems = parseInternal(index.getClassByName(type.asClassType().name()), index,
processedBeanParamClasses);
return new BeanParamItem(subBeanParamItems,
new FieldExtractor(null, fieldInfo.name(), fieldInfo.declaringClass().name().toString()));
} else {
throw new IllegalArgumentException("BeanParam annotation used on a field that is not an object: "
+ beanParamClass.name() + "." + fieldInfo.name());
}
},
(annotationValue, getterMethod) -> {
Type returnType = getterMethod.returnType();
List<Item> items = parseInternal(index.getClassByName(returnType.name()), index,
processedBeanParamClasses);
return new BeanParamItem(items, new GetterExtractor(getterMethod));
}));
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, COOKIE_PARAM,
(annotationValue, fieldInfo) -> new CookieParamItem(annotationValue,
new FieldExtractor(null, fieldInfo.name(),
fieldInfo.declaringClass().name().toString()),
fieldInfo.type().name().toString()),
(annotationValue, getterMethod) -> new CookieParamItem(annotationValue,
new GetterExtractor(getterMethod), getterMethod.returnType().name().toString())));
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, HEADER_PARAM,
(annotationValue, fieldInfo) -> new HeaderParamItem(annotationValue,
new FieldExtractor(null, fieldInfo.name(), fieldInfo.declaringClass().name().toString()),
fieldInfo.type().name().toString()),
(annotationValue, getterMethod) -> new HeaderParamItem(annotationValue,
new GetterExtractor(getterMethod), getterMethod.returnType().name().toString())));
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, PATH_PARAM,
(annotationValue, fieldInfo) -> new PathParamItem(annotationValue, fieldInfo.type().name().toString(),
new FieldExtractor(null, fieldInfo.name(), fieldInfo.declaringClass().name().toString())),
(annotationValue, getterMethod) -> new PathParamItem(annotationValue,
getterMethod.returnType().name().toString(),
new GetterExtractor(getterMethod))));
resultList.addAll(paramItemsForFieldsAndMethods(beanParamClass, FORM_PARAM,
(annotationValue, fieldInfo) -> new FormParamItem(annotationValue,
fieldInfo.type().name().toString(),
new FieldExtractor(null, fieldInfo.name(), fieldInfo.declaringClass().name().toString())),
(annotationValue, getterMethod) -> new FormParamItem(annotationValue,
getterMethod.returnType().name().toString(),
new GetterExtractor(getterMethod))));
return resultList;
} finally {
processedBeanParamClasses.remove(beanParamClass);
}
}
private static MethodInfo getGetterMethod(ClassInfo beanParamClass, MethodInfo methodInfo) {
MethodInfo getter = null;
if (methodInfo.parameters().size() > 0) { // should be setter
// find the corresponding getter:
String setterName = methodInfo.name();
if (setterName.startsWith("set")) {
getter = beanParamClass.method(setterName.replace("^set", "^get"));
}
} else if (methodInfo.name().startsWith("get")) {
getter = methodInfo;
}
if (getter == null) {
throw new IllegalArgumentException(
"No getter corresponding to " + methodInfo.declaringClass().name() + "#" + methodInfo.name() + " found");
}
return getter;
}
private static <T extends Item> List<T> paramItemsForFieldsAndMethods(ClassInfo beanParamClass, DotName parameterType,
BiFunction<String, FieldInfo, T> fieldExtractor, BiFunction<String, MethodInfo, T> methodExtractor) {
return ParamTypeAnnotations.of(beanParamClass, parameterType).itemsForFieldsAndMethods(fieldExtractor, methodExtractor);
}
private BeanParamParser() {
}
private static class ParamTypeAnnotations {
private final ClassInfo beanParamClass;
private final List<AnnotationInstance> annotations;
private ParamTypeAnnotations(ClassInfo beanParamClass, DotName parameterType) {
this.beanParamClass = beanParamClass;
List<AnnotationInstance> relevantAnnotations = beanParamClass.annotations().get(parameterType);
this.annotations = relevantAnnotations == null
? Collections.emptyList()
: relevantAnnotations.stream().filter(this::isFieldOrMethodAnnotation).collect(Collectors.toList());
}
private static ParamTypeAnnotations of(ClassInfo beanParamClass, DotName parameterType) {
return new ParamTypeAnnotations(beanParamClass, parameterType);
}
private <T extends Item> List<T> itemsForFieldsAndMethods(BiFunction<String, FieldInfo, T> itemFromFieldExtractor,
BiFunction<String, MethodInfo, T> itemFromMethodExtractor) {
return annotations.stream()
.map(annotation -> toItem(annotation, itemFromFieldExtractor, itemFromMethodExtractor))
.collect(Collectors.toList());
}
private <T extends Item> T toItem(AnnotationInstance annotation,
BiFunction<String, FieldInfo, T> itemFromFieldExtractor,
BiFunction<String, MethodInfo, T> itemFromMethodExtractor) {
String annotationValue = annotation.value() == null ? null : annotation.value().asString();
return annotation.target().kind() == AnnotationTarget.Kind.FIELD
? itemFromFieldExtractor.apply(annotationValue, annotation.target().asField())
: itemFromMethodExtractor.apply(annotationValue,
getGetterMethod(beanParamClass, annotation.target().asMethod()));
}
private boolean isFieldOrMethodAnnotation(AnnotationInstance annotation) {
return annotation.target().kind() == AnnotationTarget.Kind.FIELD
|| annotation.target().kind() == AnnotationTarget.Kind.METHOD;
}
}
}<|fim▁end|> | |
<|file_name|>constraints.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
TDDA constraint discovery and verification is provided for a number
of DB-API (PEP-0249) compliant databases, and also for a number of other
(NoSQL) databases.
The top-level functions are:
:py:func:`tdda.constraints.discover_db_table`:
Discover constraints from a single database table.
:py:func:`tdda.constraints.verify_db_table`:
Verify (check) a single database table, against a set of previously
discovered constraints.
:py:func:`tdda.constraints.detect_db_table`:
For detection of failing records in a single database table,
but not yet implemented for databases.
"""
import sys
from tdda.constraints.base import (
DatasetConstraints,
Verification,
)
from tdda.constraints.baseconstraints import (
BaseConstraintCalculator,
BaseConstraintDetector,
BaseConstraintVerifier,
BaseConstraintDiscoverer,
MAX_CATEGORIES,
)
from tdda.constraints.db.drivers import DatabaseHandler
from tdda import rexpy
if sys.version_info[0] >= 3:
long = int
class DatabaseConstraintCalculator(BaseConstraintCalculator):
def __init__(self, tablename, testing=False):
self.tablename = tablename
self.testing = testing
def is_null(self, value):
return self.db_value_is_null(value)
def to_datetime(self, value):
return self.db_value_to_datetime(value)
def column_exists(self, colname):
return colname in self.get_column_names()
def get_column_names(self):
return self.get_database_column_names(self.tablename)
def get_nrecords(self):
return self.get_database_nrows(self.tablename)
def types_compatible(self, x, y, colname=None):
return types_compatible(x, y, colname if not self.testing else None)
def calc_min(self, colname):
return self.get_database_min(self.tablename, colname)
def calc_max(self, colname):
return self.get_database_max(self.tablename, colname)
def calc_min_length(self, colname):
return self.get_database_min_length(self.tablename, colname)
def calc_max_length(self, colname):
return self.get_database_max_length(self.tablename, colname)
def calc_tdda_type(self, colname):
return self.get_database_column_type(self.tablename, colname)
def calc_null_count(self, colname):
return self.get_database_nnull(self.tablename, colname)
def calc_non_null_count(self, colname):
return self.get_database_nnonnull(self.tablename, colname)
def calc_nunique(self, colname):
return self.get_database_nunique(self.tablename, colname)
def calc_unique_values(self, colname, include_nulls=True):
return self.get_database_unique_values(self.tablename, colname,
include_nulls=include_nulls)
def calc_non_integer_values_count(self, colname):
raise Exception('database should not require non_integer_values_count')
def calc_all_non_nulls_boolean(self, colname):
raise Exception('database should not require all_non_nulls_boolean')
def find_rexes(self, colname, values=None, seed=None):
if not values:
values = self.get_database_unique_values(self.tablename, colname)
return rexpy.extract(sorted(values), seed=seed)
def calc_rex_constraint(self, colname, constraint, detect=False):
return not self.get_database_rex_match(self.tablename, colname,
constraint.value)
class DatabaseConstraintDetector(BaseConstraintDetector):
"""
No-op implementation of the Constraint Detector methods for
databases.
"""
def __init__(self, tablename):
pass
class DatabaseConstraintVerifier(DatabaseConstraintCalculator,
DatabaseConstraintDetector,
BaseConstraintVerifier,
DatabaseHandler):
"""
A :py:class:`DatabaseConstraintVerifier` object provides methods
for verifying every type of constraint against a single database table.
"""
def __init__(self, dbtype, db, tablename, epsilon=None,
type_checking='strict', testing=False):
"""
Inputs:
*dbtype*:
Type of database.
*db*:
A DB-API database connection object (as obtained from
a call to the connect() method on the underlying database
driver).
*tablename*:
A table name, referring to a table that exists in the
database and is accessible. It can either be a simple
name, or a schema-qualified name of the form `schema.name`.
"""
DatabaseHandler.__init__(self, dbtype, db)
tablename = self.resolve_table(tablename)
DatabaseConstraintCalculator.__init__(self, tablename, testing)
DatabaseConstraintDetector.__init__(self, tablename)
BaseConstraintVerifier.__init__(self, epsilon=epsilon,
type_checking=type_checking)
class DatabaseVerification(Verification):
"""
A :py:class:`DatabaseVerification` object is the variant of
the :py:class:`tdda.constraints.base.Verification` object used for
verification of constraints on a database table.
"""
def __init__(self, *args, **kwargs):
Verification.__init__(self, *args, **kwargs)
class DatabaseConstraintDiscoverer(DatabaseConstraintCalculator,
BaseConstraintDiscoverer,
DatabaseHandler):
"""
A :py:class:`DatabaseConstraintDiscoverer` object is used to discover
constraints on a single database table.
"""
def __init__(self, dbtype, db, tablename, inc_rex=False, seed=None):
DatabaseHandler.__init__(self, dbtype, db)
tablename = self.resolve_table(tablename)
DatabaseConstraintCalculator.__init__(self, tablename)
BaseConstraintDiscoverer.__init__(self, inc_rex=inc_rex, seed=seed)
self.tablename = tablename
def types_compatible(x, y, colname):
"""
Returns boolean indicating whether the coarse_type of *x* and *y* are
the same, for scalar values. The int and long types are considered to
be the same.
For databases, coarse types are pretty much the same as the column types,
except that different sizes of integer are all considered to be ints.
If *colname* is provided, and the check fails, a warning is issued
to stderr.
"""
tx = int if type(x) is long else type(x)
ty = int if type(y) is long else type(y)
ok = tx == ty
if not ok and colname:
print('Warning: Failing incompatible types constraint for field %s '
'of type %s.\n(Constraint value %s of type %s.)'
% (colname, type(x), y, type(y)), file=sys.stderr)
return ok
def verify_db_table(dbtype, db, tablename, constraints_path, epsilon=None,
type_checking='strict', testing=False, report='all',
**kwargs):
"""
Verify that (i.e. check whether) the database table provided
satisfies the constraints in the JSON .tdda file provided.
Mandatory Inputs:
*dbtype*:
Type of database.
*db*:
A database object
*tablename*:
A database table name, to be checked.
*constraints_path*:
The path to a JSON .tdda file (possibly
generated by the discover_constraints
function, below) containing constraints
to be checked.
Optional Inputs:
*epsilon*:
When checking minimum and maximum values
for numeric fields, this provides a
tolerance. The tolerance is a proportion
of the constraint value by which the
constraint can be exceeded without causing
a constraint violation to be issued.
For example, with epsilon set to 0.01 (i.e. 1%),
values can be up to 1% larger than a max constraint
without generating constraint failure,
and minimum values can be up to 1% smaller
that the minimum constraint value without
generating a constraint failure. (These
are modified, as appropriate, for negative
values.)
If not specified, an *epsilon* of 0 is used,
so there is no tolerance.
NOTE: A consequence of the fact that these
are proportionate is that min/max values
of zero do not have any tolerance, i.e.
the wrong sign always generates a failure.
*type_checking*:
``strict`` or ``sloppy``. For databases (unlike
Pandas DataFrames), this defaults to 'strict'.
If this is set to sloppy, a database "real"
column c will only be allowed to satisfy a
an "int" type constraint.
*report*:
``all`` or ``fields``.
This controls the behaviour of the
:py:meth:`~~tdda.constraints.db.constraints.DatabaseVerification.__str__`
method on the resulting
:py:class:`~tdda.constraints.db.constraints.DatabaseVerification`
object (but not its content).
The default is ``all``, which means that
all fields are shown, together with the
verification status of each constraint
for that field.
If report is set to ``fields``, only fields for
which at least one constraint failed are shown.
*testing*:
Boolean flag. Should only be set to ``True``
when being run as part of an automated test.
It suppresses type-compatibility warnings.
Returns:
:py:class:`~tdda.constraints.db.constraints.DatabaseVerification` object.
This object has attributes:
- *passed* --- Number of passing constriants
- *failures* --- Number of failing constraints
Example usage::
import pgdb
from tdda.constraints import verify_db_table
dbspec = 'localhost:databasename:username:password'
tablename = 'schemaname.tablename'
db = pgdb.connect(dbspec)
v = verify_db_table('postgres' db, tablename, 'myconstraints.tdda')
print('Constraints passing:', v.passes)
print('Constraints failing: %d\\n' % v.failures)
print(str(v))
"""
dbv = DatabaseConstraintVerifier(dbtype, db, tablename, epsilon=epsilon,
type_checking=type_checking,
testing=testing)
if not dbv.check_table_exists(tablename):
print('No table %s' % tablename, file=sys.stderr)
sys.exit(1)
constraints = DatasetConstraints(loadpath=constraints_path)
return dbv.verify(constraints,
VerificationClass=DatabaseVerification,
report=report, **kwargs)
def detect_db_table(dbtype, db, tablename, constraints_path, epsilon=None,
type_checking='strict', testing=False, **kwargs):
"""
For detection of failures from verification of constraints, but
not yet implemented for database tables.
"""
raise NotImplementedError('Detection is not implemented (yet) '
'for databases.')
def discover_db_table(dbtype, db, tablename, inc_rex=False, seed=None):
"""
Automatically discover potentially useful constraints that characterize
the database table provided.
Input:
*dbtype*:
Type of database.
*db*:
a database object
*tablename*:
a table name
Possible return values:
- :py:class:`~tdda.constraints.base.DatasetConstraints` object
- ``None`` --- (if no constraints were found).
This function goes through each column in the table and, where
appropriate, generates constraints that describe (and are satisified
by) this dataframe.
Assuming it generates at least one constraint for at least one field
it returns a :py:class:`tdda.constraints.base.DatasetConstraints` object.
This includes a 'fields' attribute, keyed on the column name.
The returned :py:class:`~tdda.constraints.base.DatasetConstraints` object
includes a :py:meth:`~tdda.constraints.base.DatasetContraints.to_json`
method, which converts the constraints into JSON for saving as a tdda
constraints file. By convention, such JSON files use a '.tdda'
extension.
The JSON constraints file can be used to check whether other datasets
also satisfy the constraints.
The kinds of constraints (potentially) generated for each field (column)
are:
*type*:
the (coarse, TDDA) type of the field. One of
'bool', 'int', 'real', 'string' or 'date'.
*min*:
for non-string fields, the minimum value in the column.
Not generated for all-null columns.
*max*:
for non-string fields, the maximum value in the column.
Not generated for all-null columns.
*min_length*:
For string fields, the length of the shortest string(s)
in the field.
*max_length*:
For string fields, the length of the longest string(s)
in the field.
*sign*:
If all the values in a numeric field have consistent sign,
a sign constraint will be written with a value chosen from:
- positive --- For all values *v* in field: `v > 0`
- non-negative --- For all values *v* in field: `v >= 0`
- zero --- For all values *v* in field: `v == 0`
- non-positive --- For all values *v* in field: `v <= 0`
- negative --- For all values *v* in field: `v < 0`
- null --- For all values *v* in field: `v is null`
*max_nulls*:
The maximum number of nulls allowed in the field.
- If the field has no nulls, a constraint
will be written with max_nulls set to zero.
- If the field has a single null, a constraint will
be written with max_nulls set to one.
- If the field has more than 1 null, no constraint
will be generated.
*no_duplicates*:
For string fields (only, for now), if every
non-null value in the field is different,
this constraint will be generated (with value ``True``);
otherwise no constraint will be generated. So this constraint
indicates that all the **non-null** values in a string
field are distinct (unique).
*allowed_values*:
For string fields only, if there are
:py:const:`MAX_CATEGORIES` or fewer distinct string
values in the dataframe, an AllowedValues constraint
listing them will be generated.
:py:const:`MAX_CATEGORIES` is currently "hard-wired" to 20.
Regular Expression constraints are not (currently) generated for fields
in database tables.
Example usage::
import pgdb
from tdda.constraints import discover_db_table
dbspec = 'localhost:databasename:username:password'
tablename = 'schemaname.tablename'
db = pgdb.connect(dbspec)
constraints = discover_db_table('postgres', db, tablename)
with open('myconstraints.tdda', 'w') as f:
f.write(constraints.to_json())
"""
disco = DatabaseConstraintDiscoverer(dbtype, db, tablename,
inc_rex=inc_rex, seed=seed)
if not disco.check_table_exists(tablename):
print('No table %s' % tablename, file=sys.stderr)<|fim▁hole|> if constraints:
nrows = disco.get_nrows(tablename)
constraints.set_stats(n_records=nrows, n_selected=nrows)
constraints.set_dates_user_host_creator()
constraints.set_rdbms('%s:%s:%s:%s' % (dbtype or '', db.host or '',
db.user, db.database))
constraints.set_source(tablename, tablename)
return constraints<|fim▁end|> | sys.exit(1)
constraints = disco.discover() |
<|file_name|>api.spec.ts<|end_file_name|><|fim▁begin|>import {
it,
describe,
expect,
inject,
beforeEachProviders
} from 'angular2/testing';
import {Api} from './api';
describe('Api Service', () => {
beforeEachProviders(() => [Api]);
it('should ...', inject([Api], (api:Api) => {
expect(api.title).toBe('Modern Cookbook');<|fim▁hole|><|fim▁end|> | }));
}); |
<|file_name|>lib.in.rs<|end_file_name|><|fim▁begin|>#[cfg(not(feature = "sqlite"))]
mod annotations;
mod deserialization;
mod insert;
mod schema;<|fim▁hole|><|fim▁end|> | mod update; |
<|file_name|>api.py<|end_file_name|><|fim▁begin|><|fim▁hole|># proxy module
from __future__ import absolute_import
from mayavi.core.api import *<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from decimal import Decimal
from django.http import HttpResponse, HttpRequest
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.test import TestCase, Client
from ....cart.app import cart_app
from ....cart.models import CART_SESSION_KEY
from ....cart.tests import TestCart
from ....contrib.delivery.simplepost.models import PostShippingType
from ....order import handler as order_handler
from ....order.models import Order
from ....payment import ConfirmationFormNeeded
from ....payment.tests import TestPaymentProvider
from ....product.tests import DeadParrot
from ..common.decorators import require_order
from ..common.views import prepare_order, reactivate_order
from . import views
urlpatterns = patterns('',
url(r'^cart/', include(cart_app.urls)),
url(r'^checkout/', include('satchless.contrib.checkout.multistep.urls')),
url(r'^order/', include('satchless.order.urls')),
)
class TestPaymentProviderWithConfirmation(TestPaymentProvider):
def confirm(self, order):
raise ConfirmationFormNeeded(action='http://test.payment.gateway.example.com')
class CheckoutTest(TestCase):
urls = 'satchless.contrib.checkout.multistep.tests'
def _setup_settings(self, custom_settings):
original_settings = {}
for setting_name, value in custom_settings.items():
if hasattr(settings, setting_name):
original_settings[setting_name] = getattr(settings, setting_name)
setattr(settings, setting_name, value)
return original_settings
def _teardown_settings(self, original_settings, custom_settings=None):
custom_settings = custom_settings or {}
for setting_name, value in custom_settings.items():
if setting_name in original_settings:
setattr(settings, setting_name, value)
else:
delattr(settings, setting_name)
def setUp(self):
self.macaw = DeadParrot.objects.create(slug='macaw',
species="Hyacinth Macaw")
self.cockatoo = DeadParrot.objects.create(slug='cockatoo',
species="White Cockatoo")
self.macaw_blue = self.macaw.variants.create(color='blue', looks_alive=False)
self.macaw_blue_fake = self.macaw.variants.create(color='blue', looks_alive=True)
self.cockatoo_white_a = self.cockatoo.variants.create(color='white', looks_alive=True)
self.cockatoo_white_d = self.cockatoo.variants.create(color='white', looks_alive=False)
self.cockatoo_blue_a = self.cockatoo.variants.create(color='blue', looks_alive=True)
self.cockatoo_blue_d = self.cockatoo.variants.create(color='blue', looks_alive=False)
self.custom_settings = {
'SATCHLESS_DELIVERY_PROVIDERS': ['satchless.contrib.delivery.simplepost.PostDeliveryProvider'],
'SATCHLESS_ORDER_PARTITIONERS': ['satchless.contrib.order.partitioner.simple'],
'SATCHLESS_PAYMENT_PROVIDERS': [TestPaymentProviderWithConfirmation],
'SATCHLESS_DJANGO_PAYMENT_TYPES': ['dummy'],
'PAYMENT_VARIANTS': {'dummy': ('payments.dummy.DummyProvider', {'url': '/', })},
}
self.original_settings = self._setup_settings(self.custom_settings)
order_handler.init_queues()
self.anon_client = Client()
PostShippingType.objects.create(price=12, typ='polecony', name='list polecony')
PostShippingType.objects.create(price=20, typ='list', name='List zwykly')
def tearDown(self):
self._teardown_settings(self.original_settings, self.custom_settings)
order_handler.init_queues()
def _test_status(self, url, method='get', *args, **kwargs):
status_code = kwargs.pop('status_code', 200)
client = kwargs.pop('client_instance', Client())
data = kwargs.pop('data', {})
response = getattr(client, method)(url, data=data, follow=False)
self.assertEqual(response.status_code, status_code,
'Incorrect status code for: %s, (%s, %s)! Expected: %s, received: %s. HTML:\n\n%s' % (
url.decode('utf-8'), args, kwargs, status_code, response.status_code,
response.content.decode('utf-8')))
return response
def _get_or_create_cart_for_client(self, client, typ='satchless_cart'):
self._test_status(reverse('satchless-cart-view'), client_instance=self.anon_client)
return TestCart.objects.get(pk=self.anon_client.session[CART_SESSION_KEY % typ], typ=typ)
def _get_order_from_session(self, session):
order_pk = session.get('satchless_order', None)
if order_pk:
return Order.objects.get(pk=order_pk)
return None
def _get_order_items(self, order):
order_items = set()
for group in order.groups.all():
order_items.update(group.items.values_list('product_variant', 'quantity'))
return order_items
def test_order_from_cart_view_creates_proper_order(self):
cart = self._get_or_create_cart_for_client(self.anon_client)
cart.replace_item(self.macaw_blue, 1)
cart.replace_item(self.macaw_blue_fake, Decimal('2.45'))
cart.replace_item(self.cockatoo_white_a, Decimal('2.45'))
self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
order = self._get_order_from_session(self.anon_client.session)
self.assertNotEqual(order, None)
order_items = self._get_order_items(order)
self.assertEqual(set(cart.items.values_list('variant', 'quantity')), order_items)
def test_order_is_updated_after_cart_changes(self):
cart = self._get_or_create_cart_for_client(self.anon_client)
cart.replace_item(self.macaw_blue, 1)
cart.replace_item(self.macaw_blue_fake, Decimal('2.45'))
cart.replace_item(self.cockatoo_white_a, Decimal('2.45'))
self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
order = self._get_order_from_session(self.anon_client.session)
order_items = self._get_order_items(order)
# compare cart and order
self.assertEqual(set(cart.items.values_list('variant', 'quantity')), order_items)
# update cart
cart.add_item(self.macaw_blue, 100)
cart.add_item(self.macaw_blue_fake, 100)
self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
old_order = order
order = self._get_order_from_session(self.anon_client.session)
# order should be reused
self.assertEqual(old_order.pk, order.pk)
self.assertNotEqual(order, None)
order_items = self._get_order_items(order)
# compare cart and order
self.assertEqual(set(cart.items.values_list('variant', 'quantity')), order_items)
def test_prepare_order_creates_order_and_redirects_to_checkout_when_cart_is_not_empty(self):
cart = self._get_or_create_cart_for_client(self.anon_client)
cart.replace_item(self.macaw_blue, 1)
response = self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
order_pk = self.anon_client.session.get('satchless_order', None)
order = Order.objects.get(pk=order_pk)
self.assertRedirects(response, reverse(views.checkout,
kwargs={'order_token':
order.token}))
def test_prepare_order_redirects_to_cart_when_cart_is_empty(self):
self._get_or_create_cart_for_client(self.anon_client)
response = self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
# 'satchless_cart' is taken from multistep/urls.py:
# url(r'^prepare-order/$', prepare_order, {'typ': 'satchless_cart'}...)
self.assertRedirects(response, reverse('satchless-cart-view'))
def test_prepare_order_redirects_to_checkout_when_order_exists(self):
order = self._create_order(self.anon_client)
response = self._test_status(reverse(prepare_order), method='post',
client_instance=self.anon_client, status_code=302)
self.assertRedirects(response, reverse(views.checkout,
kwargs={'order_token':
order.token}))
def _create_cart(self, client):
cart = self._get_or_create_cart_for_client(client)
cart.replace_item(self.macaw_blue, 1)
cart.replace_item(self.macaw_blue_fake, Decimal('2.45'))
cart.replace_item(self.cockatoo_white_a, Decimal('2.45'))
return cart
def _create_order(self, client):
self._create_cart(client)
self._test_status(reverse(prepare_order), method='post',
client_instance=client, status_code=302)
return self._get_order_from_session(client.session)
def test_order_is_deleted_when_all_cart_items_are_deleted(self):
order = self._create_order(self.anon_client)
for cart_item in order.cart.items.all():
self.assertTrue(Order.objects.filter(pk=order.pk).exists())
order.cart.replace_item(cart_item.variant, 0)
self.assertFalse(Order.objects.filter(pk=order.pk).exists())
def test_checkout_view(self):
order = self._create_order(self.anon_client)
response = self._test_status(reverse(views.checkout,
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
status_code=200)
group = order.groups.get()
dtypes = order_handler.get_delivery_types(group)
dtype = dtypes[0][0]
df = response.context['delivery_formset']
data = {'billing_first_name': 'First',
'billing_last_name': 'Last',
'billing_street_address_1': 'Via Rodeo 1',
'billing_city': 'Beverly Hills',
'billing_country': 'US',
'billing_country_area': 'AZ',
'billing_phone': '555-555-5555',
'billing_postal_code': '90210'}
data[df.add_prefix('INITIAL_FORMS')] = '1'
data[df.add_prefix('MAX_NUM_FORMS')] = ''
data[df.add_prefix('TOTAL_FORMS')] = '1'
for form in df.forms:
data[form.add_prefix('delivery_type')] = dtype
data[form.add_prefix('id')] = group.id
self._test_status(reverse(views.checkout, kwargs={'order_token':
order.token}),
data=data, status_code=302,
client_instance=self.anon_client, method='post')
self.assertEqual(order.groups.get().delivery_type, dtype)
def test_delivery_details_view(self):
order = self._create_order(self.anon_client)
group = order.groups.get()
dtypes = order_handler.get_delivery_types(group)
group.delivery_type = dtypes[0][0]
group.save()
self._test_status(reverse(views.delivery_details,
kwargs={'order_token': order.token}),
client_instance=self.anon_client, method='get')
def test_delivery_details_view_redirects_to_checkout_when_delivery_type_is_missing(self):
order = self._create_order(self.anon_client)
response = self._test_status(reverse(views.delivery_details,
kwargs={'order_token':
order.token}),
status_code=302,
client_instance=self.anon_client,
method='get')
self.assertRedirects(response, reverse(views.checkout,
kwargs={'order_token':
order.token}))
def test_payment_view_redirects_to_payment_choice_view_when_payment_type_is_missing(self):
order = self._create_order(self.anon_client)
response = self._test_status(reverse(views.payment_details,
kwargs={'order_token':
order.token}),
status_code=302,
client_instance=self.anon_client,
method='get')
self.assertRedirects(response, reverse(views.payment_choice,
kwargs={'order_token':
order.token}))
def test_checkout_views_redirects_to_confirmation_page_when_order_has_payment_pending_status(self):
order = self._create_order(self.anon_client)
order.set_status('payment-pending')
self._test_status(reverse(views.payment_details,
kwargs={'order_token':
order.token}),
status_code=302,
client_instance=self.anon_client,
method='get')
def test_reactive_order_view_changes_order_status_to_checkout(self):
order = self._create_order(self.anon_client)
order.set_status('payment-failed')
self._test_status(reverse(reactivate_order,
kwargs={'order_token':
order.token}),
status_code=302,
client_instance=self.anon_client,
method='post')
self.assertEqual(Order.objects.get(pk=order.pk).status, 'checkout')
def test_reactive_order_view_redirects_to_checkout_for_correct_order(self):
order = self._create_order(self.anon_client)
order.set_status('payment-failed')
response = self._test_status(reverse(reactivate_order,
kwargs={'order_token':
order.token}),
status_code=302,
client_instance=self.anon_client,
method='post')
self.assertRedirects(response, reverse('satchless-checkout', args=(order.token,)))
def test_require_order_decorator(self):
def assertRedirects(response, path):
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], path)
def view_factory(status):
@require_order(status=status)
def view(request, order_token):
return HttpResponse()
return view
request = HttpRequest()
order = self._create_order(self.anon_client)
# decorator should not redirect if status is correct
for status, name in Order.STATUS_CHOICES:
view = view_factory(status)
order.set_status(status)
self.assertTrue(view(request, order_token=order.token).status_code, 200)
view = view_factory('non-existing-status')
order.set_status('payment-pending')<|fim▁hole|>
order.set_status('checkout')
assertRedirects(view(request, order_token=order.token),
reverse('satchless-checkout', args=(order.token,)))
for status in ('payment-failed', 'delivery', 'payment-complete', 'cancelled'):
order.set_status(status)
assertRedirects(view(request, order_token=order.token),
reverse('satchless-order-view', args=(order.token,)))
assertRedirects(view(request, order_token='non-existing-order-token'),
reverse('satchless-cart-view'))<|fim▁end|> | assertRedirects(view(request, order_token=order.token),
reverse('satchless-checkout-confirmation', args=(order.token,))) |
<|file_name|>toggle_markup.java<|end_file_name|><|fim▁begin|>package com.actelion.research.orbit.imageAnalysis.components.icons;
import java.awt.*;
import java.awt.geom.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.lang.ref.WeakReference;
import java.util.Base64;
import java.util.Stack;
import javax.imageio.ImageIO;
import javax.swing.SwingUtilities;
import javax.swing.plaf.UIResource;
import org.pushingpixels.neon.api.icon.ResizableIcon;
import org.pushingpixels.neon.api.icon.ResizableIconUIResource;
/**
* This class has been automatically generated using <a
* href="https://github.com/kirill-grouchnikov/radiance">Photon SVG transcoder</a>.
*/
public class toggle_markup implements ResizableIcon {
private Shape shape = null;
private GeneralPath generalPath = null;
private Paint paint = null;
private Stroke stroke = null;
private Shape clip = null;
private Stack<AffineTransform> transformsStack = new Stack<>();
private void _paint0(Graphics2D g,float origAlpha) {
transformsStack.push(g.getTransform());
//
g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha));
transformsStack.push(g.getTransform());
g.transform(new AffineTransform(1.0666667222976685f, 0.0f, 0.0f, 1.0666667222976685f, -0.0f, -0.0f));
// _0
g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha));
transformsStack.push(g.getTransform());
g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, -343.7007751464844f));
// _0_0
g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha));
transformsStack.push(g.getTransform());
g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f));
// _0_0_0
paint = new Color(255, 0, 255, 255);
stroke = new BasicStroke(25.0f,0,0,4.0f,null,0.0f);
shape = new Rectangle2D.Double(86.42857360839844, 424.5050964355469, 187.14285278320312, 205.0);
g.setPaint(paint);
g.setStroke(stroke);
g.draw(shape);
g.setTransform(transformsStack.pop());
g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha));
transformsStack.push(g.getTransform());
g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f));
// _0_0_1
paint = new Color(0, 255, 255, 255);
stroke = new BasicStroke(25.0f,0,0,4.0f,null,0.0f);
if (generalPath == null) {
generalPath = new GeneralPath();
} else {
generalPath.reset();
}
generalPath.moveTo(450.7143f, 462.36224f);
generalPath.lineTo(425.0f, 703.7908f);
generalPath.lineTo(236.42857f, 766.6479f);
generalPath.lineTo(96.42857f, 826.6479f);
generalPath.lineTo(84.28571f, 947.3622f);
generalPath.lineTo(412.85715f, 1023.0765f);
generalPath.lineTo(482.85715f, 902.3622f);
generalPath.lineTo(620.0f, 989.5051f);
generalPath.lineTo(637.8571f, 420.93365f);
generalPath.closePath();
shape = generalPath;
g.setPaint(paint);
g.setStroke(stroke);
g.draw(shape);
g.setTransform(transformsStack.pop());
g.setTransform(transformsStack.pop());
g.setTransform(transformsStack.pop());
g.setTransform(transformsStack.pop());
}
@SuppressWarnings("unused")
private void innerPaint(Graphics2D g) {
float origAlpha = 1.0f;
Composite origComposite = g.getComposite();
if (origComposite instanceof AlphaComposite) {
AlphaComposite origAlphaComposite =
(AlphaComposite)origComposite;
if (origAlphaComposite.getRule() == AlphaComposite.SRC_OVER) {
origAlpha = origAlphaComposite.getAlpha();
}
}
_paint0(g, origAlpha);
shape = null;
generalPath = null;
paint = null;
stroke = null;<|fim▁hole|> clip = null;
transformsStack.clear();
}
/**
* Returns the X of the bounding box of the original SVG image.
*
* @return The X of the bounding box of the original SVG image.
*/
public static double getOrigX() {
return 75.46253967285156;
}
/**
* Returns the Y of the bounding box of the original SVG image.
*
* @return The Y of the bounding box of the original SVG image.
*/
public static double getOrigY() {
return 65.65621185302734;
}
/**
* Returns the width of the bounding box of the original SVG image.
*
* @return The width of the bounding box of the original SVG image.
*/
public static double getOrigWidth() {
return 618.7836303710938;
}
/**
* Returns the height of the bounding box of the original SVG image.
*
* @return The height of the bounding box of the original SVG image.
*/
public static double getOrigHeight() {
return 674.2141723632812;
}
/** The current width of this resizable icon. */
private int width;
/** The current height of this resizable icon. */
private int height;
/**
* Creates a new transcoded SVG image. This is marked as private to indicate that app
* code should be using the {@link #of(int, int)} method to obtain a pre-configured instance.
*/
private toggle_markup() {
this.width = (int) getOrigWidth();
this.height = (int) getOrigHeight();
}
@Override
public int getIconHeight() {
return height;
}
@Override
public int getIconWidth() {
return width;
}
@Override
public synchronized void setDimension(Dimension newDimension) {
this.width = newDimension.width;
this.height = newDimension.height;
}
@Override
public synchronized void paintIcon(Component c, Graphics g, int x, int y) {
Graphics2D g2d = (Graphics2D) g.create();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BICUBIC);
g2d.translate(x, y);
double coef1 = (double) this.width / getOrigWidth();
double coef2 = (double) this.height / getOrigHeight();
double coef = Math.min(coef1, coef2);
g2d.clipRect(0, 0, this.width, this.height);
g2d.scale(coef, coef);
g2d.translate(-getOrigX(), -getOrigY());
if (coef1 != coef2) {
if (coef1 < coef2) {
int extraDy = (int) ((getOrigWidth() - getOrigHeight()) / 2.0);
g2d.translate(0, extraDy);
} else {
int extraDx = (int) ((getOrigHeight() - getOrigWidth()) / 2.0);
g2d.translate(extraDx, 0);
}
}
Graphics2D g2ForInner = (Graphics2D) g2d.create();
innerPaint(g2ForInner);
g2ForInner.dispose();
g2d.dispose();
}
/**
* Returns a new instance of this icon with specified dimensions.
*
* @param width Required width of the icon
* @param height Required height of the icon
* @return A new instance of this icon with specified dimensions.
*/
public static ResizableIcon of(int width, int height) {
toggle_markup base = new toggle_markup();
base.width = width;
base.height = height;
return base;
}
/**
* Returns a new {@link UIResource} instance of this icon with specified dimensions.
*
* @param width Required width of the icon
* @param height Required height of the icon
* @return A new {@link UIResource} instance of this icon with specified dimensions.
*/
public static ResizableIconUIResource uiResourceOf(int width, int height) {
toggle_markup base = new toggle_markup();
base.width = width;
base.height = height;
return new ResizableIconUIResource(base);
}
/**
* Returns a factory that returns instances of this icon on demand.
*
* @return Factory that returns instances of this icon on demand.
*/
public static Factory factory() {
return toggle_markup::new;
}
}<|fim▁end|> | |
<|file_name|>UART01.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# uart-eg01.py
#
# to run on the other end of the UART
# screen /dev/ttyUSB1 115200
import serial
def readlineCR(uart):
line = b''
while True:
byte = uart.read()
line += byte
if byte == b'\r':
return line
uart = serial.Serial('/dev/ttyUSB0', baudrate=115200, timeout=1)
while True:
uart.write(b'\r\nSay something: ')
line = readlineCR(uart)<|fim▁hole|> lineStr = '\r\nYou sent : {}'.format(line.decode('utf-8'))
uart.write(lineStr.encode('utf-8'))
else:
uart.write(b'\r\nexiting\r\n')
uart.close()
exit(0)<|fim▁end|> | if line != b'exit\r': |
<|file_name|>test_layout_objects.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.template import Context
from django.utils.translation import ugettext as _
from django.utils.translation import activate, deactivate
from .compatibility import get_template_from_string
from .conftest import only_bootstrap
from .forms import CheckboxesTestForm, TestForm
from crispy_forms.bootstrap import (
PrependedAppendedText, AppendedText, PrependedText, InlineRadios,
Tab, TabHolder, AccordionGroup, Accordion, Alert, InlineCheckboxes,
FieldWithButtons, StrictButton
)
from crispy_forms.helper import FormHelper
from crispy_forms.layout import (
Layout, HTML, Field, MultiWidgetField
)
from crispy_forms.utils import render_crispy_form
def test_field_with_custom_template():
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Field('email', template='custom_field_template.html')
)
html = render_crispy_form(test_form)
assert '<h1>Special custom field</h1>' in html
def test_multiwidget_field():
template = get_template_from_string("""
{% load crispy_forms_tags %}
{% crispy form %}
""")
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
MultiWidgetField(
'datetime_field',
attrs=(
{'rel': 'test_dateinput'},
{'rel': 'test_timeinput', 'style': 'width: 30px;', 'type': "hidden"}
)
)
)
c = Context({'form': test_form})
html = template.render(c)
assert html.count('class="dateinput') == 1
assert html.count('rel="test_dateinput"') == 1
assert html.count('rel="test_timeinput"') == 1
assert html.count('style="width: 30px;"') == 1
assert html.count('type="hidden"') == 1
def test_field_type_hidden():
template = get_template_from_string("""
{% load crispy_forms_tags %}
{% crispy test_form %}
""")
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Field('email', type="hidden", data_test=12),
Field('datetime_field'),
)
c = Context({
'test_form': test_form,
})
html = template.render(c)
# Check form parameters
assert html.count('data-test="12"') == 1
assert html.count('name="email"') == 1
assert html.count('class="dateinput') == 1
assert html.count('class="timeinput') == 1
def test_field_wrapper_class(settings):
form = TestForm()
form.helper = FormHelper()
form.helper.layout = Layout(Field('email', wrapper_class="testing"))
html = render_crispy_form(form)
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('class="control-group testing"') == 1
elif settings.CRISPY_TEMPLATE_PACK == 'bootstrap3':
assert html.count('class="form-group testing"') == 1
elif settings.CRISPY_TEMPLATE_PACK == 'bootstrap4':
assert html.count('class="form-group row testing"') == 1
def test_html_with_carriage_returns(settings):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
HTML("""
if (a==b){
// some comment
a+1;
foo();
}
""")
)
html = render_crispy_form(test_form)
if settings.CRISPY_TEMPLATE_PACK == 'uni_form':
assert html.count('\n') == 23
elif settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('\n') == 25
else:
assert html.count('\n') == 27
def test_i18n():
activate('es')
form = TestForm()
form.helper = FormHelper()
form.helper.layout = Layout(
HTML(_("Enter a valid value."))
)
html = render_crispy_form(form)
assert "Introduzca un valor correcto" in html
deactivate()
@only_bootstrap
class TestBootstrapLayoutObjects(object):
def test_custom_django_widget(self):
class CustomRadioSelect(forms.RadioSelect):
pass
class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
pass
# Make sure an inherited RadioSelect gets rendered as it
form = CheckboxesTestForm()
form.fields['inline_radios'].widget = CustomRadioSelect()
form.helper = FormHelper()
form.helper.layout = Layout('inline_radios')
html = render_crispy_form(form)
assert 'class="radio"' in html
# Make sure an inherited CheckboxSelectMultiple gets rendered as it
form.fields['checkboxes'].widget = CustomCheckboxSelectMultiple()
form.helper.layout = Layout('checkboxes')
html = render_crispy_form(form)
assert 'class="checkbox"' in html
def test_prepended_appended_text(self, settings):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
PrependedAppendedText('email', '@', 'gmail.com'),
AppendedText('password1', '#'),
PrependedText('password2', '$'),
)
html = render_crispy_form(test_form)
# Check form parameters
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('<span class="add-on">@</span>') == 1
assert html.count('<span class="add-on">gmail.com</span>') == 1
assert html.count('<span class="add-on">#</span>') == 1
assert html.count('<span class="add-on">$</span>') == 1
if settings.CRISPY_TEMPLATE_PACK in ['bootstrap3', 'bootstrap4']:
assert html.count('<span class="input-group-addon">@</span>') == 1
assert html.count(
'<span class="input-group-addon">gmail.com</span>') == 1
assert html.count('<span class="input-group-addon">#</span>') == 1
assert html.count('<span class="input-group-addon">$</span>') == 1
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap3':
test_form.helper.layout = Layout(
PrependedAppendedText('email', '@', 'gmail.com',
css_class='input-lg'), )
html = render_crispy_form(test_form)
assert '<input class="input-lg' in html
assert '<span class="input-group-addon input-lg' in html
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap4':
test_form.helper.layout = Layout(
PrependedAppendedText('email', '@', 'gmail.com',
css_class='form-control-lg'), )
html = render_crispy_form(test_form)
assert '<input class="form-control-lg' in html
assert '<span class="input-group-addon' in html
def test_inline_radios(self, settings):
test_form = CheckboxesTestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
InlineRadios('inline_radios')
)
html = render_crispy_form(test_form)
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('radio inline"') == 2
elif settings.CRISPY_TEMPLATE_PACK in ['bootstrap3', 'bootstrap4']:
assert html.count('radio-inline"') == 2
def test_accordion_and_accordiongroup(self, settings):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Accordion(
AccordionGroup(
'one',
'first_name'
),
AccordionGroup(
'two',
'password1',
'password2'
)
)
)
html = render_crispy_form(test_form)
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('<div class="accordion"') == 1
assert html.count('<div class="accordion-group">') == 2
assert html.count('<div class="accordion-heading">') == 2
else:
assert html.count('<div class="panel panel-default"') == 2
assert html.count('<div class="panel-group"') == 1
assert html.count('<div class="panel-heading">') == 2
assert html.count('<div id="one"') == 1
assert html.count('<div id="two"') == 1
assert html.count('name="first_name"') == 1
assert html.count('name="password1"') == 1
assert html.count('name="password2"') == 1
def test_accordion_active_false_not_rendered(self, settings):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Accordion(
AccordionGroup(
'one',
'first_name',
),
# there is no ``active`` kwarg here.
)
)
# The first time, there should be one of them there.
html = render_crispy_form(test_form)
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
accordion_class = "accordion-body"
else:
accordion_class = "panel-collapse"
assert html.count('<div id="one" class="%s collapse in"' % accordion_class) == 1
test_form.helper.layout = Layout(
Accordion(
AccordionGroup(
'one',
'first_name',
active=False, # now ``active`` manually set as False
),
)
)
# This time, it shouldn't be there at all.
html = render_crispy_form(test_form)
assert html.count('<div id="one" class="%s collapse in"' % accordion_class) == 0<|fim▁hole|> test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Alert(content='Testing...')
)
html = render_crispy_form(test_form)
assert html.count('<div class="alert"') == 1
assert html.count('<button type="button" class="close"') == 1
assert html.count('Testing...') == 1
def test_alert_block(self):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
Alert(content='Testing...', block=True)
)
html = render_crispy_form(test_form)
assert html.count('<div class="alert alert-block"') == 1
assert html.count('Testing...') == 1
def test_tab_and_tab_holder(self):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
TabHolder(
Tab(
'one',
'first_name',
css_id="custom-name",
css_class="first-tab-class"
),
Tab(
'two',
'password1',
'password2'
)
)
)
html = render_crispy_form(test_form)
assert html.count(
'<li class="tab-pane active"><a href="#custom-name" data-toggle="tab">One</a></li>'
) == 1
assert html.count('class="tab-pane first-tab-class active"') == 1
assert html.count('<li class="tab-pane') == 2
assert html.count('tab-pane') == 4
assert html.count('<div id="custom-name"') == 1
assert html.count('<div id="two"') == 1
assert html.count('name="first_name"') == 1
assert html.count('name="password1"') == 1
assert html.count('name="password2"') == 1
def test_tab_helper_reuse(self):
# this is a proper form, according to the docs.
# note that the helper is a class property here,
# shared between all instances
class TestForm(forms.Form):
val1 = forms.CharField(required=False)
val2 = forms.CharField(required=True)
helper = FormHelper()
helper.layout = Layout(
TabHolder(
Tab('one', 'val1',),
Tab('two', 'val2',)
)
)
# first render of form => everything is fine
test_form = TestForm()
html = render_crispy_form(test_form)
# second render of form => first tab should be active,
# but not duplicate class
test_form = TestForm()
html = render_crispy_form(test_form)
assert html.count('class="tab-pane active active"') == 0
# render a new form, now with errors
test_form = TestForm(data={'val1': 'foo'})
html = render_crispy_form(test_form)
# tab 1 should not be active
assert html.count('<div id="one" \n class="tab-pane active') == 0
# tab 2 should be active
assert html.count('<div id="two" \n class="tab-pane active') == 1
def test_radio_attrs(self):
form = CheckboxesTestForm()
form.fields['inline_radios'].widget.attrs = {'class': "first"}
form.fields['checkboxes'].widget.attrs = {'class': "second"}
html = render_crispy_form(form)
assert 'class="first"' in html
assert 'class="second"' in html
def test_field_with_buttons(self, settings):
form = TestForm()
form.helper = FormHelper()
form.helper.layout = Layout(
FieldWithButtons(
Field('password1', css_class="span4"),
StrictButton("Go!", css_id="go-button"),
StrictButton("No!", css_class="extra"),
StrictButton("Test", type="submit", name="whatever", value="something"),
css_class="extra",
autocomplete="off"
)
)
html = render_crispy_form(form)
form_group_class = 'control-group'
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap3':
form_group_class = 'form-group'
elif settings.CRISPY_TEMPLATE_PACK == 'bootstrap4':
form_group_class = 'form-group row'
assert html.count('class="%s extra"' % form_group_class) == 1
assert html.count('autocomplete="off"') == 1
assert html.count('class="span4') == 1
assert html.count('id="go-button"') == 1
assert html.count("Go!") == 1
assert html.count("No!") == 1
assert html.count('class="btn"') == 2
assert html.count('class="btn extra"') == 1
assert html.count('type="submit"') == 1
assert html.count('name="whatever"') == 1
assert html.count('value="something"') == 1
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('class="input-append"') == 1
elif settings.CRISPY_TEMPLATE_PACK in ['bootstrap3', 'bootstrap4']:
assert html.count('class="input-group-btn') == 1
def test_hidden_fields(self):
form = TestForm()
# All fields hidden
for field in form.fields:
form.fields[field].widget = forms.HiddenInput()
form.helper = FormHelper()
form.helper.layout = Layout(
AppendedText('password1', 'foo'),
PrependedText('password2', 'bar'),
PrependedAppendedText('email', 'bar'),
InlineCheckboxes('first_name'),
InlineRadios('last_name'),
)
html = render_crispy_form(form)
assert html.count("<input") == 5
assert html.count('type="hidden"') == 5
assert html.count('<label') == 0
def test_multiplecheckboxes(self, settings):
test_form = CheckboxesTestForm()
html = render_crispy_form(test_form)
assert html.count('checked="checked"') == 6
test_form.helper = FormHelper(test_form)
test_form.helper[1].wrap(InlineCheckboxes, inline=True)
html = render_crispy_form(test_form)
if settings.CRISPY_TEMPLATE_PACK == 'bootstrap':
assert html.count('checkbox inline"') == 3
assert html.count('inline"') == 3
elif settings.CRISPY_TEMPLATE_PACK in ['bootstrap3', 'bootstrap4']:
assert html.count('checkbox-inline"') == 3
assert html.count('inline="True"') == 4<|fim▁end|> |
def test_alert(self): |
<|file_name|>parseOptions.py<|end_file_name|><|fim▁begin|>from modules.Utils import runCommand
import re
import sys
def parseOptions(command):
so, se, rc = runCommand("gofed %s --help" % command)
if rc != 0:
return []
options = []
option_f = False
for line in so.split("\n"):
if line == "Options:":
option_f = True
continue
if option_f == True:
if line == "":
break
# line must start with two spaces and minus
if len(line) < 3:
continue
if line[:3] != " -":
continue<|fim▁hole|>
if parts == []:
continue
# do we have both short and long options?
opts = map(lambda i: i.strip().split(' ')[0].split('=')[0], parts)
for opt in opts:
options.append(opt)
return sorted(options)
if __name__ == "__main__":
if len(sys.argv) != 2:
print ""
command = sys.argv[1]
options = parseOptions(command)
if options == []:
print command + ":"
else:
print command + ":" + " ".join(options)<|fim▁end|> |
line = line.strip()
parts = line.split(' ')[0].split(',') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.