_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q4500
|
_py_expand_long
|
train
|
def _py_expand_long(subsequence, sequence, max_l_dist):
"""Partial match expansion, optimized for long sub-sequences."""
# The additional optimization in this version is to limit the part of
# the sub-sequence inspected for each sequence character. The start and
# end of the iteration are limited to the range where the scores are
# smaller than the maximum allowed distance. Additionally, once a good
# expansion has been found, the range is further reduced to where the
# scores are smaller than the score of the best expansion found so far.
subseq_len = len(subsequence)
if subseq_len == 0:
return (0, 0)
# Initialize the scores array with values for just skipping sub-sequence
# chars.
scores = list(range(1, subseq_len + 1))
min_score = subseq_len
min_score_idx = -1
max_good_score = max_l_dist
new_needle_idx_range_start = 0
new_needle_idx_range_end = subseq_len - 1
for seq_index, char in enumerate(sequence):
# calculate scores, one for each character in the sub-sequence
needle_idx_range_start = new_needle_idx_range_start
needle_idx_range_end = min(subseq_len, new_needle_idx_range_end + 1)
a = seq_index
c = a + 1
if c <= max_good_score:
new_needle_idx_range_start = 0
new_needle_idx_range_end = 0
else:
new_needle_idx_range_start = None
new_needle_idx_range_end = -1
for subseq_index in range(needle_idx_range_start, needle_idx_range_end):
b = scores[subseq_index]
c = scores[subseq_index] = min(
a + (char != subsequence[subseq_index]),
b + 1,
c + 1,
)
a = b
if c <= max_good_score:
if new_needle_idx_range_start is None:
new_needle_idx_range_start = subseq_index
new_needle_idx_range_end = max(
new_needle_idx_range_end,
subseq_index + 1 + (max_good_score - c),
)
# bail early when it is impossible to find a better expansion
if new_needle_idx_range_start is None:
break
# keep the minimum score found for matches of the entire sub-sequence
if needle_idx_range_end == subseq_len and c <= min_score:
min_score = c
min_score_idx = seq_index
if min_score < max_good_score:
max_good_score = min_score
return (min_score, min_score_idx + 1) if min_score <= max_l_dist else (None, None)
|
python
|
{
"resource": ""
}
|
q4501
|
_validate_hands
|
train
|
def _validate_hands(hands, missing):
'''
Validates hands, based on values that
are supposed to be missing from them.
:param list hands: list of Hand objects to validate
:param list missing: list of sets that indicate the values
that are supposed to be missing from
the respective Hand objects
:return: True if no Hand objects contain values that they
are supposed to be missing; False otherwise
'''
for h, m in zip(hands, missing):
for value in m:
if dominoes.hand.contains_value(h, value):
return False
return True
|
python
|
{
"resource": ""
}
|
q4502
|
Game._update_valid_moves
|
train
|
def _update_valid_moves(self):
'''
Updates self.valid_moves according to the latest game state.
Assumes that the board and all hands are non-empty.
'''
left_end = self.board.left_end()
right_end = self.board.right_end()
moves = []
for d in self.hands[self.turn]:
if left_end in d:
moves.append((d, True))
# do not double count moves if both of the board's ends have
# the same value, and a domino can be placed on both of them
if right_end in d and left_end != right_end:
moves.append((d, False))
self.valid_moves = tuple(moves)
|
python
|
{
"resource": ""
}
|
q4503
|
Game.make_move
|
train
|
def make_move(self, d, left):
'''
Plays a domino from the hand of the player whose turn it is onto one
end of the game board. If the game does not end, the turn is advanced
to the next player who has a valid move.
Making a move is transactional - if the operation fails at any point,
the game will return to its state before the operation began.
:param Domino d: domino to be played
:param bool left: end of the board on which to play the
domino (True for left, False for right)
:return: a Result object if the game ends; None otherwise
:raises GameOverException: if the game has already ended
:raises NoSuchDominoException: if the domino to be played is not in
the hand of the player whose turn it is
:raises EndsMismatchException: if the domino cannot be placed on
the specified position in the board
'''
if self.result is not None:
raise dominoes.GameOverException('Cannot make a move - the game is over!')
i = self.hands[self.turn].play(d)
try:
self.board.add(d, left)
except dominoes.EndsMismatchException as error:
# return the domino to the hand if it cannot be placed on the board
self.hands[self.turn].draw(d, i)
raise error
# record the move
self.moves.append((d, left))
# check if the game ended due to a player running out of dominoes
if not self.hands[self.turn]:
self.valid_moves = ()
self.result = dominoes.Result(
self.turn, True, pow(-1, self.turn) * sum(_remaining_points(self.hands))
)
return self.result
# advance the turn to the next player with a valid move.
# if no player has a valid move, the game is stuck. also,
# record all the passes.
passes = []
stuck = True
for _ in self.hands:
self.turn = next_player(self.turn)
self._update_valid_moves()
if self.valid_moves:
self.moves.extend(passes)
stuck = False
break
else:
passes.append(None)
if stuck:
player_points = _remaining_points(self.hands)
team_points = [player_points[0] + player_points[2],
player_points[1] + player_points[3]]
if team_points[0] < team_points[1]:
self.result = dominoes.Result(self.turn, False, sum(team_points))
elif team_points[0] == team_points[1]:
self.result = dominoes.Result(self.turn, False, 0)
else:
self.result = dominoes.Result(self.turn, False, -sum(team_points))
return self.result
|
python
|
{
"resource": ""
}
|
q4504
|
Game.missing_values
|
train
|
def missing_values(self):
'''
Computes the values that must be missing from each
player's hand, based on when they have passed.
:return: a list of sets, each one containing the
values that must be missing from the
corresponding player's hand
'''
missing = [set() for _ in self.hands]
# replay the game from the beginning
board = dominoes.SkinnyBoard()
player = self.starting_player
for move in self.moves:
if move is None:
# pass - update the missing values
missing[player].update([board.left_end(), board.right_end()])
else:
# not a pass - update the board
board.add(*move)
# move on to the next player
player = next_player(player)
return missing
|
python
|
{
"resource": ""
}
|
q4505
|
Game.random_possible_hands
|
train
|
def random_possible_hands(self):
'''
Returns random possible hands for all players, given the information
known by the player whose turn it is. This information includes the
current player's hand, the sizes of the other players' hands, and the
moves played by every player, including the passes.
:return: a list of possible Hand objects, corresponding to each player
'''
# compute values that must be missing from
# each hand, to rule out impossible hands
missing = self.missing_values()
# get the dominoes that are in all of the other hands. note that, even
# though we are 'looking' at the other hands to get these dominoes, we
# are not 'cheating' because these dominoes could also be computed by
# subtracting the dominoes that have been played (which are public
# knowledge) and the dominoes in the current player's hand from the
# initial set of dominoes
other_dominoes = [d for p, h in enumerate(self.hands) for d in h if p != self.turn]
while True:
# generator for a shuffled shallow copy of other_dominoes
shuffled_dominoes = (d for d in random.sample(other_dominoes, len(other_dominoes)))
# generate random hands by partitioning the shuffled dominoes according
# to how many dominoes need to be in each of the other hands. since we
# know the current player's hand, we just use a shallow copy of it
hands = []
for player, hand in enumerate(self.hands):
if player != self.turn:
hand = [next(shuffled_dominoes) for _ in hand]
hands.append(dominoes.Hand(hand))
# only return the hands if they are possible, according to the values we
# know to be missing from each hand. if the hands are not possible, try
# generating random hands again
if _validate_hands(hands, missing):
return hands
|
python
|
{
"resource": ""
}
|
q4506
|
Game.all_possible_hands
|
train
|
def all_possible_hands(self):
'''
Yields all possible hands for all players, given the information
known by the player whose turn it is. This information includes the
current player's hand, the sizes of the other players' hands, and the
moves played by every player, including the passes.
:yields: a list of possible Hand objects, corresponding to each player
'''
# compute values that must be missing from
# each hand, to rule out impossible hands
missing = self.missing_values()
# get the dominoes that are in all of the other hands. note that, even
# though we are 'looking' at the other hands to get these dominoes, we
# are not 'cheating' because these dominoes could also be computed by
# subtracting the dominoes that have been played (which are public
# knowledge) and the dominoes in the current player's hand from the
# initial set of dominoes
other_dominoes = {d for p, h in enumerate(self.hands) for d in h if p != self.turn}
# get the lengths of all the other hands, so
# that we know how many dominoes to place in each
other_hand_lengths = [len(h) for p, h in enumerate(self.hands) if p != self.turn]
# iterate over all possible hands that the other players might have
for possible_hands in _all_possible_partitionings(other_dominoes, other_hand_lengths):
# given possible hands for all players, this is a generator for
# tuples containing the dominoes that are in the other players' hands
possible_hands = (h for h in possible_hands)
# build a list containing possible hands for all players. since we
# know the current player's hand, we just use a shallow copy of it
hands = []
for player, hand in enumerate(self.hands):
if player != self.turn:
hand = next(possible_hands)
hands.append(dominoes.Hand(hand))
# only yield the hands if they are possible, according
# to the values we know to be missing from each hand
if _validate_hands(hands, missing):
yield hands
|
python
|
{
"resource": ""
}
|
q4507
|
random
|
train
|
def random(game):
'''
Prefers moves randomly.
:param Game game: game to play
:return: None
'''
game.valid_moves = tuple(sorted(game.valid_moves, key=lambda _: rand.random()))
|
python
|
{
"resource": ""
}
|
q4508
|
bota_gorda
|
train
|
def bota_gorda(game):
'''
Prefers to play dominoes with higher point values.
:param Game game: game to play
:return: None
'''
game.valid_moves = tuple(sorted(game.valid_moves, key=lambda m: -(m[0].first + m[0].second)))
|
python
|
{
"resource": ""
}
|
q4509
|
make_moves
|
train
|
def make_moves(game, player=dominoes.players.identity):
'''
For each of a Game object's valid moves, yields
a tuple containing the move and the Game object
obtained by playing the move on the original Game
object. The original Game object will be modified.
:param Game game: the game to make moves on
:param callable player: a player to call on the
game before making any
moves, to determine the
order in which they get
made. The identity
player is the default.
'''
# game is over - do not yield anything
if game.result is not None:
return
# determine the order in which to make moves
player(game)
# copy the original game before making all
# but the last move
for move in game.valid_moves[:-1]:
new_game = copy.deepcopy(game)
new_game.make_move(*move)
yield move, new_game
# don't copy the original game before making
# the last move
move = game.valid_moves[-1]
game.make_move(*move)
yield move, game
|
python
|
{
"resource": ""
}
|
q4510
|
alphabeta
|
train
|
def alphabeta(game, alpha_beta=(-float('inf'), float('inf')),
player=dominoes.players.identity):
'''
Runs minimax search with alpha-beta pruning on the provided game.
:param Game game: game to search
:param tuple alpha_beta: a tuple of two floats that indicate
the initial values of alpha and beta,
respectively. The default is (-inf, inf).
:param callable player: player used to sort moves to be explored.
Ordering better moves first may significantly
reduce the amount of moves that need to be
explored. The identity player is the default.
'''
# base case - game is over
if game.result is not None:
return [], game.result.points
if game.turn % 2:
# minimizing player
best_value = float('inf')
op = operator.lt
update = lambda ab, v: (ab[0], min(ab[1], v))
else:
# maximizing player
best_value = -float('inf')
op = operator.gt
update = lambda ab, v: (max(ab[0], v), ab[1])
# recursive case - game is not over
for move, new_game in make_moves(game, player):
moves, value = alphabeta(new_game, alpha_beta, player)
if op(value, best_value):
best_value = value
best_moves = moves
best_moves.insert(0, move)
alpha_beta = update(alpha_beta, best_value)
if alpha_beta[1] <= alpha_beta[0]:
# alpha-beta cutoff
break
return best_moves, best_value
|
python
|
{
"resource": ""
}
|
q4511
|
Series.next_game
|
train
|
def next_game(self):
'''
Advances the series to the next game, if possible. Also updates
each team's score with points from the most recently completed game.
:return: the next game, if the previous game did not end the series;
None otherwise
:raises SeriesOverException: if the series has already ended
:raises GameInProgressException: if the last game has not yet finished
'''
if self.is_over():
raise dominoes.SeriesOverException(
'Cannot start a new game - series ended with a score of {} to {}'.format(*self.scores)
)
result = self.games[-1].result
if result is None:
raise dominoes.GameInProgressException(
'Cannot start a new game - the latest one has not finished!'
)
# update each team's score with the points from the previous game
if result.points >= 0:
self.scores[0] += result.points
else:
self.scores[1] -= result.points
# return None if the series is now over
if self.is_over():
return
# determine the starting player for the next game
if result.won or pow(-1, result.player) * result.points > 0:
starting_player = result.player
elif not result.points:
starting_player = self.games[-1].starting_player
else: # pow(-1, result.player) * result.points < 0
starting_player = dominoes.game.next_player(result.player)
# start the next game
self.games.append(dominoes.Game.new(starting_player=starting_player))
return self.games[-1]
|
python
|
{
"resource": ""
}
|
q4512
|
SkinnyBoard.add
|
train
|
def add(self, d, left):
'''
Adds the provided domino to the specifed end of the board.
:param Domino d: domino to add
:param bool left: end of the board to which to add the
domino (True for left, False for right)
:return: None
:raises EndsMismatchException: if the values do not match
'''
if left:
self._add_left(d)
else:
self._add_right(d)
|
python
|
{
"resource": ""
}
|
q4513
|
Hand.play
|
train
|
def play(self, d):
'''
Removes a domino from the hand.
:param Domino d: domino to remove from the hand
:return: the index within the hand of the played domino
:raises NoSuchDominoException: if the domino is not in the hand
'''
try:
i = self._dominoes.index(d)
except ValueError:
raise dominoes.NoSuchDominoException('Cannot make move -'
' {} is not in hand!'.format(d))
self._dominoes.pop(i)
return i
|
python
|
{
"resource": ""
}
|
q4514
|
Hand.draw
|
train
|
def draw(self, d, i=None):
'''
Adds a domino to the hand.
:param Domino d: domino to add to the hand
:param int i: index at which to add the domino;
by default adds to the end of the hand
:return: None
'''
if i is None:
self._dominoes.append(d)
else:
self._dominoes.insert(i, d)
|
python
|
{
"resource": ""
}
|
q4515
|
create_centerlines
|
train
|
def create_centerlines(src, dst, density=0.5):
"""
Create centerlines and save the to an ESRI Shapefile.
Reads polygons from the `src` ESRI Shapefile, creates Centerline
objects with the specified `density` parameter and writes them to
the `dst` ESRI Shapefile.
Only Polygon features are converted to centerlines. Features of
different types are skipped.
Args:
src (str): source ESRI Shapefile
dst (str): destination ESRI Shapefile
density (:obj:`float`, optional): the Centerline's density.
Defaults to 0.5 (meters)
Returns:
None
"""
try:
DST_DRIVER = get_ogr_driver(filepath=dst)
except ValueError:
raise
with fiona.Env():
with fiona.open(src, mode='r') as source:
SCHEMA = source.schema.copy()
SCHEMA.update({'geometry': 'MultiLineString'})
with fiona.open(
dst,
mode='w',
driver=DST_DRIVER.GetName(),
schema=SCHEMA,
crs=source.crs,
encoding=source.encoding) as destination:
for record in source:
geom = record.get('geometry')
input_geom = shape(geom)
if not is_valid_geometry(geometry=input_geom):
continue
attributes = record.get('properties')
try:
centerline_obj = Centerline(
input_geom=input_geom,
interpolation_dist=density,
**attributes
)
except RuntimeError as err:
logging.warning(
"ignoring record that could not be processed: %s",
err
)
continue
centerline_dict = {
'geometry': mapping(centerline_obj),
'properties': {
k: v
for k, v in centerline_obj.__dict__.items()
if k in attributes.keys()
}
}
destination.write(centerline_dict)
return None
|
python
|
{
"resource": ""
}
|
q4516
|
Centerline._create_centerline
|
train
|
def _create_centerline(self):
"""
Calculate the centerline of a polygon.
Densifies the border of a polygon which is then represented by a Numpy
array of points necessary for creating the Voronoi diagram. Once the
diagram is created, the ridges located within the polygon are
joined and returned.
Returns:
a union of lines that are located within the polygon.
"""
border = array(self.__densify_border())
vor = Voronoi(border)
vertex = vor.vertices
lst_lines = []
for j, ridge in enumerate(vor.ridge_vertices):
if -1 not in ridge:
line = LineString([
(vertex[ridge[0]][0] + self._minx,
vertex[ridge[0]][1] + self._miny),
(vertex[ridge[1]][0] + self._minx,
vertex[ridge[1]][1] + self._miny)])
if line.within(self._input_geom) and len(line.coords[0]) > 1:
lst_lines.append(line)
nr_lines = len(lst_lines)
if nr_lines < 2:
raise RuntimeError((
"Number of produced ridges is too small: {}"
", this might be caused by too large interpolation distance."
).format(nr_lines))
return unary_union(lst_lines)
|
python
|
{
"resource": ""
}
|
q4517
|
Centerline.__densify_border
|
train
|
def __densify_border(self):
"""
Densify the border of a polygon.
The border is densified by a given factor (by default: 0.5).
The complexity of the polygon's geometry is evaluated in order
to densify the borders of its interior rings as well.
Returns:
list: a list of points where each point is represented by
a list of its reduced coordinates
Example:
[[X1, Y1], [X2, Y2], ..., [Xn, Yn]
"""
if isinstance(self._input_geom, MultiPolygon):
polygons = [polygon for polygon in self._input_geom]
else:
polygons = [self._input_geom]
points = []
for polygon in polygons:
if len(polygon.interiors) == 0:
exterior = LineString(polygon.exterior)
points += self.__fixed_interpolation(exterior)
else:
exterior = LineString(polygon.exterior)
points += self.__fixed_interpolation(exterior)
for j in range(len(polygon.interiors)):
interior = LineString(polygon.interiors[j])
points += self.__fixed_interpolation(interior)
return points
|
python
|
{
"resource": ""
}
|
q4518
|
Centerline.__fixed_interpolation
|
train
|
def __fixed_interpolation(self, line):
"""
Place additional points on the border at the specified distance.
By default the distance is 0.5 (meters) which means that the first
point will be placed 0.5 m from the starting point, the second
point will be placed at the distance of 1.0 m from the first
point, etc. The loop breaks when the summarized distance exceeds
the length of the line.
Args:
line (shapely.geometry.LineString): object
Returns:
list: a list of points where each point is represented by
a list of its reduced coordinates
Example:
[[X1, Y1], [X2, Y2], ..., [Xn, Yn]
"""
STARTPOINT = [line.xy[0][0] - self._minx, line.xy[1][0] - self._miny]
ENDPOINT = [line.xy[0][-1] - self._minx, line.xy[1][-1] - self._miny]
count = self._interpolation_dist
newline = [STARTPOINT]
while count < line.length:
point = line.interpolate(count)
newline.append([point.x - self._minx, point.y - self._miny])
count += self._interpolation_dist
newline.append(ENDPOINT)
return newline
|
python
|
{
"resource": ""
}
|
q4519
|
is_valid_geometry
|
train
|
def is_valid_geometry(geometry):
"""
Confirm that the geometry type is of type Polygon or MultiPolygon.
Args:
geometry (BaseGeometry): BaseGeometry instance (e.g. Polygon)
Returns:
bool
"""
if isinstance(geometry, Polygon) or isinstance(geometry, MultiPolygon):
return True
else:
return False
|
python
|
{
"resource": ""
}
|
q4520
|
get_ogr_driver
|
train
|
def get_ogr_driver(filepath):
"""
Get the OGR driver from the provided file extension.
Args:
file_extension (str): file extension
Returns:
osgeo.ogr.Driver
Raises:
ValueError: no driver is found
"""
filename, file_extension = os.path.splitext(filepath)
EXTENSION = file_extension[1:]
ogr_driver_count = ogr.GetDriverCount()
for idx in range(ogr_driver_count):
driver = ogr.GetDriver(idx)
driver_extension = driver.GetMetadataItem(str('DMD_EXTENSION')) or ''
driver_extensions = driver.GetMetadataItem(str('DMD_EXTENSIONS')) or ''
if EXTENSION == driver_extension or EXTENSION in driver_extensions:
return driver
else:
msg = 'No driver found for the following file extension: {}'.format(
EXTENSION)
raise ValueError(msg)
|
python
|
{
"resource": ""
}
|
q4521
|
get_numwords
|
train
|
def get_numwords():
"""Convert number words to integers in a given text."""
numwords = {'and': (1, 0), 'a': (1, 1), 'an': (1, 1)}
for idx, word in enumerate(UNITS):
numwords[word] = (1, idx)
for idx, word in enumerate(TENS):
numwords[word] = (1, idx * 10)
for idx, word in enumerate(SCALES):
numwords[word] = (10 ** (idx * 3 or 2), 0)
all_numbers = ur'|'.join(ur'\b%s\b' % i for i in numwords.keys() if i)
return all_numbers, numwords
|
python
|
{
"resource": ""
}
|
q4522
|
get_units_regex
|
train
|
def get_units_regex():
"""Build a compiled regex object."""
op_keys = sorted(OPERATORS.keys(), key=len, reverse=True)
unit_keys = sorted(l.UNITS.keys(), key=len, reverse=True)
symbol_keys = sorted(l.SYMBOLS.keys(), key=len, reverse=True)
exponent = ur'(?:(?:\^?\-?[0-9%s]*)(?:\ cubed|\ squared)?)(?![a-zA-Z])' % \
SUPERSCRIPTS
all_ops = '|'.join([r'%s' % re.escape(i) for i in op_keys])
all_units = '|'.join([ur'%s' % re.escape(i) for i in unit_keys])
all_symbols = '|'.join([ur'%s' % re.escape(i) for i in symbol_keys])
pattern = ur'''
(?P<prefix>(?:%s)(?![a-zA-Z]))? # Currencies, mainly
(?P<value>%s)-? # Number
(?:(?P<operator1>%s)?(?P<unit1>(?:%s)%s)?) # Operator + Unit (1)
(?:(?P<operator2>%s)?(?P<unit2>(?:%s)%s)?) # Operator + Unit (2)
(?:(?P<operator3>%s)?(?P<unit3>(?:%s)%s)?) # Operator + Unit (3)
(?:(?P<operator4>%s)?(?P<unit4>(?:%s)%s)?) # Operator + Unit (4)
''' % tuple([all_symbols, RAN_PATTERN] + 4 * [all_ops, all_units,
exponent])
regex = re.compile(pattern, re.VERBOSE | re.IGNORECASE)
return regex
|
python
|
{
"resource": ""
}
|
q4523
|
get_dimension_permutations
|
train
|
def get_dimension_permutations(entities, dimensions):
"""Get all possible dimensional definitions for an entity."""
new_dimensions = defaultdict(int)
for item in dimensions:
new = entities[item['base']].dimensions
if new:
for new_item in new:
new_dimensions[new_item['base']] += new_item['power'] * \
item['power']
else:
new_dimensions[item['base']] += item['power']
final = [[{'base': i[0], 'power': i[1]} for i in new_dimensions.items()]]
final.append(dimensions)
final = [sorted(i, key=lambda x: x['base']) for i in final]
candidates = []
for item in final:
if item not in candidates:
candidates.append(item)
return candidates
|
python
|
{
"resource": ""
}
|
q4524
|
load_entities
|
train
|
def load_entities():
"""Load entities from JSON file."""
path = os.path.join(TOPDIR, 'entities.json')
entities = json.load(open(path))
names = [i['name'] for i in entities]
try:
assert len(set(names)) == len(entities)
except AssertionError:
raise Exception('Entities with same name: %s' % [i for i in names if
names.count(i) > 1])
entities = dict((k['name'], c.Entity(name=k['name'],
dimensions=k['dimensions'],
uri=k['URI'])) for k in entities)
dimensions_ent = defaultdict(list)
for ent in entities:
if not entities[ent].dimensions:
continue
perms = get_dimension_permutations(entities, entities[ent].dimensions)
for perm in perms:
key = get_key_from_dimensions(perm)
dimensions_ent[key].append(entities[ent])
return entities, dimensions_ent
|
python
|
{
"resource": ""
}
|
q4525
|
get_dimensions_units
|
train
|
def get_dimensions_units(names):
"""Create dictionary of unit dimensions."""
dimensions_uni = {}
for name in names:
key = get_key_from_dimensions(names[name].dimensions)
dimensions_uni[key] = names[name]
plain_dimensions = [{'base': name, 'power': 1}]
key = get_key_from_dimensions(plain_dimensions)
dimensions_uni[key] = names[name]
if not names[name].dimensions:
names[name].dimensions = plain_dimensions
names[name].dimensions = [{'base': names[i['base']].name,
'power': i['power']} for i in
names[name].dimensions]
return dimensions_uni
|
python
|
{
"resource": ""
}
|
q4526
|
load_units
|
train
|
def load_units():
"""Load units from JSON file."""
names = {}
lowers = defaultdict(list)
symbols = defaultdict(list)
surfaces = defaultdict(list)
for unit in json.load(open(os.path.join(TOPDIR, 'units.json'))):
try:
assert unit['name'] not in names
except AssertionError:
msg = 'Two units with same name in units.json: %s' % unit['name']
raise Exception(msg)
obj = c.Unit(name=unit['name'], surfaces=unit['surfaces'],
entity=ENTITIES[unit['entity']], uri=unit['URI'],
symbols=unit['symbols'], dimensions=unit['dimensions'])
names[unit['name']] = obj
for symbol in unit['symbols']:
surfaces[symbol].append(obj)
lowers[symbol.lower()].append(obj)
if unit['entity'] == 'currency':
symbols[symbol].append(obj)
for surface in unit['surfaces']:
surfaces[surface].append(obj)
lowers[surface.lower()].append(obj)
split = surface.split()
index = None
if ' per ' in surface:
index = split.index('per') - 1
elif 'degree ' in surface:
index = split.index('degree')
if index is not None:
plural = ' '.join([i if num != index else
PLURALS.plural(split[index]) for num, i in
enumerate(split)])
else:
plural = PLURALS.plural(surface)
if plural != surface:
surfaces[plural].append(obj)
lowers[plural.lower()].append(obj)
dimensions_uni = get_dimensions_units(names)
return names, surfaces, lowers, symbols, dimensions_uni
|
python
|
{
"resource": ""
}
|
q4527
|
download_wiki
|
train
|
def download_wiki():
"""Download WikiPedia pages of ambiguous units."""
ambiguous = [i for i in l.UNITS.items() if len(i[1]) > 1]
ambiguous += [i for i in l.DERIVED_ENT.items() if len(i[1]) > 1]
pages = set([(j.name, j.uri) for i in ambiguous for j in i[1]])
print
objs = []
for num, page in enumerate(pages):
obj = {'url': page[1]}
obj['_id'] = obj['url'].replace('https://en.wikipedia.org/wiki/', '')
obj['clean'] = obj['_id'].replace('_', ' ')
print '---> Downloading %s (%d of %d)' % \
(obj['clean'], num + 1, len(pages))
obj['text'] = wikipedia.page(obj['clean']).content
obj['unit'] = page[0]
objs.append(obj)
path = os.path.join(l.TOPDIR, 'wiki.json')
os.remove(path)
json.dump(objs, open(path, 'w'), indent=4, sort_keys=True)
print '\n---> All done.\n'
|
python
|
{
"resource": ""
}
|
q4528
|
clean_text
|
train
|
def clean_text(text):
"""Clean text for TFIDF."""
new_text = re.sub(ur'\p{P}+', ' ', text)
new_text = [stem(i) for i in new_text.lower().split() if not
re.findall(r'[0-9]', i)]
new_text = ' '.join(new_text)
return new_text
|
python
|
{
"resource": ""
}
|
q4529
|
disambiguate_entity
|
train
|
def disambiguate_entity(key, text):
"""Resolve ambiguity between entities with same dimensionality."""
new_ent = l.DERIVED_ENT[key][0]
if len(l.DERIVED_ENT[key]) > 1:
transformed = TFIDF_MODEL.transform([text])
scores = CLF.predict_proba(transformed).tolist()[0]
scores = sorted(zip(scores, TARGET_NAMES), key=lambda x: x[0],
reverse=True)
names = [i.name for i in l.DERIVED_ENT[key]]
scores = [i for i in scores if i[1] in names]
try:
new_ent = l.ENTITIES[scores[0][1]]
except IndexError:
logging.debug('\tAmbiguity not resolved for "%s"', str(key))
return new_ent
|
python
|
{
"resource": ""
}
|
q4530
|
disambiguate_unit
|
train
|
def disambiguate_unit(unit, text):
"""
Resolve ambiguity.
Distinguish between units that have same names, symbols or abbreviations.
"""
new_unit = l.UNITS[unit]
if not new_unit:
new_unit = l.LOWER_UNITS[unit.lower()]
if not new_unit:
raise KeyError('Could not find unit "%s"' % unit)
if len(new_unit) > 1:
transformed = TFIDF_MODEL.transform([clean_text(text)])
scores = CLF.predict_proba(transformed).tolist()[0]
scores = sorted(zip(scores, TARGET_NAMES), key=lambda x: x[0],
reverse=True)
names = [i.name for i in new_unit]
scores = [i for i in scores if i[1] in names]
try:
final = l.UNITS[scores[0][1]][0]
logging.debug('\tAmbiguity resolved for "%s" (%s)', unit, scores)
except IndexError:
logging.debug('\tAmbiguity not resolved for "%s"', unit)
final = new_unit[0]
else:
final = new_unit[0]
return final
|
python
|
{
"resource": ""
}
|
q4531
|
clean_surface
|
train
|
def clean_surface(surface, span):
"""Remove spurious characters from a quantity's surface."""
surface = surface.replace('-', ' ')
no_start = ['and', ' ']
no_end = [' and', ' ']
found = True
while found:
found = False
for word in no_start:
if surface.lower().startswith(word):
surface = surface[len(word):]
span = (span[0] + len(word), span[1])
found = True
for word in no_end:
if surface.lower().endswith(word):
surface = surface[:-len(word)]
span = (span[0], span[1] - len(word))
found = True
if not surface:
return None, None
split = surface.lower().split()
if split[0] in ['one', 'a', 'an'] and len(split) > 1 and split[1] in \
r.UNITS + r.TENS:
span = (span[0] + len(surface.split()[0]) + 1, span[1])
surface = ' '.join(surface.split()[1:])
return surface, span
|
python
|
{
"resource": ""
}
|
q4532
|
build_unit_name
|
train
|
def build_unit_name(dimensions):
"""Build the name of the unit from its dimensions."""
name = ''
for unit in dimensions:
if unit['power'] < 0:
name += 'per '
power = abs(unit['power'])
if power == 1:
name += unit['base']
elif power == 2:
name += 'square ' + unit['base']
elif power == 3:
name += 'cubic ' + unit['base']
elif power > 3:
name += unit['base'] + ' to the %g' % power
name += ' '
name = name.strip()
logging.debug(u'\tUnit inferred name: %s', name)
return name
|
python
|
{
"resource": ""
}
|
q4533
|
get_unit_from_dimensions
|
train
|
def get_unit_from_dimensions(dimensions, text):
"""Reconcile a unit based on its dimensionality."""
key = l.get_key_from_dimensions(dimensions)
try:
unit = l.DERIVED_UNI[key]
except KeyError:
logging.debug(u'\tCould not find unit for: %s', key)
unit = c.Unit(name=build_unit_name(dimensions),
dimensions=dimensions,
entity=get_entity_from_dimensions(dimensions, text))
return unit
|
python
|
{
"resource": ""
}
|
q4534
|
parse_unit
|
train
|
def parse_unit(item, group, slash):
"""Parse surface and power from unit text."""
surface = item.group(group).replace('.', '')
power = re.findall(r'\-?[0-9%s]+' % r.SUPERSCRIPTS, surface)
if power:
power = [r.UNI_SUPER[i] if i in r.UNI_SUPER else i for i
in power]
power = ''.join(power)
new_power = (-1 * int(power) if slash else int(power))
surface = re.sub(r'\^?\-?[0-9%s]+' % r.SUPERSCRIPTS, '', surface)
elif re.findall(r'\bcubed\b', surface):
new_power = (-3 if slash else 3)
surface = re.sub(r'\bcubed\b', '', surface).strip()
elif re.findall(r'\bsquared\b', surface):
new_power = (-2 if slash else 2)
surface = re.sub(r'\bsquared\b', '', surface).strip()
else:
new_power = (-1 if slash else 1)
return surface, new_power
|
python
|
{
"resource": ""
}
|
q4535
|
get_unit
|
train
|
def get_unit(item, text):
"""Extract unit from regex hit."""
group_units = [1, 4, 6, 8, 10]
group_operators = [3, 5, 7, 9]
item_units = [item.group(i) for i in group_units if item.group(i)]
if len(item_units) == 0:
unit = l.NAMES['dimensionless']
else:
dimensions, slash = [], False
for group in sorted(group_units + group_operators):
if not item.group(group):
continue
if group in group_units:
surface, power = parse_unit(item, group, slash)
if clf.USE_CLF:
base = clf.disambiguate_unit(surface, text).name
else:
base = l.UNITS[surface][0].name
dimensions += [{'base': base, 'power': power}]
elif not slash:
slash = any(i in item.group(group) for i in [u'/', u' per '])
unit = get_unit_from_dimensions(dimensions, text)
logging.debug(u'\tUnit: %s', unit)
logging.debug(u'\tEntity: %s', unit.entity)
return unit
|
python
|
{
"resource": ""
}
|
q4536
|
get_surface
|
train
|
def get_surface(shifts, orig_text, item, text):
"""Extract surface from regex hit."""
span = item.span()
logging.debug(u'\tInitial span: %s ("%s")', span, text[span[0]:span[1]])
real_span = (span[0] - shifts[span[0]], span[1] - shifts[span[1] - 1])
surface = orig_text[real_span[0]:real_span[1]]
logging.debug(u'\tShifted span: %s ("%s")', real_span, surface)
while any(surface.endswith(i) for i in [' ', '-']):
surface = surface[:-1]
real_span = (real_span[0], real_span[1] - 1)
while surface.startswith(' '):
surface = surface[1:]
real_span = (real_span[0] + 1, real_span[1])
logging.debug(u'\tFinal span: %s ("%s")', real_span, surface)
return surface, real_span
|
python
|
{
"resource": ""
}
|
q4537
|
is_quote_artifact
|
train
|
def is_quote_artifact(orig_text, span):
"""Distinguish between quotes and units."""
res = False
cursor = re.finditer(r'("|\')[^ .,:;?!()*+-].*?("|\')', orig_text)
for item in cursor:
if item.span()[1] == span[1]:
res = True
return res
|
python
|
{
"resource": ""
}
|
q4538
|
MeteorClient.login
|
train
|
def login(self, user, password, token=None, callback=None):
"""Login with a username and password
Arguments:
user - username or email address
password - the password for the account
Keyword Arguments:
token - meteor resume token
callback - callback function containing error as first argument and login data"""
# TODO: keep the tokenExpires around so we know the next time
# we need to authenticate
# hash the password
hashed = hashlib.sha256(password).hexdigest()
# handle username or email address
if '@' in user:
user_object = {
'email': user
}
else:
user_object = {
'username': user
}
password_object = {
'algorithm': 'sha-256',
'digest': hashed
}
self._login_token = token
self._login_data = {'user': user_object, 'password': password_object}
if token:
self._resume(token, callback=callback)
else:
self._login(self._login_data, callback=callback)
|
python
|
{
"resource": ""
}
|
q4539
|
MeteorClient.logout
|
train
|
def logout(self, callback=None):
"""Logout a user
Keyword Arguments:
callback - callback function called when the user has been logged out"""
self.ddp_client.call('logout', [], callback=callback)
self.emit('logged_out')
|
python
|
{
"resource": ""
}
|
q4540
|
MeteorClient.call
|
train
|
def call(self, method, params, callback=None):
"""Call a remote method
Arguments:
method - remote method name
params - remote method parameters
Keyword Arguments:
callback - callback function containing return data"""
self._wait_for_connect()
self.ddp_client.call(method, params, callback=callback)
|
python
|
{
"resource": ""
}
|
q4541
|
MeteorClient.unsubscribe
|
train
|
def unsubscribe(self, name):
"""Unsubscribe from a collection
Arguments:
name - the name of the publication"""
self._wait_for_connect()
if name not in self.subscriptions:
raise MeteorClientException('No subscription for {}'.format(name))
self.ddp_client.unsubscribe(self.subscriptions[name]['id'])
del self.subscriptions[name]
self.emit('unsubscribed', name)
|
python
|
{
"resource": ""
}
|
q4542
|
MeteorClient.find
|
train
|
def find(self, collection, selector={}):
"""Find data in a collection
Arguments:
collection - collection to search
Keyword Arguments:
selector - the query (default returns all items in a collection)"""
results = []
for _id, doc in self.collection_data.data.get(collection, {}).items():
doc.update({'_id': _id})
if selector == {}:
results.append(doc)
for key, value in selector.items():
if key in doc and doc[key] == value:
results.append(doc)
return results
|
python
|
{
"resource": ""
}
|
q4543
|
MeteorClient.find_one
|
train
|
def find_one(self, collection, selector={}):
"""Return one item from a collection
Arguments:
collection - collection to search
Keyword Arguments:
selector - the query (default returns first item found)"""
for _id, doc in self.collection_data.data.get(collection, {}).items():
doc.update({'_id': _id})
if selector == {}:
return doc
for key, value in selector.items():
if key in doc and doc[key] == value:
return doc
return None
|
python
|
{
"resource": ""
}
|
q4544
|
MeteorClient.remove
|
train
|
def remove(self, collection, selector, callback=None):
"""Remove an item from a collection
Arguments:
collection - the collection to be modified
selector - Specifies which documents to remove
Keyword Arguments:
callback - Optional. If present, called with an error object as its argument."""
self.call("/" + collection + "/remove", [selector], callback=callback)
|
python
|
{
"resource": ""
}
|
q4545
|
WebSocket.close
|
train
|
def close(self, status=1000, reason=u''):
"""
Send Close frame to the client. The underlying socket is only closed
when the client acknowledges the Close frame.
status is the closing identifier.
reason is the reason for the close.
"""
try:
if self.closed is False:
close_msg = bytearray()
close_msg.extend(struct.pack("!H", status))
if _check_unicode(reason):
close_msg.extend(reason.encode('utf-8'))
else:
close_msg.extend(reason)
self._send_message(False, CLOSE, close_msg)
finally:
self.closed = True
|
python
|
{
"resource": ""
}
|
q4546
|
deflate_and_encode
|
train
|
def deflate_and_encode(plantuml_text):
"""zlib compress the plantuml text and encode it for the plantuml server.
"""
zlibbed_str = zlib.compress(plantuml_text.encode('utf-8'))
compressed_string = zlibbed_str[2:-4]
return encode(compressed_string.decode('latin-1'))
|
python
|
{
"resource": ""
}
|
q4547
|
encode
|
train
|
def encode(data):
"""encode the plantuml data which may be compresses in the proper
encoding for the plantuml server
"""
res = ""
for i in range(0,len(data), 3):
if (i+2==len(data)):
res += _encode3bytes(ord(data[i]), ord(data[i+1]), 0)
elif (i+1==len(data)):
res += _encode3bytes(ord(data[i]), 0, 0)
else:
res += _encode3bytes(ord(data[i]), ord(data[i+1]), ord(data[i+2]))
return res
|
python
|
{
"resource": ""
}
|
q4548
|
AbstractPEMObject.sha1_hexdigest
|
train
|
def sha1_hexdigest(self):
# type: () -> str
"""
A SHA-1 digest of the whole object for easy differentiation.
.. versionadded:: 18.1.0
"""
if self._sha1_hexdigest is None:
self._sha1_hexdigest = hashlib.sha1(self._pem_bytes).hexdigest()
return self._sha1_hexdigest
|
python
|
{
"resource": ""
}
|
q4549
|
make
|
train
|
def make(cls, **kwargs):
"""Create a container.
Reports extra keys as well as missing ones.
Thanks to habnabit for the idea!
"""
cls_attrs = {f.name: f for f in attr.fields(cls)}
unknown = {k: v for k, v in kwargs.items() if k not in cls_attrs}
if len(unknown) > 0:
_LOGGER.warning(
"Got unknowns for %s: %s - please create an issue!", cls.__name__, unknown
)
missing = [k for k in cls_attrs if k not in kwargs]
data = {k: v for k, v in kwargs.items() if k in cls_attrs}
# initialize missing values to avoid passing default=None
# for the attrs attribute definitions
for m in missing:
default = cls_attrs[m].default
if isinstance(default, attr.Factory):
if not default.takes_self:
data[m] = default.factory()
else:
raise NotImplementedError
else:
_LOGGER.debug("Missing key %s with no default for %s", m, cls.__name__)
data[m] = None
# initialize and store raw data for debug purposes
inst = cls(**data)
setattr(inst, "raw", kwargs)
return inst
|
python
|
{
"resource": ""
}
|
q4550
|
Device.create_post_request
|
train
|
async def create_post_request(self, method: str, params: Dict = None):
"""Call the given method over POST.
:param method: Name of the method
:param params: dict of parameters
:return: JSON object
"""
if params is None:
params = {}
headers = {"Content-Type": "application/json"}
payload = {
"method": method,
"params": [params],
"id": next(self.idgen),
"version": "1.0",
}
if self.debug > 1:
_LOGGER.debug("> POST %s with body: %s", self.guide_endpoint, payload)
async with aiohttp.ClientSession(headers=headers) as session:
res = await session.post(self.guide_endpoint, json=payload, headers=headers)
if self.debug > 1:
_LOGGER.debug("Received %s: %s" % (res.status_code, res.text))
if res.status != 200:
raise SongpalException(
"Got a non-ok (status %s) response for %s" % (res.status, method),
error=await res.json()["error"],
)
res = await res.json()
# TODO handle exceptions from POST? This used to raise SongpalException
# on requests.RequestException (Unable to get APIs).
if "error" in res:
raise SongpalException("Got an error for %s" % method, error=res["error"])
if self.debug > 1:
_LOGGER.debug("Got %s: %s", method, pf(res))
return res
|
python
|
{
"resource": ""
}
|
q4551
|
Device.get_supported_methods
|
train
|
async def get_supported_methods(self):
"""Get information about supported methods.
Calling this as the first thing before doing anything else is
necessary to fill the available services table.
"""
response = await self.request_supported_methods()
if "result" in response:
services = response["result"][0]
_LOGGER.debug("Got %s services!" % len(services))
for x in services:
serv = await Service.from_payload(
x, self.endpoint, self.idgen, self.debug, self.force_protocol
)
if serv is not None:
self.services[x["service"]] = serv
else:
_LOGGER.warning("Unable to create service %s", x["service"])
for service in self.services.values():
if self.debug > 1:
_LOGGER.debug("Service %s", service)
for api in service.methods:
# self.logger.debug("%s > %s" % (service, api))
if self.debug > 1:
_LOGGER.debug("> %s" % api)
return self.services
return None
|
python
|
{
"resource": ""
}
|
q4552
|
Device.set_power
|
train
|
async def set_power(self, value: bool):
"""Toggle the device on and off."""
if value:
status = "active"
else:
status = "off"
# TODO WoL works when quickboot is not enabled
return await self.services["system"]["setPowerStatus"](status=status)
|
python
|
{
"resource": ""
}
|
q4553
|
Device.get_play_info
|
train
|
async def get_play_info(self) -> PlayInfo:
"""Return of the device."""
info = await self.services["avContent"]["getPlayingContentInfo"]({})
return PlayInfo.make(**info.pop())
|
python
|
{
"resource": ""
}
|
q4554
|
Device.get_power_settings
|
train
|
async def get_power_settings(self) -> List[Setting]:
"""Get power settings."""
return [
Setting.make(**x)
for x in await self.services["system"]["getPowerSettings"]({})
]
|
python
|
{
"resource": ""
}
|
q4555
|
Device.set_power_settings
|
train
|
async def set_power_settings(self, target: str, value: str) -> None:
"""Set power settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["system"]["setPowerSettings"](params)
|
python
|
{
"resource": ""
}
|
q4556
|
Device.get_googlecast_settings
|
train
|
async def get_googlecast_settings(self) -> List[Setting]:
"""Get Googlecast settings."""
return [
Setting.make(**x)
for x in await self.services["system"]["getWuTangInfo"]({})
]
|
python
|
{
"resource": ""
}
|
q4557
|
Device.set_googlecast_settings
|
train
|
async def set_googlecast_settings(self, target: str, value: str):
"""Set Googlecast settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["system"]["setWuTangInfo"](params)
|
python
|
{
"resource": ""
}
|
q4558
|
Device.get_settings
|
train
|
async def get_settings(self) -> List[SettingsEntry]:
"""Get a list of available settings.
See :func:request_settings_tree: for raw settings.
"""
settings = await self.request_settings_tree()
return [SettingsEntry.make(**x) for x in settings["settings"]]
|
python
|
{
"resource": ""
}
|
q4559
|
Device.get_misc_settings
|
train
|
async def get_misc_settings(self) -> List[Setting]:
"""Return miscellaneous settings such as name and timezone."""
misc = await self.services["system"]["getDeviceMiscSettings"](target="")
return [Setting.make(**x) for x in misc]
|
python
|
{
"resource": ""
}
|
q4560
|
Device.set_misc_settings
|
train
|
async def set_misc_settings(self, target: str, value: str):
"""Change miscellaneous settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["system"]["setDeviceMiscSettings"](params)
|
python
|
{
"resource": ""
}
|
q4561
|
Device.get_sleep_timer_settings
|
train
|
async def get_sleep_timer_settings(self) -> List[Setting]:
"""Get sleep timer settings."""
return [
Setting.make(**x)
for x in await self.services["system"]["getSleepTimerSettings"]({})
]
|
python
|
{
"resource": ""
}
|
q4562
|
Device.get_storage_list
|
train
|
async def get_storage_list(self) -> List[Storage]:
"""Return information about connected storage devices."""
return [
Storage.make(**x)
for x in await self.services["system"]["getStorageList"]({})
]
|
python
|
{
"resource": ""
}
|
q4563
|
Device.get_update_info
|
train
|
async def get_update_info(self, from_network=True) -> SoftwareUpdateInfo:
"""Get information about updates."""
if from_network:
from_network = "true"
else:
from_network = "false"
# from_network = ""
info = await self.services["system"]["getSWUpdateInfo"](network=from_network)
return SoftwareUpdateInfo.make(**info)
|
python
|
{
"resource": ""
}
|
q4564
|
Device.get_inputs
|
train
|
async def get_inputs(self) -> List[Input]:
"""Return list of available outputs."""
res = await self.services["avContent"]["getCurrentExternalTerminalsStatus"]()
return [Input.make(services=self.services, **x) for x in res if 'meta:zone:output' not in x['meta']]
|
python
|
{
"resource": ""
}
|
q4565
|
Device.get_zones
|
train
|
async def get_zones(self) -> List[Zone]:
"""Return list of available zones."""
res = await self.services["avContent"]["getCurrentExternalTerminalsStatus"]()
zones = [Zone.make(services=self.services, **x) for x in res if 'meta:zone:output' in x['meta']]
if not zones:
raise SongpalException("Device has no zones")
return zones
|
python
|
{
"resource": ""
}
|
q4566
|
Device.get_setting
|
train
|
async def get_setting(self, service: str, method: str, target: str):
"""Get a single setting for service.
:param service: Service to query.
:param method: Getter method for the setting, read from ApiMapping.
:param target: Setting to query.
:return: JSON response from the device.
"""
return await self.services[service][method](target=target)
|
python
|
{
"resource": ""
}
|
q4567
|
Device.get_bluetooth_settings
|
train
|
async def get_bluetooth_settings(self) -> List[Setting]:
"""Get bluetooth settings."""
bt = await self.services["avContent"]["getBluetoothSettings"]({})
return [Setting.make(**x) for x in bt]
|
python
|
{
"resource": ""
}
|
q4568
|
Device.set_bluetooth_settings
|
train
|
async def set_bluetooth_settings(self, target: str, value: str) -> None:
"""Set bluetooth settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["avContent"]["setBluetoothSettings"](params)
|
python
|
{
"resource": ""
}
|
q4569
|
Device.set_custom_eq
|
train
|
async def set_custom_eq(self, target: str, value: str) -> None:
"""Set custom EQ settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["audio"]["setCustomEqualizerSettings"](params)
|
python
|
{
"resource": ""
}
|
q4570
|
Device.get_supported_playback_functions
|
train
|
async def get_supported_playback_functions(
self, uri=""
) -> List[SupportedFunctions]:
"""Return list of inputs and their supported functions."""
return [
SupportedFunctions.make(**x)
for x in await self.services["avContent"]["getSupportedPlaybackFunction"](
uri=uri
)
]
|
python
|
{
"resource": ""
}
|
q4571
|
Device.get_playback_settings
|
train
|
async def get_playback_settings(self) -> List[Setting]:
"""Get playback settings such as shuffle and repeat."""
return [
Setting.make(**x)
for x in await self.services["avContent"]["getPlaybackModeSettings"]({})
]
|
python
|
{
"resource": ""
}
|
q4572
|
Device.set_playback_settings
|
train
|
async def set_playback_settings(self, target, value) -> None:
"""Set playback settings such a shuffle and repeat."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["avContent"]["setPlaybackModeSettings"](params)
|
python
|
{
"resource": ""
}
|
q4573
|
Device.get_schemes
|
train
|
async def get_schemes(self) -> List[Scheme]:
"""Return supported uri schemes."""
return [
Scheme.make(**x)
for x in await self.services["avContent"]["getSchemeList"]()
]
|
python
|
{
"resource": ""
}
|
q4574
|
Device.get_source_list
|
train
|
async def get_source_list(self, scheme: str = "") -> List[Source]:
"""Return available sources for playback."""
res = await self.services["avContent"]["getSourceList"](scheme=scheme)
return [Source.make(**x) for x in res]
|
python
|
{
"resource": ""
}
|
q4575
|
Device.get_content_count
|
train
|
async def get_content_count(self, source: str):
"""Return file listing for source."""
params = {"uri": source, "type": None, "target": "all", "view": "flat"}
return ContentInfo.make(
**await self.services["avContent"]["getContentCount"](params)
)
|
python
|
{
"resource": ""
}
|
q4576
|
Device.get_contents
|
train
|
async def get_contents(self, uri) -> List[Content]:
"""Request content listing recursively for the given URI.
:param uri: URI for the source.
:return: List of Content objects.
"""
contents = [
Content.make(**x)
for x in await self.services["avContent"]["getContentList"](uri=uri)
]
contentlist = []
for content in contents:
if content.contentKind == "directory" and content.index >= 0:
# print("got directory %s" % content.uri)
res = await self.get_contents(content.uri)
contentlist.extend(res)
else:
contentlist.append(content)
# print("%s%s" % (' ' * depth, content))
return contentlist
|
python
|
{
"resource": ""
}
|
q4577
|
Device.get_volume_information
|
train
|
async def get_volume_information(self) -> List[Volume]:
"""Get the volume information."""
res = await self.services["audio"]["getVolumeInformation"]({})
volume_info = [Volume.make(services=self.services, **x) for x in res]
if len(volume_info) < 1:
logging.warning("Unable to get volume information")
elif len(volume_info) > 1:
logging.debug("The device seems to have more than one volume setting.")
return volume_info
|
python
|
{
"resource": ""
}
|
q4578
|
Device.get_sound_settings
|
train
|
async def get_sound_settings(self, target="") -> List[Setting]:
"""Get the current sound settings.
:param str target: settings target, defaults to all.
"""
res = await self.services["audio"]["getSoundSettings"]({"target": target})
return [Setting.make(**x) for x in res]
|
python
|
{
"resource": ""
}
|
q4579
|
Device.get_soundfield
|
train
|
async def get_soundfield(self) -> List[Setting]:
"""Get the current sound field settings."""
res = await self.services["audio"]["getSoundSettings"]({"target": "soundField"})
return Setting.make(**res[0])
|
python
|
{
"resource": ""
}
|
q4580
|
Device.set_sound_settings
|
train
|
async def set_sound_settings(self, target: str, value: str):
"""Change a sound setting."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["audio"]["setSoundSettings"](params)
|
python
|
{
"resource": ""
}
|
q4581
|
Device.get_speaker_settings
|
train
|
async def get_speaker_settings(self) -> List[Setting]:
"""Return speaker settings."""
speaker_settings = await self.services["audio"]["getSpeakerSettings"]({})
return [Setting.make(**x) for x in speaker_settings]
|
python
|
{
"resource": ""
}
|
q4582
|
Device.set_speaker_settings
|
train
|
async def set_speaker_settings(self, target: str, value: str):
"""Set speaker settings."""
params = {"settings": [{"target": target, "value": value}]}
return await self.services["audio"]["setSpeakerSettings"](params)
|
python
|
{
"resource": ""
}
|
q4583
|
Device.listen_notifications
|
train
|
async def listen_notifications(self, fallback_callback=None):
"""Listen for notifications from the device forever.
Use :func:on_notification: to register what notifications to listen to.
"""
tasks = []
async def handle_notification(notification):
if type(notification) not in self.callbacks:
if not fallback_callback:
_LOGGER.debug("No callbacks for %s", notification)
# _LOGGER.debug("Existing callbacks for: %s" % self.callbacks)
else:
await fallback_callback(notification)
return
for cb in self.callbacks[type(notification)]:
await cb(notification)
for serv in self.services.values():
tasks.append(
asyncio.ensure_future(
serv.listen_all_notifications(handle_notification)
)
)
try:
print(await asyncio.gather(*tasks))
except Exception as ex:
# TODO: do a slightly restricted exception handling?
# Notify about disconnect
await handle_notification(ConnectChange(connected=False, exception=ex))
return
|
python
|
{
"resource": ""
}
|
q4584
|
Device.stop_listen_notifications
|
train
|
async def stop_listen_notifications(self):
"""Stop listening on notifications."""
_LOGGER.debug("Stopping listening for notifications..")
for serv in self.services.values():
await serv.stop_listen_notifications()
return True
|
python
|
{
"resource": ""
}
|
q4585
|
Device.get_notifications
|
train
|
async def get_notifications(self) -> List[Notification]:
"""Get available notifications, which can then be subscribed to.
Call :func:activate: to enable notifications, and :func:listen_notifications:
to loop forever for notifications.
:return: List of Notification objects
"""
notifications = []
for serv in self.services:
for notification in self.services[serv].notifications:
notifications.append(notification)
return notifications
|
python
|
{
"resource": ""
}
|
q4586
|
Device.raw_command
|
train
|
async def raw_command(self, service: str, method: str, params: Any):
"""Call an arbitrary method with given parameters.
This is useful for debugging and trying out commands before
implementing them properly.
:param service: Service, use list(self.services) to get a list of availables.
:param method: Method to call.
:param params: Parameters as a python object (e.g., dict, list)
:return: Raw JSON response from the device.
"""
_LOGGER.info("Calling %s.%s(%s)", service, method, params)
return await self.services[service][method](params)
|
python
|
{
"resource": ""
}
|
q4587
|
GroupControl.call
|
train
|
async def call(self, action, **kwargs):
"""Make an action call with given kwargs."""
act = self.service.action(action)
_LOGGER.info("Calling %s with %s", action, kwargs)
res = await act.async_call(**kwargs)
_LOGGER.info(" Result: %s" % res)
return res
|
python
|
{
"resource": ""
}
|
q4588
|
GroupControl.info
|
train
|
async def info(self):
"""Return device info."""
"""
{'MasterCapability': 9, 'TransportPort': 3975}
"""
act = self.service.action("X_GetDeviceInfo")
res = await act.async_call()
return res
|
python
|
{
"resource": ""
}
|
q4589
|
GroupControl.state
|
train
|
async def state(self) -> GroupState:
"""Return the current group state"""
act = self.service.action("X_GetState")
res = await act.async_call()
return GroupState.make(**res)
|
python
|
{
"resource": ""
}
|
q4590
|
GroupControl.get_group_memory
|
train
|
async def get_group_memory(self):
"""Return group memory."""
# Returns an XML with groupMemoryList
act = self.service.action("X_GetAllGroupMemory")
res = await act.async_call()
return res
|
python
|
{
"resource": ""
}
|
q4591
|
GroupControl.update_group_memory
|
train
|
async def update_group_memory(self, memory_id, mode, name, slaves, codectype=0x0040, bitrate=0x0003):
"""Update existing memory? Can be used to create new ones, too?"""
act = self.service.action("X_UpdateGroupMemory")
res = await act.async_call(MemoryID=memory_id,
GroupMode=mode,
GroupName=name,
SlaveList=slaves,
CodecType=codectype,
CodecBitrate=bitrate)
return res
|
python
|
{
"resource": ""
}
|
q4592
|
GroupControl.delete_group_memory
|
train
|
async def delete_group_memory(self, memory_id):
"""Delete group memory."""
act = self.service.action("X_DeleteGroupMemory")
res = await act.async_call(MemoryID=memory_id)
|
python
|
{
"resource": ""
}
|
q4593
|
GroupControl.get_codec
|
train
|
async def get_codec(self):
"""Get codec settings."""
act = self.service.action("X_GetCodec")
res = await act.async_call()
return res
|
python
|
{
"resource": ""
}
|
q4594
|
GroupControl.set_codec
|
train
|
async def set_codec(self, codectype=0x0040, bitrate=0x0003):
"""Set codec settings."""
act = self.service.action("X_SetCodec")
res = await act.async_call(CodecType=codectype, CodecBitrate=bitrate)
return res
|
python
|
{
"resource": ""
}
|
q4595
|
GroupControl.abort
|
train
|
async def abort(self):
"""Abort current group session."""
state = await self.state()
res = await self.call("X_Abort", MasterSessionID=state.MasterSessionID)
return res
|
python
|
{
"resource": ""
}
|
q4596
|
Service.fetch_signatures
|
train
|
async def fetch_signatures(endpoint, protocol, idgen):
"""Request available methods for the service."""
async with aiohttp.ClientSession() as session:
req = {
"method": "getMethodTypes",
"params": [''],
"version": "1.0",
"id": next(idgen),
}
if protocol == ProtocolType.WebSocket:
async with session.ws_connect(endpoint, timeout=2) as s:
await s.send_json(req)
res = await s.receive_json()
return res
else:
res = await session.post(endpoint, json=req)
json = await res.json()
return json
|
python
|
{
"resource": ""
}
|
q4597
|
Service.from_payload
|
train
|
async def from_payload(cls, payload, endpoint, idgen, debug, force_protocol=None):
"""Create Service object from a payload."""
service_name = payload["service"]
if "protocols" not in payload:
raise SongpalException(
"Unable to find protocols from payload: %s" % payload
)
protocols = payload["protocols"]
_LOGGER.debug("Available protocols for %s: %s", service_name, protocols)
if force_protocol and force_protocol.value in protocols:
protocol = force_protocol
elif "websocket:jsonizer" in protocols:
protocol = ProtocolType.WebSocket
elif "xhrpost:jsonizer" in protocols:
protocol = ProtocolType.XHRPost
else:
raise SongpalException(
"No known protocols for %s, got: %s" % (service_name, protocols)
)
_LOGGER.debug("Using protocol: %s" % protocol)
service_endpoint = "%s/%s" % (endpoint, service_name)
# creation here we want to pass the created service class to methods.
service = cls(service_name, service_endpoint, protocol, idgen, debug)
sigs = await cls.fetch_signatures(
service_endpoint, protocol, idgen
)
if debug > 1:
_LOGGER.debug("Signatures: %s", sigs)
if "error" in sigs:
_LOGGER.error("Got error when fetching sigs: %s", sigs["error"])
return None
methods = {}
for sig in sigs["results"]:
name = sig[0]
parsed_sig = MethodSignature.from_payload(*sig)
if name in methods:
_LOGGER.debug("Got duplicate signature for %s, existing was %s. Keeping the existing one",
parsed_sig, methods[name])
else:
methods[name] = Method(service, parsed_sig, debug)
service.methods = methods
if "notifications" in payload and "switchNotifications" in methods:
notifications = [
Notification(
service_endpoint, methods["switchNotifications"], notification
)
for notification in payload["notifications"]
]
service.notifications = notifications
_LOGGER.debug("Got notifications: %s" % notifications)
return service
|
python
|
{
"resource": ""
}
|
q4598
|
Service.wrap_notification
|
train
|
def wrap_notification(self, data):
"""Convert notification JSON to a notification class."""
if "method" in data:
method = data["method"]
params = data["params"]
change = params[0]
if method == "notifyPowerStatus":
return PowerChange.make(**change)
elif method == "notifyVolumeInformation":
return VolumeChange.make(**change)
elif method == "notifyPlayingContentInfo":
return ContentChange.make(**change)
elif method == "notifySettingsUpdate":
return SettingChange.make(**change)
elif method == "notifySWUpdateInfo":
return SoftwareUpdateChange.make(**change)
else:
_LOGGER.warning("Got unknown notification type: %s", method)
elif "result" in data:
result = data["result"][0]
if "enabled" in result and "enabled" in result:
return NotificationChange(**result)
else:
_LOGGER.warning("Unknown notification, returning raw: %s", data)
return data
|
python
|
{
"resource": ""
}
|
q4599
|
Service.listen_all_notifications
|
train
|
async def listen_all_notifications(self, callback):
"""Enable all exposed notifications.
:param callback: Callback to call when a notification is received.
"""
everything = [noti.asdict() for noti in self.notifications]
if len(everything) > 0:
await self._methods["switchNotifications"](
{"enabled": everything}, _consumer=callback
)
else:
_LOGGER.debug("No notifications available for %s", self.name)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.