function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def get_actions(self, run_config, controller):
"""Get actions from the UI, apply to controller, and return an ActionCmd."""
if not self._initialized:
return ActionCmd.STEP
for event in pygame.event.get():
ctrl = pygame.key.get_mods() & pygame.KMOD_CTRL
shift = pygame.key.get_mods() & pygame.KMOD_SHIFT
alt = pygame.key.get_mods() & pygame.KMOD_ALT
if event.type == pygame.QUIT:
return ActionCmd.QUIT
elif event.type == pygame.KEYDOWN:
if self._help:
self._help = False
elif event.key in (pygame.K_QUESTION, pygame.K_SLASH):
self._help = True
elif event.key == pygame.K_PAUSE:
pause = True
while pause:
time.sleep(0.1)
for event2 in pygame.event.get():
if event2.type == pygame.KEYDOWN:
if event2.key in (pygame.K_PAUSE, pygame.K_ESCAPE):
pause = False
elif event2.key == pygame.K_F4:
return ActionCmd.QUIT
elif event2.key == pygame.K_F5:
return ActionCmd.RESTART
elif event.key == pygame.K_F4:
return ActionCmd.QUIT
elif event.key == pygame.K_F5:
return ActionCmd.RESTART
elif event.key == pygame.K_F9: # Toggle rgb rendering.
if self._rgb_screen_px and self._feature_screen_px:
self._render_rgb = not self._render_rgb
print("Rendering", self._render_rgb and "RGB" or "Feature Layers")
self.init_window()
elif event.key == pygame.K_F11: # Toggle synchronous rendering.
self._render_sync = not self._render_sync
print("Rendering", self._render_sync and "Sync" or "Async")
elif event.key == pygame.K_F12:
self._raw_actions = not self._raw_actions
print("Action space:", self._raw_actions and "Raw" or "Spatial")
elif event.key == pygame.K_F10: # Toggle player_relative layer.
self._render_player_relative = not self._render_player_relative
elif event.key == pygame.K_F8: # Save a replay.
self.save_replay(run_config, controller)
elif event.key in (pygame.K_PLUS, pygame.K_EQUALS) and ctrl:
self.zoom(1.1) # zoom in
elif event.key in (pygame.K_MINUS, pygame.K_UNDERSCORE) and ctrl:
self.zoom(1 / 1.1) # zoom out
elif event.key in (pygame.K_PAGEUP, pygame.K_PAGEDOWN):
if ctrl:
if event.key == pygame.K_PAGEUP:
self._step_mul += 1
elif self._step_mul > 1:
self._step_mul -= 1
print("New step mul:", self._step_mul)
else:
self._fps *= 1.25 if event.key == pygame.K_PAGEUP else 1 / 1.25
print("New max game speed: %.1f" % self._fps)
elif event.key == pygame.K_F1:
if self._obs.observation.player_common.idle_worker_count > 0:
controller.act(self.select_idle_worker(ctrl, shift))
elif event.key == pygame.K_F2:
if self._obs.observation.player_common.army_count > 0:
controller.act(self.select_army(shift))
elif event.key == pygame.K_F3:
if self._obs.observation.player_common.warp_gate_count > 0:
controller.act(self.select_warp_gates(shift))
if self._obs.observation.player_common.larva_count > 0:
controller.act(self.select_larva())
elif event.key in self.cmd_group_keys:
controller.act(self.control_group(self.cmd_group_keys[event.key],
ctrl, shift, alt))
elif event.key in self.camera_actions:
if self._obs:
pt = point.Point.build(self._obs.observation.raw_data.player.camera)
pt += self.camera_actions[event.key]
controller.act(self.camera_action_raw(pt))
controller.observer_act(self.camera_action_observer_pt(pt))
elif event.key == pygame.K_ESCAPE:
controller.observer_act(self.camera_action_observer_player(
self._obs.observation.player_common.player_id))
if self._queued_action:
self.clear_queued_action()
else:
cmds = self._abilities(lambda cmd: cmd.hotkey == "escape") # Cancel
for cmd in cmds: # There could be multiple cancels.
assert not cmd.requires_point
controller.act(self.unit_action(cmd, None, shift))
else:
if not self._queued_action:
key = pygame.key.name(event.key).lower()
new_cmd = self._queued_hotkey + key
cmds = self._abilities(lambda cmd, n=new_cmd: ( # pylint: disable=g-long-lambda
cmd.hotkey != "escape" and cmd.hotkey.startswith(n)))
if cmds:
self._queued_hotkey = new_cmd
if len(cmds) == 1:
cmd = cmds[0]
if cmd.hotkey == self._queued_hotkey:
if cmd.requires_point:
self.clear_queued_action()
self._queued_action = cmd
else:
controller.act(self.unit_action(cmd, None, shift))
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_pos = self.get_mouse_pos(event.pos)
if event.button == MouseButtons.LEFT and mouse_pos:
if self._queued_action:
controller.act(self.unit_action(
self._queued_action, mouse_pos, shift))
elif mouse_pos.surf.surf_type & SurfType.MINIMAP:
controller.act(self.camera_action(mouse_pos))
controller.observer_act(self.camera_action_observer_pt(
mouse_pos.world_pos))
else:
self._select_start = mouse_pos
elif event.button == MouseButtons.RIGHT:
if self._queued_action:
self.clear_queued_action()
cmds = self._abilities(lambda cmd: cmd.name == "Smart")
if cmds:
controller.act(self.unit_action(cmds[0], mouse_pos, shift))
elif event.type == pygame.MOUSEBUTTONUP:
mouse_pos = self.get_mouse_pos(event.pos)
if event.button == MouseButtons.LEFT and self._select_start:
if (mouse_pos and mouse_pos.surf.surf_type & SurfType.SCREEN and
mouse_pos.surf.surf_type == self._select_start.surf.surf_type):
controller.act(self.select_action(
self._select_start, mouse_pos, ctrl, shift))
self._select_start = None
return ActionCmd.STEP | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def camera_action_raw(self, world_pos):
"""Return a `sc_pb.Action` with the camera movement filled."""
action = sc_pb.Action()
world_pos.assign_to(action.action_raw.camera_move.center_world_space)
return action | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def camera_action_observer_player(self, player_id):
"""Return a `sc_pb.ObserverAction` with the camera movement filled."""
action = sc_pb.ObserverAction()
action.camera_follow_player.player_id = player_id
return action | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def select_idle_worker(self, ctrl, shift):
"""Select an idle worker."""
action = sc_pb.Action()
mod = sc_ui.ActionSelectIdleWorker
if ctrl:
select_worker = mod.AddAll if shift else mod.All
else:
select_worker = mod.Add if shift else mod.Set
action.action_ui.select_idle_worker.type = select_worker
return action | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def select_warp_gates(self, shift):
"""Select all warp gates."""
action = sc_pb.Action()
action.action_ui.select_warp_gates.selection_add = shift
return action | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def control_group(self, control_group_id, ctrl, shift, alt):
"""Act on a control group, selecting, setting, etc."""
action = sc_pb.Action()
select = action.action_ui.control_group
mod = sc_ui.ActionControlGroup
if not ctrl and not shift and not alt:
select.action = mod.Recall
elif ctrl and not shift and not alt:
select.action = mod.Set
elif not ctrl and shift and not alt:
select.action = mod.Append
elif not ctrl and not shift and alt:
select.action = mod.SetAndSteal
elif not ctrl and shift and alt:
select.action = mod.AppendAndSteal
else:
return # unknown
select.control_group_index = control_group_id
return action | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def _abilities(self, fn=None):
"""Return the list of abilities filtered by `fn`."""
out = {}
for cmd in self._obs.observation.abilities:
ability = _Ability(cmd, self._static_data.abilities)
if not fn or fn(ability):
out[ability.ability_id] = ability
return list(out.values()) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def _units_in_area(self, rect):
"""Return the list of units that intersect the rect."""
player_id = self._obs.observation.player_common.player_id
return [u for u, p in self._visible_units()
if rect.intersects_circle(p, u.radius) and u.owner == player_id] | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_units(self, surf):
"""Draw the units and buildings."""
unit_dict = None # Cache the units {tag: unit_proto} for orders.
tau = 2 * math.pi
for u, p in self._visible_units():
if self._camera.intersects_circle(p, u.radius):
fraction_damage = clamp((u.health_max - u.health) / (u.health_max or 1),
0, 1)
if u.display_type == sc_raw.Placeholder:
surf.draw_circle(colors.PLAYER_ABSOLUTE_PALETTE[u.owner] // 3, p,
u.radius)
else:
surf.draw_circle(colors.PLAYER_ABSOLUTE_PALETTE[u.owner], p, u.radius)
if fraction_damage > 0:
surf.draw_circle(colors.PLAYER_ABSOLUTE_PALETTE[u.owner] // 2,
p, u.radius * fraction_damage)
surf.draw_circle(colors.black, p, u.radius, thickness=1)
if self._static_data.unit_stats[u.unit_type].movement_speed > 0:
surf.draw_arc(colors.white, p, u.radius, u.facing - 0.1,
u.facing + 0.1, thickness=1)
def draw_arc_ratio(color, world_loc, radius, start, end, thickness=1):
surf.draw_arc(color, world_loc, radius, start * tau, end * tau,
thickness)
if u.shield and u.shield_max:
draw_arc_ratio(colors.blue, p, u.radius - 0.05, 0,
u.shield / u.shield_max)
if u.energy and u.energy_max:
draw_arc_ratio(colors.purple * 0.9, p, u.radius - 0.1, 0,
u.energy / u.energy_max)
if 0 < u.build_progress < 1:
draw_arc_ratio(colors.cyan, p, u.radius - 0.15, 0, u.build_progress)
elif u.orders and 0 < u.orders[0].progress < 1:
draw_arc_ratio(colors.cyan, p, u.radius - 0.15, 0,
u.orders[0].progress)
if u.buff_duration_remain and u.buff_duration_max:
draw_arc_ratio(colors.white, p, u.radius - 0.2, 0,
u.buff_duration_remain / u.buff_duration_max)
if u.attack_upgrade_level:
draw_arc_ratio(self.upgrade_colors[u.attack_upgrade_level], p,
u.radius - 0.25, 0.18, 0.22, thickness=3)
if u.armor_upgrade_level:
draw_arc_ratio(self.upgrade_colors[u.armor_upgrade_level], p,
u.radius - 0.25, 0.23, 0.27, thickness=3)
if u.shield_upgrade_level:
draw_arc_ratio(self.upgrade_colors[u.shield_upgrade_level], p,
u.radius - 0.25, 0.28, 0.32, thickness=3)
def write_small(loc, s):
surf.write_world(self._font_small, colors.white, loc, str(s))
name = self.get_unit_name(
surf, self._static_data.units.get(u.unit_type, "<none>"), u.radius)
if name:
write_small(p, name)
if u.ideal_harvesters > 0:
write_small(p + point.Point(0, 0.5),
"%s / %s" % (u.assigned_harvesters, u.ideal_harvesters))
if u.mineral_contents > 0:
write_small(p - point.Point(0, 0.5), u.mineral_contents)
elif u.vespene_contents > 0:
write_small(p - point.Point(0, 0.5), u.vespene_contents)
elif u.display_type == sc_raw.Snapshot:
write_small(p - point.Point(0, 0.5), "snapshot")
elif u.display_type == sc_raw.Placeholder:
write_small(p - point.Point(0, 0.5), "placeholder")
elif u.is_hallucination:
write_small(p - point.Point(0, 0.5), "hallucination")
elif u.is_burrowed:
write_small(p - point.Point(0, 0.5), "burrowed")
elif u.cloak != sc_raw.NotCloaked:
write_small(p - point.Point(0, 0.5), "cloaked")
if u.is_selected:
surf.draw_circle(colors.green, p, u.radius + 0.1, 1)
# Draw the orders of selected units.
start_point = p
for o in u.orders:
target_point = None
if o.HasField("target_world_space_pos"):
target_point = point.Point.build(o.target_world_space_pos)
elif o.HasField("target_unit_tag"):
if unit_dict is None:
unit_dict = {t.tag: t
for t in self._obs.observation.raw_data.units}
target_unit = unit_dict.get(o.target_unit_tag)
if target_unit:
target_point = point.Point.build(target_unit.pos)
if target_point:
surf.draw_line(colors.cyan * 0.75, start_point, target_point)
start_point = target_point
else:
break
for rally in u.rally_targets:
surf.draw_line(colors.cyan * 0.75, p,
point.Point.build(rally.point)) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_effects(self, surf):
"""Draw the effects."""
for effect in self._obs.observation.raw_data.effects:
color = [
colors.effects[effect.effect_id],
colors.effects[effect.effect_id],
colors.PLAYER_ABSOLUTE_PALETTE[effect.owner],
]
name = self.get_unit_name(
surf, features.Effects(effect.effect_id).name, effect.radius)
for pos in effect.pos:
p = point.Point.build(pos)
# pygame alpha transparency doesn't work, so just draw thin circles.
for r in range(1, int(effect.radius * 3)):
surf.draw_circle(color[r % 3], p, r / 3, thickness=2)
if name:
surf.write_world(self._font_small, colors.white, p, name) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_selection(self, surf):
"""Draw the selection rectange."""
select_start = self._select_start # Cache to avoid a race condition.
if select_start:
mouse_pos = self.get_mouse_pos()
if (mouse_pos and mouse_pos.surf.surf_type & SurfType.SCREEN and
mouse_pos.surf.surf_type == select_start.surf.surf_type):
rect = point.Rect(select_start.world_pos, mouse_pos.world_pos)
surf.draw_rect(colors.green, rect, 1) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_build_target(self, surf):
"""Draw the build target."""
round_half = lambda v, cond: round(v - 0.5) + 0.5 if cond else round(v)
queued_action = self._queued_action
if queued_action:
radius = queued_action.footprint_radius
if radius:
pos = self.get_mouse_pos()
if pos:
pos = point.Point(round_half(pos.world_pos.x, (radius * 2) % 2),
round_half(pos.world_pos.y, (radius * 2) % 2))
surf.draw_circle(
colors.PLAYER_ABSOLUTE_PALETTE[
self._obs.observation.player_common.player_id],
pos, radius) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_overlay(self, surf):
"""Draw the overlay describing resources."""
obs = self._obs.observation
player = obs.player_common
surf.write_screen(
self._font_large, colors.green, (0.2, 0.2),
"Minerals: %s, Vespene: %s, Food: %s / %s" % (
player.minerals, player.vespene, player.food_used, player.food_cap))
times, steps = zip(*self._game_times)
sec = obs.game_loop // 22.4 # http://liquipedia.net/starcraft2/Game_Speed
surf.write_screen(
self._font_large, colors.green, (-0.2, 0.2),
"Score: %s, Step: %s, %.1f/s, Time: %d:%02d" % (
obs.score.score, obs.game_loop, sum(steps) / (sum(times) or 1),
sec // 60, sec % 60),
align="right")
surf.write_screen(
self._font_large, colors.green * 0.8, (-0.2, 1.2),
"APM: %d, EPM: %d, FPS: O:%.1f, R:%.1f" % (
obs.score.score_details.current_apm,
obs.score.score_details.current_effective_apm,
len(times) / (sum(times) or 1),
len(self._render_times) / (sum(self._render_times) or 1)),
align="right")
line = 3
for alert, ts in sorted(self._alerts.items(), key=lambda item: item[1]):
if time.time() < ts + 3: # Show for 3 seconds.
surf.write_screen(self._font_large, colors.red, (20, line), alert)
line += 1
else:
del self._alerts[alert] | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_help(self, surf):
"""Draw the help dialog."""
if not self._help:
return
def write(loc, text):
surf.write_screen(self._font_large, colors.black, loc, text)
surf.surf.fill(colors.white * 0.8)
write((1, 1), "Shortcuts:")
max_len = max(len(s) for s, _ in self.shortcuts)
for i, (hotkey, description) in enumerate(self.shortcuts, start=2):
write((2, i), hotkey)
write((3 + max_len * 0.7, i), description) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_commands(self, surf):
"""Draw the list of upgrades and available commands."""
line = itertools.count(2)
def write(loc, text, color=colors.yellow):
surf.write_screen(self._font_large, color, loc, text)
def write_line(x, *args, **kwargs):
write((x, next(line)), *args, **kwargs)
action_count = len(self._obs.observation.abilities)
if action_count > 0:
write_line(0.2, "Available Actions:", colors.green)
past_abilities = {act.ability
for act in self._past_actions if act.ability}
for cmd in sorted(self._abilities(lambda c: c.name != "Smart"),
key=lambda c: c.name):
if self._queued_action and cmd == self._queued_action:
color = colors.green
elif self._queued_hotkey and cmd.hotkey.startswith(self._queued_hotkey):
color = colors.green * 0.75
elif cmd.ability_id in past_abilities:
color = colors.red
else:
color = colors.yellow
hotkey = cmd.hotkey[0:3] # truncate "escape" -> "esc"
y = next(line)
write((1, y), hotkey, color)
write((4, y), cmd.name, color)
next(line)
upgrade_count = len(self._obs.observation.raw_data.player.upgrade_ids)
if upgrade_count > 0:
write_line(0.2, "Upgrades: %s" % upgrade_count, colors.green)
upgrades = [
self._static_data.upgrades[upgrade_id].name
for upgrade_id in self._obs.observation.raw_data.player.upgrade_ids]
for name in sorted(upgrades):
write_line(1, name) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_panel(self, surf):
"""Draw the unit selection or build queue."""
left = -14 # How far from the right border
line = itertools.count(3)
def unit_name(unit_type):
return self._static_data.units.get(unit_type, "<unknown>")
def write(loc, text, color=colors.yellow):
surf.write_screen(self._font_large, color, loc, text)
def write_line(x, *args, **kwargs):
write((left + x, next(line)), *args, **kwargs)
def write_single(unit):
"""Write a description of a single selected unit."""
write_line(1, unit_name(unit.unit_type), colors.cyan)
write_line(1, "Health: %s / %s" % (unit.health, unit.max_health))
if unit.max_shields:
write_line(1, "Shields: %s / %s" % (unit.shields, unit.max_shields))
if unit.max_energy:
write_line(1, "Energy: %s / %s" % (unit.energy, unit.max_energy))
if unit.build_progress > 0:
write_line(1, "Progress: %d%%" % (unit.build_progress * 100))
if unit.transport_slots_taken > 0:
write_line(1, "Slots: %s" % unit.transport_slots_taken)
def write_multi(units):
"""Write a description of multiple selected units."""
counts = collections.defaultdict(int)
for unit in units:
counts[unit_name(unit.unit_type)] += 1
for name, count in sorted(counts.items()):
y = next(line)
write((left + 1, y), count)
write((left + 3, y), name)
ui = self._obs.observation.ui_data
if ui.groups:
write_line(0, "Control Groups:", colors.green)
for group in ui.groups:
y = next(line)
write((left + 1, y), "%s:" % group.control_group_index, colors.green)
write((left + 3, y), "%s %s" % (group.count,
unit_name(group.leader_unit_type)))
next(line)
if ui.HasField("single"):
write_line(0, "Selection:", colors.green)
write_single(ui.single.unit)
if (ui.single.attack_upgrade_level or
ui.single.armor_upgrade_level or
ui.single.shield_upgrade_level):
write_line(1, "Upgrades:")
if ui.single.attack_upgrade_level:
write_line(2, "Attack: %s" % ui.single.attack_upgrade_level)
if ui.single.armor_upgrade_level:
write_line(2, "Armor: %s" % ui.single.armor_upgrade_level)
if ui.single.shield_upgrade_level:
write_line(2, "Shield: %s" % ui.single.shield_upgrade_level)
if ui.single.buffs:
write_line(1, "Buffs:")
for b in ui.single.buffs:
write_line(2, buffs.Buffs(b).name)
elif ui.HasField("multi"):
write_line(0, "Selection:", colors.green)
write_multi(ui.multi.units)
elif ui.HasField("cargo"):
write_line(0, "Selection:", colors.green)
write_single(ui.cargo.unit)
next(line)
write_line(0, "Cargo:", colors.green)
write_line(1, "Empty slots: %s" % ui.cargo.slots_available)
write_multi(ui.cargo.passengers)
elif ui.HasField("production"):
write_line(0, "Selection:", colors.green)
write_single(ui.production.unit)
next(line)
if ui.production.production_queue:
write_line(0, "Production:", colors.green)
for item in ui.production.production_queue:
specific_data = self._static_data.abilities[item.ability_id]
if specific_data.remaps_to_ability_id:
general_data = self._static_data.abilities[
specific_data.remaps_to_ability_id]
else:
general_data = specific_data
s = (general_data.friendly_name or general_data.button_name or
general_data.link_name)
s = s.replace("Research ", "").replace("Train ", "")
if item.build_progress > 0:
s += ": %d%%" % (item.build_progress * 100)
write_line(1, s)
elif ui.production.build_queue: # Handle old binaries, no research.
write_line(0, "Build Queue:", colors.green)
for unit in ui.production.build_queue:
s = unit_name(unit.unit_type)
if unit.build_progress > 0:
s += ": %d%%" % (unit.build_progress * 100)
write_line(1, s) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_actions(self):
"""Draw the actions so that they can be inspected for accuracy."""
now = time.time()
for act in self._past_actions:
if act.pos and now < act.deadline:
remain = (act.deadline - now) / (act.deadline - act.time)
if isinstance(act.pos, point.Point):
size = remain / 3
self.all_surfs(_Surface.draw_circle, act.color, act.pos, size, 1)
else:
# Fade with alpha would be nice, but doesn't seem to work.
self.all_surfs(_Surface.draw_rect, act.color, act.pos, 1) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def prepare_actions(self, obs):
"""Keep a list of the past actions so they can be drawn."""
now = time.time()
while self._past_actions and self._past_actions[0].deadline < now:
self._past_actions.pop(0)
def add_act(ability_id, color, pos, timeout=1):
if ability_id:
ability = self._static_data.abilities[ability_id]
if ability.remaps_to_ability_id: # Prefer general abilities.
ability_id = ability.remaps_to_ability_id
self._past_actions.append(
PastAction(ability_id, color, pos, now, now + timeout))
for act in obs.actions:
if (act.HasField("action_raw") and
act.action_raw.HasField("unit_command") and
act.action_raw.unit_command.HasField("target_world_space_pos")):
pos = point.Point.build(
act.action_raw.unit_command.target_world_space_pos)
add_act(act.action_raw.unit_command.ability_id, colors.yellow, pos)
if act.HasField("action_feature_layer"):
act_fl = act.action_feature_layer
if act_fl.HasField("unit_command"):
if act_fl.unit_command.HasField("target_screen_coord"):
pos = self._world_to_feature_screen_px.back_pt(
point.Point.build(act_fl.unit_command.target_screen_coord))
add_act(act_fl.unit_command.ability_id, colors.cyan, pos)
elif act_fl.unit_command.HasField("target_minimap_coord"):
pos = self._world_to_feature_minimap_px.back_pt(
point.Point.build(act_fl.unit_command.target_minimap_coord))
add_act(act_fl.unit_command.ability_id, colors.cyan, pos)
else:
add_act(act_fl.unit_command.ability_id, None, None)
if (act_fl.HasField("unit_selection_point") and
act_fl.unit_selection_point.HasField("selection_screen_coord")):
pos = self._world_to_feature_screen_px.back_pt(point.Point.build(
act_fl.unit_selection_point.selection_screen_coord))
add_act(None, colors.cyan, pos)
if act_fl.HasField("unit_selection_rect"):
for r in act_fl.unit_selection_rect.selection_screen_coord:
rect = point.Rect(
self._world_to_feature_screen_px.back_pt(
point.Point.build(r.p0)),
self._world_to_feature_screen_px.back_pt(
point.Point.build(r.p1)))
add_act(None, colors.cyan, rect, 0.3)
if act.HasField("action_render"):
act_rgb = act.action_render
if act_rgb.HasField("unit_command"):
if act_rgb.unit_command.HasField("target_screen_coord"):
pos = self._world_to_rgb_screen_px.back_pt(
point.Point.build(act_rgb.unit_command.target_screen_coord))
add_act(act_rgb.unit_command.ability_id, colors.red, pos)
elif act_rgb.unit_command.HasField("target_minimap_coord"):
pos = self._world_to_rgb_minimap_px.back_pt(
point.Point.build(act_rgb.unit_command.target_minimap_coord))
add_act(act_rgb.unit_command.ability_id, colors.red, pos)
else:
add_act(act_rgb.unit_command.ability_id, None, None)
if (act_rgb.HasField("unit_selection_point") and
act_rgb.unit_selection_point.HasField("selection_screen_coord")):
pos = self._world_to_rgb_screen_px.back_pt(point.Point.build(
act_rgb.unit_selection_point.selection_screen_coord))
add_act(None, colors.red, pos)
if act_rgb.HasField("unit_selection_rect"):
for r in act_rgb.unit_selection_rect.selection_screen_coord:
rect = point.Rect(
self._world_to_rgb_screen_px.back_pt(
point.Point.build(r.p0)),
self._world_to_rgb_screen_px.back_pt(
point.Point.build(r.p1)))
add_act(None, colors.red, rect, 0.3) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_base_map(self, surf):
"""Draw the base map."""
hmap_feature = features.SCREEN_FEATURES.height_map
hmap = hmap_feature.unpack(self._obs.observation)
if not hmap.any():
hmap = hmap + 100 # pylint: disable=g-no-augmented-assignment
hmap_color = hmap_feature.color(hmap)
out = hmap_color * 0.6
creep_feature = features.SCREEN_FEATURES.creep
creep = creep_feature.unpack(self._obs.observation)
creep_mask = creep > 0
creep_color = creep_feature.color(creep)
out[creep_mask, :] = (0.4 * out[creep_mask, :] +
0.6 * creep_color[creep_mask, :])
power_feature = features.SCREEN_FEATURES.power
power = power_feature.unpack(self._obs.observation)
power_mask = power > 0
power_color = power_feature.color(power)
out[power_mask, :] = (0.7 * out[power_mask, :] +
0.3 * power_color[power_mask, :])
if self._render_player_relative:
player_rel_feature = features.SCREEN_FEATURES.player_relative
player_rel = player_rel_feature.unpack(self._obs.observation)
player_rel_mask = player_rel > 0
player_rel_color = player_rel_feature.color(player_rel)
out[player_rel_mask, :] = player_rel_color[player_rel_mask, :]
visibility = features.SCREEN_FEATURES.visibility_map.unpack(
self._obs.observation)
visibility_fade = np.array([[0.5] * 3, [0.75]*3, [1]*3])
out *= visibility_fade[visibility]
surf.blit_np_array(out) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_mini_map(self, surf):
"""Draw the minimap."""
if (self._render_rgb and self._obs.observation.HasField("render_data") and
self._obs.observation.render_data.HasField("minimap")):
# Draw the rendered version.
surf.blit_np_array(features.Feature.unpack_rgb_image(
self._obs.observation.render_data.minimap))
else: # Render it manually from feature layer data.
hmap_feature = features.MINIMAP_FEATURES.height_map
hmap = hmap_feature.unpack(self._obs.observation)
if not hmap.any():
hmap = hmap + 100 # pylint: disable=g-no-augmented-assignment
hmap_color = hmap_feature.color(hmap)
creep_feature = features.MINIMAP_FEATURES.creep
creep = creep_feature.unpack(self._obs.observation)
creep_mask = creep > 0
creep_color = creep_feature.color(creep)
if self._obs.observation.player_common.player_id in (0, 16): # observer
# If we're the observer, show the absolute since otherwise all player
# units are friendly, making it pretty boring.
player_feature = features.MINIMAP_FEATURES.player_id
else:
player_feature = features.MINIMAP_FEATURES.player_relative
player_data = player_feature.unpack(self._obs.observation)
player_mask = player_data > 0
player_color = player_feature.color(player_data)
visibility = features.MINIMAP_FEATURES.visibility_map.unpack(
self._obs.observation)
visibility_fade = np.array([[0.5] * 3, [0.75]*3, [1]*3])
# Compose and color the different layers.
out = hmap_color * 0.6
out[creep_mask, :] = (0.4 * out[creep_mask, :] +
0.6 * creep_color[creep_mask, :])
out[player_mask, :] = player_color[player_mask, :]
out *= visibility_fade[visibility]
# Render the bit of the composited layers that actually correspond to the
# map. This isn't all of it on non-square maps.
shape = self._playable.diagonal.scale_max_size(
self._feature_minimap_px).floor()
surf.blit_np_array(out[:shape.y, :shape.x, :])
surf.draw_rect(colors.white * 0.8, self._camera, 1) # Camera
# Sensor rings.
for radar in self._obs.observation.raw_data.radar:
surf.draw_circle(colors.white / 2, point.Point.build(radar.pos),
radar.radius, 1)
if self._obs.observation.game_loop < 22.4 * 20:
for loc in self._game_info.start_raw.start_locations:
surf.draw_circle(colors.red, point.Point.build(loc), 5, 1)
pygame.draw.rect(surf.surf, colors.red, surf.surf.get_rect(), 1) # Border | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_rendered_map(self, surf):
"""Draw the rendered pixels."""
surf.blit_np_array(features.Feature.unpack_rgb_image(
self._obs.observation.render_data.map)) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_feature_layer(self, surf, feature):
"""Draw a feature layer."""
layer = feature.unpack(self._obs.observation)
if layer is not None:
surf.blit_np_array(feature.color(layer))
else: # Ignore layers that aren't in this version of SC2.
surf.surf.fill(colors.black) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def draw_raw_layer(self, surf, from_obs, name, color):
"""Draw a raw layer."""
if from_obs:
layer = getattr(self._obs.observation.raw_data.map_state, name)
else:
layer = getattr(self._game_info.start_raw, name)
layer = features.Feature.unpack_layer(layer)
if layer is not None:
surf.blit_np_array(color[layer])
else: # Ignore layers that aren't in this version of SC2.
surf.surf.fill(colors.black) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def render(self, obs):
"""Push an observation onto the queue to be rendered."""
if not self._initialized:
return
now = time.time()
self._game_times.append(
(now - self._last_time,
max(1, obs.observation.game_loop - self._obs.observation.game_loop)))
self._last_time = now
self._last_game_loop = self._obs.observation.game_loop
self._obs_queue.put(obs)
if self._render_sync:
self._obs_queue.join() | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def render_obs(self, obs):
"""Render a frame given an observation."""
start_time = time.time()
self._obs = obs
self.check_valid_queued_action()
self._update_camera(point.Point.build(
self._obs.observation.raw_data.player.camera))
for surf in self._surfaces:
# Render that surface.
surf.draw(surf)
mouse_pos = self.get_mouse_pos()
if mouse_pos:
# Draw a small mouse cursor
self.all_surfs(_Surface.draw_circle, colors.green, mouse_pos.world_pos,
0.1)
self.draw_actions()
with sw("flip"):
pygame.display.flip()
self._render_times.append(time.time() - start_time) | deepmind/pysc2 | [
7691,
1151,
7691,
50,
1501006617
] |
def calculate_iou(bbox1, bbox2):
"""Calculates the Intersection-Over-Union of two bounding boxes.
IOU is a ratio for determining how much two bounding boxes match.
Args:
bbox1: The first bounding box.
bbox2: The bounding box to compare with.
Returns:
The IOU as a float from 0.0 to 1.0 (1.0 being a perfect match.)
"""
x_overlap = max(0,
min(bbox1.right, bbox2.right) - max(bbox1.left, bbox2.left))
y_overlap = max(0,
min(bbox1.bottom, bbox2.bottom) - max(bbox1.top, bbox2.top))
intersection = x_overlap * y_overlap
area1 = (bbox1.right - bbox1.left) * (bbox1.bottom - bbox1.top)
area2 = (bbox2.right - bbox2.left) * (bbox2.bottom - bbox2.top)
union = area1 + area2 - intersection
return intersection / union | google/automl-video-ondevice | [
51,
22,
51,
4,
1566359607
] |
def __init__(self, allowed_staleness=10, min_iou=0.6):
"""Constructor for MediaPipeTrackValidator.
Args:
allowed_staleness: How many updates the track can linger for until it is
determined to be stale.
min_iou: How much the detection box must match a tracked box to be
determined as an associated detection.
"""
self._track_map = {}
self._allowed_staleness = allowed_staleness
self._min_iou = min_iou | google/automl-video-ondevice | [
51,
22,
51,
4,
1566359607
] |
def update_tracks(self, managed_tracks):
"""Updates tracks stored in the validator with new tracking data.
Also adds new tracks if the validator does not know about the track yet.
Args:
managed_tracks: Tracks managed by mediapipe.
"""
for track in managed_tracks:
if track.track_id in self._track_map:
registered_track = self._track_map[track.track_id]
registered_track.track = track
else:
new_track = TrackWrapper(
track=track,
age=0,
staleness=0,
)
self._track_map[track.track_id] = new_track | google/automl-video-ondevice | [
51,
22,
51,
4,
1566359607
] |
def reset_tracks_with_detections(self, detections):
"""Resets the staleness of tracks if there are associated detections.
Args:
detections: List of raw detections created from inferencing.
"""
for detection in detections:
max_iou = 0
associated_track = None
for track in self._track_map.values():
iou = calculate_iou(detection.bbox, track.track.bbox)
if iou > max_iou and iou > self._min_iou:
max_iou = iou
associated_track = track
if associated_track is not None:
associated_track.staleness = 0 | google/automl-video-ondevice | [
51,
22,
51,
4,
1566359607
] |
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description='Invoke the build tool on a workspace while enabling and '
'running the tests')
parser.add_argument(
'--rosdistro-name',
required=True,
help='The name of the ROS distro to identify the setup file to be '
'sourced (if available)')
add_argument_ros_version(parser)
add_argument_build_tool(parser, required=True)
a1 = add_argument_build_tool_args(parser)
a2 = add_argument_build_tool_test_args(parser)
parser.add_argument(
'--workspace-root',
required=True,
help='The root path of the workspace to compile')
parser.add_argument(
'--parent-result-space', nargs='*',
help='The paths of the parent result spaces')
parser.add_argument(
'--clean-before',
action='store_true',
help='The flag if the workspace should be cleaned before the '
'invocation')
parser.add_argument(
'--clean-after',
action='store_true',
help='The flag if the workspace should be cleaned after the '
'invocation')
add_argument_require_gpu_support(parser)
remainder_args = extract_multiple_remainders(argv, (a1, a2))
args = parser.parse_args(argv)
for k, v in remainder_args.items():
setattr(args, k, v)
ensure_workspace_exists(args.workspace_root)
if args.clean_before:
clean_workspace(args.workspace_root)
parent_result_spaces = None
if args.parent_result_space:
parent_result_spaces = args.parent_result_space
try:
with Scope('SUBSECTION', 'build workspace in isolation'):
test_results_dir = os.path.join(
args.workspace_root, 'test_results')
cmake_args = ['-DBUILD_TESTING=1']
if args.ros_version == 1:
cmake_args += [
'-DCATKIN_ENABLE_TESTING=1', '-DCATKIN_SKIP_TESTING=0',
'-DCATKIN_TEST_RESULTS_DIR=%s' % test_results_dir]
additional_args = args.build_tool_args or []
if args.build_tool == 'colcon':
additional_args += ['--test-result-base', test_results_dir]
env = dict(os.environ)
env.setdefault('MAKEFLAGS', '-j1')
rc = call_build_tool(
args.build_tool, args.rosdistro_name, args.workspace_root,
cmake_clean_cache=True,
cmake_args=cmake_args, args=additional_args,
parent_result_spaces=parent_result_spaces, env=env)
if not rc:
with Scope('SUBSECTION', 'build tests'):
additional_args = args.build_tool_args or []
if args.build_tool == 'colcon':
additional_args += ['--cmake-target-skip-unavailable']
rc = call_build_tool(
args.build_tool, args.rosdistro_name, args.workspace_root,
cmake_args=cmake_args,
make_args=['tests'], args=additional_args,
parent_result_spaces=parent_result_spaces, env=env)
if not rc:
make_args = ['run_tests']
additional_args = args.build_tool_args or []
if args.build_tool == 'colcon':
cmake_args = None
make_args = None
additional_args = ['--test-result-base', test_results_dir]
additional_args += args.build_tool_test_args or []
# for workspaces with only plain cmake packages the setup files
# generated by cmi won't implicitly source the underlays
if parent_result_spaces is None:
parent_result_spaces = ['/opt/ros/%s' % args.rosdistro_name]
if args.build_tool == 'catkin_make_isolated':
devel_space = os.path.join(
args.workspace_root, 'devel_isolated')
parent_result_spaces.append(devel_space)
# since catkin_make_isolated doesn't provide a custom
# environment to run tests this needs to source the devel space
# and force a CMake run ro use the new environment
with Scope('SUBSECTION', 'run tests'):
rc = call_build_tool(
args.build_tool,
args.rosdistro_name, args.workspace_root,
cmake_args=cmake_args,
force_cmake=args.build_tool == 'catkin_make_isolated',
make_args=make_args, args=additional_args,
parent_result_spaces=parent_result_spaces, env=env,
colcon_verb='test')
finally:
if args.clean_after:
clean_workspace(args.workspace_root)
return rc | ros-infrastructure/ros_buildfarm | [
70,
86,
70,
43,
1412023694
] |
def config(self):
return {"url": self.url, "endpointurl": self.TEST_AWS_ENDPOINT_URL} | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def should_test():
do_test = env2bool("DVC_TEST_AWS", undefined=None)
if do_test is not None:
return do_test
if os.getenv("AWS_ACCESS_KEY_ID") and os.getenv(
"AWS_SECRET_ACCESS_KEY"
):
return True
return False | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def _get_storagepath():
return (
TEST_AWS_REPO_BUCKET
+ "/"
+ "dvc_test_caches"
+ "/"
+ str(uuid.uuid4())
) | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def get_url():
return "s3://" + S3._get_storagepath() | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def _s3(self):
import boto3
return boto3.client("s3", endpoint_url=self.config["endpointurl"]) | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def is_dir(self):
path = (self / "").path
resp = self._s3.list_objects(Bucket=self.bucket, Prefix=path)
return bool(resp.get("Contents")) | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def mkdir(self, mode=0o777, parents=False, exist_ok=False):
assert mode == 0o777
assert parents | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def read_bytes(self):
data = self._s3.get_object(Bucket=self.bucket, Key=self.path)
return data["Body"].read() | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def fs_path(self):
return self.bucket + "/" + self.path.lstrip("/") | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def s3_fake_creds_file(monkeypatch):
# https://github.com/spulec/moto#other-caveats
import pathlib
aws_dir = pathlib.Path("~").expanduser() / ".aws"
aws_dir.mkdir(exist_ok=True)
aws_creds = aws_dir / "credentials"
initially_exists = aws_creds.exists()
if not initially_exists:
aws_creds.touch()
try:
with monkeypatch.context() as m:
m.setenv("AWS_ACCESS_KEY_ID", "testing")
m.setenv("AWS_SECRET_ACCESS_KEY", "testing")
m.setenv("AWS_SECURITY_TOKEN", "testing")
m.setenv("AWS_SESSION_TOKEN", "testing")
yield
finally:
if aws_creds.exists() and not initially_exists:
aws_creds.unlink() | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def s3_server(test_config, docker_compose, docker_services):
import requests
test_config.requires("s3")
port = docker_services.port_for("motoserver", 5000)
endpoint_url = TEST_AWS_ENDPOINT_URL.format(port=port)
def _check():
try:
r = requests.get(endpoint_url)
return r.ok
except requests.RequestException:
return False
docker_services.wait_until_responsive(
timeout=60.0, pause=0.1, check=_check
)
S3.TEST_AWS_ENDPOINT_URL = endpoint_url
return endpoint_url | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def s3(test_config, s3_server, s3_fake_creds_file):
test_config.requires("s3")
workspace = S3(S3.get_url())
workspace._s3.create_bucket(Bucket=TEST_AWS_REPO_BUCKET)
yield workspace | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def testRuns(self):
with mock.patch.object(dataset, 'load', new=fake_mnist_data):
train.train(
batch_size=1,
learning_rate=0.1,
num_training_iters=10,
validation_steps=5)
train.train(
batch_size=2,
learning_rate=0.1,
num_training_iters=5,
validation_steps=2)
train.train(
batch_size=10,
learning_rate=0.1,
num_training_iters=1,
validation_steps=1) | google/trax | [
7391,
769,
7391,
106,
1570288154
] |
def gen_examples(num_examples):
x = np.array(
np.random.randn(num_examples, 784), copy=False, dtype=np.float32)
y = np.zeros((num_examples, 10), dtype=np.float32)
y[:][0] = 1.
return (x, y) | google/trax | [
7391,
769,
7391,
106,
1570288154
] |
def catkin_success(args, env={}):
orig_environ = dict(os.environ)
try:
os.environ.update(env)
catkin_main(args)
except SystemExit as exc:
ret = exc.code
if ret != 0:
import traceback
traceback.print_exc()
finally:
os.environ = orig_environ
return ret == 0 | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def __init__(self, expected, expected_regex=None):
self.expected = expected
self.expected_regex = expected_regex | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def __exit__(self, exc_type, exc_value, tb):
if self.expected is None:
if exc_type is None:
return True
else:
raise
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise AssertionError("{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
raise
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
expected_regex = re.compile(expected_regex)
if not expected_regex.search(str(exc_value)):
raise AssertionError("'{0}' does not match '{1}'".format(expected_regex.pattern, str(exc_value)))
return True | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
self.out = StringIO()
self.err = StringIO()
sys.stdout = self.out
sys.stderr = self.err
return self.out, self.err | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def __init__(self, prefix=''):
self.prefix = prefix
self.delete = False | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def __exit__(self, exc_type, exc_value, traceback):
if self.delete and self.temp_path and os.path.exists(self.temp_path):
print('Deleting temporary testind directory: %s' % self.temp_path)
shutil.rmtree(self.temp_path)
if self.original_cwd and os.path.exists(self.original_cwd):
os.chdir(self.original_cwd) | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def decorated(*args, **kwds):
with temporary_directory() as directory:
from inspect import getargspec
# If it takes directory of kwargs and kwds does already have
# directory, inject it
if 'directory' not in kwds and 'directory' in getargspec(f)[0]:
kwds['directory'] = directory
return f(*args, **kwds) | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def run(args, **kwargs):
"""
Call to Popen, returns (errcode, stdout, stderr)
"""
print("run:", args)
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
cwd=kwargs.get('cwd', os.getcwd()))
print("P==", p.__dict__)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr) | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def assert_cmd_failure(cmd, **kwargs):
"""
Asserts that running a command returns non-zero.
returns: stdout
"""
print(">>>", cmd, kwargs)
(r, out, err) = run(cmd, withexitstatus=True, **kwargs)
print("<<<", str(out))
assert 0 != r, "cmd succeeded, but it should fail: %s result=%u\noutput=\n%s" % (cmd, r, out)
return out | catkin/catkin_tools | [
149,
135,
149,
91,
1393292582
] |
def test_configuration_subclass_inherits_items(self):
class BaseConfig(schema.Schema):
base = schema.BooleanItem(default=True, required=True)
class SubClassedConfig(BaseConfig):
hungry = schema.BooleanItem(
title="Hungry", description="Are you hungry?", required=True
)
self.assertDictEqual(
SubClassedConfig.serialize(),
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"base": {"default": True, "type": "boolean", "title": "base"},
"hungry": {
"type": "boolean",
"description": "Are you hungry?",
"title": "Hungry",
},
},
"required": ["base", "hungry"],
"x-ordering": ["base", "hungry"],
"additionalProperties": False,
},
)
class MergedConfigClass(schema.Schema):
thirsty = schema.BooleanItem(
title="Thirsty", description="Are you thirsty?", required=True
)
merge_subclassed = SubClassedConfig(flatten=True)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"thirsty": {
"type": "boolean",
"description": "Are you thirsty?",
"title": "Thirsty",
},
"base": {"default": True, "type": "boolean", "title": "base"},
"hungry": {
"type": "boolean",
"description": "Are you hungry?",
"title": "Hungry",
},
},
"required": ["thirsty", "base", "hungry"],
"x-ordering": ["thirsty", "base", "hungry"],
"additionalProperties": False,
}
self.assertDictContainsSubset(
MergedConfigClass.serialize()["properties"], expected["properties"]
)
self.assertDictContainsSubset(expected, MergedConfigClass.serialize()) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_optional_requirements_config(self):
class BaseRequirements(schema.Schema):
driver = schema.StringItem(default="digitalocean", format="hidden")
class SSHKeyFileSchema(schema.Schema):
ssh_key_file = schema.StringItem(
title="SSH Private Key",
description=(
"The path to an SSH private key which will be used "
"to authenticate on the deployed VMs"
),
)
class SSHKeyNamesSchema(schema.Schema):
ssh_key_names = schema.StringItem(
title="SSH Key Names",
description=(
"The names of an SSH key being managed on "
"DigitalOcean account which will be used to "
"authenticate on the deployed VMs"
),
)
class Requirements(BaseRequirements):
title = "DigitalOcean"
description = "DigitalOcean Cloud VM configuration requirements."
personal_access_token = schema.StringItem(
title="Personal Access Token",
description=(
"This is the API access token which can be generated "
"under the API/Application on your account"
),
required=True,
)
requirements_definition = schema.AnyOfItem(
items=(
SSHKeyFileSchema.as_requirements_item(),
SSHKeyNamesSchema.as_requirements_item(),
),
)(flatten=True)
ssh_key_file = SSHKeyFileSchema(flatten=True)
ssh_key_names = SSHKeyNamesSchema(flatten=True)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DigitalOcean",
"description": "DigitalOcean Cloud VM configuration requirements.",
"type": "object",
"properties": {
"driver": {
"default": "digitalocean",
"format": "hidden",
"type": "string",
"title": "driver",
},
"personal_access_token": {
"type": "string",
"description": (
"This is the API access token which can be "
"generated under the API/Application on your account"
),
"title": "Personal Access Token",
},
"ssh_key_file": {
"type": "string",
"description": (
"The path to an SSH private key which will "
"be used to authenticate on the deployed VMs"
),
"title": "SSH Private Key",
},
"ssh_key_names": {
"type": "string",
"description": (
"The names of an SSH key being managed on DigitalOcean "
"account which will be used to authenticate on the deployed VMs"
),
"title": "SSH Key Names",
},
},
"anyOf": [{"required": ["ssh_key_file"]}, {"required": ["ssh_key_names"]}],
"required": ["personal_access_token"],
"x-ordering": [
"driver",
"personal_access_token",
"ssh_key_file",
"ssh_key_names",
],
"additionalProperties": False,
}
self.assertDictEqual(expected, Requirements.serialize())
class Requirements2(BaseRequirements):
title = "DigitalOcean"
description = "DigitalOcean Cloud VM configuration requirements."
personal_access_token = schema.StringItem(
title="Personal Access Token",
description=(
"This is the API access token which can be generated "
"under the API/Application on your account"
),
required=True,
)
ssh_key_file = schema.StringItem(
title="SSH Private Key",
description=(
"The path to an SSH private key which will be used "
"to authenticate on the deployed VMs"
),
)
ssh_key_names = schema.StringItem(
title="SSH Key Names",
description=(
"The names of an SSH key being managed on "
"DigitalOcean account which will be used to "
"authenticate on the deployed VMs"
),
)
requirements_definition = schema.AnyOfItem(
items=(
schema.RequirementsItem(requirements=["ssh_key_file"]),
schema.RequirementsItem(requirements=["ssh_key_names"]),
),
)(flatten=True)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DigitalOcean",
"description": "DigitalOcean Cloud VM configuration requirements.",
"type": "object",
"properties": {
"driver": {
"default": "digitalocean",
"format": "hidden",
"type": "string",
"title": "driver",
},
"personal_access_token": {
"type": "string",
"description": (
"This is the API access token which can be "
"generated under the API/Application on your account"
),
"title": "Personal Access Token",
},
"ssh_key_file": {
"type": "string",
"description": (
"The path to an SSH private key which will "
"be used to authenticate on the deployed VMs"
),
"title": "SSH Private Key",
},
"ssh_key_names": {
"type": "string",
"description": (
"The names of an SSH key being managed on DigitalOcean "
"account which will be used to authenticate on the deployed VMs"
),
"title": "SSH Key Names",
},
},
"anyOf": [{"required": ["ssh_key_file"]}, {"required": ["ssh_key_names"]}],
"required": ["personal_access_token"],
"x-ordering": [
"driver",
"personal_access_token",
"ssh_key_file",
"ssh_key_names",
],
"additionalProperties": False,
}
self.assertDictContainsSubset(expected, Requirements2.serialize())
class Requirements3(schema.Schema):
title = "DigitalOcean"
description = "DigitalOcean Cloud VM configuration requirements."
merge_reqs = Requirements(flatten=True)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DigitalOcean",
"description": "DigitalOcean Cloud VM configuration requirements.",
"type": "object",
"properties": {
"driver": {
"default": "digitalocean",
"format": "hidden",
"type": "string",
"title": "driver",
},
"personal_access_token": {
"type": "string",
"description": (
"This is the API access token which can be "
"generated under the API/Application on your account"
),
"title": "Personal Access Token",
},
"ssh_key_file": {
"type": "string",
"description": (
"The path to an SSH private key which will "
"be used to authenticate on the deployed VMs"
),
"title": "SSH Private Key",
},
"ssh_key_names": {
"type": "string",
"description": (
"The names of an SSH key being managed on DigitalOcean "
"account which will be used to authenticate on the deployed VMs"
),
"title": "SSH Key Names",
},
},
"anyOf": [{"required": ["ssh_key_file"]}, {"required": ["ssh_key_names"]}],
"required": ["personal_access_token"],
"x-ordering": [
"driver",
"personal_access_token",
"ssh_key_file",
"ssh_key_names",
],
"additionalProperties": False,
}
self.assertDictContainsSubset(expected, Requirements3.serialize())
class Requirements4(schema.Schema):
title = "DigitalOcean"
description = "DigitalOcean Cloud VM configuration requirements."
merge_reqs = Requirements(flatten=True)
ssh_key_file_2 = schema.StringItem(
title="SSH Private Key",
description=(
"The path to an SSH private key which will be used "
"to authenticate on the deployed VMs"
),
)
ssh_key_names_2 = schema.StringItem(
title="SSH Key Names",
description=(
"The names of an SSH key being managed on "
"DigitalOcean account which will be used to "
"authenticate on the deployed VMs"
),
)
requirements_definition_2 = schema.AnyOfItem(
items=(
schema.RequirementsItem(requirements=["ssh_key_file_2"]),
schema.RequirementsItem(requirements=["ssh_key_names_2"]),
),
)(flatten=True)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "DigitalOcean",
"description": "DigitalOcean Cloud VM configuration requirements.",
"type": "object",
"properties": {
"driver": {
"default": "digitalocean",
"format": "hidden",
"type": "string",
"title": "driver",
},
"personal_access_token": {
"type": "string",
"description": (
"This is the API access token which can be "
"generated under the API/Application on your account"
),
"title": "Personal Access Token",
},
"ssh_key_file": {
"type": "string",
"description": (
"The path to an SSH private key which will "
"be used to authenticate on the deployed VMs"
),
"title": "SSH Private Key",
},
"ssh_key_names": {
"type": "string",
"description": (
"The names of an SSH key being managed on DigitalOcean "
"account which will be used to authenticate on the deployed VMs"
),
"title": "SSH Key Names",
},
"ssh_key_file_2": {
"type": "string",
"description": (
"The path to an SSH private key which will "
"be used to authenticate on the deployed VMs"
),
"title": "SSH Private Key",
},
"ssh_key_names_2": {
"type": "string",
"description": (
"The names of an SSH key being managed on DigitalOcean "
"account which will be used to authenticate on the deployed VMs"
),
"title": "SSH Key Names",
},
},
"anyOf": [
{"required": ["ssh_key_file"]},
{"required": ["ssh_key_names"]},
{"required": ["ssh_key_file_2"]},
{"required": ["ssh_key_names_2"]},
],
"required": ["personal_access_token"],
"x-ordering": [
"driver",
"personal_access_token",
"ssh_key_file",
"ssh_key_names",
"ssh_key_file_2",
"ssh_key_names_2",
],
"additionalProperties": False,
}
self.assertDictContainsSubset(expected, Requirements4.serialize()) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_optional_requirements_config_validation(self):
class BaseRequirements(schema.Schema):
driver = schema.StringItem(default="digitalocean", format="hidden")
class SSHKeyFileSchema(schema.Schema):
ssh_key_file = schema.StringItem(
title="SSH Private Key",
description=(
"The path to an SSH private key which will be used "
"to authenticate on the deployed VMs"
),
)
class SSHKeyNamesSchema(schema.Schema):
ssh_key_names = schema.StringItem(
title="SSH Key Names",
description=(
"The names of an SSH key being managed on "
"Digial Ocean account which will be used to "
"authenticate on the deployed VMs"
),
)
class Requirements(BaseRequirements):
title = "DigitalOcean"
description = "DigitalOcean Cloud VM configuration requirements."
personal_access_token = schema.StringItem(
title="Personal Access Token",
description=(
"This is the API access token which can be generated "
"under the API/Application on your account"
),
required=True,
)
requirements_definition = schema.AnyOfItem(
items=(
SSHKeyFileSchema.as_requirements_item(),
SSHKeyNamesSchema.as_requirements_item(),
),
)(flatten=True)
ssh_key_file = SSHKeyFileSchema(flatten=True)
ssh_key_names = SSHKeyNamesSchema(flatten=True)
try:
jsonschema.validate(
{
"personal_access_token": "foo",
"ssh_key_names": "bar",
"ssh_key_file": "test",
},
Requirements.serialize(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate(
{"personal_access_token": "foo", "ssh_key_names": "bar"},
Requirements.serialize(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate(
{"personal_access_token": "foo", "ssh_key_file": "test"},
Requirements.serialize(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"personal_access_token": "foo"}, Requirements.serialize()
)
if JSONSCHEMA_VERSION >= _LooseVersion("3.0.0"):
self.assertIn(
"'ssh_key_file' is a required property", excinfo.exception.message
)
else:
self.assertIn(
"is not valid under any of the given schemas", excinfo.exception.message
) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_boolean_config_validation(self):
class TestConf(schema.Schema):
item = schema.BooleanItem(title="Hungry", description="Are you hungry?")
try:
jsonschema.validate({"item": False}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 1}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_string_config_validation(self):
class TestConf(schema.Schema):
item = schema.StringItem(title="Foo", description="Foo Item")
try:
jsonschema.validate({"item": "the item"}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
class TestConf(schema.Schema):
item = schema.StringItem(
title="Foo", description="Foo Item", min_length=1, max_length=10
)
try:
jsonschema.validate({"item": "the item"}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": "the item the item"}, TestConf.serialize())
self.assertIn("is too long", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.StringItem(
title="Foo", description="Foo Item", min_length=10, max_length=100
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": "the item"}, TestConf.serialize())
self.assertIn("is too short", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.StringItem(
title="Foo", description="Foo Item", enum=("foo", "bar")
)
try:
jsonschema.validate({"item": "foo"}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
class TestConf(schema.Schema):
item = schema.StringItem(
title="Foo", description="Foo Item", enum=("foo", "bar")
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": "bin"}, TestConf.serialize())
self.assertIn("is not one of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.StringItem(
title="Foo", description="Foo Item", pattern=r"^([\w_-]+)$"
)
try:
jsonschema.validate(
{"item": "the-item"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "the item"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("does not match", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_email_config_validation(self):
class TestConf(schema.Schema):
item = schema.EMailItem(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": "[email protected]"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_ipv4_config_validation(self):
class TestConf(schema.Schema):
item = schema.IPv4Item(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": "127.0.0.1"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_ipv6_config_validation(self):
class TestConf(schema.Schema):
item = schema.IPv6Item(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": salt.utils.stringutils.to_str("::1")},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_hostname_config_validation(self):
class TestConf(schema.Schema):
item = schema.HostnameItem(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": "localhost"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_datetime_config_validation(self):
class TestConf(schema.Schema):
item = schema.DateTimeItem(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": "2015-07-01T18:05:27+01:00"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_uri_config(self):
item = schema.UriItem(title="Foo", description="Foo Item")
self.assertDictEqual(
item.serialize(),
{
"type": "string",
"title": item.title,
"description": item.description,
"format": item.format,
},
) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_uri_config_validation(self):
class TestConf(schema.Schema):
item = schema.UriItem(title="Item", description="Item description")
try:
jsonschema.validate(
{"item": "ssh://localhost"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": "3"},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not a", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_number_config_validation(self):
class TestConf(schema.Schema):
item = schema.NumberItem(title="How many dogs", description="Question")
try:
jsonschema.validate({"item": 2}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": "3"}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.NumberItem(
title="How many dogs", description="Question", multiple_of=2.2
)
try:
jsonschema.validate({"item": 4.4}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 4}, TestConf.serialize())
self.assertIn("is not a multiple of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.NumberItem(
title="Foo", description="Foo Item", minimum=1, maximum=10
)
try:
jsonschema.validate({"item": 3}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 11}, TestConf.serialize())
self.assertIn("is greater than the maximum of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.NumberItem(
title="Foo", description="Foo Item", minimum=10, maximum=100
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is less than the minimum of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.NumberItem(
title="How many dogs",
description="Question",
minimum=0,
exclusive_minimum=True,
maximum=10,
exclusive_maximum=True,
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 0}, TestConf.serialize())
self.assertIn(
"is less than or equal to the minimum of", excinfo.exception.message
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 10}, TestConf.serialize())
self.assertIn(
"is greater than or equal to the maximum of", excinfo.exception.message
)
class TestConf(schema.Schema):
item = schema.NumberItem(
title="Foo", description="Foo Item", enum=(0, 2, 4, 6)
)
try:
jsonschema.validate({"item": 4}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
class TestConf(schema.Schema):
item = schema.NumberItem(
title="Foo", description="Foo Item", enum=(0, 2, 4, 6)
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is not one of", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_integer_config_validation(self):
class TestConf(schema.Schema):
item = schema.IntegerItem(title="How many dogs", description="Question")
try:
jsonschema.validate({"item": 2}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3.1}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="How many dogs", description="Question", multiple_of=2
)
try:
jsonschema.validate({"item": 4}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is not a multiple of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="Foo", description="Foo Item", minimum=1, maximum=10
)
try:
jsonschema.validate({"item": 3}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 11}, TestConf.serialize())
self.assertIn("is greater than the maximum of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="Foo", description="Foo Item", minimum=10, maximum=100
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is less than the minimum of", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="How many dogs",
description="Question",
minimum=0,
exclusive_minimum=True,
maximum=10,
exclusive_maximum=True,
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 0}, TestConf.serialize())
self.assertIn(
"is less than or equal to the minimum of", excinfo.exception.message
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 10}, TestConf.serialize())
self.assertIn(
"is greater than or equal to the maximum of", excinfo.exception.message
)
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="Foo", description="Foo Item", enum=(0, 2, 4, 6)
)
try:
jsonschema.validate({"item": 4}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
class TestConf(schema.Schema):
item = schema.IntegerItem(
title="Foo", description="Foo Item", enum=(0, 2, 4, 6)
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 3}, TestConf.serialize())
self.assertIn("is not one of", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_array_config_validation(self):
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Dog Names",
description="Name your dogs",
items=schema.StringItem(),
)
try:
jsonschema.validate(
{"item": ["Tobias", "Óscar"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": ["Tobias", "Óscar", 3]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Dog Names",
description="Name your dogs",
items=schema.StringItem(),
min_items=1,
max_items=2,
)
try:
jsonschema.validate(
{"item": ["Tobias", "Óscar"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": ["Tobias", "Óscar", "Pepe"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is too long", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": []},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is too short", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Dog Names",
description="Name your dogs",
items=schema.StringItem(),
uniqueItems=True,
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": ["Tobias", "Tobias"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("has non-unique elements", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.ArrayItem(items=(schema.StringItem(), schema.IntegerItem()))
try:
jsonschema.validate(
{"item": ["Óscar", 4]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": ["Tobias", "Óscar"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.ArrayItem(
items=schema.ArrayItem(
items=(schema.StringItem(), schema.IntegerItem())
)
)
try:
jsonschema.validate(
{"item": [["Tobias", 8], ["Óscar", 4]]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": [["Tobias", 8], ["Óscar", "4"]]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.ArrayItem(items=schema.StringItem(enum=["Tobias", "Óscar"]))
try:
jsonschema.validate(
{"item": ["Óscar"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate(
{"item": ["Tobias"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": ["Pepe"]},
TestConf.serialize(),
format_checker=jsonschema.FormatChecker(),
)
self.assertIn("is not one of", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_dict_config_validation(self):
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"sides": schema.IntegerItem()},
)
try:
jsonschema.validate({"item": {"sides": 1}}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": {"sides": "1"}}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 2}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"color": schema.StringItem(enum=["red", "green", "blue"])},
pattern_properties={"si.*": schema.IntegerItem()},
)
try:
jsonschema.validate(
{"item": {"sides": 1, "color": "red"}}, TestConf.serialize()
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": {"sides": "4", "color": "blue"}}, TestConf.serialize()
)
self.assertIn("is not of type", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 2}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"color": schema.StringItem(enum=["red", "green", "blue"])},
pattern_properties={"si.*": schema.IntegerItem()},
additional_properties=False,
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": {"color": "green", "sides": 4, "surfaces": 4}},
TestConf.serialize(),
)
if JSONSCHEMA_VERSION < _LooseVersion("2.6.0"):
self.assertIn(
"Additional properties are not allowed", excinfo.exception.message
)
else:
self.assertIn(
"'surfaces' does not match any of the regexes",
excinfo.exception.message,
)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"color": schema.StringItem(enum=["red", "green", "blue"])},
additional_properties=schema.OneOfItem(
items=[schema.BooleanItem(), schema.IntegerItem()]
),
)
try:
jsonschema.validate(
{"item": {"sides": 1, "color": "red", "rugged_surface": False}},
TestConf.serialize(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": {"sides": "4", "color": "blue"}}, TestConf.serialize()
)
if JSONSCHEMA_VERSION >= _LooseVersion("3.0.0"):
self.assertIn("'4'", excinfo.exception.message)
self.assertIn("is not of type", excinfo.exception.message)
self.assertIn("'boolean'", excinfo.exception.message)
else:
self.assertIn(
"is not valid under any of the given schemas", excinfo.exception.message
)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"color": schema.StringItem(enum=["red", "green", "blue"])},
additional_properties=schema.OneOfItem(
items=[schema.BooleanItem(), schema.IntegerItem()]
),
min_properties=2,
max_properties=3,
)
try:
jsonschema.validate(
{"item": {"color": "red", "sides": 1}}, TestConf.serialize()
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate(
{"item": {"sides": 1, "color": "red", "rugged_surface": False}},
TestConf.serialize(),
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": {"color": "blue"}}, TestConf.serialize())
self.assertIn("does not have enough properties", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{
"item": {
"sides": 4,
"color": "blue",
"rugged_surface": False,
"opaque": True,
}
},
TestConf.serialize(),
)
self.assertIn("has too many properties", excinfo.exception.message)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties={"sides": schema.IntegerItem(required=True)},
additional_properties=schema.OneOfItem(
items=[schema.BooleanItem(), schema.StringItem()]
),
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": {"color": "blue", "rugged_surface": False, "opaque": True}},
TestConf.serialize(),
)
self.assertIn("'sides' is a required property", excinfo.exception.message)
class Props(schema.Schema):
sides = schema.IntegerItem(required=True)
class TestConf(schema.Schema):
item = schema.DictItem(
title="Poligon",
description="Describe the Poligon",
properties=Props(),
additional_properties=schema.OneOfItem(
items=[schema.BooleanItem(), schema.StringItem()]
),
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{"item": {"color": "blue", "rugged_surface": False, "opaque": True}},
TestConf.serialize(),
)
self.assertIn("'sides' is a required property", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_oneof_config_validation(self):
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Hungry",
description="Are you hungry?",
items=schema.OneOfItem(
items=(
schema.StringItem(title="Yes", enum=["yes"]),
schema.StringItem(title="No", enum=["no"]),
)
),
)
try:
jsonschema.validate({"item": ["no"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": ["maybe"]}, TestConf.serialize())
if JSONSCHEMA_VERSION >= _LooseVersion("3.0.0"):
self.assertIn("'maybe'", excinfo.exception.message)
self.assertIn("is not one of", excinfo.exception.message)
self.assertIn("'yes'", excinfo.exception.message)
else:
self.assertIn(
"is not valid under any of the given schemas", excinfo.exception.message
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 2}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_anyof_config_validation(self):
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Hungry",
description="Are you hungry?",
items=schema.AnyOfItem(
items=(
schema.StringItem(title="Yes", enum=["yes"]),
schema.StringItem(title="No", enum=["no"]),
schema.BooleanItem(),
)
),
)
try:
jsonschema.validate({"item": ["no"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate({"item": ["yes"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate({"item": [True]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate({"item": [False]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": ["maybe"]}, TestConf.serialize())
if JSONSCHEMA_VERSION >= _LooseVersion("3.0.0"):
self.assertIn("'maybe'", excinfo.exception.message)
self.assertIn("is not one of", excinfo.exception.message)
self.assertIn("'yes'", excinfo.exception.message)
else:
self.assertIn(
"is not valid under any of the given schemas", excinfo.exception.message
)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 2}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_allof_config_validation(self):
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Hungry",
description="Are you hungry?",
items=schema.AllOfItem(
items=(
schema.StringItem(min_length=2),
schema.StringItem(max_length=3),
)
),
)
try:
jsonschema.validate({"item": ["no"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate({"item": ["yes"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": ["maybe"]}, TestConf.serialize())
self.assertIn("is too long", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": ["hmmmm"]}, TestConf.serialize())
self.assertIn("is too long", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": 2}, TestConf.serialize())
self.assertIn("is not of type", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_not_config_validation(self):
class TestConf(schema.Schema):
item = schema.ArrayItem(
title="Hungry",
description="Are you hungry?",
items=schema.NotItem(item=schema.BooleanItem()),
)
try:
jsonschema.validate({"item": ["no"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
try:
jsonschema.validate({"item": ["yes"]}, TestConf.serialize())
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc))
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": [True]}, TestConf.serialize())
self.assertIn("is not allowed for", excinfo.exception.message)
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"item": [False]}, TestConf.serialize())
self.assertIn("is not allowed for", excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_config_name_override_class_attrname(self):
class TestConf(schema.Schema):
item = schema.BooleanItem(title="Hungry", description="Are you hungry?")
class TestConf2(schema.Schema):
a_name = TestConf(name="another_name")
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"another_name": {
"id": (
"https://non-existing.saltstack.com/schemas/another_name.json#"
),
"type": "object",
"properties": {
"item": {
"type": "boolean",
"description": "Are you hungry?",
"title": "Hungry",
}
},
"x-ordering": ["item"],
"additionalProperties": False,
}
},
"x-ordering": ["another_name"],
"additionalProperties": False,
}
self.assertDictEqual(TestConf2.serialize(), expected) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_schema_item_serialize(self):
obj = copy.deepcopy(self.obj)
expected_serialized = {"$ref": "#/definitions/ComplexSchemaItem"}
self.assertDictEqual(obj.serialize(), expected_serialized) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_complex_schema_item_definition(self):
complex_obj = copy.deepcopy(self.complex_obj)
expected_def = {
"type": "object",
"title": "ComplexComplexSchemaItem",
"properties": {
"hungry": {
"type": "boolean",
"title": "Hungry",
"description": "Are you hungry?",
},
"complex_item": {
"type": "object",
"$ref": "#/definitions/test_definition",
},
},
"required": ["hungry"],
}
self.assertDictEqual(complex_obj.get_definition(), expected_def) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_one_of_complex_definition_schema(self):
serialized = salt.utils.yaml.safe_load(
salt.utils.json.dumps(self.one_of_schema.serialize())
)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Test OneOf Complex Definitions Schema",
"type": "object",
"properties": {
"one_of_item": {
"oneOf": [
{"$ref": "#/definitions/ComplexSchemaItem"},
{"type": "string"},
]
}
},
"x-ordering": ["one_of_item"],
"additionalProperties": False,
"definitions": {
"ComplexSchemaItem": {
"type": "object",
"title": "ComplexSchemaItem",
"properties": {
"thirsty": {
"type": "boolean",
"title": "Thirsty",
"description": "Are you thirsty?",
}
},
}
},
}
self.assertDictEqual(serialized, expected) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_dict_complex_definition_schema(self):
serialized = salt.utils.yaml.safe_load(
salt.utils.json.dumps(self.dict_schema.serialize())
)
expected = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Test Dict Complex Definitions Schema",
"type": "object",
"properties": {
"dict_item": {
"type": "object",
"title": "dict_item",
"required": ["complex_obj"],
"properties": {
"complex_obj": {"$ref": "#/definitions/ComplexSchemaItem"}
},
"additionalProperties": {"$ref": "#/definitions/ComplexSchemaItem"},
}
},
"x-ordering": ["dict_item"],
"additionalProperties": False,
"definitions": {
"ComplexSchemaItem": {
"type": "object",
"title": "ComplexSchemaItem",
"properties": {
"thirsty": {
"type": "boolean",
"title": "Thirsty",
"description": "Are you thirsty?",
}
},
}
},
}
self.assertDictEqual(serialized, expected) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_schema_item_thirsty_valid(self):
serialized = self.schema.serialize()
try:
jsonschema.validate({"complex_item": {"thirsty": True}}, serialized)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc)) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_schema_item_thirsty_invalid(self):
serialized = self.schema.serialize()
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"complex_item": {"thirsty": "Foo"}}, serialized)
expected = "'Foo' is not of type 'boolean'"
self.assertIn(expected, excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_complex_schema_item_hungry_valid(self):
serialized = self.complex_schema.serialize()
try:
jsonschema.validate({"complex_complex_item": {"hungry": True}}, serialized)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc)) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_both_complex_complex_schema_all_items_valid(self):
serialized = self.complex_schema.serialize()
try:
jsonschema.validate(
{
"complex_complex_item": {
"hungry": True,
"complex_item": {"thirsty": True},
}
},
serialized,
)
except jsonschema.exceptions.ValidationError as exc:
self.fail("ValidationError raised: {}".format(exc)) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_complex_schema_item_hungry_invalid(self):
serialized = self.complex_schema.serialize()
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate({"complex_complex_item": {"hungry": "Foo"}}, serialized)
expected = "'Foo' is not of type 'boolean'"
self.assertIn(expected, excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def test_complex_complex_schema_item_inner_thirsty_invalid(self):
serialized = self.complex_schema.serialize()
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
jsonschema.validate(
{
"complex_complex_item": {
"hungry": True,
"complex_item": {"thirsty": "Bar"},
}
},
serialized,
)
expected = "'Bar' is not of type 'boolean'"
self.assertIn(expected, excinfo.exception.message) | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def main():
settings = get_settings_from_env()
server = server_factory(**settings)
server.serve_forever() | kubeflow/pipelines | [
3125,
1400,
3125,
892,
1526085107
] |
def server_factory(visualization_server_image,
visualization_server_tag, frontend_image, frontend_tag,
disable_istio_sidecar, minio_access_key,
minio_secret_key, kfp_default_pipeline_root=None,
url="", controller_port=8080):
"""
Returns an HTTPServer populated with Handler with customized settings
"""
class Controller(BaseHTTPRequestHandler):
def sync(self, parent, children):
# parent is a namespace
namespace = parent.get("metadata", {}).get("name")
pipeline_enabled = parent.get("metadata", {}).get(
"labels", {}).get("pipelines.kubeflow.org/enabled")
if pipeline_enabled != "true":
return {"status": {}, "children": []}
desired_configmap_count = 1
desired_resources = []
if kfp_default_pipeline_root:
desired_configmap_count = 2
desired_resources += [{
"apiVersion": "v1",
"kind": "ConfigMap",
"metadata": {
"name": "kfp-launcher",
"namespace": namespace,
},
"data": {
"defaultPipelineRoot": kfp_default_pipeline_root,
},
}]
# Compute status based on observed state.
desired_status = {
"kubeflow-pipelines-ready":
len(children["Secret.v1"]) == 1 and
len(children["ConfigMap.v1"]) == desired_configmap_count and
len(children["Deployment.apps/v1"]) == 2 and
len(children["Service.v1"]) == 2 and
len(children["DestinationRule.networking.istio.io/v1alpha3"]) == 1 and
len(children["AuthorizationPolicy.security.istio.io/v1beta1"]) == 1 and
"True" or "False"
}
# Generate the desired child object(s).
desired_resources += [
{
"apiVersion": "v1",
"kind": "ConfigMap",
"metadata": {
"name": "metadata-grpc-configmap",
"namespace": namespace,
},
"data": {
"METADATA_GRPC_SERVICE_HOST":
"metadata-grpc-service.kubeflow",
"METADATA_GRPC_SERVICE_PORT": "8080",
},
},
# Visualization server related manifests below
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"labels": {
"app": "ml-pipeline-visualizationserver"
},
"name": "ml-pipeline-visualizationserver",
"namespace": namespace,
},
"spec": {
"selector": {
"matchLabels": {
"app": "ml-pipeline-visualizationserver"
},
},
"template": {
"metadata": {
"labels": {
"app": "ml-pipeline-visualizationserver"
},
"annotations": disable_istio_sidecar and {
"sidecar.istio.io/inject": "false"
} or {},
},
"spec": {
"containers": [{
"image": f"{visualization_server_image}:{visualization_server_tag}",
"imagePullPolicy":
"IfNotPresent",
"name":
"ml-pipeline-visualizationserver",
"ports": [{
"containerPort": 8888
}],
"resources": {
"requests": {
"cpu": "50m",
"memory": "200Mi"
},
"limits": {
"cpu": "500m",
"memory": "1Gi"
},
}
}],
"serviceAccountName":
"default-editor",
},
},
},
},
{
"apiVersion": "networking.istio.io/v1alpha3",
"kind": "DestinationRule",
"metadata": {
"name": "ml-pipeline-visualizationserver",
"namespace": namespace,
},
"spec": {
"host": "ml-pipeline-visualizationserver",
"trafficPolicy": {
"tls": {
"mode": "ISTIO_MUTUAL"
}
}
}
},
{
"apiVersion": "security.istio.io/v1beta1",
"kind": "AuthorizationPolicy",
"metadata": {
"name": "ml-pipeline-visualizationserver",
"namespace": namespace,
},
"spec": {
"selector": {
"matchLabels": {
"app": "ml-pipeline-visualizationserver"
}
},
"rules": [{
"from": [{
"source": {
"principals": ["cluster.local/ns/kubeflow/sa/ml-pipeline"]
}
}]
}]
}
},
{
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"name": "ml-pipeline-visualizationserver",
"namespace": namespace,
},
"spec": {
"ports": [{
"name": "http",
"port": 8888,
"protocol": "TCP",
"targetPort": 8888,
}],
"selector": {
"app": "ml-pipeline-visualizationserver",
},
},
},
# Artifact fetcher related resources below.
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"labels": {
"app": "ml-pipeline-ui-artifact"
},
"name": "ml-pipeline-ui-artifact",
"namespace": namespace,
},
"spec": {
"selector": {
"matchLabels": {
"app": "ml-pipeline-ui-artifact"
}
},
"template": {
"metadata": {
"labels": {
"app": "ml-pipeline-ui-artifact"
},
"annotations": disable_istio_sidecar and {
"sidecar.istio.io/inject": "false"
} or {},
},
"spec": {
"containers": [{
"name":
"ml-pipeline-ui-artifact",
"image": f"{frontend_image}:{frontend_tag}",
"imagePullPolicy":
"IfNotPresent",
"ports": [{
"containerPort": 3000
}],
"env": [
{
"name": "MINIO_ACCESS_KEY",
"valueFrom": {
"secretKeyRef": {
"key": "accesskey",
"name": "mlpipeline-minio-artifact"
}
}
},
{
"name": "MINIO_SECRET_KEY",
"valueFrom": {
"secretKeyRef": {
"key": "secretkey",
"name": "mlpipeline-minio-artifact"
}
}
}
],
"resources": {
"requests": {
"cpu": "10m",
"memory": "70Mi"
},
"limits": {
"cpu": "100m",
"memory": "500Mi"
},
}
}],
"serviceAccountName":
"default-editor"
}
}
}
},
{
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"name": "ml-pipeline-ui-artifact",
"namespace": namespace,
"labels": {
"app": "ml-pipeline-ui-artifact"
}
},
"spec": {
"ports": [{
"name":
"http", # name is required to let istio understand request protocol
"port": 80,
"protocol": "TCP",
"targetPort": 3000
}],
"selector": {
"app": "ml-pipeline-ui-artifact"
}
}
},
]
print('Received request:\n', json.dumps(parent, sort_keys=True))
print('Desired resources except secrets:\n', json.dumps(desired_resources, sort_keys=True))
# Moved after the print argument because this is sensitive data.
desired_resources.append({
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "mlpipeline-minio-artifact",
"namespace": namespace,
},
"data": {
"accesskey": minio_access_key,
"secretkey": minio_secret_key,
},
})
return {"status": desired_status, "children": desired_resources}
def do_POST(self):
# Serve the sync() function as a JSON webhook.
observed = json.loads(
self.rfile.read(int(self.headers.get("content-length"))))
desired = self.sync(observed["parent"], observed["children"])
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
self.wfile.write(bytes(json.dumps(desired), 'utf-8'))
return HTTPServer((url, int(controller_port)), Controller) | kubeflow/pipelines | [
3125,
1400,
3125,
892,
1526085107
] |
def _parse_time(t):
global calendar
parsed, code = calendar.parse(t)
if code != 2:
raise ValueError("Could not parse {}!".format(t))
parsed = datetime.fromtimestamp(time.mktime(parsed))
return (parsed - datetime.now()).total_seconds() | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def is_lambda_function(obj):
return isinstance(obj, LambdaType) and obj.__name__ == "<lambda>" | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def check_result_in_fail_condition(fail_condition, result):
return result in fail_condition | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def _get_failcondition_check(fail_condition):
if callable(fail_condition):
return fail_condition
elif isinstance(fail_condition, set):
return partial(check_result_in_fail_condition, fail_condition)
else:
return partial(check_result_is_fail_condition, fail_condition) | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def wait_for_decorator(*args, **kwargs):
"""Wrapper for :py:func:`utils.wait.wait_for` that makes it nicer to write testing waits.
It passes the function decorated to to ``wait_for``
Example:
.. code-block:: python
@wait_for_decorator(num_sec=120)
def my_waiting_func():
return do_something()
You can also pass it without parameters, then it uses ``wait_for``'s defaults:
.. code-block:: python
@wait_for_decorator
def my_waiting_func():
return do_something()
Then the result of the waiting is stored in the variable named after the function.
"""
if not kwargs and len(args) == 1 and callable(args[0]):
# No params passed, only a callable, so just call it
return wait_for(args[0])
else:
def g(f):
return wait_for(f, *args, **kwargs)
return g | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def __init__(self, time_for_refresh=300, callback=None, *args, **kwargs):
self.callback = callback or self.it_is_time
self.time_for_refresh = time_for_refresh
self.args = args
self.kwargs = kwargs
self._is_it_time = False
self.start() | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def it_is_time(self):
self._is_it_time = True | RedHatQE/wait_for | [
15,
14,
15,
1,
1440750364
] |
def __init__(self, method, url, **kwargs):
self.response = httpx.get(url) | stencila/hub | [
30,
4,
30,
195,
1447281243
] |
def __enter__(self, *args, **kwargs):
return self | stencila/hub | [
30,
4,
30,
195,
1447281243
] |
def test_extension_from_mimetype(tempdir):
with working_directory(tempdir.path):
files = pull_http({"url": "https://httpbin.org/get"})
assert files["get.json"]["mimetype"] == "application/json"
files = pull_http({"url": "https://httpbin.org/image/png"}, path="image")
assert files["image.png"]["mimetype"] == "image/png"
files = pull_http({"url": "https://httpbin.org/html"}, path="content")
assert files["content.html"]["mimetype"] == "text/html"
files = pull_http({"url": "https://httpbin.org/html"}, path="foo.bar")
assert files["foo.bar"]["mimetype"] is None | stencila/hub | [
30,
4,
30,
195,
1447281243
] |
def test_encrypt_default_noop_secret_engine(self):
self.assertEqual(encrypt(b"le temps des cerises", yolo=1, fomo=2),
"noop$bGUgdGVtcHMgZGVzIGNlcmlzZXM=") | zentralopensource/zentral | [
671,
87,
671,
23,
1445349783
] |
def test_decrypt_default_noop_secret_engine(self):
self.assertEqual(decrypt("noop$bGUgdGVtcHMgZGVzIGNlcmlzZXM=", yolo=1, fomo=2),
b"le temps des cerises") | zentralopensource/zentral | [
671,
87,
671,
23,
1445349783
] |
def __init__(self):
super(JSApiExample, self).__init__()
self._toBeUpdatedFromThread = None
self._startThread = None
self._running = Label('')
self.setSpacing(True)
javascript = Label("<h3>Run Native JavaScript</h3>",
Label.CONTENT_XHTML)
self.addComponent(javascript)
script = TextArea()
script.setWidth('100%')
script.setRows(3)
script.setValue('alert(\"Hello Muntjac\");')
self.addComponent(script)
self.addComponent(Button('Run script', RunListener(self, script))) | rwl/muntjac | [
43,
14,
43,
5,
1316308871
] |
def __init__(self, component, script):
self._component = component
self._script = script | rwl/muntjac | [
43,
14,
43,
5,
1316308871
] |
Subsets and Splits