repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
bingopodcast/bingos | bingo_emulator/fun_spot_63/game.py | 1 | 47949 | #!/usr/bin/python
import logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
import procgame.game, sys, os
import procgame.config
import random
import procgame.sound
sys.path.insert(0,os.path.pardir)
import bingo_emulator.common.units as units
import bingo_emulator.common.functions as functions
from bingo_emulator.graphics import methods as graphics
from bingo_emulator.graphics.fun_spot_63 import *
class MulticardBingo(procgame.game.Mode):
def __init__(self, game):
super(MulticardBingo, self).__init__(game=game, priority=5)
self.holes = []
self.game.anti_cheat.engage(self.game)
self.startup()
self.game.sound.register_music('motor', "audio/six_card_motor.wav")
self.game.sound.register_music('search', "audio/six_card_search_old.wav")
self.game.sound.register_sound('add', "audio/six_card_add_card.wav")
self.game.sound.register_sound('tilt', "audio/tilt.wav")
self.game.sound.register_sound('step', "audio/step.wav")
self.game.sound.register_sound('eb_search', "audio/EB_Search.wav")
def sw_coin_active(self, sw):
self.game.tilt.disengage()
self.regular_play()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
if self.game.replays > 0 and self.game.selector.position < 6:
self.delay(name="play_replays", delay=0.2, handler=self.play_replays)
def play_replays(self):
if self.game.replays > 0 and self.game.selector.position < 6:
self.delay(name="play", delay=0, handler=self.regular_play)
self.delay(name="display", delay=0.2, handler=graphics.fun_spot_63.display, param=self)
self.delay(name="coin", delay=0.2, handler=self.play_replays)
def sw_startButton_active(self, sw):
if self.game.replays > 0 or self.game.switches.freeplay.is_active():
self.regular_play()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
if self.game.replays > 0 and self.game.selector.position < 6:
self.delay(name="play_replays", delay=0.2, handler=self.play_replays)
def sw_trough4_active_for_1s(self, sw):
if self.game.ball_count.position >= 4:
self.timeout_actions()
def timeout_actions(self):
if (self.game.timer.position < 39):
self.game.timer.step()
self.delay(name="timeout", delay=5.0, handler=self.timeout_actions)
else:
self.game.timer.step()
self.tilt_actions()
def sw_trough8_closed(self, sw):
if self.game.start.status == False:
self.game.ball_count.position -= 1
self.game.returned = True
self.check_lifter_status()
else:
self.check_lifter_status()
def sw_enter_active(self, sw):
if self.game.switches.left.is_active() and self.game.switches.right.is_active():
self.game.end_run_loop()
os.system("/home/nbaldridge/proc/bingo_emulator/start_game.sh fun_spot_63")
def check_shutter(self, start=0):
if start == 1:
if self.game.switches.smRunout.is_active():
if self.game.switches.shutter.is_active():
self.game.coils.shutter.disable()
else:
if self.game.switches.shutter.is_inactive():
if self.game.switches.smRunout.is_active():
self.game.coils.shutter.disable()
def regular_play(self):
self.holes = []
self.game.cu = not self.game.cu
self.cancel_delayed(name="search")
self.cancel_delayed(name="card1_replay_step_up")
self.cancel_delayed(name="card2_replay_step_up")
self.cancel_delayed(name="card3_replay_step_up")
self.cancel_delayed(name="card4_replay_step_up")
self.cancel_delayed(name="card5_replay_step_up")
self.cancel_delayed(name="card6_replay_step_up")
self.cancel_delayed(name="timeout")
self.game.search_index.disengage()
self.game.coils.counter.pulse()
self.game.returned = False
self.game.sound.stop('add')
self.game.sound.play('add')
if self.game.start.status == True:
self.game.selector.step()
if self.game.cu == 1:
self.game.spotting.step()
if self.game.switches.shutter.is_inactive():
self.game.coils.shutter.enable()
self.replay_step_down()
self.game.ball_count.reset()
self.check_lifter_status()
self.game.start.engage(self.game)
else:
self.game.card1_replay_counter.reset()
self.game.card2_replay_counter.reset()
self.game.card3_replay_counter.reset()
self.game.card4_replay_counter.reset()
self.game.card5_replay_counter.reset()
self.game.card6_replay_counter.reset()
self.game.timer.reset()
self.game.start.engage(self.game)
self.game.selector.reset()
self.game.ball_count.reset()
self.game.sound.play_music('motor', -1)
self.regular_play()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
self.game.tilt.disengage()
def check_lifter_status(self):
if self.game.tilt.status == False:
if self.game.switches.trough8.is_closed() and self.game.switches.trough5.is_open() and self.game.switches.trough4.is_open() and self.game.switches.trough3.is_closed() and self.game.switches.trough2.is_closed():
if self.game.switches.shooter.is_open():
self.game.coils.lifter.enable()
self.game.returned = False
else:
if self.game.start.status == False:
if self.game.switches.trough4.is_open():
if self.game.switches.shooter.is_open():
if self.game.switches.gate.is_closed():
self.game.coils.lifter.enable()
else:
if self.game.returned == True and self.game.ball_count.position == 4:
if self.game.switches.shooter.is_open():
self.game.coils.lifter.enable()
self.game.returned = False
def sw_smRunout_active_for_1ms(self, sw):
if self.game.start.status == True:
self.check_shutter(1)
else:
self.check_shutter()
def sw_trough1_closed(self, sw):
if self.game.switches.shooter.is_closed():
self.game.coils.lifter.disable()
def sw_ballLift_active_for_500ms(self, sw):
if self.game.tilt.status == False:
if self.game.switches.shooter.is_open():
if self.game.ball_count.position < 5:
self.game.coils.lifter.enable()
def sw_gate_inactive_for_1ms(self, sw):
self.game.start.disengage()
if self.game.switches.shutter.is_active():
self.game.coils.shutter.enable()
self.game.ball_count.step()
if self.game.ball_count.position == 4:
self.game.sound.play('tilt')
self.game.sound.play('tilt')
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
if self.game.ball_count.position <= 4:
self.check_lifter_status()
# This is really nasty, but it is how we render graphics for each individual hole.
# numbers are added (or removed from) a list. In this way, I can re-use the same
# routine even for games where there are ball return functions like Surf Club.
def check_spotting(self):
if self.game.spotting.position == 0:
if 22 not in self.holes:
self.holes.append(22)
if self.game.spotting.position == 4:
if 20 not in self.holes:
self.holes.append(20)
elif self.game.spotting.position == 6:
if 20 not in self.holes:
self.holes.append(20)
elif self.game.spotting.position == 9:
if 15 not in self.holes:
self.holes.append(15)
elif self.game.spotting.position == 11:
if 22 not in self.holes:
self.holes.append(22)
elif self.game.spotting.position == 13:
if 17 not in self.holes:
self.holes.append(17)
elif self.game.spotting.position == 16:
if 20 not in self.holes:
self.holes.append(20)
elif self.game.spotting.position == 18:
if 21 not in self.holes:
self.holes.append(21)
elif self.game.spotting.position == 23:
if 16 not in self.holes:
self.holes.append(16)
elif self.game.spotting.position == 26:
if 21 not in self.holes:
self.holes.append(21)
elif self.game.spotting.position == 28:
if 17 not in self.holes:
self.holes.append(17)
elif self.game.spotting.position == 30:
if 16 not in self.holes:
self.holes.append(16)
elif self.game.spotting.position == 34:
if 15 not in self.holes:
self.holes.append(15)
elif self.game.spotting.position == 35:
if 22 not in self.holes:
self.holes.append(22)
elif self.game.spotting.position == 39:
if 22 not in self.holes:
self.holes.append(22)
elif self.game.spotting.position == 41:
if 21 not in self.holes:
self.holes.append(21)
elif self.game.spotting.position == 42:
if 17 not in self.holes:
self.holes.append(17)
elif self.game.spotting.position == 46:
if 21 not in self.holes:
self.holes.append(21)
elif self.game.spotting.position == 47:
if 15 not in self.holes:
self.holes.append(15)
elif self.game.spotting.position == 49:
if 16 not in self.holes:
self.holes.append(16)
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole1_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(1)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole2_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(2)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole3_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(3)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole4_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(4)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole5_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(5)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole6_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(6)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole7_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(7)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole8_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(8)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole9_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(9)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole10_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(10)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole11_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(11)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole12_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(12)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole13_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(13)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole14_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(14)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole15_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(15)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole16_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(16)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole17_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(17)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole18_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(18)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole19_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(19)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole20_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(20)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole21_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(21)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole22_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(22)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole23_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(23)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole24_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(24)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_hole25_active_for_40ms(self, sw):
if self.game.tilt.status == False and self.game.start.status == False:
self.holes.append(25)
if self.game.ball_count.position >= 4:
if self.game.search_index.status == False:
self.search()
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_replayReset_active(self, sw):
self.game.anti_cheat.disengage()
self.holes = []
graphics.fun_spot_63.display(self)
self.tilt_actions()
self.replay_step_down(self.game.replays)
def tilt_actions(self):
self.game.start.disengage()
self.cancel_delayed(name="replay_reset")
self.cancel_delayed(name="card1_replay_step_up")
self.cancel_delayed(name="card2_replay_step_up")
self.cancel_delayed(name="card3_replay_step_up")
self.cancel_delayed(name="card4_replay_step_up")
self.cancel_delayed(name="card5_replay_step_up")
self.cancel_delayed(name="card6_replay_step_up")
self.cancel_delayed(name="timeout")
self.game.search_index.disengage()
if self.game.ball_count.position == 0:
if self.game.switches.shutter.is_active():
self.game.coils.shutter.enable()
self.holes = []
self.game.selector.reset()
self.game.ball_count.reset()
self.game.anti_cheat.engage(game)
self.game.tilt.engage(self.game)
self.game.sound.stop_music()
self.game.sound.play('tilt')
# displays "Tilt" on the backglass, you have to recoin.
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
def sw_tilt_active(self, sw):
if self.game.tilt.status == False:
self.tilt_actions()
def replay_step_down(self, number=0):
if number > 0:
if number > 1:
self.game.replays -= 1
self.game.coils.registerDown.pulse()
number -= 1
graphics.fun_spot_63.display(self)
self.delay(name="replay_reset", delay=0.13, handler=self.replay_step_down, param=number)
elif number == 1:
self.game.replays -= 1
self.game.coils.registerDown.pulse()
number -= 1
graphics.fun_spot_63.display(self)
self.cancel_delayed(name="replay_reset")
else:
if self.game.replays > 0:
self.game.replays -= 1
self.delay(name="display", delay=0.1, handler=graphics.fun_spot_63.display, param=self)
self.game.coils.registerDown.pulse()
def replay_step_up(self):
if self.game.replays < 200:
self.game.replays += 1
self.game.coils.registerUp.pulse()
graphics.fun_spot_63.display(self)
def search(self):
# The search workflow/logic will determine if you actually have a winner, but it is a bit tricky.
# if the ball is in a particular hole, the search relays need to click and/or clack, and
# when you have at least three going at once, it should latch on the search index and score.
# This scoring is tempered by the selector disc. You have to have the card enabled that you're
# winning on. This whole process will have to happen on a rotational basis. The search should really
# begin immediately upon the first ball landing in the hole.
# I suspect that the best, fastest way to complete the search is actually to reimplement the mechanical
# search activity. For each revolution of the search disc (which happens about every 5-7 seconds), the
# game will activate() each search relay for each 'hot' rivet on the search disc. This can be on a different
# wiper finger for each set of rivets on the search disc.
self.game.sound.stop_music()
self.game.sound.play_music('search', -1)
if self.game.search_index.status == False:
for i in range(0, 100):
if i <= 50:
self.r = self.closed_search_relays(self.game.searchdisc.position)
self.game.searchdisc.spin()
if i >= 51:
self.r = self.closed_search_relays(self.game.searchdisc2.position + 50)
self.game.searchdisc2.spin()
self.wipers = self.r[0]
self.card = self.r[1]
self.super_line = self.r[2]
# From here, I need to determine based on the value of r, whether to latch the search index and score. For Coney Island,
# I need to determine the best winner on each card. To do this, I must compare the position of the replay counter before
# determining the winner. Reminder that my replay counters are a 1:1 representation.
self.match = []
for key in self.wipers:
for number in self.holes:
if number == key:
self.match.append(self.wipers[key])
relays = sorted(set(self.match))
#TODO Play sound for each relay closure.
s = functions.count_seq(relays)
if self.game.selector.position >= self.card:
if s >= 3:
self.find_winner(s, self.card, self.super_line)
break
def find_winner(self, relays, card, super_line):
if self.game.search_index.status == False and self.game.replays < 200:
#Implemented per S/I card 20-100
if card == 1:
if relays == 3:
if self.super_line == 1:
if self.game.card1_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card1_replay_step_up(12 - self.game.card1_replay_counter.position)
else:
if self.game.card1_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card1_replay_step_up(4 - self.game.card1_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card1_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card1_replay_step_up(60 - self.game.card1_replay_counter.position)
else:
if self.game.card1_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card1_replay_step_up(20 - self.game.card1_replay_counter.position)
if relays == 5:
if self.game.card1_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card1_replay_step_up(100 - self.game.card1_replay_counter.position)
if card == 2:
if relays == 3:
if self.super_line == 1:
if self.game.card2_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card2_replay_step_up(12 - self.game.card2_replay_counter.position)
else:
if self.game.card2_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card2_replay_step_up(4 - self.game.card2_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card2_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card2_replay_step_up(60 - self.game.card2_replay_counter.position)
else:
if self.game.card2_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card2_replay_step_up(20 - self.game.card2_replay_counter.position)
if relays == 5:
if self.game.card2_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card2_replay_step_up(100 - self.game.card2_replay_counter.position)
if card == 3:
if relays == 3:
if self.super_line == 1:
if self.game.card3_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card3_replay_step_up(12 - self.game.card3_replay_counter.position)
else:
if self.game.card3_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card3_replay_step_up(4 - self.game.card3_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card3_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card3_replay_step_up(60 - self.game.card3_replay_counter.position)
else:
if self.game.card3_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card3_replay_step_up(20 - self.game.card3_replay_counter.position)
if relays == 5:
if self.game.card3_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card3_replay_step_up(100 - self.game.card3_replay_counter.position)
if card == 4:
if relays == 3:
if self.super_line == 1:
if self.game.card4_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card4_replay_step_up(12 - self.game.card4_replay_counter.position)
else:
if self.game.card4_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card4_replay_step_up(4 - self.game.card4_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card4_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card4_replay_step_up(60 - self.game.card4_replay_counter.position)
else:
if self.game.card4_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card4_replay_step_up(20 - self.game.card4_replay_counter.position)
if relays == 5:
if self.game.card4_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card4_replay_step_up(100 - self.game.card4_replay_counter.position)
if card == 5:
if relays == 3:
if self.super_line == 1:
if self.game.card5_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card5_replay_step_up(12 - self.game.card5_replay_counter.position)
else:
if self.game.card5_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card5_replay_step_up(4 - self.game.card5_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card5_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card5_replay_step_up(60 - self.game.card5_replay_counter.position)
else:
if self.game.card5_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card5_replay_step_up(20 - self.game.card5_replay_counter.position)
if relays == 5:
if self.game.card5_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card5_replay_step_up(100 - self.game.card5_replay_counter.position)
if card == 6:
if relays == 3:
if self.super_line == 1:
if self.game.card6_replay_counter.position < 12:
self.game.search_index.engage(self.game)
self.card6_replay_step_up(12 - self.game.card6_replay_counter.position)
else:
if self.game.card6_replay_counter.position < 4:
self.game.search_index.engage(self.game)
self.card6_replay_step_up(4 - self.game.card6_replay_counter.position)
if relays == 4:
if self.super_line == 1:
if self.game.card6_replay_counter.position < 60:
self.game.search_index.engage(self.game)
self.card6_replay_step_up(60 - self.game.card6_replay_counter.position)
else:
if self.game.card6_replay_counter.position < 20:
self.game.search_index.engage(self.game)
self.card6_replay_step_up(20 - self.game.card6_replay_counter.position)
if relays == 5:
if self.game.card6_replay_counter.position < 100:
self.game.search_index.engage(self.game)
self.card6_replay_step_up(100 - self.game.card6_replay_counter.position)
def card1_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card1_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card1_replay_step_up", delay=0.1, handler=self.card1_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card1_replay_step_up")
self.search()
def card2_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card2_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card2_replay_step_up", delay=0.1, handler=self.card2_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card2_replay_step_up")
self.search()
def card3_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card3_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card3_replay_step_up", delay=0.1, handler=self.card3_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card3_replay_step_up")
self.search()
def card4_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card4_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card4_replay_step_up", delay=0.1, handler=self.card4_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card4_replay_step_up")
self.search()
def card5_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card5_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card5_replay_step_up", delay=0.1, handler=self.card5_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card5_replay_step_up")
self.search()
def card6_replay_step_up(self, number):
self.game.sound.stop_music()
if number >= 1:
self.game.card6_replay_counter.step()
number -= 1
self.replay_step_up()
if self.game.replays == 200:
number = 0
self.delay(name="card6_replay_step_up", delay=0.1, handler=self.card6_replay_step_up, param=number)
else:
self.game.search_index.disengage()
self.cancel_delayed(name="card6_replay_step_up")
self.search()
def closed_search_relays(self, rivets):
# This function is critical, as it will determine which card is returned, etc. I need to check both the position of the
# replay counter for the card, as well as the selector unit to ensure that the card is selected. We will get a row back
# that has the numbers on the position which will return the search relay connected. When three out of the five relays
# are connected, we get a winner!
self.pos = {}
# Card 1
self.pos[0] = {}
self.pos[1] = {5:1, 1:2, 9:3, 25:4, 3:5}
self.pos[2] = {8:1, 22:2, 10:3, 19:4, 7:5}
self.pos[3] = {6:1, 18:2, 16:3, 11:4, 17:5}
self.pos[4] = {24:1, 21:2, 14:3, 20:4, 13:5}
self.pos[5] = {12:1, 23:2, 2:3, 4:4, 15:5}
self.pos[6] = {5:1, 8:2, 6:3, 24:4, 12:5}
self.pos[7] = {1:1, 22:2, 18:3, 21:4, 23:5}
self.pos[8] = {9:1, 10:2, 16:3, 14:4, 2:5}
self.pos[9] = {25:1, 19:2, 11:3, 20:4, 4:5}
self.pos[10] = {3:1, 7:2, 17:3, 13:4, 15:5}
self.pos[11] = {5:1, 22:2, 16:3, 20:4, 15:5}
self.pos[12] = {3:1, 19:2, 16:3, 21:4, 12:5}
self.pos[13] = {}
self.pos[14] = {}
self.pos[15] = {}
self.pos[16] = {}
self.pos[17] = {}
# There are five blank positions in between cards. Early games have less to search!
# Card 2
self.pos[18] = {9:1, 24:2, 16:3, 4:4, 6:5}
self.pos[19] = {13:1, 19:2, 14:3, 20:4, 25:5}
self.pos[20] = {2:1, 18:2, 15:3, 12:4, 17:5}
self.pos[21] = {1:1, 22:2, 11:3, 21:4, 8:5}
self.pos[22] = {10:1, 7:2, 5:3, 23:4, 3:5}
self.pos[23] = {9:1, 13:2, 2:3, 1:4, 10:5}
self.pos[24] = {24:1, 19:2, 18:3, 22:4, 7:5}
self.pos[25] = {16:1, 14:2, 15:3, 11:4, 5:5}
self.pos[26] = {4:1, 20:2, 12:3, 21:4, 23:5}
self.pos[27] = {6:1, 25:2, 17:3, 8:4, 3:5}
self.pos[28] = {9:1, 19:2, 15:3, 21:4, 3:5}
self.pos[29] = {6:1, 20:2, 15:3, 22:4, 10:5}
self.pos[30] = {}
self.pos[31] = {}
self.pos[32] = {}
self.pos[33] = {}
self.pos[34] = {}
# Another five blank positions. Can you believe it?
# Card 3
self.pos[35] = {3:1, 7:2, 10:3, 4:4, 9:5}
self.pos[36] = {24:1, 21:2, 18:3, 22:4, 8:5}
self.pos[37] = {15:1, 14:2, 17:3, 11:4, 2:5}
self.pos[38] = {13:1, 20:2, 12:3, 19:4, 23:5}
self.pos[39] = {6:1, 25:2, 16:3, 1:4, 5:5}
self.pos[40] = {3:1, 24:2, 15:3, 13:4, 6:5}
self.pos[41] = {7:1, 21:2, 14:3, 20:4, 25:5}
self.pos[42] = {10:1, 18:2, 17:3, 12:4, 16:5}
self.pos[43] = {4:1, 22:2, 11:3, 19:4, 1:5}
self.pos[44] = {9:1, 8:2, 2:3, 23:4, 5:5}
self.pos[45] = {3:1, 21:2, 17:3, 19:4, 5:5}
self.pos[46] = {9:1, 22:2, 17:3, 20:4, 6:5}
self.pos[47] = {}
self.pos[48] = {}
self.pos[49] = {}
self.pos[50] = {}
# Start of the second search disc modeled as part
# of the same array for simplicity. Parent function
# calls this subset.
# Card #4
self.pos[51] = {6:1, 7:2, 3:3, 24:4, 1:5}
self.pos[52] = {23:1, 14:2, 12:3, 18:4, 2:5}
self.pos[53] = {5:1, 19:2, 20:3, 16:4, 22:5}
self.pos[54] = {11:1, 17:2, 9:3, 15:4, 25:5}
self.pos[55] = {10:1, 13:2, 21:3, 4:4, 8:5}
self.pos[56] = {6:1, 23:2, 5:3, 11:4, 10:5}
self.pos[57] = {7:1, 14:2, 19:3, 17:4, 13:5}
self.pos[58] = {3:1, 12:2, 20:3, 9:4, 21:5}
self.pos[59] = {24:1, 18:2, 16:3, 15:4, 4:5}
self.pos[60] = {1:1, 2:2, 22:3, 25:4, 8:5}
self.pos[61] = {6:1, 14:2, 20:3, 15:4, 8:5}
self.pos[62] = {1:1, 18:2, 20:3, 17:4, 10:5}
self.pos[63] = {}
self.pos[64] = {}
self.pos[65] = {}
self.pos[66] = {}
self.pos[67] = {}
# Card #5
self.pos[68] = {8:1, 23:2, 10:3, 13:4, 4:5}
self.pos[69] = {2:1, 17:2, 16:3, 14:4, 24:5}
self.pos[70] = {20:1, 12:2, 22:3, 19:4, 5:5}
self.pos[71] = {25:1, 15:2, 9:3, 18:4, 11:5}
self.pos[72] = {1:1, 7:2, 21:3, 3:4, 6:5}
self.pos[73] = {8:1, 2:2, 20:3, 25:4, 1:5}
self.pos[74] = {23:1, 17:2, 12:3, 15:4, 7:5}
self.pos[75] = {10:1, 16:2, 22:3, 9:4, 21:5}
self.pos[76] = {13:1, 14:2, 19:3, 18:4, 3:5}
self.pos[77] = {4:1, 24:2, 5:3, 11:4, 6:5}
self.pos[78] = {8:1, 17:2, 22:3, 18:4, 6:5}
self.pos[79] = {4:1, 14:2, 22:3, 15:4, 1:5}
self.pos[80] = {}
self.pos[81] = {}
self.pos[82] = {}
self.pos[83] = {}
self.pos[84] = {}
# Card #6
self.pos[85] = {4:1, 6:2, 1:3, 23:4, 5:5}
self.pos[86] = {25:1, 15:2, 3:3, 17:4, 13:5}
self.pos[87] = {9:1, 19:2, 21:3, 12:4, 20:5}
self.pos[88] = {10:1, 18:2, 16:3, 14:4, 8:5}
self.pos[89] = {7:1, 24:2, 22:3, 2:4, 11:5}
self.pos[90] = {4:1, 25:2, 9:3, 10:4, 7:5}
self.pos[91] = {6:1, 15:2, 19:3, 18:4, 24:5}
self.pos[92] = {1:1, 3:2, 21:3, 16:4, 22:5}
self.pos[93] = {23:1, 17:2, 12:3, 14:4, 2:5}
self.pos[94] = {5:1, 13:2, 20:3, 8:4, 11:5}
self.pos[95] = {4:1, 15:2, 21:3, 14:4, 11:5}
self.pos[96] = {5:1, 17:2, 21:3, 18:4, 7:5}
self.pos[97] = {}
self.pos[98] = {}
self.pos[99] = {}
self.pos[100] = {}
super_line = 0
if rivets in range(0,18):
card = 1
if rivets in range(18,35):
card = 2
if rivets in range(35,50):
card = 3
if rivets in range(50,68):
card = 4
if rivets in range(68,85):
card = 5
if rivets in range(85,100):
card = 6
if rivets == 4:
super_line = 1
elif rivets == 21:
super_line = 1
elif rivets == 38:
super_line = 1
elif rivets == 54:
super_line = 1
elif rivets == 71:
super_line = 1
elif rivets == 88:
super_line = 1
return (self.pos[rivets], card, super_line)
# Define reset as the knock-off, anti-cheat relay disabled, and replay reset enabled. Motors turn while credits are knocked off.
# When meter reaches zero and the zero limit switch is hit, turn off motor sound and leave backglass gi on, but with tilt displayed.
def startup(self):
self.tilt_actions()
class FunSpot63(procgame.game.BasicGame):
""" Fun Spot '63 was an Ohio Dime Game without a replay button """
def __init__(self, machine_type):
super(FunSpot63, self).__init__(machine_type)
pygame.mixer.pre_init(44100,-16,2,512)
self.sound = procgame.sound.SoundController(self)
self.sound.set_volume(1.0)
# NOTE: trough_count only counts the number of switches present in the trough. It does _not_ count
# the number of balls present. In this game, there should be 8 balls.
self.trough_count = 6
# Subclass my units unique to this game - modifications must be made to set up mixers and steppers unique to the game
# NOTE: 'top' positions are indexed using a 0 index, so the top on a 24 position unit is actually 23.
self.searchdisc = units.Search("searchdisc", 49)
self.searchdisc2 = units.Search("searchdisc2", 49)
#Seach relays
self.s1 = units.Relay("s1")
self.s2 = units.Relay("s2")
self.s3 = units.Relay("s3")
self.s4 = units.Relay("s4")
self.s5 = units.Relay("s5")
self.search_index = units.Relay("search_index")
#Spotting disc in Lotta Fun actually keeps track of spotted numbers
self.spotting = units.Stepper("spotting", 49, "Lotta Fun", "continuous")
self.spotting.position = random.randint(0,49)
#Replay Counter
self.card1_replay_counter = units.Stepper("card1_replay_counter", 200)
self.card2_replay_counter = units.Stepper("card2_replay_counter", 200)
self.card3_replay_counter = units.Stepper("card3_replay_counter", 200)
self.card4_replay_counter = units.Stepper("card4_replay_counter", 200)
self.card5_replay_counter = units.Stepper("card5_replay_counter", 200)
self.card6_replay_counter = units.Stepper("card6_replay_counter", 200)
#Initialize stepper units used to keep track of features or timing.
self.selector = units.Stepper("selector", 6)
self.timer = units.Stepper("timer", 40)
self.ball_count = units.Stepper("ball_count", 7)
#Check for status of the replay register zero switch. If positive
#and machine is just powered on, this will zero out the replays.
self.replay_reset = units.Relay("replay_reset")
# Now, the control unit can be in one of two positions, essentially.
# This alternates by coin, and is used to portion the Spotted Numbers.
self.cu = 1
#When engage()d, light 6v circuit, and enable game features, scoring,
#etc. Disengage()d means that the machine is 'soft' tilted.
self.anti_cheat = units.Relay("anti_cheat")
#When engage()d, spin.
self.start = units.Relay("start")
#Tilt is separate from anti-cheat in that the trip will move the shutter
#when the game is tilted with 1st ball in the lane. Also prevents you
#from picking back up by killing the anti-cheat. Can be engaged by
#tilt bob, slam tilt switches, or timer at 39th step.
#Immediately kills motors.
self.tilt = units.Relay("tilt")
self.replays = 0
self.returned = False
def reset(self):
super(FunSpot63, self).reset()
self.logger = logging.getLogger('game')
self.load_config('bingo.yaml')
main_mode = MulticardBingo(self)
self.modes.add(main_mode)
game = FunSpot63(machine_type='pdb')
game.reset()
game.run_loop()
| gpl-3.0 | 1,453,764,986,287,668,500 | 46.380435 | 222 | 0.550981 | false |
mrakgr/futhark | examples/life/quadlife_alt.py | 1 | 55476 | import sys
import numpy as np
import ctypes as ct
import pyopencl as cl
import pyopencl.array
import time
import argparse
FUT_BLOCK_DIM = "16"
cl_group_size = np.int32(512)
synchronous = False
fut_opencl_src = """typedef char int8_t;
typedef short int16_t;
typedef int int32_t;
typedef long int64_t;
typedef uchar uint8_t;
typedef ushort uint16_t;
typedef uint uint32_t;
typedef ulong uint64_t;
static inline int8_t add8(int8_t x, int8_t y)
{
return x + y;
}
static inline int16_t add16(int16_t x, int16_t y)
{
return x + y;
}
static inline int32_t add32(int32_t x, int32_t y)
{
return x + y;
}
static inline int64_t add64(int64_t x, int64_t y)
{
return x + y;
}
static inline int8_t sub8(int8_t x, int8_t y)
{
return x - y;
}
static inline int16_t sub16(int16_t x, int16_t y)
{
return x - y;
}
static inline int32_t sub32(int32_t x, int32_t y)
{
return x - y;
}
static inline int64_t sub64(int64_t x, int64_t y)
{
return x - y;
}
static inline int8_t mul8(int8_t x, int8_t y)
{
return x * y;
}
static inline int16_t mul16(int16_t x, int16_t y)
{
return x * y;
}
static inline int32_t mul32(int32_t x, int32_t y)
{
return x * y;
}
static inline int64_t mul64(int64_t x, int64_t y)
{
return x * y;
}
static inline uint8_t udiv8(uint8_t x, uint8_t y)
{
return x / y;
}
static inline uint16_t udiv16(uint16_t x, uint16_t y)
{
return x / y;
}
static inline uint32_t udiv32(uint32_t x, uint32_t y)
{
return x / y;
}
static inline uint64_t udiv64(uint64_t x, uint64_t y)
{
return x / y;
}
static inline uint8_t umod8(uint8_t x, uint8_t y)
{
return x % y;
}
static inline uint16_t umod16(uint16_t x, uint16_t y)
{
return x % y;
}
static inline uint32_t umod32(uint32_t x, uint32_t y)
{
return x % y;
}
static inline uint64_t umod64(uint64_t x, uint64_t y)
{
return x % y;
}
static inline int8_t sdiv8(int8_t x, int8_t y)
{
int8_t q = x / y;
int8_t r = x % y;
return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0);
}
static inline int16_t sdiv16(int16_t x, int16_t y)
{
int16_t q = x / y;
int16_t r = x % y;
return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0);
}
static inline int32_t sdiv32(int32_t x, int32_t y)
{
int32_t q = x / y;
int32_t r = x % y;
return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0);
}
static inline int64_t sdiv64(int64_t x, int64_t y)
{
int64_t q = x / y;
int64_t r = x % y;
return q - ((r != 0 && r < 0 != y < 0) ? 1 : 0);
}
static inline int8_t smod8(int8_t x, int8_t y)
{
int8_t r = x % y;
return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y);
}
static inline int16_t smod16(int16_t x, int16_t y)
{
int16_t r = x % y;
return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y);
}
static inline int32_t smod32(int32_t x, int32_t y)
{
int32_t r = x % y;
return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y);
}
static inline int64_t smod64(int64_t x, int64_t y)
{
int64_t r = x % y;
return r + (r == 0 || (x > 0 && y > 0) || (x < 0 && y < 0) ? 0 : y);
}
static inline int8_t squot8(int8_t x, int8_t y)
{
return x / y;
}
static inline int16_t squot16(int16_t x, int16_t y)
{
return x / y;
}
static inline int32_t squot32(int32_t x, int32_t y)
{
return x / y;
}
static inline int64_t squot64(int64_t x, int64_t y)
{
return x / y;
}
static inline int8_t srem8(int8_t x, int8_t y)
{
return x % y;
}
static inline int16_t srem16(int16_t x, int16_t y)
{
return x % y;
}
static inline int32_t srem32(int32_t x, int32_t y)
{
return x % y;
}
static inline int64_t srem64(int64_t x, int64_t y)
{
return x % y;
}
static inline uint8_t shl8(uint8_t x, uint8_t y)
{
return x << y;
}
static inline uint16_t shl16(uint16_t x, uint16_t y)
{
return x << y;
}
static inline uint32_t shl32(uint32_t x, uint32_t y)
{
return x << y;
}
static inline uint64_t shl64(uint64_t x, uint64_t y)
{
return x << y;
}
static inline uint8_t lshr8(uint8_t x, uint8_t y)
{
return x >> y;
}
static inline uint16_t lshr16(uint16_t x, uint16_t y)
{
return x >> y;
}
static inline uint32_t lshr32(uint32_t x, uint32_t y)
{
return x >> y;
}
static inline uint64_t lshr64(uint64_t x, uint64_t y)
{
return x >> y;
}
static inline int8_t ashr8(int8_t x, int8_t y)
{
return x >> y;
}
static inline int16_t ashr16(int16_t x, int16_t y)
{
return x >> y;
}
static inline int32_t ashr32(int32_t x, int32_t y)
{
return x >> y;
}
static inline int64_t ashr64(int64_t x, int64_t y)
{
return x >> y;
}
static inline uint8_t and8(uint8_t x, uint8_t y)
{
return x & y;
}
static inline uint16_t and16(uint16_t x, uint16_t y)
{
return x & y;
}
static inline uint32_t and32(uint32_t x, uint32_t y)
{
return x & y;
}
static inline uint64_t and64(uint64_t x, uint64_t y)
{
return x & y;
}
static inline uint8_t or8(uint8_t x, uint8_t y)
{
return x | y;
}
static inline uint16_t or16(uint16_t x, uint16_t y)
{
return x | y;
}
static inline uint32_t or32(uint32_t x, uint32_t y)
{
return x | y;
}
static inline uint64_t or64(uint64_t x, uint64_t y)
{
return x | y;
}
static inline uint8_t xor8(uint8_t x, uint8_t y)
{
return x ^ y;
}
static inline uint16_t xor16(uint16_t x, uint16_t y)
{
return x ^ y;
}
static inline uint32_t xor32(uint32_t x, uint32_t y)
{
return x ^ y;
}
static inline uint64_t xor64(uint64_t x, uint64_t y)
{
return x ^ y;
}
static inline char ult8(uint8_t x, uint8_t y)
{
return x < y;
}
static inline char ult16(uint16_t x, uint16_t y)
{
return x < y;
}
static inline char ult32(uint32_t x, uint32_t y)
{
return x < y;
}
static inline char ult64(uint64_t x, uint64_t y)
{
return x < y;
}
static inline char ule8(uint8_t x, uint8_t y)
{
return x <= y;
}
static inline char ule16(uint16_t x, uint16_t y)
{
return x <= y;
}
static inline char ule32(uint32_t x, uint32_t y)
{
return x <= y;
}
static inline char ule64(uint64_t x, uint64_t y)
{
return x <= y;
}
static inline char slt8(int8_t x, int8_t y)
{
return x < y;
}
static inline char slt16(int16_t x, int16_t y)
{
return x < y;
}
static inline char slt32(int32_t x, int32_t y)
{
return x < y;
}
static inline char slt64(int64_t x, int64_t y)
{
return x < y;
}
static inline char sle8(int8_t x, int8_t y)
{
return x <= y;
}
static inline char sle16(int16_t x, int16_t y)
{
return x <= y;
}
static inline char sle32(int32_t x, int32_t y)
{
return x <= y;
}
static inline char sle64(int64_t x, int64_t y)
{
return x <= y;
}
static inline int8_t pow8(int8_t x, int8_t y)
{
int8_t res = 1, rem = y;
while (rem != 0) {
if (rem & 1)
res *= x;
rem >>= 1;
x *= x;
}
return res;
}
static inline int16_t pow16(int16_t x, int16_t y)
{
int16_t res = 1, rem = y;
while (rem != 0) {
if (rem & 1)
res *= x;
rem >>= 1;
x *= x;
}
return res;
}
static inline int32_t pow32(int32_t x, int32_t y)
{
int32_t res = 1, rem = y;
while (rem != 0) {
if (rem & 1)
res *= x;
rem >>= 1;
x *= x;
}
return res;
}
static inline int64_t pow64(int64_t x, int64_t y)
{
int64_t res = 1, rem = y;
while (rem != 0) {
if (rem & 1)
res *= x;
rem >>= 1;
x *= x;
}
return res;
}
static inline int8_t sext_i8_i8(int8_t x)
{
return x;
}
static inline int16_t sext_i8_i16(int8_t x)
{
return x;
}
static inline int32_t sext_i8_i32(int8_t x)
{
return x;
}
static inline int64_t sext_i8_i64(int8_t x)
{
return x;
}
static inline int8_t sext_i16_i8(int16_t x)
{
return x;
}
static inline int16_t sext_i16_i16(int16_t x)
{
return x;
}
static inline int32_t sext_i16_i32(int16_t x)
{
return x;
}
static inline int64_t sext_i16_i64(int16_t x)
{
return x;
}
static inline int8_t sext_i32_i8(int32_t x)
{
return x;
}
static inline int16_t sext_i32_i16(int32_t x)
{
return x;
}
static inline int32_t sext_i32_i32(int32_t x)
{
return x;
}
static inline int64_t sext_i32_i64(int32_t x)
{
return x;
}
static inline int8_t sext_i64_i8(int64_t x)
{
return x;
}
static inline int16_t sext_i64_i16(int64_t x)
{
return x;
}
static inline int32_t sext_i64_i32(int64_t x)
{
return x;
}
static inline int64_t sext_i64_i64(int64_t x)
{
return x;
}
static inline uint8_t zext_i8_i8(uint8_t x)
{
return x;
}
static inline uint16_t zext_i8_i16(uint8_t x)
{
return x;
}
static inline uint32_t zext_i8_i32(uint8_t x)
{
return x;
}
static inline uint64_t zext_i8_i64(uint8_t x)
{
return x;
}
static inline uint8_t zext_i16_i8(uint16_t x)
{
return x;
}
static inline uint16_t zext_i16_i16(uint16_t x)
{
return x;
}
static inline uint32_t zext_i16_i32(uint16_t x)
{
return x;
}
static inline uint64_t zext_i16_i64(uint16_t x)
{
return x;
}
static inline uint8_t zext_i32_i8(uint32_t x)
{
return x;
}
static inline uint16_t zext_i32_i16(uint32_t x)
{
return x;
}
static inline uint32_t zext_i32_i32(uint32_t x)
{
return x;
}
static inline uint64_t zext_i32_i64(uint32_t x)
{
return x;
}
static inline uint8_t zext_i64_i8(uint64_t x)
{
return x;
}
static inline uint16_t zext_i64_i16(uint64_t x)
{
return x;
}
static inline uint32_t zext_i64_i32(uint64_t x)
{
return x;
}
static inline uint64_t zext_i64_i64(uint64_t x)
{
return x;
}
static inline float fdiv32(float x, float y)
{
return x / y;
}
static inline float fadd32(float x, float y)
{
return x + y;
}
static inline float fsub32(float x, float y)
{
return x - y;
}
static inline float fmul32(float x, float y)
{
return x * y;
}
static inline float fpow32(float x, float y)
{
return pow(x, y);
}
static inline char cmplt32(float x, float y)
{
return x < y;
}
static inline char cmple32(float x, float y)
{
return x <= y;
}
static inline float sitofp_i8_f32(int8_t x)
{
return x;
}
static inline float sitofp_i16_f32(int16_t x)
{
return x;
}
static inline float sitofp_i32_f32(int32_t x)
{
return x;
}
static inline float sitofp_i64_f32(int64_t x)
{
return x;
}
static inline float uitofp_i8_f32(uint8_t x)
{
return x;
}
static inline float uitofp_i16_f32(uint16_t x)
{
return x;
}
static inline float uitofp_i32_f32(uint32_t x)
{
return x;
}
static inline float uitofp_i64_f32(uint64_t x)
{
return x;
}
static inline int8_t fptosi_f32_i8(float x)
{
return x;
}
static inline int16_t fptosi_f32_i16(float x)
{
return x;
}
static inline int32_t fptosi_f32_i32(float x)
{
return x;
}
static inline int64_t fptosi_f32_i64(float x)
{
return x;
}
static inline uint8_t fptoui_f32_i8(float x)
{
return x;
}
static inline uint16_t fptoui_f32_i16(float x)
{
return x;
}
static inline uint32_t fptoui_f32_i32(float x)
{
return x;
}
static inline uint64_t fptoui_f32_i64(float x)
{
return x;
}
__kernel void map_kernel_1022(int32_t m_880, __global
unsigned char *world_mem_1109, int32_t n_879,
__global unsigned char *mem_1112)
{
const uint kernel_thread_index_1022 = get_global_id(0);
if (kernel_thread_index_1022 >= n_879 * m_880)
return;
int32_t i_1023;
int32_t i_1024;
char b_1025;
// compute thread index
{
i_1023 = squot32(kernel_thread_index_1022, m_880);
i_1024 = kernel_thread_index_1022 - squot32(kernel_thread_index_1022,
m_880) * m_880;
}
// read kernel parameters
{
b_1025 = *(__global char *) &world_mem_1109[i_1023 * m_880 + i_1024];
}
int8_t res_1026;
if (b_1025) {
res_1026 = 1;
} else {
res_1026 = 0;
}
// write kernel result
{
*(__global int8_t *) &mem_1112[i_1023 * m_880 + i_1024] = res_1026;
}
}
__kernel void map_kernel_1176(int32_t m_880, __global unsigned char *mem_1114)
{
const uint global_thread_index_1176 = get_global_id(0);
if (global_thread_index_1176 >= m_880)
return;
int32_t i_1177;
// compute thread index
{
i_1177 = global_thread_index_1176;
}
// read kernel parameters
{ }
// write kernel result
{
*(__global int32_t *) &mem_1114[i_1177 * 4] = 0;
}
}
__kernel void map_kernel_1180(int32_t m_880, __global unsigned char *mem_1114,
int32_t n_879, __global unsigned char *mem_1117)
{
const uint global_thread_index_1180 = get_global_id(0);
if (global_thread_index_1180 >= n_879 * m_880)
return;
int32_t i_1181;
int32_t j_1182;
int32_t input_1183;
// compute thread index
{
i_1181 = squot32(global_thread_index_1180, m_880);
j_1182 = global_thread_index_1180 - squot32(global_thread_index_1180,
m_880) * m_880;
}
// read kernel parameters
{
input_1183 = *(__global int32_t *) &mem_1114[j_1182 * 4];
}
// write kernel result
{
*(__global int32_t *) &mem_1117[(i_1181 * m_880 + j_1182) * 4] =
input_1183;
}
}
__kernel void map_kernel_1048(int32_t n_889, int32_t m_890, __global
unsigned char *mem_1130, __global
unsigned char *all_history_mem_1119, __global
unsigned char *mem_1133, __global
unsigned char *mem_1137)
{
const uint kernel_thread_index_1048 = get_global_id(0);
if (kernel_thread_index_1048 >= n_889 * m_890)
return;
int32_t i_1049;
int32_t i_1050;
int32_t not_curried_1051;
// compute thread index
{
i_1049 = squot32(kernel_thread_index_1048, m_890);
i_1050 = kernel_thread_index_1048 - squot32(kernel_thread_index_1048,
m_890) * m_890;
}
// read kernel parameters
{
not_curried_1051 = *(__global int32_t *) &all_history_mem_1119[(i_1049 *
m_890 +
i_1050) *
4];
}
int32_t res_1052 = not_curried_1051 & 3;
int32_t arg_1053 = ashr32(not_curried_1051, 2);
char cond_1054 = slt32(255, arg_1053);
int32_t res_1055;
if (cond_1054) {
res_1055 = 255;
} else {
res_1055 = arg_1053;
}
int8_t y_1057 = sext_i32_i8(res_1055);
// write kernel result
{
*(__global int8_t *) &mem_1133[i_1049 * m_890 + i_1050] = y_1057;
for (int i_1188 = 0; i_1188 < 3; i_1188++) {
*(__global int8_t *) &mem_1137[3 * (m_890 * i_1049) + (m_890 *
i_1188 +
i_1050)] =
*(__global int8_t *) &mem_1130[3 * res_1052 + i_1188];
}
}
}
__kernel void map_kernel_1037(__global unsigned char *mem_1137, int32_t n_889,
__global unsigned char *mem_1133, int32_t m_890,
__global unsigned char *mem_1141)
{
const uint kernel_thread_index_1037 = get_global_id(0);
if (kernel_thread_index_1037 >= n_889 * m_890 * 3)
return;
int32_t i_1038;
int32_t i_1039;
int32_t i_1040;
int8_t y_1041;
int8_t binop_param_noncurried_1042;
// compute thread index
{
i_1038 = squot32(kernel_thread_index_1037, m_890 * 3);
i_1039 = squot32(kernel_thread_index_1037 -
squot32(kernel_thread_index_1037, m_890 * 3) * (m_890 *
3), 3);
i_1040 = kernel_thread_index_1037 - squot32(kernel_thread_index_1037,
m_890 * 3) * (m_890 * 3) -
squot32(kernel_thread_index_1037 - squot32(kernel_thread_index_1037,
m_890 * 3) * (m_890 * 3),
3) * 3;
}
// read kernel parameters
{
y_1041 = *(__global int8_t *) &mem_1133[i_1038 * m_890 + i_1039];
binop_param_noncurried_1042 = *(__global int8_t *) &mem_1137[i_1038 *
(3 *
m_890) +
i_1040 *
m_890 +
i_1039];
}
int8_t res_1043 = binop_param_noncurried_1042 - y_1041;
// write kernel result
{
*(__global int8_t *) &mem_1141[i_1038 * (m_890 * 3) + i_1039 * 3 +
i_1040] = res_1043;
}
}
__kernel void map_kernel_1100(int32_t n_910, __global unsigned char *mem_1149,
__global unsigned char *mem_1151)
{
const uint kernel_thread_index_1100 = get_global_id(0);
if (kernel_thread_index_1100 >= n_910)
return;
int32_t i_1101;
// compute thread index
{
i_1101 = kernel_thread_index_1100;
}
// read kernel parameters
{ }
int32_t x_1103 = i_1101 - 1;
int32_t res_1104 = smod32(x_1103, n_910);
int32_t x_1105 = i_1101 + 1;
int32_t res_1106 = smod32(x_1105, n_910);
// write kernel result
{
*(__global int32_t *) &mem_1149[i_1101 * 4] = res_1106;
*(__global int32_t *) &mem_1151[i_1101 * 4] = res_1104;
}
}
__kernel void map_kernel_1064(__global unsigned char *mem_1149, __global
unsigned char *world_mem_1153, int32_t n_910,
__global unsigned char *mem_1151, int32_t m_911,
__global unsigned char *mem_1147, __global
unsigned char *history_mem_1155, __global
unsigned char *mem_1158, __global
unsigned char *mem_1161)
{
const uint kernel_thread_index_1064 = get_global_id(0);
if (kernel_thread_index_1064 >= n_910 * m_911)
return;
int32_t i_1065;
int32_t i_1066;
int32_t res_1068;
int32_t res_1069;
int32_t x_1070;
// compute thread index
{
i_1065 = squot32(kernel_thread_index_1064, m_911);
i_1066 = kernel_thread_index_1064 - squot32(kernel_thread_index_1064,
m_911) * m_911;
}
// read kernel parameters
{
res_1068 = *(__global int32_t *) &mem_1149[i_1065 * 4];
res_1069 = *(__global int32_t *) &mem_1151[i_1065 * 4];
x_1070 = *(__global int32_t *) &history_mem_1155[(i_1065 * m_911 +
i_1066) * 4];
}
int32_t x_1072 = i_1066 + 1;
int32_t res_1073 = smod32(x_1072, m_911);
int32_t x_1074 = i_1066 - 1;
int32_t res_1075 = smod32(x_1074, m_911);
int8_t x_1076 = *(__global int8_t *) &world_mem_1153[res_1069 * m_911 +
i_1066];
int8_t y_1077 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 +
res_1075];
int8_t x_1078 = x_1076 + y_1077;
int8_t y_1079 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 +
i_1066];
int8_t x_1080 = x_1078 + y_1079;
int8_t y_1081 = *(__global int8_t *) &world_mem_1153[i_1065 * m_911 +
res_1073];
int8_t x_1082 = x_1080 + y_1081;
int8_t y_1083 = *(__global int8_t *) &world_mem_1153[res_1068 * m_911 +
i_1066];
int8_t res_1084 = x_1082 + y_1083;
int32_t i_1085 = sext_i8_i32(res_1084);
int8_t res_1086 = *(__global int8_t *) &mem_1147[i_1085];
int32_t res_1087 = x_1070 & 3;
int32_t arg_1088 = ashr32(x_1070, 2);
char cond_1089 = slt32(128, arg_1088);
int32_t res_1090;
if (cond_1089) {
res_1090 = 128;
} else {
res_1090 = arg_1088;
}
int8_t y_1091 = sext_i32_i8(res_1087);
char cond_1092 = res_1086 == y_1091;
int32_t x_1093 = res_1090 + 1;
int32_t x_1094 = x_1093 << 2;
int32_t y_1095 = sext_i8_i32(res_1086);
int32_t res_1096 = x_1094 | y_1095;
int32_t res_1097;
if (cond_1092) {
res_1097 = res_1096;
} else {
res_1097 = y_1095;
}
// write kernel result
{
*(__global int32_t *) &mem_1158[(i_1065 * m_911 + i_1066) * 4] =
res_1097;
*(__global int8_t *) &mem_1161[i_1065 * m_911 + i_1066] = res_1086;
}
}
"""
# Hacky parser/reader for values written in Futhark syntax. Used for
# reading stdin when compiling standalone programs with the Python
# code generator.
lookahead_buffer = []
def reset_lookahead():
global lookahead_buffer
lookahead_buffer = []
def get_char(f):
global lookahead_buffer
if len(lookahead_buffer) == 0:
return f.read(1)
else:
c = lookahead_buffer[0]
lookahead_buffer = lookahead_buffer[1:]
return c
def unget_char(f, c):
global lookahead_buffer
lookahead_buffer = [c] + lookahead_buffer
def peek_char(f):
c = get_char(f)
if c:
unget_char(f, c)
return c
def skip_spaces(f):
c = get_char(f)
while c != None:
if c.isspace():
c = get_char(f)
elif c == '-':
# May be line comment.
if peek_char(f) == '-':
# Yes, line comment. Skip to end of line.
while (c != '\n' and c != None):
c = get_char(f)
else:
break
else:
break
if c:
unget_char(f, c)
def parse_specific_char(f, expected):
got = get_char(f)
if got != expected:
unget_char(f, got)
raise ValueError
return True
def parse_specific_string(f, s):
for c in s:
parse_specific_char(f, c)
return True
def optional(p, *args):
try:
return p(*args)
except ValueError:
return None
def sepBy(p, sep, *args):
elems = []
x = optional(p, *args)
if x != None:
elems += [x]
while optional(sep, *args) != None:
x = p(*args)
elems += [x]
return elems
def parse_int(f):
s = ''
c = get_char(f)
while c != None:
if c.isdigit():
s += c
c = get_char(f)
else:
unget_char(f, c)
break
optional(read_int_trailer, f)
return s
def parse_int_signed(f):
s = ''
c = get_char(f)
if c == '-' and peek_char(f).isdigit():
s = c + parse_int(f)
else:
unget_char(f, c)
s = parse_int(f)
return s
def read_int_trailer(f):
parse_specific_char(f, 'i')
while peek_char(f).isdigit():
get_char(f)
def read_comma(f):
skip_spaces(f)
parse_specific_char(f, ',')
return ','
def read_int(f):
skip_spaces(f)
return int(parse_int_signed(f))
def read_char(f):
skip_spaces(f)
parse_specific_char(f, '\'')
c = get_char(f)
parse_specific_char(f, '\'')
return c
def read_double(f):
skip_spaces(f)
c = get_char(f)
if (c == '-'):
sign = '-'
else:
unget_char(f,c)
sign = ''
bef = optional(parse_int, f)
if bef == None:
bef = '0'
parse_specific_char(f, '.')
aft = parse_int(f)
elif optional(parse_specific_char, f, '.'):
aft = parse_int(f)
else:
aft = '0'
if (optional(parse_specific_char, f, 'E') or
optional(parse_specific_char, f, 'e')):
expt = parse_int_signed(f)
else:
expt = '0'
optional(read_float_trailer, f)
return float(sign + bef + '.' + aft + 'E' + expt)
def read_float(f):
return read_double(f)
def read_float_trailer(f):
parse_specific_char(f, 'f')
while peek_char(f).isdigit():
get_char(f)
def read_bool(f):
skip_spaces(f)
if peek_char(f) == 'T':
parse_specific_string(f, 'True')
return True
elif peek_char(f) == 'F':
parse_specific_string(f, 'False')
return False
else:
raise ValueError
def read_array_elems(f, elem_reader):
skip_spaces(f)
parse_specific_char(f, '[')
xs = sepBy(elem_reader, read_comma, f)
skip_spaces(f)
parse_specific_char(f, ']')
return xs
def read_array_helper(f, elem_reader, rank):
def nested_row_reader(_):
return read_array_helper(f, elem_reader, rank-1)
if rank == 1:
row_reader = elem_reader
else:
row_reader = nested_row_reader
return read_array_elems(f, row_reader)
def expected_array_dims(l, rank):
if rank > 1:
n = len(l)
if n == 0:
elem = []
else:
elem = l[0]
return [n] + expected_array_dims(elem, rank-1)
else:
return [len(l)]
def verify_array_dims(l, dims):
if dims[0] != len(l):
raise ValueError
if len(dims) > 1:
for x in l:
verify_array_dims(x, dims[1:])
def read_double_signed(f):
skip_spaces(f)
c = get_char(f)
if c == '-' and peek_char(f).isdigit():
v = -1 * read_double(f)
else:
unget_char(f, c)
v = read_double(f)
return v
def read_array(f, elem_reader, rank, bt):
elems = read_array_helper(f, elem_reader, rank)
dims = expected_array_dims(elems, rank)
verify_array_dims(elems, dims)
return np.array(elems, dtype=bt)
# Scalar functions.
import numpy as np
def signed(x):
if type(x) == np.uint8:
return np.int8(x)
elif type(x) == np.uint16:
return np.int16(x)
elif type(x) == np.uint32:
return np.int32(x)
else:
return np.int64(x)
def unsigned(x):
if type(x) == np.int8:
return np.uint8(x)
elif type(x) == np.int16:
return np.uint16(x)
elif type(x) == np.int32:
return np.uint32(x)
else:
return np.uint64(x)
def shlN(x,y):
return x << y
def ashrN(x,y):
return x >> y
def sdivN(x,y):
return x / y
def smodN(x,y):
return x % y
def udivN(x,y):
return signed(unsigned(x) / unsigned(y))
def umodN(x,y):
return signed(unsigned(x) % unsigned(y))
def squotN(x,y):
return np.int32(float(x) / float(y))
def sremN(x,y):
return np.fmod(x,y)
def powN(x,y):
return x ** y
def fpowN(x,y):
return x ** y
def sleN(x,y):
return x <= y
def sltN(x,y):
return x < y
def uleN(x,y):
return unsigned(x) <= unsigned(y)
def ultN(x,y):
return unsigned(x) < unsigned(y)
def lshr8(x,y):
return np.int8(np.uint8(x) >> np.uint8(y))
def lshr16(x,y):
return np.int16(np.uint16(x) >> np.uint16(y))
def lshr32(x,y):
return np.int32(np.uint32(x) >> np.uint32(y))
def lshr64(x,y):
return np.int64(np.uint64(x) >> np.uint64(y))
def sext_T_i8(x):
return np.int8(x)
def sext_T_i16(x):
return np.int16(x)
def sext_T_i32(x):
return np.int32(x)
def sext_T_i64(x):
return np.int32(x)
def zext_i8_i8(x):
return np.int8(np.uint8(x))
def zext_i8_i16(x):
return np.int16(np.uint8(x))
def zext_i8_i32(x):
return np.int32(np.uint8(x))
def zext_i8_i64(x):
return np.int64(np.uint8(x))
def zext_i16_i8(x):
return np.int8(np.uint16(x))
def zext_i16_i16(x):
return np.int16(np.uint16(x))
def zext_i16_i32(x):
return np.int32(np.uint16(x))
def zext_i16_i64(x):
return np.int64(np.uint16(x))
def zext_i32_i8(x):
return np.int8(np.uint32(x))
def zext_i32_i16(x):
return np.int16(np.uint32(x))
def zext_i32_i32(x):
return np.int32(np.uint32(x))
def zext_i32_i64(x):
return np.int64(np.uint32(x))
def zext_i64_i8(x):
return np.int8(np.uint64(x))
def zext_i64_i16(x):
return np.int16(np.uint64(x))
def zext_i64_i32(x):
return np.int32(np.uint64(x))
def zext_i64_i64(x):
return np.int64(np.uint64(x))
shl8 = shl16 = shl32 = shl64 = shlN
ashr8 = ashr16 = ashr32 = ashr64 = ashrN
sdiv8 = sdiv16 = sdiv32 = sdiv64 = sdivN
smod8 = smod16 = smod32 = smod64 = smodN
udiv8 = udiv16 = udiv32 = udiv64 = udivN
umod8 = umod16 = umod32 = umod64 = umodN
squot8 = squot16 = squot32 = squot64 = squotN
srem8 = srem16 = srem32 = srem64 = sremN
pow8 = pow16 = pow32 = pow64 = powN
fpow32 = fpow64 = fpowN
sle8 = sle16 = sle32 = sle64 = sleN
slt8 = slt16 = slt32 = slt64 = sltN
ule8 = ule16 = ule32 = ule64 = uleN
ult8 = ult16 = ult32 = ult64 = ultN
sext_i8_i8 = sext_i16_i8 = sext_i32_i8 = sext_i64_i8 = sext_T_i8
sext_i8_i16 = sext_i16_i16 = sext_i32_i16 = sext_i64_i16 = sext_T_i16
sext_i8_i32 = sext_i16_i32 = sext_i32_i32 = sext_i64_i32 = sext_T_i32
sext_i8_i64 = sext_i16_i64 = sext_i32_i64 = sext_i64_i64 = sext_T_i64
def ssignum(x):
return np.sign(x)
def usignum(x):
if x < 0:
return ssignum(-x)
else:
return ssignum(x)
def sitofp_T_f32(x):
return np.float32(x)
sitofp_i8_f32 = sitofp_i16_f32 = sitofp_i32_f32 = sitofp_i64_f32 = sitofp_T_f32
def sitofp_T_f64(x):
return np.float64(x)
sitofp_i8_f64 = sitofp_i16_f64 = sitofp_i32_f64 = sitofp_i64_f64 = sitofp_T_f64
def uitofp_T_f32(x):
return np.float32(unsigned(x))
uitofp_i8_f32 = uitofp_i16_f32 = uitofp_i32_f32 = uitofp_i64_f32 = uitofp_T_f32
def uitofp_T_f64(x):
return np.float64(unsigned(x))
uitofp_i8_f64 = uitofp_i16_f64 = uitofp_i32_f64 = uitofp_i64_f64 = uitofp_T_f64
def fptosi_T_i8(x):
return np.int8(np.trunc(x))
fptosi_f32_i8 = fptosi_f64_i8 = fptosi_T_i8
def fptosi_T_i16(x):
return np.int16(np.trunc(x))
fptosi_f32_i16 = fptosi_f64_i16 = fptosi_T_i16
def fptosi_T_i32(x):
return np.int32(np.trunc(x))
fptosi_f32_i32 = fptosi_f64_i32 = fptosi_T_i32
def fptosi_T_i64(x):
return np.int64(np.trunc(x))
fptosi_f32_i64 = fptosi_f64_i64 = fptosi_T_i64
def fptoui_T_i8(x):
return np.uint8(np.trunc(x))
fptoui_f32_i8 = fptoui_f64_i8 = fptoui_T_i8
def fptoui_T_i16(x):
return np.uint16(np.trunc(x))
fptoui_f32_i16 = fptoui_f64_i16 = fptoui_T_i16
def fptoui_T_i32(x):
return np.uint32(np.trunc(x))
fptoui_f32_i32 = fptoui_f64_i32 = fptoui_T_i32
def fptoui_T_i64(x):
return np.uint64(np.trunc(x))
fptoui_f32_i64 = fptoui_f64_i64 = fptoui_T_i64
def fpconv_f32_f64(x):
return np.float64(x)
def fpconv_f64_f32(x):
return np.float32(x)
def futhark_log64(x):
return np.float64(np.log(x))
def futhark_sqrt64(x):
return np.sqrt(x)
def futhark_exp64(x):
return np.exp(x)
def futhark_cos64(x):
return np.cos(x)
def futhark_sin64(x):
return np.sin(x)
def futhark_atan2_64(x, y):
return np.arctan2(x, y)
def futhark_isnan64(x):
return np.isnan(x)
def futhark_isinf64(x):
return np.isinf(x)
def futhark_log32(x):
return np.float32(np.log(x))
def futhark_sqrt32(x):
return np.float32(np.sqrt(x))
def futhark_exp32(x):
return np.exp(x)
def futhark_cos32(x):
return np.cos(x)
def futhark_sin32(x):
return np.sin(x)
def futhark_atan2_32(x, y):
return np.arctan2(x, y)
def futhark_isnan32(x):
return np.isnan(x)
def futhark_isinf32(x):
return np.isinf(x)
class quadlife_alt:
def __init__(self):
self.ctx = cl.create_some_context(interactive=False)
self.queue = cl.CommandQueue(self.ctx)
# XXX: Assuming just a single device here.
platform_name = self.ctx.get_info(cl.context_info.DEVICES)[0].platform.name
device_type = self.ctx.get_info(cl.context_info.DEVICES)[0].type
lockstep_width = 1
if ((platform_name == "NVIDIA CUDA") and (device_type == cl.device_type.GPU)):
lockstep_width = np.int32(32)
if ((platform_name == "AMD Accelerated Parallel Processing") and (device_type == cl.device_type.GPU)):
lockstep_width = np.int32(64)
if (len(fut_opencl_src) >= 0):
program = cl.Program(self.ctx, fut_opencl_src).build(["-DFUT_BLOCK_DIM={}".format(FUT_BLOCK_DIM), "-DLOCKSTEP_WIDTH={}".format(lockstep_width)])
self.map_kernel_1022_var = program.map_kernel_1022
self.map_kernel_1176_var = program.map_kernel_1176
self.map_kernel_1180_var = program.map_kernel_1180
self.map_kernel_1048_var = program.map_kernel_1048
self.map_kernel_1037_var = program.map_kernel_1037
self.map_kernel_1100_var = program.map_kernel_1100
self.map_kernel_1064_var = program.map_kernel_1064
def futhark_init(self, world_mem_size_1108, world_mem_1109, n_879, m_880):
nesting_size_1020 = (m_880 * n_879)
bytes_1110 = (n_879 * m_880)
mem_1112 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1110) if (bytes_1110 > np.int32(0)) else np.int32(1)))
group_size_1174 = np.int32(512)
num_groups_1175 = squot32((((n_879 * m_880) + group_size_1174) - np.int32(1)),
group_size_1174)
if ((np.int32(1) * (num_groups_1175 * group_size_1174)) != np.int32(0)):
self.map_kernel_1022_var.set_args(np.int32(m_880), world_mem_1109,
np.int32(n_879), mem_1112)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1022_var,
(long((num_groups_1175 * group_size_1174)),),
(long(group_size_1174),))
if synchronous:
self.queue.finish()
bytes_1113 = (np.int32(4) * m_880)
mem_1114 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1113) if (bytes_1113 > np.int32(0)) else np.int32(1)))
group_size_1178 = np.int32(512)
num_groups_1179 = squot32(((m_880 + group_size_1178) - np.int32(1)),
group_size_1178)
if ((np.int32(1) * (num_groups_1179 * group_size_1178)) != np.int32(0)):
self.map_kernel_1176_var.set_args(np.int32(m_880), mem_1114)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1176_var,
(long((num_groups_1179 * group_size_1178)),),
(long(group_size_1178),))
if synchronous:
self.queue.finish()
x_1116 = (np.int32(4) * n_879)
bytes_1115 = (x_1116 * m_880)
mem_1117 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1115) if (bytes_1115 > np.int32(0)) else np.int32(1)))
group_size_1184 = np.int32(512)
num_groups_1185 = squot32((((n_879 * m_880) + group_size_1184) - np.int32(1)),
group_size_1184)
if ((np.int32(1) * (num_groups_1185 * group_size_1184)) != np.int32(0)):
self.map_kernel_1180_var.set_args(np.int32(m_880), mem_1114,
np.int32(n_879), mem_1117)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1180_var,
(long((num_groups_1185 * group_size_1184)),),
(long(group_size_1184),))
if synchronous:
self.queue.finish()
out_mem_1170 = mem_1112
out_memsize_1171 = bytes_1110
out_mem_1172 = mem_1117
out_memsize_1173 = bytes_1115
return (out_memsize_1171, out_mem_1170, out_memsize_1173, out_mem_1172)
def futhark_render_frame(self, all_history_mem_size_1118,
all_history_mem_1119, n_889, m_890):
mem_1121 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1)))
cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(0)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(1)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1121, np.array(np.int8(-1),
dtype=ct.c_int8),
device_offset=long(np.int32(2)), is_blocking=synchronous)
mem_1123 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1)))
cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(0)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(-1),
dtype=ct.c_int8),
device_offset=long(np.int32(1)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1123, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(2)), is_blocking=synchronous)
mem_1125 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1)))
cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(-1),
dtype=ct.c_int8),
device_offset=long(np.int32(0)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(1)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1125, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(2)), is_blocking=synchronous)
mem_1127 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(3)) if (np.int32(3) > np.int32(0)) else np.int32(1)))
cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(-1),
dtype=ct.c_int8),
device_offset=long(np.int32(0)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(-1),
dtype=ct.c_int8),
device_offset=long(np.int32(1)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1127, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(2)), is_blocking=synchronous)
mem_1130 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(12)) if (np.int32(12) > np.int32(0)) else np.int32(1)))
if ((np.int32(3) * np.int32(1)) != np.int32(0)):
cl.enqueue_copy(self.queue, mem_1130, mem_1121,
dest_offset=long(np.int32(0)),
src_offset=long(np.int32(0)),
byte_count=long((np.int32(3) * np.int32(1))))
if synchronous:
self.queue.finish()
if ((np.int32(3) * np.int32(1)) != np.int32(0)):
cl.enqueue_copy(self.queue, mem_1130, mem_1123,
dest_offset=long(np.int32(3)),
src_offset=long(np.int32(0)),
byte_count=long((np.int32(3) * np.int32(1))))
if synchronous:
self.queue.finish()
if ((np.int32(3) * np.int32(1)) != np.int32(0)):
cl.enqueue_copy(self.queue, mem_1130, mem_1125,
dest_offset=long((np.int32(3) * np.int32(2))),
src_offset=long(np.int32(0)),
byte_count=long((np.int32(3) * np.int32(1))))
if synchronous:
self.queue.finish()
if ((np.int32(3) * np.int32(1)) != np.int32(0)):
cl.enqueue_copy(self.queue, mem_1130, mem_1127,
dest_offset=long((np.int32(3) * np.int32(3))),
src_offset=long(np.int32(0)),
byte_count=long((np.int32(3) * np.int32(1))))
if synchronous:
self.queue.finish()
nesting_size_1046 = (m_890 * n_889)
bytes_1131 = (n_889 * m_890)
mem_1133 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1131) if (bytes_1131 > np.int32(0)) else np.int32(1)))
x_1136 = (n_889 * np.int32(3))
bytes_1134 = (x_1136 * m_890)
mem_1137 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1134) if (bytes_1134 > np.int32(0)) else np.int32(1)))
group_size_1189 = np.int32(512)
num_groups_1190 = squot32((((n_889 * m_890) + group_size_1189) - np.int32(1)),
group_size_1189)
if ((np.int32(1) * (num_groups_1190 * group_size_1189)) != np.int32(0)):
self.map_kernel_1048_var.set_args(np.int32(n_889), np.int32(m_890),
mem_1130, all_history_mem_1119,
mem_1133, mem_1137)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1048_var,
(long((num_groups_1190 * group_size_1189)),),
(long(group_size_1189),))
if synchronous:
self.queue.finish()
nesting_size_1033 = (np.int32(3) * m_890)
nesting_size_1035 = (nesting_size_1033 * n_889)
bytes_1138 = (bytes_1131 * np.int32(3))
mem_1141 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1138) if (bytes_1138 > np.int32(0)) else np.int32(1)))
group_size_1191 = np.int32(512)
num_groups_1192 = squot32(((((n_889 * m_890) * np.int32(3)) + group_size_1191) - np.int32(1)),
group_size_1191)
if ((np.int32(1) * (num_groups_1192 * group_size_1191)) != np.int32(0)):
self.map_kernel_1037_var.set_args(mem_1137, np.int32(n_889), mem_1133,
np.int32(m_890), mem_1141)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1037_var,
(long((num_groups_1192 * group_size_1191)),),
(long(group_size_1191),))
if synchronous:
self.queue.finish()
out_mem_1186 = mem_1141
out_memsize_1187 = bytes_1138
return (out_memsize_1187, out_mem_1186)
def futhark_steps(self, world_mem_size_1142, history_mem_size_1144,
world_mem_1143, history_mem_1145, n_910, m_911, steps_914):
mem_1147 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(np.int32(16)) if (np.int32(16) > np.int32(0)) else np.int32(1)))
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(0)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8),
device_offset=long(np.int32(1)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8),
device_offset=long(np.int32(2)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(3)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(0), dtype=ct.c_int8),
device_offset=long(np.int32(4)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8),
device_offset=long(np.int32(5)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8),
device_offset=long(np.int32(6)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(1), dtype=ct.c_int8),
device_offset=long(np.int32(7)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8),
device_offset=long(np.int32(8)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8),
device_offset=long(np.int32(9)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8),
device_offset=long(np.int32(10)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8),
device_offset=long(np.int32(11)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8),
device_offset=long(np.int32(12)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8),
device_offset=long(np.int32(13)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(2), dtype=ct.c_int8),
device_offset=long(np.int32(14)), is_blocking=synchronous)
cl.enqueue_copy(self.queue, mem_1147, np.array(np.int8(3), dtype=ct.c_int8),
device_offset=long(np.int32(15)), is_blocking=synchronous)
bytes_1148 = (np.int32(4) * n_910)
mem_1149 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1148) if (bytes_1148 > np.int32(0)) else np.int32(1)))
mem_1151 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1148) if (bytes_1148 > np.int32(0)) else np.int32(1)))
group_size_1197 = np.int32(512)
num_groups_1198 = squot32(((n_910 + group_size_1197) - np.int32(1)),
group_size_1197)
if ((np.int32(1) * (num_groups_1198 * group_size_1197)) != np.int32(0)):
self.map_kernel_1100_var.set_args(np.int32(n_910), mem_1149, mem_1151)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1100_var,
(long((num_groups_1198 * group_size_1197)),),
(long(group_size_1197),))
if synchronous:
self.queue.finish()
nesting_size_1062 = (m_911 * n_910)
bytes_1156 = (bytes_1148 * m_911)
bytes_1159 = (n_910 * m_911)
double_buffer_mem_1166 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1159) if (bytes_1159 > np.int32(0)) else np.int32(1)))
double_buffer_mem_1167 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1156) if (bytes_1156 > np.int32(0)) else np.int32(1)))
mem_1158 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1156) if (bytes_1156 > np.int32(0)) else np.int32(1)))
mem_1161 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(bytes_1159) if (bytes_1159 > np.int32(0)) else np.int32(1)))
world_mem_size_1152 = world_mem_size_1142
history_mem_size_1154 = history_mem_size_1144
world_mem_1153 = world_mem_1143
history_mem_1155 = history_mem_1145
i_920 = np.int32(0)
one_1208 = np.int32(1)
for counter_1207 in range(steps_914):
group_size_1205 = np.int32(512)
num_groups_1206 = squot32((((n_910 * m_911) + group_size_1205) - np.int32(1)),
group_size_1205)
if ((np.int32(1) * (num_groups_1206 * group_size_1205)) != np.int32(0)):
self.map_kernel_1064_var.set_args(mem_1149, world_mem_1153,
np.int32(n_910), mem_1151,
np.int32(m_911), mem_1147,
history_mem_1155, mem_1158, mem_1161)
cl.enqueue_nd_range_kernel(self.queue, self.map_kernel_1064_var,
(long((num_groups_1206 * group_size_1205)),),
(long(group_size_1205),))
if synchronous:
self.queue.finish()
if (((n_910 * m_911) * np.int32(1)) != np.int32(0)):
cl.enqueue_copy(self.queue, double_buffer_mem_1166, mem_1161,
dest_offset=long(np.int32(0)),
src_offset=long(np.int32(0)),
byte_count=long(((n_910 * m_911) * np.int32(1))))
if synchronous:
self.queue.finish()
if (((n_910 * m_911) * np.int32(4)) != np.int32(0)):
cl.enqueue_copy(self.queue, double_buffer_mem_1167, mem_1158,
dest_offset=long(np.int32(0)),
src_offset=long(np.int32(0)),
byte_count=long(((n_910 * m_911) * np.int32(4))))
if synchronous:
self.queue.finish()
world_mem_size_tmp_1199 = bytes_1159
history_mem_size_tmp_1200 = bytes_1156
world_mem_tmp_1201 = double_buffer_mem_1166
history_mem_tmp_1202 = double_buffer_mem_1167
world_mem_size_1152 = world_mem_size_tmp_1199
history_mem_size_1154 = history_mem_size_tmp_1200
world_mem_1153 = world_mem_tmp_1201
history_mem_1155 = history_mem_tmp_1202
i_920 += one_1208
world_mem_1163 = world_mem_1153
world_mem_size_1162 = world_mem_size_1152
history_mem_1165 = history_mem_1155
history_mem_size_1164 = history_mem_size_1154
out_mem_1193 = world_mem_1163
out_memsize_1194 = world_mem_size_1162
out_mem_1195 = history_mem_1165
out_memsize_1196 = history_mem_size_1164
return (out_memsize_1194, out_mem_1193, out_memsize_1196, out_mem_1195)
def init(self, world_mem_1109_ext):
n_879 = np.int32(world_mem_1109_ext.shape[np.int32(0)])
m_880 = np.int32(world_mem_1109_ext.shape[np.int32(1)])
world_mem_size_1108 = np.int32(world_mem_1109_ext.nbytes)
if (type(world_mem_1109_ext) == cl.array.Array):
world_mem_1109 = world_mem_1109_ext.data
else:
world_mem_1109 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(world_mem_size_1108) if (world_mem_size_1108 > np.int32(0)) else np.int32(1)))
if (world_mem_size_1108 != np.int32(0)):
cl.enqueue_copy(self.queue, world_mem_1109, world_mem_1109_ext,
is_blocking=synchronous)
(out_memsize_1171, out_mem_1170, out_memsize_1173,
out_mem_1172) = self.futhark_init(world_mem_size_1108, world_mem_1109,
n_879, m_880)
return (cl.array.Array(self.queue, (n_879, m_880), ct.c_int8,
data=out_mem_1170), cl.array.Array(self.queue,
(n_879, m_880),
ct.c_int32,
data=out_mem_1172))
def render_frame(self, all_history_mem_1119_ext):
n_889 = np.int32(all_history_mem_1119_ext.shape[np.int32(0)])
m_890 = np.int32(all_history_mem_1119_ext.shape[np.int32(1)])
all_history_mem_size_1118 = np.int32(all_history_mem_1119_ext.nbytes)
if (type(all_history_mem_1119_ext) == cl.array.Array):
all_history_mem_1119 = all_history_mem_1119_ext.data
else:
all_history_mem_1119 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(all_history_mem_size_1118) if (all_history_mem_size_1118 > np.int32(0)) else np.int32(1)))
if (all_history_mem_size_1118 != np.int32(0)):
cl.enqueue_copy(self.queue, all_history_mem_1119,
all_history_mem_1119_ext, is_blocking=synchronous)
(out_memsize_1187,
out_mem_1186) = self.futhark_render_frame(all_history_mem_size_1118,
all_history_mem_1119, n_889,
m_890)
return cl.array.Array(self.queue, (n_889, m_890, np.int32(3)), ct.c_int8,
data=out_mem_1186)
def steps(self, world_mem_1143_ext, history_mem_1145_ext, steps_914_ext):
n_910 = np.int32(world_mem_1143_ext.shape[np.int32(0)])
m_911 = np.int32(world_mem_1143_ext.shape[np.int32(1)])
world_mem_size_1142 = np.int32(world_mem_1143_ext.nbytes)
if (type(world_mem_1143_ext) == cl.array.Array):
world_mem_1143 = world_mem_1143_ext.data
else:
world_mem_1143 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(world_mem_size_1142) if (world_mem_size_1142 > np.int32(0)) else np.int32(1)))
if (world_mem_size_1142 != np.int32(0)):
cl.enqueue_copy(self.queue, world_mem_1143, world_mem_1143_ext,
is_blocking=synchronous)
n_910 = np.int32(history_mem_1145_ext.shape[np.int32(0)])
m_911 = np.int32(history_mem_1145_ext.shape[np.int32(1)])
history_mem_size_1144 = np.int32(history_mem_1145_ext.nbytes)
if (type(history_mem_1145_ext) == cl.array.Array):
history_mem_1145 = history_mem_1145_ext.data
else:
history_mem_1145 = cl.Buffer(self.ctx, cl.mem_flags.READ_WRITE,
long(long(history_mem_size_1144) if (history_mem_size_1144 > np.int32(0)) else np.int32(1)))
if (history_mem_size_1144 != np.int32(0)):
cl.enqueue_copy(self.queue, history_mem_1145, history_mem_1145_ext,
is_blocking=synchronous)
steps_914 = np.int32(steps_914_ext)
(out_memsize_1194, out_mem_1193, out_memsize_1196,
out_mem_1195) = self.futhark_steps(world_mem_size_1142,
history_mem_size_1144, world_mem_1143,
history_mem_1145, n_910, m_911,
steps_914)
return (cl.array.Array(self.queue, (n_910, m_911), ct.c_int8,
data=out_mem_1193), cl.array.Array(self.queue,
(n_910, m_911),
ct.c_int32,
data=out_mem_1195)) | bsd-3-clause | 4,486,038,976,242,857,500 | 29.420159 | 150 | 0.537404 | false |
bschollnick/downloader | downloader3.py | 1 | 7816 | """
Downloader, mass file downloader.
"""
import common
import logging, logging.handlers
import os
import os.path
import plistlib
import stat
import sys
import time
from optparse import OptionParser
from yapsy.PluginManager import PluginManager
SCRIPT_FILENAME = os.path.abspath(sys.argv[0])
SCRIPT_DIRECTORY = os.sep.join(os.path.split(SCRIPT_FILENAME)[0:-1])
def initialize_parser():
"""
Initialize the parser, and set the basic parser options
"""
parser = OptionParser(usage="usage: %prog [options] filename",
version="%prog 1.0")
parser.add_option("-u", "--url",
action="store",
dest="url_to_fetch",
default="",
help="URL to fetch")
parser.add_option("-t", "--target",
action="store",
dest="download_folder",
default="",
help="Download Folder to use",)
parser.add_option("-l", "--log",
action="store",
dest="log_folder",
default="",
help="The Log Folder to use",)
parser.add_option("--details",
action="store_true",
dest="details",
default=False,
help="Report details on downloads",)
parser.add_option("--silent",
action="store_true",
dest="silent",
default=False,
help="Absolutely no feedback on downloading",)
parser.add_option("--dllimit",
action="store",
dest="downloadlimit",
default=0,
type="int",
help="Maximum # of Files to download before quitting",)
parser.add_option("--skiplimit",
action="store",
dest="skiplimit",
default=0,
type="int",
help="Maximum # of Files to skip before quitting",)
parser.add_option("--start",
action="store",
dest="startingplace",
default=0,
type="int",
help="The Offset to start at",)
parser.add_option("--csv",
action="store",
dest="csv_file",
default="",
help="CSV File containing sources")
return parser
def plugin_parser_adds(parser, plug):
"""
Call the parser options from the plugin(s),
to allow the plugins to install
options into the parser.
"""
if hasattr(plug, "parser_options"):
parser = plug.parser_options(parser)
def parse_commandline(parser):
"""
Process the parser and return the options to the main.
"""
options = parser.parse_args()[0]
return options
def make_weblocation_file(filename,
url):
"""
Make the weblocation file, to allow easy "one click" access
to the gallery, etc, that originated the content.
"""
if not os.path.exists(filename):
try:
output_file = open(filename, "w")
plist = dict(URL=url)
plistlib.writePlist(plist, output_file)
output_file.close()
except IOError:
pass
def make_script_file(options):
"""
Make the shellscript file, to help automate redownload of the content.
"""
try:
script_name = "update_capture.command"
if not os.path.exists(options.download_folder +
os.sep + script_name):
update_script = open(options.download_folder +
os.sep + script_name,
"w")
update_script.write("python %s " % SCRIPT_FILENAME)
for x_arg in sys.argv[1:]:
update_script.write('"%s"' % x_arg + " ")
update_script.close()
os.chmod(options.download_folder + os.sep + script_name,
511 | stat.S_IEXEC)
except IOError:
pass
def process_commandline():
"""
Process the command line options
"""
parser = initialize_parser()
manager = PluginManager()
manager.setPluginPlaces([SCRIPT_DIRECTORY + os.sep + "plugins"])
manager.collectPlugins()
plugin_names = {}
# Loop round the plugins and print their names.
for plug in manager.getAllPlugins():
plugin_names[plug.name.lower().strip()] = plug
# plugin name contains pointer to module
plugin_parser_adds(parser, plug.plugin_object)
options = parse_commandline(parser)
if options.silent:
print options
if options.url_to_fetch == "":
print "Please supply an URL to process."
return None
if options.download_folder == "":
print "Please supply an download folder."
return None
if options.log_folder == "":
options.log_folder = "~/logs"
options.download_folder = os.path.abspath(options.download_folder)
options.download_folder = common.clean_filename(\
unicode(options.download_folder))
if not options.download_folder.strip().endswith(os.sep):
options.download_folder = options.download_folder + os.sep
return (options, plugin_names)
def main():
"""
The main function. TaDa!
"""
log = logging.getLogger('Downloader')
log.setLevel(logging.INFO)
console_h = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter('%(message)s')
console_h.setFormatter(console_formatter)
log.addHandler(console_h)
s_options, plugin_names = process_commandline()
logdir = os.path.abspath(os.path.join(\
os.path.expanduser(s_options.log_folder)))
print "Logging to ", logdir
if not os.path.exists(logdir):
os.makedirs(logdir)
logfilename = os.path.abspath(os.path.join(logdir, "downloader.log"))
print "Log file name: ", logfilename
file_h = logging.handlers.RotatingFileHandler(logfilename,
maxBytes=(50000),
backupCount=7)
file_format = logging.Formatter(\
"%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_h.setFormatter(file_format)
log.addHandler(file_h)
#
# Make Root download folder
#
if os.path.exists(s_options.download_folder) != True:
os.makedirs(s_options.download_folder)
for x_key in plugin_names.keys():
if getattr(s_options, x_key):
plugin = plugin_names[x_key].plugin_object
print "Using Plugin - %s" % x_key
start_time = time.time()
if not s_options.silent:
log.info("Downloading to: %s", s_options.download_folder)
make_weblocation_file(s_options.download_folder +
os.sep + "downloaded_site.webloc",
s_options.url_to_fetch)
results = plugin.download(s_options)
elapsed = int((time.time() - start_time) * 100)/100
if results != None:
(total_downloaded, total_skipped, total_errors) = results
print
print
log.info("Total Downloaded Files - %s", total_downloaded)
log.info("Total Skipped Files - %s", total_skipped)
log.info("Total Errors - %s", total_errors)
log.info("Elapsed Time (Seconds) - %f", elapsed)
log.info("Elapsed Time (Min) - %f", (elapsed/60))
if total_downloaded != 0:
sys.exit(1)
else:
sys.exit(0) # Load the plugins from the plugin directory.
if __name__ == "__main__":
main()
| mpl-2.0 | 2,206,838,923,007,199,500 | 30.643725 | 77 | 0.546827 | false |
alfred82santa/tarrabmeCheckerGtk | src/tarrabme_checker_gtk/dialogs.py | 1 | 3341 | from gi.repository import Gtk, Gio
from .actuators import NeoPixelsActuator
__author__ = 'alfred'
class PreferencesDialog(Gtk.Dialog):
def __init__(self, settings):
Gtk.Dialog.__init__(self, "Preferences", None,
Gtk.DialogFlags.USE_HEADER_BAR | Gtk.DialogFlags.MODAL,
[], use_header_bar=True)
self.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
button = self.add_button(Gtk.STOCK_OK, Gtk.ResponseType.OK)
button.get_style_context().add_class('suggested-action')
self.settings = settings
self.set_default_size(490, 350)
self.builder = Gtk.Builder.new_from_resource('/org/me/tarrab/Checker/tarrabme-preferences.ui')
scrolled_window = Gtk.ScrolledWindow()
scrolled_window.set_hexpand(True)
scrolled_window.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scrolled_window.add(self.builder.get_object('PerferencesView'))
entry = self.builder.get_object('basepath_entry')
self.settings.bind('baseurl', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('login_endpoint_entry')
self.settings.bind('login-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
combo = self.builder.get_object('login_method_combo')
self.settings.bind('login-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('logout_endpoint_entry')
self.settings.bind('logout-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
combo = self.builder.get_object('logout_method_combo')
self.settings.bind('logout-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('attempt_endpoint_entry')
self.settings.bind('attempt-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
combo = self.builder.get_object('attempt_method_combo')
self.settings.bind('attempt-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('attempt_list_endpoint_entry')
self.settings.bind('attempt-list-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
combo = self.builder.get_object('attempt_list_method_combo')
self.settings.bind('attempt-list-method', combo, 'active_id', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('account_entry')
self.settings.bind('account-path', entry, 'text', Gio.SettingsBindFlags.DEFAULT)
adjustment = self.builder.get_object('windows_adjustment')
self.settings.bind('window-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT)
adjustment = self.builder.get_object('columns_adjustment')
self.settings.bind('column-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT)
adjustment = self.builder.get_object('rows_adjustment')
self.settings.bind('row-count', adjustment, 'value', Gio.SettingsBindFlags.DEFAULT)
entry = self.builder.get_object('neopixels_endpoint')
self.settings.bind(NeoPixelsActuator.ENDPOINT_SETTING_NAME, entry,
'text', Gio.SettingsBindFlags.DEFAULT)
self.get_content_area().pack_start(scrolled_window, True, True, 0)
self.show_all()
| gpl-2.0 | 3,221,447,033,717,781,500 | 46.056338 | 102 | 0.677641 | false |
khosrow/metpx | sundew/unittests/unittest_senderAm.py | 1 | 1082 | # -*- coding: iso-8859-1 -*-
#############################################################################################
# Name: unittest_senderAm.py
# Author: Jun Hu
# Date: 2012-04-30
# Description: test cases for senderAm class
#############################################################################################
import sys,os,unittest
sys.path.insert(1, '../sundew/lib/')
os.environ['PXROOT']="."
from Logger import Logger
from Client import Client
from CacheManager import CacheManager
from senderAm import sendeAm
class unittest_Template(unittest.TestCase):
def setUp(self,logFile='log/Template.log'):
self.logger = Logger(logFile, 'DEBUG', 'Sub')
self.logger = self.logger.getLogger()
def test_Template(self):
self.assertEqual(None, None)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(unittest_Template))
return suite
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(unittest_Template)
unittest.TextTestRunner(verbosity=2).run(suite)
| gpl-2.0 | 7,791,971,487,421,272,000 | 30.823529 | 93 | 0.575786 | false |
praekelt/django-ultracache | ultracache/tests/utils.py | 1 | 1072 | from collections import OrderedDict
class DummyProxy(dict):
def make_key(self, path, headers=None):
key = path
if headers is not None:
key += str(frozenset(sorted(headers.items())))
return key
def cache(self, request, value):
headers = {k[5:].replace("_", "-").lower(): v for \
k, v in request.META.items() if k.startswith("HTTP_")}
key = self.make_key(request.get_full_path(), headers)
self[key] = value
def is_cached(self, path, headers=None):
# The test framework sends an empty cookie with each request. Avoid
# copy pasta in the individual tests and just add that header here.
if headers is None:
headers = {u"cookie": u""}
key = self.make_key(path, headers)
return key in self
def purge(self, path, headers=None):
key = self.make_key(path, headers)
if key in self:
del self[key]
dummy_proxy = DummyProxy()
def dummy_purger(path, headers=None):
dummy_proxy.purge(path, headers=headers)
| bsd-3-clause | 4,270,799,919,882,227,000 | 29.628571 | 75 | 0.602612 | false |
ContextLab/hypertools | hypertools/tools/format_data.py | 1 | 7323 | import warnings
import numpy as np
import six
from .._externals.ppca import PPCA
from .._shared.helpers import get_type
def format_data(x, vectorizer='CountVectorizer',
semantic='LatentDirichletAllocation', corpus='wiki', ppca=True, text_align='hyper'):
"""
Formats data into a list of numpy arrays
This function is useful to identify rows of your array that contain missing
data or nans. The returned indices can be used to remove the rows with
missing data, or label the missing data points that are interpolated
using PPCA.
Parameters
----------
x : numpy array, dataframe, string or (mixed) list
The data to convert
vectorizer : str, dict, class or class instance
The vectorizer to use. Built-in options are 'CountVectorizer' or
'TfidfVectorizer'. To change default parameters, set to a dictionary
e.g. {'model' : 'CountVectorizer', 'params' : {'max_features' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.feature_extraction.text
for details. You can also specify your own vectorizer model as a class,
or class instance. With either option, the class must have a
fit_transform method (see here: http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to vectorizer_params. If
a class instance, no parameters can be passed.
semantic : str, dict, class or class instance
Text model to use to transform text data. Built-in options are
'LatentDirichletAllocation' or 'NMF' (default: LDA). To change default
parameters, set to a dictionary e.g. {'model' : 'NMF', 'params' :
{'n_components' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.decomposition
for details on the two model options. You can also specify your own
text model as a class, or class instance. With either option, the class
must have a fit_transform method (see here:
http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to text_params. If
a class instance, no parameters can be passed.
corpus : list (or list of lists) of text samples or 'wiki', 'nips', 'sotus'.
Text to use to fit the semantic model (optional). If set to 'wiki', 'nips'
or 'sotus' and the default semantic and vectorizer models are used, a
pretrained model will be loaded which can save a lot of time.
ppca : bool
Performs PPCA to fill in missing values (default: True)
text_align : str
Alignment algorithm to use when both text and numerical data are passed.
If numerical arrays have the same shape, and the text data contains the
same number of samples, the text and numerical data are automatically
aligned to a common space. Example use case: an array of movie frames
(frames by pixels) and text descriptions of the frame. In this case,
the movie and text will be automatically aligned to the same space
(default: hyperalignment).
Returns
----------
data : list of numpy arrays
A list of formatted arrays
"""
# not sure why i needed to import here, but its the only way I could get it to work
from .df2mat import df2mat
from .text2mat import text2mat
from ..datageometry import DataGeometry
# if x is not a list, make it one
if type(x) is not list:
x = [x]
if all([isinstance(xi, six.string_types) for xi in x]):
x = [x]
# check data type for each element in list
dtypes = list(map(get_type, x))
# handle text data:
if any(map(lambda x: x in ['list_str', 'str', 'arr_str'], dtypes)):
# default text args
text_args = {
'vectorizer' : vectorizer,
'semantic' : semantic,
'corpus' : corpus
}
# filter text data
text_data = []
for i,j in zip(x, dtypes):
if j in ['list_str', 'str', 'arr_str']:
text_data.append(np.array(i).reshape(-1, 1))
# convert text to numerical matrices
text_data = text2mat(text_data, **text_args)
# replace the text data with transformed data
processed_x = []
textidx=0
for i, dtype in enumerate(dtypes):
if dtype in ['list_str', 'str', 'arr_str']:
processed_x.append(text_data[textidx])
textidx+=1
elif dtype == 'df':
processed_x.append(df2mat(x[i]))
elif dtype == 'geo':
text_args = {
'vectorizer' : vectorizer,
'semantic' : semantic,
'corpus' : corpus
}
for j in format_data(x[i].get_data(), **text_args):
processed_x.append(j)
else:
processed_x.append(x[i])
# reshape anything that is 1d
if any([i.ndim<=1 for i in processed_x]):
processed_x = [np.reshape(i,(i.shape[0],1)) if i.ndim==1 else i for i in processed_x]
contains_text = any([dtype in ['list_str', 'str', 'arr_str'] for dtype in dtypes])
contains_num = any([dtype in ['list_num', 'array', 'df', 'arr_num'] for dtype in dtypes])
# if there are any nans in any of the lists, use ppca
if ppca is True:
if contains_num:
num_data = []
for i,j in zip(processed_x, dtypes):
if j in ['list_num', 'array', 'df', 'arr_num']:
num_data.append(i)
if np.isnan(np.vstack(num_data)).any():
warnings.warn('Missing data: Inexact solution computed with PPCA (see https://github.com/allentran/pca-magic for details)')
num_data = fill_missing(num_data)
x_temp = []
for dtype in dtypes:
if dtype in ['list_str', 'str', 'arr_str']:
x_temp.append(text_data.pop(0))
elif dtype in ['list_num', 'array', 'df', 'arr_num']:
x_temp.append(num_data.pop(0))
processed_x = x_temp
# if input data contains both text and numerical data
if contains_num and contains_text:
# and if they have the same number of samples
if np.unique(np.array([i.shape[0] for i, j in zip(processed_x, dtypes)])).shape[0] == 1:
from .align import align as aligner
# align the data
warnings.warn('Numerical and text data with same number of '
'samples detected. Aligning data to a common space.')
processed_x = aligner(processed_x, align=text_align, format_data=False)
return processed_x
def fill_missing(x):
# ppca if missing data
m = PPCA()
m.fit(data=np.vstack(x))
x_pca = m.transform()
# if the whole row is missing, return nans
all_missing = [idx for idx, a in enumerate(np.vstack(x)) if all([type(b)==np.nan for b in a])]
if len(all_missing)>0:
for i in all_missing:
x_pca[i, :] = np.nan
# get the original lists back
if len(x)>1:
x_split = np.cumsum([i.shape[0] for i in x][:-1])
return list(np.split(x_pca, x_split, axis=0))
else:
return [x_pca]
| mit | -6,226,596,756,234,407,000 | 38.583784 | 139 | 0.604534 | false |
RetroMelon/PatchWords | patchwords_project/patchwords/queries.py | 1 | 3242 | # The queries.py file contains a bunch of relatively complex database
# queries that probably shouldn't take place inside the views.
from patchwords.models import Category, Story, Paragraph, User, Favourite
#gets a list of
def getTopStories(start=0, end=5, category=None):
#getting the stories and zipping them with their favourites
if not category:
stories = Story.objects.all()
else:
stories = Story.objects.filter(category=category)
stories_ordered = zip(map(lambda x : x.favourites, stories), stories)
if not stories_ordered:
return []
#sorting the stories
stories_ordered.sort()
stories_ordered.reverse()
#unzipping the stories
stories_ordered = zip(*(stories_ordered))[1]
return stories_ordered[start:end]
def getTopCategories(quantity=20):
#getting the categories and mapping them with their favourites.
cats = Category.objects.all()
cats_with_story_count = map(lambda x: (x.total_stories, x), cats)
cats_with_story_count.sort()
cats_with_story_count.reverse()
#unzipping the categories
cats_with_story_count = zip(*(cats_with_story_count))[1]
#returning the top 20
return cats_with_story_count[:quantity]
def _sortParagraphs(paragraphs):
if not paragraphs:
return []
#zipping paragraphs with likes
#zipped = map(lambda x: (x.likes, x), paragraphs)
#zipped.sort()
#zipped.reverse()
print paragraphs
#sorting by likes then date
def comparator(x, y):
#if we have the same likes we should compare dates instead
if x.likes == y.likes:
print x.likes, "and", y.likes, "are equal."
dt_difference = (x.created_datetime < y.created_datetime)
return int(dt_difference)
else:
return y.likes - x.likes
#unzipped = zip(*zipped)[1]
#return unzipped
print "WAAAAAT"
paragraphs = sorted(paragraphs, cmp=comparator)
print "HELLOOOOOO"
print "parapgraphs", paragraphs
return paragraphs
#a wrapper around getMostPopularSubtree
def getMostPopularSubtree(paragraph):
return _getMostPopularSubtree([paragraph,])
#given a paragraph lis, this returns a list of lists of paragraphs.
#it assumes that the first paragraph in the list is the most popular
def _getMostPopularSubtree(paragraphs):
#getting the most popular paragraph's children
child_paragraphs = Paragraph.objects.filter(parent=paragraphs[0])
#sorting all of the children
child_paragraphs = _sortParagraphs(child_paragraphs)
#print "child paragraphs: \n\n", child_paragraphs
#adding the children to the list of things to return
return_list = [child_paragraphs,]
#if the children list is not empty, then we extend te list with the most popular subtree
if child_paragraphs:
most_pop = _getMostPopularSubtree(child_paragraphs)
if most_pop and most_pop[0]:
return_list.extend(most_pop)
return return_list
def get_favourited_stories(request,username):
user = User.objects.get(username=username)
favourites = list(Favourite.objects.filter(user=user))
stories = []
for favourite in favourites:
stories += [favourite.story]
return stories
| mit | 3,769,124,615,543,503,400 | 30.784314 | 92 | 0.692474 | false |
Collisionc/sickbeard_mp4_automator | delugePostProcess.py | 1 | 5295 | #!/usr/bin/env python
import os
import sys
from autoprocess import autoProcessTV, autoProcessMovie, autoProcessTVSR, sonarr, radarr
from readSettings import ReadSettings
from mkvtomp4 import MkvtoMp4
from deluge_client import DelugeRPCClient
import logging
from logging.config import fileConfig
logpath = '/var/log/sickbeard_mp4_automator'
if os.name == 'nt':
logpath = os.path.dirname(sys.argv[0])
elif not os.path.isdir(logpath):
try:
os.mkdir(logpath)
except:
logpath = os.path.dirname(sys.argv[0])
configPath = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'logging.ini')).replace("\\", "\\\\")
logPath = os.path.abspath(os.path.join(logpath, 'index.log')).replace("\\", "\\\\")
fileConfig(configPath, defaults={'logfilename': logPath})
log = logging.getLogger("delugePostProcess")
log.info("Deluge post processing started.")
settings = ReadSettings(os.path.dirname(sys.argv[0]), "autoProcess.ini")
categories = [settings.deluge['sb'], settings.deluge['cp'], settings.deluge['sonarr'], settings.deluge['radarr'], settings.deluge['sr'], settings.deluge['bypass']]
remove = settings.deluge['remove']
if len(sys.argv) < 4:
log.error("Not enough command line parameters present, are you launching this from deluge?")
sys.exit()
path = str(sys.argv[3])
torrent_name = str(sys.argv[2])
torrent_id = str(sys.argv[1])
delete_dir = None
log.debug("Path: %s." % path)
log.debug("Torrent: %s." % torrent_name)
log.debug("Hash: %s." % torrent_id)
client = DelugeRPCClient(host=settings.deluge['host'], port=int(settings.deluge['port']), username=settings.deluge['user'], password=settings.deluge['pass'])
client.connect()
if client.connected:
log.info("Successfully connected to Deluge")
else:
log.error("Failed to connect to Deluge")
sys.exit()
torrent_data = client.call('core.get_torrent_status', torrent_id, ['files', 'label'])
try:
torrent_files = torrent_data[b'files']
category = torrent_data[b'label'].lower().decode()
except:
torrent_files = torrent_data['files']
category = torrent_data['label'].lower()
files = []
log.info("List of files in torrent:")
for contents in torrent_files:
try:
files.append(contents[b'path'].decode())
log.debug(contents[b'path'].decode())
except:
files.append(contents['path'])
log.info(contents['path'])
if category.lower() not in categories:
log.error("No valid category detected.")
sys.exit()
if len(categories) != len(set(categories)):
log.error("Duplicate category detected. Category names must be unique.")
sys.exit()
if settings.deluge['convert']:
# Check for custom Deluge output_dir
if settings.deluge['output_dir']:
settings.output_dir = settings.deluge['output_dir']
log.info("Overriding output_dir to %s." % settings.deluge['output_dir'])
# Perform conversion.
settings.delete = False
if not settings.output_dir:
suffix = "convert"
settings.output_dir = os.path.join(path, ("%s-%s" % (torrent_name, suffix)))
if not os.path.exists(settings.output_dir):
os.mkdir(settings.output_dir)
delete_dir = settings.output_dir
converter = MkvtoMp4(settings)
for filename in files:
inputfile = os.path.join(path, filename)
if MkvtoMp4(settings).validSource(inputfile):
log.info("Converting file %s at location %s." % (inputfile, settings.output_dir))
try:
output = converter.process(inputfile)
except:
log.info("Error converting file %s." % inputfile)
path = converter.output_dir
else:
suffix = "copy"
newpath = os.path.join(path, ("%s-%s" % (torrent_name, suffix)))
if not os.path.exists(newpath):
os.mkdir(newpath)
for filename in files:
inputfile = os.path.join(path, filename)
log.info("Copying file %s to %s." % (inputfile, newpath))
shutil.copy(inputfile, newpath)
path = newpath
delete_dir = newpath
# Send to Sickbeard
if (category == categories[0]):
log.info("Passing %s directory to Sickbeard." % path)
autoProcessTV.processEpisode(path, settings)
# Send to CouchPotato
elif (category == categories[1]):
log.info("Passing %s directory to Couch Potato." % path)
autoProcessMovie.process(path, settings, torrent_name)
# Send to Sonarr
elif (category == categories[2]):
log.info("Passing %s directory to Sonarr." % path)
sonarr.processEpisode(path, settings)
elif (category == categories[3]):
log.info("Passing %s directory to Radarr." % path)
radarr.processMovie(path, settings)
elif (category == categories[4]):
log.info("Passing %s directory to Sickrage." % path)
autoProcessTVSR.processEpisode(path, settings)
elif (category == categories[5]):
log.info("Bypassing any further processing as per category.")
if delete_dir:
if os.path.exists(delete_dir):
try:
os.rmdir(delete_dir)
log.debug("Successfully removed tempoary directory %s." % delete_dir)
except:
log.exception("Unable to delete temporary directory.")
if remove:
try:
client.call('core.remove_torrent', torrent_id, True)
except:
log.exception("Unable to remove torrent from deluge.")
| mit | 6,938,189,214,140,944,000 | 34.066225 | 163 | 0.671388 | false |
jay-lau/magnum | magnum/tests/unit/common/test_clients.py | 1 | 15343 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client as barbicanclient
from glanceclient import client as glanceclient
from heatclient import client as heatclient
import mock
from neutronclient.v2_0 import client as neutronclient
from novaclient import client as novaclient
from oslo_config import cfg
from magnum.common import clients
from magnum.common import exception
from magnum.tests import base
class ClientsTest(base.BaseTestCase):
def setUp(self):
super(ClientsTest, self).setUp()
cfg.CONF.set_override('auth_uri', 'http://server.test:5000/v2.0',
group='keystone_authtoken')
cfg.CONF.import_opt('api_version', 'magnum.common.clients',
group='nova_client')
cfg.CONF.import_opt('api_version', 'magnum.common.clients',
group='heat_client')
cfg.CONF.import_opt('api_version', 'magnum.common.clients',
group='glance_client')
@mock.patch.object(clients.OpenStackClients, 'keystone')
def test_url_for(self, mock_keystone):
obj = clients.OpenStackClients(None)
obj.url_for(service_type='fake_service', interface='fake_endpoint')
mock_endpoint = mock_keystone.return_value.session.get_endpoint
mock_endpoint.assert_called_once_with(service_type='fake_service',
interface='fake_endpoint')
@mock.patch.object(clients.OpenStackClients, 'keystone')
def test_magnum_url(self, mock_keystone):
fake_region = 'fake_region'
fake_endpoint = 'fake_endpoint'
cfg.CONF.set_override('region_name', fake_region,
group='magnum_client')
cfg.CONF.set_override('endpoint_type', fake_endpoint,
group='magnum_client')
obj = clients.OpenStackClients(None)
obj.magnum_url()
mock_endpoint = mock_keystone.return_value.session.get_endpoint
mock_endpoint.assert_called_once_with(region_name=fake_region,
service_type='container',
interface=fake_endpoint)
@mock.patch.object(heatclient, 'Client')
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def _test_clients_heat(self, expected_region_name, mock_auth, mock_url,
mock_call):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._heat = None
obj.heat()
mock_call.assert_called_once_with(
cfg.CONF.heat_client.api_version,
endpoint='url_from_keystone', username=None,
cert_file=None, token='3bcc3d3a03f44e3d8377f9247b0ad155',
auth_url='keystone_url', ca_file=None, key_file=None,
password=None, insecure=False)
mock_url.assert_called_once_with(service_type='orchestration',
interface='publicURL',
region_name=expected_region_name)
def test_clients_heat(self):
self._test_clients_heat(None)
def test_clients_heat_region(self):
cfg.CONF.set_override('region_name', 'myregion', group='heat_client')
self._test_clients_heat('myregion')
def test_clients_heat_noauth(self):
con = mock.MagicMock()
con.auth_token = None
con.auth_token_info = None
auth_url = mock.PropertyMock(name="auth_url",
return_value="keystone_url")
type(con).auth_url = auth_url
con.get_url_for = mock.Mock(name="get_url_for")
con.get_url_for.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._heat = None
self.assertRaises(exception.AuthorizationFailure, obj.heat)
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def test_clients_heat_cached(self, mock_auth, mock_url):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._heat = None
heat = obj.heat()
heat_cached = obj.heat()
self.assertEqual(heat, heat_cached)
@mock.patch.object(glanceclient, 'Client')
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def _test_clients_glance(self, expected_region_name, mock_auth, mock_url,
mock_call):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._glance = None
obj.glance()
mock_call.assert_called_once_with(
cfg.CONF.glance_client.api_version,
endpoint='url_from_keystone', username=None,
token='3bcc3d3a03f44e3d8377f9247b0ad155',
auth_url='keystone_url',
password=None)
mock_url.assert_called_once_with(service_type='image',
interface='publicURL',
region_name=expected_region_name)
def test_clients_glance(self):
self._test_clients_glance(None)
def test_clients_glance_region(self):
cfg.CONF.set_override('region_name', 'myregion', group='glance_client')
self._test_clients_glance('myregion')
def test_clients_glance_noauth(self):
con = mock.MagicMock()
con.auth_token = None
con.auth_token_info = None
auth_url = mock.PropertyMock(name="auth_url",
return_value="keystone_url")
type(con).auth_url = auth_url
con.get_url_for = mock.Mock(name="get_url_for")
con.get_url_for.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._glance = None
self.assertRaises(exception.AuthorizationFailure, obj.glance)
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def test_clients_glance_cached(self, mock_auth, mock_url):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._glance = None
glance = obj.glance()
glance_cached = obj.glance()
self.assertEqual(glance, glance_cached)
@mock.patch.object(clients.OpenStackClients, 'keystone')
@mock.patch.object(barbicanclient, 'Client')
@mock.patch.object(clients.OpenStackClients, 'url_for')
def _test_clients_barbican(self, expected_region_name, mock_url,
mock_call, mock_keystone):
con = mock.MagicMock()
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
keystone = mock.MagicMock()
keystone.session = mock.MagicMock()
mock_keystone.return_value = keystone
obj = clients.OpenStackClients(con)
obj._barbican = None
obj.barbican()
mock_call.assert_called_once_with(
endpoint='url_from_keystone',
session=keystone.session)
mock_keystone.assert_called_once_with()
mock_url.assert_called_once_with(service_type='key-manager',
interface='publicURL',
region_name=expected_region_name)
def test_clients_barbican(self):
self._test_clients_barbican(None)
def test_clients_barbican_region(self):
cfg.CONF.set_override('region_name', 'myregion',
group='barbican_client')
self._test_clients_barbican('myregion')
def test_clients_barbican_noauth(self):
con = mock.MagicMock()
con.auth_token = None
con.auth_token_info = None
auth_url = mock.PropertyMock(name="auth_url",
return_value="keystone_url")
type(con).auth_url = auth_url
con.get_url_for = mock.Mock(name="get_url_for")
con.get_url_for.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._barbican = None
self.assertRaises(exception.AuthorizationFailure, obj.barbican)
@mock.patch.object(clients.OpenStackClients, 'keystone')
@mock.patch.object(clients.OpenStackClients, 'url_for')
def test_clients_barbican_cached(self, mock_url, mock_keystone):
con = mock.MagicMock()
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
keystone = mock.MagicMock()
keystone.session = mock.MagicMock()
mock_keystone.return_value = keystone
obj = clients.OpenStackClients(con)
obj._barbican = None
barbican = obj.barbican()
barbican_cached = obj.barbican()
self.assertEqual(barbican, barbican_cached)
@mock.patch.object(novaclient, 'Client')
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def _test_clients_nova(self, expected_region_name, mock_auth, mock_url,
mock_call):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._nova = None
obj.nova()
mock_call.assert_called_once_with(cfg.CONF.nova_client.api_version,
auth_token=con.auth_token)
mock_url.assert_called_once_with(service_type='compute',
interface='publicURL',
region_name=expected_region_name)
def test_clients_nova(self):
self._test_clients_nova(None)
def test_clients_nova_region(self):
cfg.CONF.set_override('region_name', 'myregion', group='nova_client')
self._test_clients_nova('myregion')
def test_clients_nova_noauth(self):
con = mock.MagicMock()
con.auth_token = None
con.auth_token_info = None
auth_url = mock.PropertyMock(name="auth_url",
return_value="keystone_url")
type(con).auth_url = auth_url
con.get_url_for = mock.Mock(name="get_url_for")
con.get_url_for.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._nova = None
self.assertRaises(exception.AuthorizationFailure, obj.nova)
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def test_clients_nova_cached(self, mock_auth, mock_url):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._nova = None
nova = obj.nova()
nova_cached = obj.nova()
self.assertEqual(nova, nova_cached)
@mock.patch.object(neutronclient, 'Client')
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def _test_clients_neutron(self, expected_region_name, mock_auth, mock_url,
mock_call):
fake_endpoint_type = 'fake_endpoint_type'
cfg.CONF.set_override('endpoint_type', fake_endpoint_type,
group='neutron_client')
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._neutron = None
obj.neutron()
mock_call.assert_called_once_with(
endpoint_url='url_from_keystone',
endpoint_type=fake_endpoint_type,
auth_url='keystone_url',
token='3bcc3d3a03f44e3d8377f9247b0ad155')
mock_url.assert_called_once_with(service_type='network',
interface=fake_endpoint_type,
region_name=expected_region_name)
def test_clients_neutron(self):
self._test_clients_neutron(None)
def test_clients_neutron_region(self):
cfg.CONF.set_override('region_name', 'myregion',
group='neutron_client')
self._test_clients_neutron('myregion')
def test_clients_neutron_noauth(self):
con = mock.MagicMock()
con.auth_token = None
con.auth_token_info = None
auth_url = mock.PropertyMock(name="auth_url",
return_value="keystone_url")
type(con).auth_url = auth_url
con.get_url_for = mock.Mock(name="get_url_for")
con.get_url_for.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._neutron = None
self.assertRaises(exception.AuthorizationFailure, obj.neutron)
@mock.patch.object(clients.OpenStackClients, 'url_for')
@mock.patch.object(clients.OpenStackClients, 'auth_url')
def test_clients_neutron_cached(self, mock_auth, mock_url):
mock_auth.__get__ = mock.Mock(return_value="keystone_url")
con = mock.MagicMock()
con.auth_token = "3bcc3d3a03f44e3d8377f9247b0ad155"
con.auth_url = "keystone_url"
mock_url.return_value = "url_from_keystone"
obj = clients.OpenStackClients(con)
obj._neutron = None
neutron = obj.neutron()
neutron_cached = obj.neutron()
self.assertEqual(neutron, neutron_cached)
| apache-2.0 | 3,733,136,881,680,137,000 | 42.962751 | 79 | 0.612527 | false |
FreeOpcUa/python-opcua | opcua/common/events.py | 1 | 8172 | import copy
from opcua import ua
import opcua
from opcua.ua.uaerrors import UaError
from opcua.common import ua_utils
class Event(object):
"""
OPC UA Event object.
This is class in inherited by the common event objects such as BaseEvent,
other auto standard events and custom events
Events are used to trigger events on server side and are
sent to clients for every events from server
Developper Warning:
On server side the data type of attributes should be known, thus
add properties using the add_property method!!!
"""
def __init__(self, emitting_node=ua.ObjectIds.Server):
self.server_handle = None
self.select_clauses = None
self.event_fields = None
self.data_types = {}
if isinstance(emitting_node, ua.NodeId):
self.emitting_node = emitting_node
else:
self.emitting_node = ua.NodeId(emitting_node)
# save current attributes
self.internal_properties = list(self.__dict__.keys())[:] + ["internal_properties"]
def __str__(self):
return "{0}({1})".format(
self.__class__.__name__,
[str(k) + ":" + str(v) for k, v in self.__dict__.items() if k not in self.internal_properties])
__repr__ = __str__
def add_property(self, name, val, datatype):
"""
Add a property to event and tore its data type
"""
setattr(self, name, val)
self.data_types[name] = datatype
def get_event_props_as_fields_dict(self):
"""
convert all properties of the Event class to a dict of variants
"""
field_vars = {}
for key, value in vars(self).items():
if not key.startswith("__") and key not in self.internal_properties:
field_vars[key] = ua.Variant(value, self.data_types[key])
return field_vars
@staticmethod
def from_field_dict(fields):
"""
Create an Event object from a dict of name and variants
"""
ev = Event()
for k, v in fields.items():
ev.add_property(k, v.Value, v.VariantType)
return ev
def to_event_fields_using_subscription_fields(self, select_clauses):
"""
Using a new select_clauses and the original select_clauses
used during subscription, return a field list
"""
fields = []
for sattr in select_clauses:
for idx, o_sattr in enumerate(self.select_clauses):
if sattr.BrowsePath == o_sattr.BrowsePath and sattr.AttributeId == o_sattr.AttributeId:
fields.append(self.event_fields[idx])
break
return fields
def to_event_fields(self, select_clauses):
"""
return a field list using a select clause and the object properties
"""
none_field = ua.Variant(None, ua.VariantType.Null)
fields = []
for sattr in select_clauses:
if not sattr.BrowsePath:
name = ua.AttributeIds(sattr.AttributeId).name
else:
name = sattr.BrowsePath[0].Name
try:
val = getattr(self, name)
except AttributeError:
field = none_field
else:
field = ua.Variant(copy.deepcopy(val), self.data_types[name])
fields.append(field)
return fields
@staticmethod
def from_event_fields(select_clauses, fields):
"""
Instantiate an Event object from a select_clauses and fields
"""
ev = Event()
ev.select_clauses = select_clauses
ev.event_fields = fields
for idx, sattr in enumerate(select_clauses):
if len(sattr.BrowsePath) == 0:
name = sattr.AttributeId.name
else:
name = sattr.BrowsePath[0].Name
ev.add_property(name, fields[idx].Value, fields[idx].VariantType)
return ev
def get_filter_from_event_type(eventtypes):
evfilter = ua.EventFilter()
evfilter.SelectClauses = select_clauses_from_evtype(eventtypes)
evfilter.WhereClause = where_clause_from_evtype(eventtypes)
return evfilter
def select_clauses_from_evtype(evtypes):
clauses = []
selected_paths = []
for evtype in evtypes:
for prop in get_event_properties_from_type_node(evtype):
if prop.get_browse_name() not in selected_paths:
op = ua.SimpleAttributeOperand()
op.AttributeId = ua.AttributeIds.Value
op.BrowsePath = [prop.get_browse_name()]
clauses.append(op)
selected_paths.append(prop.get_browse_name())
return clauses
def where_clause_from_evtype(evtypes):
cf = ua.ContentFilter()
el = ua.ContentFilterElement()
# operands can be ElementOperand, LiteralOperand, AttributeOperand, SimpleAttribute
# Create a clause where the generate event type property EventType
# must be a subtype of events in evtypes argument
# the first operand is the attribute event type
op = ua.SimpleAttributeOperand()
# op.TypeDefinitionId = evtype.nodeid
op.BrowsePath.append(ua.QualifiedName("EventType", 0))
op.AttributeId = ua.AttributeIds.Value
el.FilterOperands.append(op)
# now create a list of all subtypes we want to accept
subtypes = []
for evtype in evtypes:
subtypes += [st.nodeid for st in ua_utils.get_node_subtypes(evtype)]
subtypes = list(set(subtypes)) # remove duplicates
for subtypeid in subtypes:
op = ua.LiteralOperand()
op.Value = ua.Variant(subtypeid)
el.FilterOperands.append(op)
el.FilterOperator = ua.FilterOperator.InList
cf.Elements.append(el)
return cf
def get_event_properties_from_type_node(node):
properties = []
curr_node = node
while True:
properties.extend(curr_node.get_properties())
if curr_node.nodeid.Identifier == ua.ObjectIds.BaseEventType:
break
parents = curr_node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True)
if len(parents) != 1: # Something went wrong
return None
curr_node = parents[0]
return properties
def get_event_obj_from_type_node(node):
"""
return an Event object from an event type node
"""
if node.nodeid.Identifier in opcua.common.event_objects.IMPLEMENTED_EVENTS.keys():
return opcua.common.event_objects.IMPLEMENTED_EVENTS[node.nodeid.Identifier]()
else:
parent_identifier, parent_eventtype = _find_parent_eventtype(node)
class CustomEvent(parent_eventtype):
def __init__(self):
parent_eventtype.__init__(self)
self.EventType = node.nodeid
curr_node = node
while curr_node.nodeid.Identifier != parent_identifier:
for prop in curr_node.get_properties():
name = prop.get_browse_name().Name
val = prop.get_data_value()
self.add_property(name, val.Value.Value, val.Value.VariantType)
parents = curr_node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True)
if len(parents) != 1: # Something went wrong
raise UaError("Parent of event type could notbe found")
curr_node = parents[0]
self._freeze = True
return CustomEvent()
def _find_parent_eventtype(node):
"""
"""
parents = node.get_referenced_nodes(refs=ua.ObjectIds.HasSubtype, direction=ua.BrowseDirection.Inverse, includesubtypes=True)
if len(parents) != 1: # Something went wrong
raise UaError("Parent of event type could notbe found")
if parents[0].nodeid.Identifier in opcua.common.event_objects.IMPLEMENTED_EVENTS.keys():
return parents[0].nodeid.Identifier, opcua.common.event_objects.IMPLEMENTED_EVENTS[parents[0].nodeid.Identifier]
else:
return _find_parent_eventtype(parents[0])
| lgpl-3.0 | -8,488,097,905,301,647,000 | 34.376623 | 150 | 0.618453 | false |
birdland/dlkit-doc | dlkit/services/grading.py | 1 | 224535 | # -*- coding: utf-8 -*-
"""Grading Open Service Interface Definitions
grading version 3.0.0
The Grading OSID defines a service to apply grades or ratings.
Grade Systems
The grade system sessions provide the means to retrievs and manage
``GradeSystem`` definitions. A ``GradeSystem`` is a fixed set of
``Grades`` . ``GradeSystems`` may also take the form of a numerical
score as well as a rating based on some system. ``GradeEntries`` belong
to a single ``GradebookColumn``.
Gradebook Columns
A ``Gradebook`` is represented by a series of ``GradebookColumns``. A
``GradeBookColumn`` represents a something to be graded and is joined to
a single ``GradeSystem``. A ``GradebookColumn`` may be constrained to a
single grader.
Grade Entries
A ``GradebookColumn`` is comprised of a series of ``GradeEntry``
elements. A ``GradebookColumn`` may represent "Assignment 3" while a
``GradeEntry`` may represent the assignment turned in by a particular
student.
A ``Grade`` can be applied to a ``GradeEntry`` that relates the entry to
a grader and a key ``Resource``. In the case of a class gradebook, the
key resource represents the student. If there are multiple graders for
the same key resource, each grader gets their own ``GradebookColumn``.
Gradebooks may also be used to capture ratings about other objects. In
the case where people vote for their favorite assets, the key resource
represents the ``Asset`` .
``GradebookColumns`` may have a ``GradebookColumnSummary`` entry for
summary results and statistics across all ``GradeEntries`` in the
column.
Gradebook Cataloging
``GradebookColumns`` are organized into ``Gradebooks``. ``Gradebooks``
also provide for a federated hierarchy of ``GradebookColumns``. Simple
reordering of ``GradebookColumns`` can be performed by moving the
``GradebookColumn`` relative to another. The relative positioning may
reference two ``GradebookColumns`` through the federation.
Sub Packages
The Grading OSID includes several subpackages. The Grading Transform
OSID provides a means of translating one ``GradeSystem`` to another. The
Grading Calculation OSID defines derived ``GradebookColumns``. The
Grading Batch OSID manages ``GradeSystems,`` ``GradeEntries,``
``Gradebooks,`` and ``GradebookColumns`` in bulk.
"""
from ..osid import managers as osid_managers
from ..osid import sessions as osid_sessions
from ..osid import objects as osid_objects
from ..osid import markers as osid_markers
from ..osid import records as osid_records
from ..osid import queries as osid_queries
from ..osid import searches as osid_searches
class GradingProfile(osid_managers.OsidProfile):
"""The ``GradingProfile`` describes the interoperability among grading services."""
def __init__(self):
self._provider_manager = None
def supports_grade_system_lookup(self):
"""Tests if a grade system lookup service is supported.
:return: true if grade system lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_grade_system_query(self):
"""Tests if a grade system query service is supported.
:return: ``true`` if grade system query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_grade_system_admin(self):
"""Tests if a grade system administrative service is supported.
:return: ``true`` if grade system admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_grade_entry_lookup(self):
"""Tests if a grade entry lookup service is supported.
:return: true if grade entry lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_grade_entry_query(self):
"""Tests if a grade entry query service is supported.
:return: true if grade entry query is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_grade_entry_admin(self):
"""Tests if a grade entry administrative service is supported.
:return: ``true`` if grade entry admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_gradebook_column_lookup(self):
"""Tests if a gradebook column lookup service is supported.
:return: true if gradebook column lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_gradebook_column_query(self):
"""Tests if a gradebook column query service is supported.
:return: ``true`` if grade system query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_gradebook_column_admin(self):
"""Tests if a gradebook column administrative service is supported.
:return: ``true`` if gradebook column admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_gradebook_lookup(self):
"""Tests if a gradebook lookup service is supported.
:return: ``true`` if gradebook lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def supports_gradebook_admin(self):
"""Tests if a gradebook administrative service is supported.
:return: ``true`` if gradebook admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_record_types(self):
"""Gets the supported ``Grade`` record types.
:return: a list containing the supported ``Grade`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
grade_record_types = property(fget=get_grade_record_types)
def get_grade_system_record_types(self):
"""Gets the supported ``GradeSystem`` record types.
:return: a list containing the supported ``GradeSystem`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
grade_system_record_types = property(fget=get_grade_system_record_types)
def get_grade_system_search_record_types(self):
"""Gets the supported ``GradeSystem`` search record types.
:return: a list containing the supported ``GradeSystem`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
grade_system_search_record_types = property(fget=get_grade_system_search_record_types)
def get_grade_entry_record_types(self):
"""Gets the supported ``GradeEntry`` record types.
:return: a list containing the supported ``GradeEntry`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
grade_entry_record_types = property(fget=get_grade_entry_record_types)
def get_grade_entry_search_record_types(self):
"""Gets the supported ``GradeEntry`` search record types.
:return: a list containing the supported ``GradeEntry`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
grade_entry_search_record_types = property(fget=get_grade_entry_search_record_types)
def get_gradebook_column_record_types(self):
"""Gets the supported ``GradebookColumn`` record types.
:return: a list containing the supported ``GradebookColumn`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
gradebook_column_record_types = property(fget=get_gradebook_column_record_types)
def get_gradebook_column_search_record_types(self):
"""Gets the supported gradebook column search record types.
:return: a list containing the supported ``GradebookColumn`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
gradebook_column_search_record_types = property(fget=get_gradebook_column_search_record_types)
def get_gradebook_column_summary_record_types(self):
"""Gets the supported ``GradebookColumnSummary`` record types.
:return: a list containing the supported ``GradebookColumnSummary`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
gradebook_column_summary_record_types = property(fget=get_gradebook_column_summary_record_types)
def get_gradebook_record_types(self):
"""Gets the supported ``Gradebook`` record types.
:return: a list containing the supported ``Gradebook`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
gradebook_record_types = property(fget=get_gradebook_record_types)
def get_gradebook_search_record_types(self):
"""Gets the supported gradebook search record types.
:return: a list containing the supported ``Gradebook`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
gradebook_search_record_types = property(fget=get_gradebook_search_record_types)
##
# The following methods are from osid.grading.GradebookColumnLookupSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_lookup_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_column_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_column_view(self):
"""A complete view of the ``GradebookColumn`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column(self, gradebook_column_id):
"""Gets the ``GradebookColumn`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``GradebookColumn`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``GradebookColumn`` and
retained for compatibility.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumn``
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumn
def get_gradebook_columns_by_ids(self, gradebook_column_ids):
"""Gets a ``GradebookColumnList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the gradebook
columns specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if a ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible gradeboook columns may be omitted from the list.
:param gradebook_column_ids: the list of ``Ids`` to retrieve
:type gradebook_column_ids: ``osid.id.IdList``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_record_type(self, gradebook_column_record_type):
"""Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_record_type: a gradebook column record type
:type gradebook_column_record_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns(self):
"""Gets all gradebook columns.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:return: a ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
gradebook_columns = property(fget=get_gradebook_columns)
def supports_summary(self):
"""Tests if a summary entry is available.
:return: ``true`` if a summary entry is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_summary(self, gradebook_column_id):
"""Gets the ``GradebookColumnSummary`` for summary results.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column summary
:rtype: ``osid.grading.GradebookColumnSummary``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unimplemented`` -- ``has_summary()`` is ``false``
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumnSummary
##
# The following methods are from osid.grading.GradebookColumnQuerySession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_search_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column_query(self):
"""Gets a gradebook column query.
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumnQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnQuery
gradebook_column_query = property(fget=get_gradebook_column_query)
def get_gradebook_columns_by_query(self, gradebook_column_query):
"""Gets a list of gradebook columns matching the given query.
:param gradebook_column_query: the gradebook column query
:type gradebook_column_query: ``osid.grading.GradebookColumnQuery``
:return: the returned ``GradebookColumnList``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
##
# The following methods are from osid.grading.GradebookColumnAdminSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_create_gradebook_columns(self):
"""Tests if this user can create gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a gradebook
column will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types):
"""Tests if this user can create a single ``GradebookColumn`` using the desired record types.
While ``GradingManager.getGradebookColumnRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``GradebookColumn``. Providing an empty array tests if a
``GradebookColumn`` can be created with no records.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_create(self, gradebook_column_record_types):
"""Gets the gradebook column form for creating new gradebook columns.
A new form should be requested for each create transaction.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def create_gradebook_column(self, gradebook_column_form):
"""Creates a new ``GradebookColumn``.
:param gradebook_column_form: the form for this ``GradebookColumn``
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:return: the new ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumn``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumn
def can_update_gradebook_columns(self):
"""Tests if this user can update gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer update operations to an unauthorized user.
:return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_update(self, gradebook_column_id):
"""Gets the gradebook column form for updating an existing gradebook column.
A new gradebook column form should be requested for each update
transaction.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def update_gradebook_column(self, gradebook_column_form):
"""Updates an existing gradebook column.
:param gradebook_column_form: the form containing the elements to be updated
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def sequence_gradebook_columns(self, gradebook_column_ids):
"""Resequences the gradebook columns.
:param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns``
:type gradebook_column_ids: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id):
"""Moves a gradebook column in front of another.
:param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type front_gradebook_column_id: ``osid.id.Id``
:param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type back_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id):
"""Copies gradebook column entries from one column to another.
If the target grade column grade system differs from the source,
the grades in the entries are transformed to the new grade
system.
:param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type source_gradebook_column_id: ``osid.id.Id``
:param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type target_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebook_columns(self):
"""Tests if this user can delete gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer delete operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook_column(self, gradebook_column_id):
"""Deletes the ``GradebookColumn`` identified by the given ``Id``.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete
:type gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_column_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``GradebookColumns``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook_column(self, gradebook_column_id, alias_id):
"""Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility.
The primary ``Id`` of the ``GradebookColumn`` is determined by
the provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another gradebook column,
it is reassigned to the given gradebook column ``Id``.
:param gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
##
# The following methods are from osid.grading.GradebookLookupSession
def can_lookup_gradebooks(self):
"""Tests if this user can perform ``Gradebook`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_view(self):
"""A complete view of the ``Gradebook`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook(self, gradebook_id):
"""Gets the ``Gradebook`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Gradebook`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Gradebook`` and retained
for compatility.
:param gradebook_id: ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook
:rtype: ``osid.grading.Gradebook``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.Gradebook
def get_gradebooks_by_ids(self, gradebook_ids):
"""Gets a ``GradebookList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
gradebooks specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Gradebook`` objects may be omitted from the list
and may present the elements in any order including returning a
unique set.
:param gradebook_ids: the list of ``Ids`` to retrieve
:type gradebook_ids: ``osid.id.IdList``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``gradebook_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_record_type(self, gradebook_record_type):
"""Gets a ``GradebookList`` containing the given gradebook record ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_record_type: a gradebook record type
:type gradebook_record_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_provider(self, resource_id):
"""Gets a ``GradebookList`` for the given provider ````.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks(self):
"""Gets all ``Gradebooks``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:return: a ``GradebookList``
:rtype: ``osid.grading.GradebookList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
gradebooks = property(fget=get_gradebooks)
##
# The following methods are from osid.grading.GradebookAdminSession
def can_create_gradebooks(self):
"""Tests if this user can create ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
:return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_with_record_types(self, gradebook_record_types):
"""Tests if this user can create a single ``Gradebook`` using the desired record types.
While ``GradingManager.getGradebookRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Gradebook``.
Providing an empty array tests if a ``Gradebook`` can be created
with no records.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_create(self, gradebook_record_types):
"""Gets the gradebook form for creating new gradebooks.
A new form should be requested for each create transaction.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def create_gradebook(self, gradebook_form):
"""Creates a new ``Gradebook``.
:param gradebook_form: the form for this ``Gradebook``
:type gradebook_form: ``osid.grading.GradebookForm``
:return: the new ``Gradebook``
:rtype: ``osid.grading.Gradebook``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
def can_update_gradebooks(self):
"""Tests if this user can update ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
:return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_update(self, gradebook_id):
"""Gets the gradebook form for updating an existing gradebook.
A new gradebook form should be requested for each update
transaction.
:param gradebook_id: the ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NotFound`` -- ``gradebook_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def update_gradebook(self, gradebook_form):
"""Updates an existing gradebook.
:param gradebook_form: the form containing the elements to be updated
:type gradebook_form: ``osid.grading.GradebookForm``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebooks(self):
"""Tests if this user can delete gradebooks.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
:return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook(self, gradebook_id):
"""Deletes a ``Gradebook``.
:param gradebook_id: the ``Id`` of the ``Gradebook`` to remove
:type gradebook_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook(self, gradebook_id, alias_id):
"""Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Gradebook`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id`` . If the alias is a pointer to another gradebook, it is
reassigned to the given gradebook ``Id``.
:param gradebook_id: the ``Id`` of a ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class GradingManager(osid_managers.OsidManager, osid_sessions.OsidSession, GradingProfile):
"""The grading manager provides access to grading sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``GradeSystemLookupSession:`` a session to look up grades and
grade systems
* ``GradeSystemQuerySession:`` a session to query grade systems
``None``
* ``GradeSystemSearchSession:`` a session to search grade systems
* ``GradeSystemAdminSession:`` a session to manage grade systems
* ``GradeSystemNotificationSession`` a session for subscribing to
new or deleted grades or grade systems
* ``GradeSystemGradebookSession:`` a session for retrieving grade
system to gradebook mappings
* ``GradeSystemGradebookAssignmentSession:`` a session for
managing grade system to gradebook mappings
* ``GradeSystemSmartGradebookSession:`` a session for managing
smart gradebooks of grade systems
* ``GradeEntryLookupSession:`` a session to look up grade entries
* ``GradeEntryQuerySession:`` a session to query grade entries
``None``
* ``GradeEntrySearchSession:`` a session to search grade entries
* ``GradeEntryAdminSession:`` a session to create, modify and
delete grade entries ``None``
* ``GradeEntryNotificationSession: a`` session to receive messages
pertaining to grade entry ```` changes
* ``GradebookColumnLookupSession:`` a session to look up gradebook
columns
* ``GradebookColumnQuerySession:`` a session to query gradebook
columns ``None``
* ``GradebookColumnSearchSession:`` a session to search gradebook
columns
* ``GradebookColumnAdminSession:`` a session to manage gradebook
columns
* ``GradebookColumnNotificationSession`` a session for subscribing
to new or deleted gradebook columns
* ``GradebookColumnGradebookSession:`` a session for retrieving
gradebook column to gradebook mappings
* ``GradebookColumnGradebookAssignmentSession:`` a session for
managing gradebook column to gradebook mappings
* ``GradebookColumnSmartGradebookSession:`` a session for managing
smart gradebooks of gradebook columns
* ``GradebookLookupSession:`` a session to lookup gradebooks
* ``GradebookQuerySession:`` a session to query gradebooks
* ``GradebookSearchSession`` : a session to search gradebooks
* ``GradebookAdminSession`` : a session to create, modify and
delete gradebooks
* ``GradebookNotificationSession`` : a session to receive messages
pertaining to gradebook changes
* ``GradebookHierarchySession:`` a session to traverse the
gradebook hierarchy
* ``GradebookHierarchyDesignSession:`` a session to manage the
gradebook hierarchy
"""
def __init__(self, proxy=None):
self._runtime = None
self._provider_manager = None
self._provider_sessions = dict()
self._session_management = AUTOMATIC
self._gradebook_view = DEFAULT
# This is to initialize self._proxy
osid.OsidSession.__init__(self, proxy)
self._sub_package_provider_managers = dict()
def _set_gradebook_view(self, session):
"""Sets the underlying gradebook view to match current view"""
if self._gradebook_view == COMPARATIVE:
try:
session.use_comparative_gradebook_view()
except AttributeError:
pass
else:
try:
session.use_plenary_gradebook_view()
except AttributeError:
pass
def _get_provider_session(self, session_name, proxy=None):
"""Gets the session for the provider"""
agent_key = self._get_agent_key(proxy)
if session_name in self._provider_sessions[agent_key]:
return self._provider_sessions[agent_key][session_name]
else:
session = self._instantiate_session('get_' + session_name, self._proxy)
self._set_gradebook_view(session)
if self._session_management != DISABLED:
self._provider_sessions[agent_key][session_name] = session
return session
def _get_sub_package_provider_manager(self, sub_package_name):
if sub_package_name in self._sub_package_provider_managers:
return self._sub_package_provider_managers[sub_package_name]
config = self._runtime.get_configuration()
parameter_id = Id('parameter:{0}ProviderImpl@dlkit_service'.format(sub_package_name))
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
if self._proxy is None:
# need to add version argument
sub_package = self._runtime.get_manager(sub_package_name.upper(), provider_impl)
else:
# need to add version argument
sub_package = self._runtime.get_proxy_manager(sub_package_name.upper(), provider_impl)
self._sub_package_provider_managers[sub_package_name] = sub_package
return sub_package
def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None):
"""Gets the session from a sub-package"""
agent_key = self._get_agent_key(proxy)
if session_name in self._provider_sessions[agent_key]:
return self._provider_sessions[agent_key][session_name]
else:
manager = self._get_sub_package_provider_manager(sub_package)
session = self._instantiate_session('get_' + session_name + '_for_bank',
proxy=self._proxy,
manager=manager)
self._set_bank_view(session)
if self._session_management != DISABLED:
self._provider_sessions[agent_key][session_name] = session
return session
def _instantiate_session(self, method_name, proxy=None, *args, **kwargs):
"""Instantiates a provider session"""
session_class = getattr(self._provider_manager, method_name)
if proxy is None:
try:
return session_class(bank_id=self._catalog_id, *args, **kwargs)
except AttributeError:
return session_class(*args, **kwargs)
else:
try:
return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs)
except AttributeError:
return session_class(proxy=proxy, *args, **kwargs)
def initialize(self, runtime):
"""OSID Manager initialize"""
from .primitives import Id
if self._runtime is not None:
raise IllegalState('Manager has already been initialized')
self._runtime = runtime
config = runtime.get_configuration()
parameter_id = Id('parameter:gradingProviderImpl@dlkit_service')
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
if self._proxy is None:
# need to add version argument
self._provider_manager = runtime.get_manager('GRADING', provider_impl)
else:
# need to add version argument
self._provider_manager = runtime.get_proxy_manager('GRADING', provider_impl)
def close_sessions(self):
"""Close all sessions, unless session management is set to MANDATORY"""
if self._session_management != MANDATORY:
self._provider_sessions = dict()
def use_automatic_session_management(self):
"""Session state will be saved unless closed by consumers"""
self._session_management = AUTOMATIC
def use_mandatory_session_management(self):
"""Session state will be saved and can not be closed by consumers"""
self._session_management = MANDATORY
def disable_session_management(self):
"""Session state will never be saved"""
self._session_management = DISABLED
self.close_sessions()
def get_grading_batch_manager(self):
"""Gets the ``GradingBatchManager``.
:return: a ``GradingBatchManager``
:rtype: ``osid.grading.batch.GradingBatchManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_batch()`` is true.*
"""
return # osid.grading.batch.GradingBatchManager
grading_batch_manager = property(fget=get_grading_batch_manager)
def get_grading_calculation_manager(self):
"""Gets the ``GradingCalculationManager``.
:return: a ``GradingCalculationManager``
:rtype: ``osid.grading.calculation.GradingCalculationManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_calculation() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_calculation()`` is true.*
"""
return # osid.grading.calculation.GradingCalculationManager
grading_calculation_manager = property(fget=get_grading_calculation_manager)
def get_grading_transform_manager(self):
"""Gets the ``GradingTransformManager``.
:return: a ``GradingTransformManager``
:rtype: ``osid.grading.transform.GradingTransformManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_transform() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_transform()`` is true.*
"""
return # osid.grading.transform.GradingTransformManager
grading_transform_manager = property(fget=get_grading_transform_manager)
##
# The following methods are from osid.grading.GradebookColumnLookupSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_lookup_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_column_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_column_view(self):
"""A complete view of the ``GradebookColumn`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column(self, gradebook_column_id):
"""Gets the ``GradebookColumn`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``GradebookColumn`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``GradebookColumn`` and
retained for compatibility.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumn``
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumn
def get_gradebook_columns_by_ids(self, gradebook_column_ids):
"""Gets a ``GradebookColumnList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the gradebook
columns specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if a ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible gradeboook columns may be omitted from the list.
:param gradebook_column_ids: the list of ``Ids`` to retrieve
:type gradebook_column_ids: ``osid.id.IdList``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_record_type(self, gradebook_column_record_type):
"""Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_record_type: a gradebook column record type
:type gradebook_column_record_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns(self):
"""Gets all gradebook columns.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:return: a ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
gradebook_columns = property(fget=get_gradebook_columns)
def supports_summary(self):
"""Tests if a summary entry is available.
:return: ``true`` if a summary entry is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_summary(self, gradebook_column_id):
"""Gets the ``GradebookColumnSummary`` for summary results.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column summary
:rtype: ``osid.grading.GradebookColumnSummary``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unimplemented`` -- ``has_summary()`` is ``false``
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumnSummary
##
# The following methods are from osid.grading.GradebookColumnQuerySession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_search_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column_query(self):
"""Gets a gradebook column query.
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumnQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnQuery
gradebook_column_query = property(fget=get_gradebook_column_query)
def get_gradebook_columns_by_query(self, gradebook_column_query):
"""Gets a list of gradebook columns matching the given query.
:param gradebook_column_query: the gradebook column query
:type gradebook_column_query: ``osid.grading.GradebookColumnQuery``
:return: the returned ``GradebookColumnList``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
##
# The following methods are from osid.grading.GradebookColumnAdminSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_create_gradebook_columns(self):
"""Tests if this user can create gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a gradebook
column will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types):
"""Tests if this user can create a single ``GradebookColumn`` using the desired record types.
While ``GradingManager.getGradebookColumnRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``GradebookColumn``. Providing an empty array tests if a
``GradebookColumn`` can be created with no records.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_create(self, gradebook_column_record_types):
"""Gets the gradebook column form for creating new gradebook columns.
A new form should be requested for each create transaction.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def create_gradebook_column(self, gradebook_column_form):
"""Creates a new ``GradebookColumn``.
:param gradebook_column_form: the form for this ``GradebookColumn``
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:return: the new ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumn``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumn
def can_update_gradebook_columns(self):
"""Tests if this user can update gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer update operations to an unauthorized user.
:return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_update(self, gradebook_column_id):
"""Gets the gradebook column form for updating an existing gradebook column.
A new gradebook column form should be requested for each update
transaction.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def update_gradebook_column(self, gradebook_column_form):
"""Updates an existing gradebook column.
:param gradebook_column_form: the form containing the elements to be updated
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def sequence_gradebook_columns(self, gradebook_column_ids):
"""Resequences the gradebook columns.
:param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns``
:type gradebook_column_ids: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id):
"""Moves a gradebook column in front of another.
:param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type front_gradebook_column_id: ``osid.id.Id``
:param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type back_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id):
"""Copies gradebook column entries from one column to another.
If the target grade column grade system differs from the source,
the grades in the entries are transformed to the new grade
system.
:param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type source_gradebook_column_id: ``osid.id.Id``
:param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type target_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebook_columns(self):
"""Tests if this user can delete gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer delete operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook_column(self, gradebook_column_id):
"""Deletes the ``GradebookColumn`` identified by the given ``Id``.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete
:type gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_column_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``GradebookColumns``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook_column(self, gradebook_column_id, alias_id):
"""Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility.
The primary ``Id`` of the ``GradebookColumn`` is determined by
the provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another gradebook column,
it is reassigned to the given gradebook column ``Id``.
:param gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
##
# The following methods are from osid.grading.GradebookLookupSession
def can_lookup_gradebooks(self):
"""Tests if this user can perform ``Gradebook`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_view(self):
"""A complete view of the ``Gradebook`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook(self, gradebook_id):
"""Gets the ``Gradebook`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Gradebook`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Gradebook`` and retained
for compatility.
:param gradebook_id: ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook
:rtype: ``osid.grading.Gradebook``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.Gradebook
def get_gradebooks_by_ids(self, gradebook_ids):
"""Gets a ``GradebookList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
gradebooks specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Gradebook`` objects may be omitted from the list
and may present the elements in any order including returning a
unique set.
:param gradebook_ids: the list of ``Ids`` to retrieve
:type gradebook_ids: ``osid.id.IdList``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``gradebook_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_record_type(self, gradebook_record_type):
"""Gets a ``GradebookList`` containing the given gradebook record ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_record_type: a gradebook record type
:type gradebook_record_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_provider(self, resource_id):
"""Gets a ``GradebookList`` for the given provider ````.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks(self):
"""Gets all ``Gradebooks``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:return: a ``GradebookList``
:rtype: ``osid.grading.GradebookList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
gradebooks = property(fget=get_gradebooks)
##
# The following methods are from osid.grading.GradebookAdminSession
def can_create_gradebooks(self):
"""Tests if this user can create ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
:return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_with_record_types(self, gradebook_record_types):
"""Tests if this user can create a single ``Gradebook`` using the desired record types.
While ``GradingManager.getGradebookRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Gradebook``.
Providing an empty array tests if a ``Gradebook`` can be created
with no records.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_create(self, gradebook_record_types):
"""Gets the gradebook form for creating new gradebooks.
A new form should be requested for each create transaction.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def create_gradebook(self, gradebook_form):
"""Creates a new ``Gradebook``.
:param gradebook_form: the form for this ``Gradebook``
:type gradebook_form: ``osid.grading.GradebookForm``
:return: the new ``Gradebook``
:rtype: ``osid.grading.Gradebook``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
def can_update_gradebooks(self):
"""Tests if this user can update ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
:return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_update(self, gradebook_id):
"""Gets the gradebook form for updating an existing gradebook.
A new gradebook form should be requested for each update
transaction.
:param gradebook_id: the ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NotFound`` -- ``gradebook_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def update_gradebook(self, gradebook_form):
"""Updates an existing gradebook.
:param gradebook_form: the form containing the elements to be updated
:type gradebook_form: ``osid.grading.GradebookForm``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebooks(self):
"""Tests if this user can delete gradebooks.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
:return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook(self, gradebook_id):
"""Deletes a ``Gradebook``.
:param gradebook_id: the ``Id`` of the ``Gradebook`` to remove
:type gradebook_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook(self, gradebook_id, alias_id):
"""Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Gradebook`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id`` . If the alias is a pointer to another gradebook, it is
reassigned to the given gradebook ``Id``.
:param gradebook_id: the ``Id`` of a ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class GradingProxyManager(osid_managers.OsidProxyManager, GradingProfile):
"""The grading manager provides access to grading sessions and provides interoperability tests for various aspects of this service.
Methods in this manager accept a ``Proxy`` for passing information
from server environments.The sessions included in this manager are:
* ``GradeSystemLookupSession:`` a session to look up grades and
grade systems
* ``GradeSystemQuerySession:`` a session to query grade systems
``None``
* ``GradeSystemSearchSession:`` a session to search grade systems
* ``GradeSystemAdminSession:`` a session to manage grade systems
* ``GradeSystemNotificationSession`` a session for subscribing to
new or deleted grades or grade systems
* ``GradeSystemGradebookSession:`` a session for retrieving grade
system to gradebook mappings
* ``GradeSystemGradebookAssignmentSession:`` a session for
managing grade system to gradebook mappings
* ``GradeSystemSmartGradebookSession:`` a session for managing
smart gradebooks of grade systems
* ``GradeEntryLookupSession:`` a session to look up grade entries
* ``GradeEntryQuerySession:`` a session to query grade entries
``None``
* ``GradeEntrySearchSession:`` a session to search grade entries
* ``GradeEntryAdminSession:`` a session to create, modify and
delete grade entries ``None``
* ``GradeEntryNotificationSession: a`` session to receive messages
pertaining to grade entry ```` changes
* ``GradebookColumnLookupSession:`` a session to look up gradebook
columns
* ``GradebookColumnQuerySession:`` a session to query gradebook
columns ``None``
* ``GradebookColumnSearchSession:`` a session to search gradebook
columns
* ``GradebookColumnAdminSession:`` a session to manage gradebook
columns
* ``GradebookColumnDerivationSession:`` a session to manage
derived gradebook columns
* ``GradebookColumnNotificationSession`` a session for subscribing
to new or deleted gradebook columns
* ``GradebookColumnGradebookSession:`` a session for retrieving
gradebook column to gradebook mappings
* ``GradebookColumnGradebookAssignmentSession:`` a session for
managing gradebook column to gradebook mappings
* ``GradebookColumnSmartGradebookSession:`` a session for managing
smart gradebooks of gradebook columns
* ``GradebookLookupSession:`` a session to lookup gradebooks
* ``GradebookQuerySession:`` a session to query gradebooks
* ``GradebookSearchSession`` : a session to search gradebooks
* ``GradebookAdminSession`` : a session to create, modify and
delete gradebooks
* ``GradebookNotificationSession`` : a session to receive messages
pertaining to gradebook changes
* ``GradebookHierarchySession:`` a session to traverse the
gradebook hierarchy
* ``GradebookHierarchyDesignSession:`` a session to manage the
gradebook hierarchy
"""
def get_grading_batch_proxy_manager(self):
"""Gets the ``GradingBatchProxyManager``.
:return: a ``GradingBatchProxyManager``
:rtype: ``osid.grading.batch.GradingBatchProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_batch()`` is true.*
"""
return # osid.grading.batch.GradingBatchProxyManager
grading_batch_proxy_manager = property(fget=get_grading_batch_proxy_manager)
def get_grading_calculation_proxy_manager(self):
"""Gets the ``GradingCalculationProxyManager``.
:return: a ``GradingCalculationProxyManager``
:rtype: ``osid.grading.calculation.GradingCalculationProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_calculation() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_calculation()`` is true.*
"""
return # osid.grading.calculation.GradingCalculationProxyManager
grading_calculation_proxy_manager = property(fget=get_grading_calculation_proxy_manager)
def get_grading_transform_proxy_manager(self):
"""Gets the ``GradingTransformProxyManager``.
:return: a ``GradingTransformManager``
:rtype: ``osid.grading.transform.GradingTransformProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_grading_transform() is false``
*compliance: optional -- This method must be implemented if
``supports_grading_transform()`` is true.*
"""
return # osid.grading.transform.GradingTransformProxyManager
grading_transform_proxy_manager = property(fget=get_grading_transform_proxy_manager)
##
# The following methods are from osid.grading.GradebookColumnLookupSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_lookup_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_column_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_column_view(self):
"""A complete view of the ``GradebookColumn`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column(self, gradebook_column_id):
"""Gets the ``GradebookColumn`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``GradebookColumn`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``GradebookColumn`` and
retained for compatibility.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumn``
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumn
def get_gradebook_columns_by_ids(self, gradebook_column_ids):
"""Gets a ``GradebookColumnList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the gradebook
columns specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if a ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible gradeboook columns may be omitted from the list.
:param gradebook_column_ids: the list of ``Ids`` to retrieve
:type gradebook_column_ids: ``osid.id.IdList``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``grade_book_column_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` which does not include gradebook columns of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_parent_genus_type(self, gradebook_column_genus_type):
"""Gets a ``GradebookColumnList`` corresponding to the given gradebook column genus ``Type`` and include any additional columns with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_genus_type: a gradebook column genus type
:type gradebook_column_genus_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns_by_record_type(self, gradebook_column_record_type):
"""Gets a ``GradebookColumnList`` containing the given gradebook column record ``Type``.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:param gradebook_column_record_type: a gradebook column record type
:type gradebook_column_record_type: ``osid.type.Type``
:return: the returned ``GradebookColumn`` list
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
def get_gradebook_columns(self):
"""Gets all gradebook columns.
In plenary mode, the returned list contains all known gradebook
columns or an error results. Otherwise, the returned list may
contain only those gradebook columns that are accessible through
this session.
:return: a ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
gradebook_columns = property(fget=get_gradebook_columns)
def supports_summary(self):
"""Tests if a summary entry is available.
:return: ``true`` if a summary entry is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_summary(self, gradebook_column_id):
"""Gets the ``GradebookColumnSummary`` for summary results.
:param gradebook_column_id: ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column summary
:rtype: ``osid.grading.GradebookColumnSummary``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unimplemented`` -- ``has_summary()`` is ``false``
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradebookColumnSummary
##
# The following methods are from osid.grading.GradebookColumnQuerySession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_search_gradebook_columns(self):
"""Tests if this user can perform ``GradebookColumn`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include gradebook columns in gradebooks
which are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook_column_query(self):
"""Gets a gradebook column query.
:return: the gradebook column
:rtype: ``osid.grading.GradebookColumnQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnQuery
gradebook_column_query = property(fget=get_gradebook_column_query)
def get_gradebook_columns_by_query(self, gradebook_column_query):
"""Gets a list of gradebook columns matching the given query.
:param gradebook_column_query: the gradebook column query
:type gradebook_column_query: ``osid.grading.GradebookColumnQuery``
:return: the returned ``GradebookColumnList``
:rtype: ``osid.grading.GradebookColumnList``
:raise: ``NullArgument`` -- ``gradebook_column_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnList
##
# The following methods are from osid.grading.GradebookColumnAdminSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_create_gradebook_columns(self):
"""Tests if this user can create gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a gradebook
column will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_column_with_record_types(self, gradebook_column_record_types):
"""Tests if this user can create a single ``GradebookColumn`` using the desired record types.
While ``GradingManager.getGradebookColumnRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``GradebookColumn``. Providing an empty array tests if a
``GradebookColumn`` can be created with no records.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: ``true`` if ``GradebookColumn`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_create(self, gradebook_column_record_types):
"""Gets the gradebook column form for creating new gradebook columns.
A new form should be requested for each create transaction.
:param gradebook_column_record_types: array of gradebook column record types
:type gradebook_column_record_types: ``osid.type.Type[]``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NullArgument`` -- ``gradebook_column_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def create_gradebook_column(self, gradebook_column_form):
"""Creates a new ``GradebookColumn``.
:param gradebook_column_form: the form for this ``GradebookColumn``
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:return: the new ``GradebookColumn``
:rtype: ``osid.grading.GradebookColumn``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumn
def can_update_gradebook_columns(self):
"""Tests if this user can update gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer update operations to an unauthorized user.
:return: ``false`` if gradebook column modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_column_form_for_update(self, gradebook_column_id):
"""Gets the gradebook column form for updating an existing gradebook column.
A new gradebook column form should be requested for each update
transaction.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:return: the gradebook column form
:rtype: ``osid.grading.GradebookColumnForm``
:raise: ``NotFound`` -- ``gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookColumnForm
def update_gradebook_column(self, gradebook_column_form):
"""Updates an existing gradebook column.
:param gradebook_column_form: the form containing the elements to be updated
:type gradebook_column_form: ``osid.grading.GradebookColumnForm``
:raise: ``IllegalState`` -- ``gradebook_column_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_column_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_column_form`` did not originate from ``get_gradebook_column_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def sequence_gradebook_columns(self, gradebook_column_ids):
"""Resequences the gradebook columns.
:param gradebook_column_ids: the ``Ids`` of the ``GradebookColumns``
:type gradebook_column_ids: ``osid.id.IdList``
:raise: ``NullArgument`` -- ``gradebook_column_id_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def move_gradebook_column(self, front_gradebook_column_id, back_gradebook_column_id):
"""Moves a gradebook column in front of another.
:param front_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type front_gradebook_column_id: ``osid.id.Id``
:param back_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type back_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``front_gradebook_column_id or back_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def copy_gradebook_column_entries(self, source_gradebook_column_id, target_gradebook_column_id):
"""Copies gradebook column entries from one column to another.
If the target grade column grade system differs from the source,
the grades in the entries are transformed to the new grade
system.
:param source_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type source_gradebook_column_id: ``osid.id.Id``
:param target_gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type target_gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``source_gradebook_column_id ortarget_gradebook_column_id`` is not found
:raise: ``NullArgument`` -- ``source_gradebook_column_id target_gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebook_columns(self):
"""Tests if this user can delete gradebook columns.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``GradebookColumn`` will result in a ``PermissionDenied``. This
is intended as a hint to an application that may opt not to
offer delete operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook_column(self, gradebook_column_id):
"""Deletes the ``GradebookColumn`` identified by the given ``Id``.
:param gradebook_column_id: the ``Id`` of the ``GradebookColumn`` to delete
:type gradebook_column_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``GradebookColumn`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_column_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``GradebookColumns``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``GradebookColumn`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook_column(self, gradebook_column_id, alias_id):
"""Adds an ``Id`` to a ``GradebookColumn`` for the purpose of creating compatibility.
The primary ``Id`` of the ``GradebookColumn`` is determined by
the provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another gradebook column,
it is reassigned to the given gradebook column ``Id``.
:param gradebook_column_id: the ``Id`` of a ``GradebookColumn``
:type gradebook_column_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_column_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
##
# The following methods are from osid.grading.GradebookLookupSession
def can_lookup_gradebooks(self):
"""Tests if this user can perform ``Gradebook`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_gradebook_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_gradebook_view(self):
"""A complete view of the ``Gradebook`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_gradebook(self, gradebook_id):
"""Gets the ``Gradebook`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Gradebook`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Gradebook`` and retained
for compatility.
:param gradebook_id: ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook
:rtype: ``osid.grading.Gradebook``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.Gradebook
def get_gradebooks_by_ids(self, gradebook_ids):
"""Gets a ``GradebookList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
gradebooks specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Gradebook`` objects may be omitted from the list
and may present the elements in any order including returning a
unique set.
:param gradebook_ids: the list of ``Ids`` to retrieve
:type gradebook_ids: ``osid.id.IdList``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``gradebook_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` which does not include gradebooks of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_parent_genus_type(self, gradebook_genus_type):
"""Gets a ``GradebookList`` corresponding to the given gradebook genus ``Type`` and include any additional gradebooks with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_genus_type: a gradebook genus type
:type gradebook_genus_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_record_type(self, gradebook_record_type):
"""Gets a ``GradebookList`` containing the given gradebook record ``Type``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param gradebook_record_type: a gradebook record type
:type gradebook_record_type: ``osid.type.Type``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks_by_provider(self, resource_id):
"""Gets a ``GradebookList`` for the given provider ````.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``Gradebook`` list
:rtype: ``osid.grading.GradebookList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
def get_gradebooks(self):
"""Gets all ``Gradebooks``.
In plenary mode, the returned list contains all known gradebooks
or an error results. Otherwise, the returned list may contain
only those gradebooks that are accessible through this session.
:return: a ``GradebookList``
:rtype: ``osid.grading.GradebookList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookList
gradebooks = property(fget=get_gradebooks)
##
# The following methods are from osid.grading.GradebookAdminSession
def can_create_gradebooks(self):
"""Tests if this user can create ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
:return: ``false`` if ``Gradebook`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_gradebook_with_record_types(self, gradebook_record_types):
"""Tests if this user can create a single ``Gradebook`` using the desired record types.
While ``GradingManager.getGradebookRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Gradebook``.
Providing an empty array tests if a ``Gradebook`` can be created
with no records.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Gradebook`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_create(self, gradebook_record_types):
"""Gets the gradebook form for creating new gradebooks.
A new form should be requested for each create transaction.
:param gradebook_record_types: array of gradebook record types
:type gradebook_record_types: ``osid.type.Type[]``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NullArgument`` -- ``gradebook_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def create_gradebook(self, gradebook_form):
"""Creates a new ``Gradebook``.
:param gradebook_form: the form for this ``Gradebook``
:type gradebook_form: ``osid.grading.GradebookForm``
:return: the new ``Gradebook``
:rtype: ``osid.grading.Gradebook``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form`` did not originate from ``get_gradebook_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
def can_update_gradebooks(self):
"""Tests if this user can update ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
:return: ``false`` if ``Gradebook`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_gradebook_form_for_update(self, gradebook_id):
"""Gets the gradebook form for updating an existing gradebook.
A new gradebook form should be requested for each update
transaction.
:param gradebook_id: the ``Id`` of the ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:return: the gradebook form
:rtype: ``osid.grading.GradebookForm``
:raise: ``NotFound`` -- ``gradebook_id`` is not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradebookForm
def update_gradebook(self, gradebook_form):
"""Updates an existing gradebook.
:param gradebook_form: the form containing the elements to be updated
:type gradebook_form: ``osid.grading.GradebookForm``
:raise: ``IllegalState`` -- ``gradebook_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``gradebook_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``gradebook_form did not originate from get_gradebook_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_gradebooks(self):
"""Tests if this user can delete gradebooks.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Gradebook`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
:return: ``false`` if ``Gradebook`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_gradebook(self, gradebook_id):
"""Deletes a ``Gradebook``.
:param gradebook_id: the ``Id`` of the ``Gradebook`` to remove
:type gradebook_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_gradebook_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Gradebooks``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Gradebook`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_gradebook(self, gradebook_id, alias_id):
"""Adds an ``Id`` to a ``Gradebook`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Gradebook`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id`` . If the alias is a pointer to another gradebook, it is
reassigned to the given gradebook ``Id``.
:param gradebook_id: the ``Id`` of a ``Gradebook``
:type gradebook_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``gradebook_id`` not found
:raise: ``NullArgument`` -- ``gradebook_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class Gradebook(osid_objects.OsidCatalog, osid_sessions.OsidSession):
"""A gradebook defines a collection of grade entries."""
# WILL THIS EVER BE CALLED DIRECTLY - OUTSIDE OF A MANAGER?
def __init__(self, provider_manager, catalog, runtime, proxy, **kwargs):
self._provider_manager = provider_manager
self._catalog = catalog
self._runtime = runtime
osid.OsidObject.__init__(self, self._catalog) # This is to initialize self._object
osid.OsidSession.__init__(self, proxy) # This is to initialize self._proxy
self._catalog_id = catalog.get_id()
self._provider_sessions = kwargs
self._session_management = AUTOMATIC
self._gradebook_view = DEFAULT
self._object_views = dict()
self._operable_views = dict()
self._containable_views = dict()
def _set_gradebook_view(self, session):
"""Sets the underlying gradebook view to match current view"""
if self._gradebook_view == FEDERATED:
try:
session.use_federated_gradebook_view()
except AttributeError:
pass
else:
try:
session.use_isolated_gradebook_view()
except AttributeError:
pass
def _set_object_view(self, session):
"""Sets the underlying object views to match current view"""
for obj_name in self._object_views:
if self._object_views[obj_name] == PLENARY:
try:
getattr(session, 'use_plenary_' + obj_name + '_view')()
except AttributeError:
pass
else:
try:
getattr(session, 'use_comparative_' + obj_name + '_view')()
except AttributeError:
pass
def _set_operable_view(self, session):
"""Sets the underlying operable views to match current view"""
for obj_name in self._operable_views:
if self._operable_views[obj_name] == ACTIVE:
try:
getattr(session, 'use_active_' + obj_name + '_view')()
except AttributeError:
pass
else:
try:
getattr(session, 'use_any_status_' + obj_name + '_view')()
except AttributeError:
pass
def _set_containable_view(self, session):
"""Sets the underlying containable views to match current view"""
for obj_name in self._containable_views:
if self._containable_views[obj_name] == SEQUESTERED:
try:
getattr(session, 'use_sequestered_' + obj_name + '_view')()
except AttributeError:
pass
else:
try:
getattr(session, 'use_unsequestered_' + obj_name + '_view')()
except AttributeError:
pass
def _get_provider_session(self, session_name):
"""Returns the requested provider session.
Instantiates a new one if the named session is not already known.
"""
agent_key = self._get_agent_key()
if session_name in self._provider_sessions[agent_key]:
return self._provider_sessions[agent_key][session_name]
else:
session_class = getattr(self._provider_manager, 'get_' + session_name + '_for_gradebook')
if self._proxy is None:
session = session_class(self._catalog.get_id())
else:
session = session_class(self._catalog.get_id(), self._proxy)
self._set_gradebook_view(session)
self._set_object_view(session)
self._set_operable_view(session)
self._set_containable_view(session)
if self._session_management != DISABLED:
self._provider_sessions[agent_key][session_name] = session
return session
def get_gradebook_id(self):
"""Gets the Id of this gradebook."""
return self._catalog_id
def get_gradebook(self):
"""Strange little method to assure conformance for inherited Sessions."""
return self
def get_objective_hierarchy_id(self):
"""WHAT am I doing here?"""
return self._catalog_id
def get_objective_hierarchy(self):
"""WHAT am I doing here?"""
return self
def __getattr__(self, name):
if '_catalog' in self.__dict__:
try:
return self._catalog[name]
except AttributeError:
pass
raise AttributeError
def close_sessions(self):
"""Close all sessions currently being managed by this Manager to save memory."""
if self._session_management != MANDATORY:
self._provider_sessions = dict()
else:
raise IllegalState()
def use_automatic_session_management(self):
"""Session state will be saved until closed by consumers."""
self._session_management = AUTOMATIC
def use_mandatory_session_management(self):
"""Session state will always be saved and can not be closed by consumers."""
# Session state will be saved and can not be closed by consumers
self._session_management = MANDATORY
def disable_session_management(self):
"""Session state will never be saved."""
self._session_management = DISABLED
self.close_sessions()
def get_gradebook_record(self, gradebook_record_type):
"""Gets the gradebook record corresponding to the given ``Gradebook`` record ``Type``.
This method is used to retrieve an object implementing the
requested record. The ``gradebook_record_type`` may be the
``Type`` returned in ``get_record_types()`` or any of its
parents in a ``Type`` hierarchy where
``has_record_type(gradebook_record_type)`` is ``true`` .
:param gradebook_record_type: a gradebook record type
:type gradebook_record_type: ``osid.type.Type``
:return: the gradebook record
:rtype: ``osid.grading.records.GradebookRecord``
:raise: ``NullArgument`` -- ``gradebook_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(gradebook_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.records.GradebookRecord
##
# The following methods are from osid.grading.GradeSystemLookupSession
def get_gradebook_id(self):
"""Gets the ``GradeSystem`` ``Id`` associated with this session.
:return: the ``GradeSystem Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_lookup_grade_systems(self):
"""Tests if this user can perform ``GradeSystem`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_grade_system_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_grade_system_view(self):
"""A complete view of the ``GradeSystem`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include grade entries in gradebooks which
are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_grade_system(self, grade_system_id):
"""Gets the ``GradeSystem`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``GradeSystem`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``GradeSystem`` and retained
for compatibility.
:param grade_system_id: ``Id`` of the ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:return: the grade system
:rtype: ``osid.grading.GradeSystem``
:raise: ``NotFound`` -- ``grade_system_id`` not found
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradeSystem
def get_grade_system_by_grade(self, grade_id):
"""Gets the ``GradeSystem`` by a ``Grade`` ``Id``.
:param grade_id: ``Id`` of a ``Grade``
:type grade_id: ``osid.id.Id``
:return: the grade system
:rtype: ``osid.grading.GradeSystem``
:raise: ``NotFound`` -- ``grade_id`` not found
:raise: ``NullArgument`` -- ``grade_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradeSystem
def get_grade_systems_by_ids(self, grade_system_ids):
"""Gets a ``GradeSystemList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the systems
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``GradeSystems`` may be omitted from the list and
may present the elements in any order including returning a
unique set.
:param grade_system_ids: the list of ``Ids`` to retrieve
:type grade_system_ids: ``osid.id.IdList``
:return: the returned ``GradeSystem`` list
:rtype: ``osid.grading.GradeSystemList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``grade_system_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
def get_grade_systems_by_genus_type(self, grade_system_genus_type):
"""Gets a ``GradeSystemList`` corresponding to the given grade system genus ``Type`` which does not include systems of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known systems or
an error results. Otherwise, the returned list may contain only
those systems that are accessible through this session.
:param grade_system_genus_type: a grade system genus type
:type grade_system_genus_type: ``osid.type.Type``
:return: the returned ``GradeSystem`` list
:rtype: ``osid.grading.GradeSystemList``
:raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
def get_grade_systems_by_parent_genus_type(self, grade_system_genus_type):
"""Gets a ``GradeSystemList`` corresponding to the given grade system genus ``Type`` and include any additional systems with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known systems or
an error results. Otherwise, the returned list may contain only
those systems that are accessible through this session.
:param grade_system_genus_type: a grade system genus type
:type grade_system_genus_type: ``osid.type.Type``
:return: the returned ``GradeSystem`` list
:rtype: ``osid.grading.GradeSystemList``
:raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
def get_grade_systems_by_record_type(self, grade_system_record_type):
"""Gets a ``GradeSystemList`` containing the given grade record ``Type``.
In plenary mode, the returned list contains all known systems or
an error results. Otherwise, the returned list may contain only
those systems that are accessible through this session.
:param grade_system_record_type: a grade system record type
:type grade_system_record_type: ``osid.type.Type``
:return: the returned ``GradeSystem`` list
:rtype: ``osid.grading.GradeSystemList``
:raise: ``NullArgument`` -- ``grade_system_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
def get_grade_systems(self):
"""Gets all ``GradeSystems``.
In plenary mode, the returned list contains all known grade
systems or an error results. Otherwise, the returned list may
contain only those grade systems that are accessible through
this session.
:return: a ``GradeSystemList``
:rtype: ``osid.grading.GradeSystemList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
grade_systems = property(fget=get_grade_systems)
##
# The following methods are from osid.grading.GradeSystemQuerySession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_search_grade_systems(self):
"""Tests if this user can perform ``GradeSystem`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include grades in gradebooks which are
children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_grade_system_query(self):
"""Gets a grade system query.
:return: a grade system query
:rtype: ``osid.grading.GradeSystemQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemQuery
grade_system_query = property(fget=get_grade_system_query)
def get_grade_systems_by_query(self, grade_system_query):
"""Gets a list of ``GradeSystem`` objects matching the given grade system query.
:param grade_system_query: the grade system query
:type grade_system_query: ``osid.grading.GradeSystemQuery``
:return: the returned ``GradeSystemList``
:rtype: ``osid.grading.GradeSystemList``
:raise: ``NullArgument`` -- ``grade_system_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_system_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemList
##
# The following methods are from osid.grading.GradeSystemAdminSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_create_grade_systems(self):
"""Tests if this user can create ``GradeSystems``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``GradeSystem`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
:return: ``false`` if ``GradeSystem`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_grade_system_with_record_types(self, grade_system_record_types):
"""Tests if this user can create a single ``GradeSystem`` using the desired record types.
While ``GradingManager.getGradeSystemRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``GradeSystem``.
Providing an empty array tests if a ``GradeSystem`` can be
created with no records.
:param grade_system_record_types: array of grade system types
:type grade_system_record_types: ``osid.type.Type[]``
:return: ``true`` if ``GradeSystem`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_system_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_system_form_for_create(self, grade_system_record_types):
"""Gets the grade system form for creating new grade systems.
A new form should be requested for each create transaction.
:param grade_system_record_types: array of grade system types
:type grade_system_record_types: ``osid.type.Type[]``
:return: the grade system form
:rtype: ``osid.grading.GradeSystemForm``
:raise: ``NullArgument`` -- ``grade_system_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemForm
def create_grade_system(self, grade_system_form):
"""Creates a new ``GradeSystem``.
:param grade_system_form: the form for this ``GradeSystem``
:type grade_system_form: ``osid.grading.GradeSystemForm``
:return: the new ``GradeSystem``
:rtype: ``osid.grading.GradeSystem``
:raise: ``IllegalState`` -- ``grade_system_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``grade_system_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_system_form`` did not originate from ``get_grade_system_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystem
def can_update_grade_systems(self):
"""Tests if this user can update ``GradeSystems``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``GradeSystem`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
:return: ``false`` if ``GradeSystem`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_system_form_for_update(self, grade_system_id):
"""Gets the grade system form for updating an existing grade system.
A new grade system form should be requested for each update
transaction.
:param grade_system_id: the ``Id`` of the ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:return: the grade system form
:rtype: ``osid.grading.GradeSystemForm``
:raise: ``NotFound`` -- ``grade_system_id`` is not found
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeSystemForm
def update_grade_system(self, grade_system_form):
"""Updates an existing grade system.
:param grade_system_form: the form containing the elements to be updated
:type grade_system_form: ``osid.grading.GradeSystemForm``
:raise: ``IllegalState`` -- ``grade_system_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``grade_system_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_system_form`` did not originate from ``get_grade_system_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_grade_systems(self):
"""Tests if this user can delete grade systems.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``GradeSystem`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
:return: ``false`` if ``GradeSystem`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_grade_system(self, grade_system_id):
"""Deletes a ``GradeSystem``.
:param grade_system_id: the ``Id`` of the ``GradeSystem`` to remove
:type grade_system_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``grade_system_id`` not found
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_grade_system_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``GradeSystems``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``GradeSystem`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_grade_system(self, grade_system_id, alias_id):
"""Adds an ``Id`` to a ``GradeSystem`` for the purpose of creating compatibility.
The primary ``Id`` of the ``GradeSystem`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another grade system, it is
reassigned to the given grade system ``Id``.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``grade_system_id`` not found
:raise: ``NullArgument`` -- ``grade_system_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_create_grades(self, grade_system_id):
"""Tests if this user can create ``Grade`` s for a ``GradeSystem``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``GradeSystem`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:return: ``false`` if ``Grade`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_grade_with_record_types(self, grade_system_id, grade_record_types):
"""Tests if this user can create a single ``Grade`` using the desired record types.
While ``GradingManager.getGradeRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Grade``.
Providing an empty array tests if a ``Grade`` can be created
with no records.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:param grade_record_types: array of grade recod types
:type grade_record_types: ``osid.type.Type[]``
:return: ``true`` if ``Grade`` creation using the specified ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_system_id`` or ``grade_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_form_for_create(self, grade_system_id, grade_record_types):
"""Gets the grade form for creating new grades.
A new form should be requested for each create transaction.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:param grade_record_types: array of grade recod types
:type grade_record_types: ``osid.type.Type[]``
:return: the grade form
:rtype: ``osid.grading.GradeForm``
:raise: ``NotFound`` -- ``grade_system_id`` is not found
:raise: ``NullArgument`` -- ``grade_system_id`` or ``grade_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeForm
def create_grade(self, grade_form):
"""Creates a new ``Grade``.
:param grade_form: the form for this ``Grade``
:type grade_form: ``osid.grading.GradeForm``
:return: the new ``Grade``
:rtype: ``osid.grading.Grade``
:raise: ``IllegalState`` -- ``grade_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``grade_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_form`` did not originate from ``get_grade_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Grade
def can_update_grades(self, grade_system_id):
"""Tests if this user can update ``Grades``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a ``Grade``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer update
operations to unauthorized users.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:return: ``false`` if ``Grade`` modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_form_for_update(self, grade_id):
"""Gets the grade form for updating an existing grade.
A new grade form should be requested for each update
transaction.
:param grade_id: the ``Id`` of the ``Grade``
:type grade_id: ``osid.id.Id``
:return: the grade form
:rtype: ``osid.grading.GradeForm``
:raise: ``NotFound`` -- ``grade_id`` is not found
:raise: ``NullArgument`` -- ``grade_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeForm
def update_grade(self, grade_form):
"""Updates an existing grade.
:param grade_form: the form containing the elements to be updated
:type grade_form: ``osid.grading.GradeForm``
:raise: ``IllegalState`` -- ``grade_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``grade_id`` or ``grade_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_form`` did not originate from ``get_grade_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_grades(self, grade_system_id):
"""Tests if this user can delete grades.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a ``Grade``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer delete
operations to unauthorized users.
:param grade_system_id: the ``Id`` of a ``GradeSystem``
:type grade_system_id: ``osid.id.Id``
:return: ``false`` if ``Grade`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_system_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_grade(self, grade_id):
"""Deletes a ``Grade``.
:param grade_id: the ``Id`` of the ``Grade`` to remove
:type grade_id: ``osid.id.Id``
:raise: ``NotFound`` -- ``grade_id`` not found
:raise: ``NullArgument`` -- ``grade_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_grade_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Grades``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``Grade`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_grade(self, grade_id, alias_id):
"""Adds an ``Id`` to a ``Grade`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Grade`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another grade, it is
reassigned to the given grade ``Id``.
:param grade_id: the ``Id`` of a ``Grade``
:type grade_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``grade_id`` not found
:raise: ``NullArgument`` -- ``grade_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
##
# The following methods are from osid.grading.GradeEntryLookupSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_lookup_grade_entries(self):
"""Tests if this user can perform ``GradeEntry`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
:return: ``false`` if lookup methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_comparative_grade_entry_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_plenary_grade_entry_view(self):
"""A complete view of the ``GradeEntry`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include grade entries in gradebooks which
are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_effective_grade_entry_view(self):
"""Only grade entries whose effective dates are current are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_any_effective_grade_entry_view(self):
"""All grade entries of any effective dates are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_grade_entry(self, grade_entry_id):
"""Gets the ``GradeEntry`` specified by its ``Id``.
:param grade_entry_id: ``Id`` of the ``GradeEntry``
:type grade_entry_id: ``osid.id.Id``
:return: the grade entry
:rtype: ``osid.grading.GradeEntry``
:raise: ``NotFound`` -- ``grade_entry_id`` not found
:raise: ``NullArgument`` -- ``grade_entry_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
return # osid.grading.GradeEntry
def get_grade_entries_by_ids(self, grade_entry_ids):
"""Gets a ``GradeEntryList`` corresponding to the given ``IdList``.
:param grade_entry_ids: the list of ``Ids`` to retrieve
:type grade_entry_ids: ``osid.id.IdList``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NotFound`` -- an ``Id was`` not found
:raise: ``NullArgument`` -- ``grade_entry_ids`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_by_genus_type(self, grade_entry_genus_type):
"""Gets a ``GradeEntryList`` corresponding to the given grade entry genus ``Type`` which does not include grade entries of genus types derived from the specified ``Type``.
:param grade_entry_genus_type: a grade entry genus type
:type grade_entry_genus_type: ``osid.type.Type``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``grade_entry_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_by_parent_genus_type(self, grade_entry_genus_type):
"""Gets a ``GradeEntryList`` corresponding to the given grade entry genus ``Type`` and include any additional grade entry with genus types derived from the specified ``Type``.
:param grade_entry_genus_type: a grade entry genus type
:type grade_entry_genus_type: ``osid.type.Type``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``grade_entry_genus_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_by_record_type(self, grade_entry_record_type):
"""Gets a ``GradeEntryList`` containing the given grade entry record ``Type``.
:param grade_entry_record_type: a grade entry record type
:type grade_entry_record_type: ``osid.type.Type``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``grade_entry_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_on_date(self, from_, to):
"""Gets a ``GradeEntryList`` effective during the entire given date range inclusive but not confined to the date range.
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``from or to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_gradebook_column(self, gradebook_column_id):
"""Gets a ``GradeEntryList`` for the gradebook column.
:param gradebook_column_id: a gradebook column ``Id``
:type gradebook_column_id: ``osid.id.Id``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``gradebook_column_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_gradebook_column_on_date(self, gradebook_column_id, from_, to):
"""Gets a ``GradeEntryList`` for the given gradebook column and effective during the entire given date range inclusive but not confined to the date range.
:param gradebook_column_id: a gradebook column ``Id``
:type gradebook_column_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``gradebook_column_id, from, or to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_resource(self, resource_id):
"""Gets a ``GradeEntryList`` for the given key key resource.
:param resource_id: a key resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_resource_on_date(self, resource_id, from_, to):
"""Gets a ``GradeEntryList`` for the given key resource and effective during the entire given date range inclusive but not confined to the date range.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``resource_id, from, or to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_gradebook_column_and_resource(self, gradebook_column_id, resource_id):
"""Gets a ``GradeEntryList`` for the gradebook column and key resource.
:param gradebook_column_id: a gradebook column ``Id``
:type gradebook_column_id: ``osid.id.Id``
:param resource_id: a key resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``gradebook_column_id`` or ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_for_gradebook_column_and_resource_on_date(self, gradebook_column_id, resource_id, from_, to):
"""Gets a ``GradeEntryList`` for the given gradebook column, resource, and effective during the entire given date range inclusive but not confined to the date range.
:param gradebook_column_id: a gradebook column ``Id``
:type gradebook_column_id: ``osid.id.Id``
:param resource_id: a key resource ``Id``
:type resource_id: ``osid.id.Id``
:param from: start of date range
:type from: ``osid.calendaring.DateTime``
:param to: end of date range
:type to: ``osid.calendaring.DateTime``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``gradebook_column_id, resource, from, or to`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries_by_grader(self, resource_id):
"""Gets a ``GradeEntryList`` for the given grader.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:return: the returned ``GradeEntry`` list
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
def get_grade_entries(self):
"""Gets all grade entries.
:return: a ``GradeEntryList``
:rtype: ``osid.grading.GradeEntryList``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
grade_entries = property(fget=get_grade_entries)
##
# The following methods are from osid.grading.GradeEntryQuerySession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_search_grade_entries(self):
"""Tests if this user can perform ``GradeEntry`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
:return: ``false`` if search methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def use_federated_gradebook_view(self):
"""Federates the view for methods in this session.
A federated view will include grade entries in gradebooks which
are children of this gradebook in the gradebook hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def use_isolated_gradebook_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this gradebook only.
*compliance: mandatory -- This method is must be implemented.*
"""
pass
def get_grade_entry_query(self):
"""Gets a grade entry query.
:return: the grade entry query
:rtype: ``osid.grading.GradeEntryQuery``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryQuery
grade_entry_query = property(fget=get_grade_entry_query)
def get_grade_entries_by_query(self, grade_entry_query):
"""Gets a list of entries matching the given grade entry query.
:param grade_entry_query: the grade entry query
:type grade_entry_query: ``osid.grading.GradeEntryQuery``
:return: the returned ``GradeEntryList``
:rtype: ``osid.grading.GradeEntryList``
:raise: ``NullArgument`` -- ``grade_entry_query`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_entry_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryList
##
# The following methods are from osid.grading.GradeEntryAdminSession
def get_gradebook_id(self):
"""Gets the ``Gradebook`` ``Id`` associated with this session.
:return: the ``Gradebook Id`` associated with this session
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
gradebook_id = property(fget=get_gradebook_id)
def get_gradebook(self):
"""Gets the ``Gradebook`` associated with this session.
:return: the ``Gradebook`` associated with this session
:rtype: ``osid.grading.Gradebook``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
gradebook = property(fget=get_gradebook)
def can_create_grade_entries(self):
"""Tests if this user can create grade entries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a grade
entry will result in a ``PermissionDenied``. This is intended as
a hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``GradeEntry`` creation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def can_create_grade_entry_with_record_types(self, grade_entry_record_types):
"""Tests if this user can create a single ``GradeEntry`` using the desired record types.
While ``GradingManager.getGradeEntryRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``GradeEntry``.
Providing an empty array tests if a ``GradeEntry`` can be
created with no records.
:param grade_entry_record_types: array of grade entry record types
:type grade_entry_record_types: ``osid.type.Type[]``
:return: ``true`` if ``GradeEntry`` creation using the specified record ``Types`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``grade_entry_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_entry_form_for_create(self, gradebook_column_id, resource_id, grade_entry_record_types):
"""Gets the grade entry form for creating new grade entries.
A new form should be requested for each create transaction.
:param gradebook_column_id: the gradebook column
:type gradebook_column_id: ``osid.id.Id``
:param resource_id: the key resource
:type resource_id: ``osid.id.Id``
:param grade_entry_record_types: array of grade entry record types
:type grade_entry_record_types: ``osid.type.Type[]``
:return: the grade entry form
:rtype: ``osid.grading.GradeEntryForm``
:raise: ``NotFound`` -- ``gradebook_column_id or resource_id`` not found
:raise: ``NullArgument`` -- ``gradebook_column_id, resource_id,`` or ``grade_entry_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryForm
def create_grade_entry(self, grade_entry_form):
"""Creates a new ``GradeEntry``.
:param grade_entry_form: the form for this ``GradeEntry``
:type grade_entry_form: ``osid.grading.GradeEntryForm``
:return: the new ``GradeEntry``
:rtype: ``osid.grading.GradeEntry``
:raise: ``IllegalState`` -- ``grade_entry_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``grade_entry_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntry
def can_overridecalculated_grade_entries(self):
"""Tests if this user can override grade entries calculated from another.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a grade
entry will result in a ``PermissionDenied``. This is intended as
a hint to an application that may opt not to offer create
operations to an unauthorized user.
:return: ``false`` if ``GradeEntry`` override is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_entry_form_for_override(self, grade_entry_id, grade_entry_record_types):
"""Gets the grade entry form for overriding calculated grade entries.
A new form should be requested for each create transaction.
:param grade_entry_id: the ``Id`` of the grade entry to be overridden
:type grade_entry_id: ``osid.id.Id``
:param grade_entry_record_types: array of grade entry record types
:type grade_entry_record_types: ``osid.type.Type[]``
:return: the grade entry form
:rtype: ``osid.grading.GradeEntryForm``
:raise: ``AlreadyExists`` -- ``grade_entry_id`` is already overridden
:raise: ``NotFound`` -- ``grade_entry_id`` not found or ``grade_entry_id`` is not a calculated entry
:raise: ``NullArgument`` -- ``grade_entry_id`` or ``grade_entry_record_types`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- unable to get form for requested record types
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryForm
def override_calculated_grade_entry(self, grade_entry_form):
"""Creates a new overriding ``GradeEntry``.
:param grade_entry_form: the form for this ``GradeEntry``
:type grade_entry_form: ``osid.grading.GradeEntryForm``
:return: the new ``GradeEntry``
:rtype: ``osid.grading.GradeEntry``
:raise: ``IllegalState`` -- ``grade_entry_form`` already used in a create transaction
:raise: ``InvalidArgument`` -- one or more of the form elements is invalid
:raise: ``NullArgument`` -- ``grade_entry_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_override()``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntry
def can_update_grade_entries(self):
"""Tests if this user can update grade entries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``GradeEntry`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
:return: ``false`` if grade entry modification is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_grade_entry_form_for_update(self, grade_entry_id):
"""Gets the grade entry form for updating an existing entry.
A new grade entry form should be requested for each update
transaction.
:param grade_entry_id: the ``Id`` of the ``GradeEntry``
:type grade_entry_id: ``osid.id.Id``
:return: the grade entry form
:rtype: ``osid.grading.GradeEntryForm``
:raise: ``NotFound`` -- ``grade_entry_id`` is not found
:raise: ``NullArgument`` -- ``grade_entry_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.GradeEntryForm
def update_grade_entry(self, grade_entry_form):
"""Updates an existing grade entry.
:param grade_entry_form: the form containing the elements to be updated
:type grade_entry_form: ``osid.grading.GradeEntryForm``
:raise: ``IllegalState`` -- ``grade_entry_form`` already used in an update transaction
:raise: ``InvalidArgument`` -- the form contains an invalid value
:raise: ``NullArgument`` -- ``grade_entry_form`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``grade_entry_form`` did not originate from ``get_grade_entry_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_delete_grade_entries(self):
"""Tests if this user can delete grade entries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``GradeEntry`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
:return: ``false`` if ``GradeEntry`` deletion is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def delete_grade_entry(self, grade_entry_id):
"""Deletes the ``GradeEntry`` identified by the given ``Id``.
:param grade_entry_id: the ``Id`` of the ``GradeEntry`` to delete
:type grade_entry_id: ``osid.id.Id``
:raise: ``NotFound`` -- a ``GradeEntry`` was not found identified by the given ``Id``
:raise: ``NullArgument`` -- ``grade_entry_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
def can_manage_grade_entry_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``GradeEntries``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
:return: ``false`` if ``GradeEntry`` aliasing is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def alias_grade_entry(self, grade_entry_id, alias_id):
"""Adds an ``Id`` to a ``GradeEntry`` for the purpose of creating compatibility.
The primary ``Id`` of the ``GradeEntry`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another grade entry, it is
reassigned to the given grade entry ``Id``.
:param grade_entry_id: the ``Id`` of a ``GradeEntry``
:type grade_entry_id: ``osid.id.Id``
:param alias_id: the alias ``Id``
:type alias_id: ``osid.id.Id``
:raise: ``AlreadyExists`` -- ``alias_id`` is already assigned
:raise: ``NotFound`` -- ``grade_entry_id`` not found
:raise: ``NullArgument`` -- ``grade_entry_id`` or ``alias_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class GradebookList(osid_objects.OsidList):
"""Like all ``OsidLists,`` ``GradebookList`` provides a means for accessing ``Gradebook`` elements sequentially either one at a time or many at a time.
Examples: while (gl.hasNext()) { Gradebook gradebook =
gl.getNextGradebook(); }
or
while (gl.hasNext()) {
Gradebook[] gradebooks = gl.getNextGradebooks(gl.available());
}
"""
def get_next_gradebook(self):
"""Gets the next ``Gradebook`` in this list.
:return: the next ``Gradebook`` in this list. The ``has_next()`` method should be used to test that a next ``Gradebook`` is available before calling this method.
:rtype: ``osid.grading.Gradebook``
:raise: ``IllegalState`` -- no more elements available in this list
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
next_gradebook = property(fget=get_next_gradebook)
def get_next_gradebooks(self, n):
"""Gets the next set of ``Gradebook`` elements in this list which must be less than or equal to the return from ``available()``.
:param n: the number of ``Gradebook`` elements requested which must be less than or equal to ``available()``
:type n: ``cardinal``
:return: an array of ``Gradebook`` elements.The length of the array is less than or equal to the number specified.
:rtype: ``osid.grading.Gradebook``
:raise: ``IllegalState`` -- no more elements available in this list
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.grading.Gradebook
| mit | 3,892,039,066,676,046,000 | 38.593546 | 193 | 0.644349 | false |
eckardm/archivematica | src/MCPClient/lib/clientScripts/archivematicaCreateMETSMetadataCSV.py | 1 | 4618 | #!/usr/bin/env python2
#
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <[email protected]>
# @version svn: $Id$
#/src/dashboard/src/main/models.py
import collections
import csv
import os
import sys
import traceback
# archivematicaCommon
import archivematicaFunctions
from custom_handlers import get_script_logger
from sharedVariablesAcrossModules import sharedVariablesAcrossModules
def parseMetadata(SIPPath):
"""
Parse all metadata.csv files in SIPPath.
Looking for metadata.csvs in metadata/ and
objects/metadata/transfers/<transfer name>/metadata/
See parseMetadataCSV for details on parsing.
:param SIPPath: Path to the SIP
:return: {<filename>: OrderedDict(key: [values]) }
"""
all_metadata = {}
metadata_csvs = archivematicaFunctions.find_metadata_files(SIPPath, 'metadata.csv')
for metadataCSVFilePath in metadata_csvs:
try:
csv_metadata = parseMetadataCSV(metadataCSVFilePath)
except Exception:
print >>sys.stderr, "error parsing: ", metadataCSVFilePath
traceback.print_exc(file=sys.stderr)
sharedVariablesAcrossModules.globalErrorCount += 1
continue
# Provide warning if this file already has differing metadata
# Not using all_metadata.update(csv_metadata) because of that
for entry, values in csv_metadata.iteritems():
if entry in all_metadata and all_metadata[entry] != values:
print >> sys.stderr, 'Metadata for', entry, 'being updated. Old:', all_metadata[entry], 'New:', values
existing = all_metadata.get(entry, collections.OrderedDict())
existing.update(values)
all_metadata[entry] = existing
return all_metadata
def parseMetadataCSV(metadataCSVFilePath):
"""
Parses the metadata.csv into a dict with entries for each file.
Each file's entry is an OrderedDict containing the column header and a list of values for each column.
Example CSV:
Filename,dc.title,dc.type,dc.type,Other metadata
objects/foo.jpg,Foo,Photograph,Still Image,Taken on a sunny day
objects/bar/,Bar,Photograph,Still Image,All taken on a rainy day
Produces:
{
'objects/foo.jpg': OrderedDict(dc.title=[Foo], dc.type=[Photograph, Still Image], Other metadata=[Taken on a sunny day])
'objects/bar': OrderedDict(dc.title=[Bar], dc.date=[Photograph, Still Image], Other metadata=[All taken on a rainy day])
}
:param metadataCSVFilePath: Path to the metadata CSV to parse
:return: {<filename>: OrderedDict(<metadata name>: [<metadata value>]) }
"""
metadata = {}
# use universal newline mode to support unusual newlines, like \r
with open(metadataCSVFilePath, 'rbU') as f:
reader = csv.reader(f)
# Parse first row as header
header = reader.next()
# Strip filename column, strip whitespace from header values
header = [h.strip() for h in header[1:]]
# Parse data
for row in reader:
if not row:
continue
entry_name = row[0]
if entry_name.endswith("/"):
entry_name = entry_name[:-1]
# Strip file/dir name from values
row = row[1:]
values = archivematicaFunctions.OrderedListsDict(zip(header, row))
if entry_name in metadata and metadata[entry_name] != values:
print >> sys.stderr, 'Metadata for', entry_name, 'being overwritten. Old:', metadata[entry_name], 'New:', values
metadata[entry_name] = values
return collections.OrderedDict(metadata) # Return a normal OrderedDict
if __name__ == '__main__':
logger = get_script_logger("archivematica.mcp.client.createMETSMetadataCSV")
parseMetadata(sys.argv[1])
| agpl-3.0 | 1,642,396,789,617,826,600 | 37.165289 | 128 | 0.683629 | false |
cyphactor/lifecyclemanager | extra/plugins/userlog/userlog/userlog.py | 1 | 7748 | import posixpath
from trac.core import *
from trac.config import *
from trac.config import BoolOption
from trac.web.chrome import ITemplateProvider, \
add_stylesheet
from trac.web.main import IRequestHandler
from trac.wiki import wiki_to_html, wiki_to_oneliner
from trac.mimeview import Mimeview, is_binary
from trac.util import escape, Markup
from trac.util.datefmt import format_datetime, pretty_timedelta
from trac.util.text import unicode_urlencode, shorten_line, CRLF
from trac.versioncontrol.diff import get_diff_options, unified_diff
from trac.versioncontrol import Node, Changeset
import re
class UserLogModule(Component):
implements(IRequestHandler, ITemplateProvider)
wiki_format_messages = BoolOption('changeset', 'wiki_format_messages',
'true',
"""Whether wiki formatting should be applied to changeset messages.
If this option is disabled, changeset messages will be rendered as
pre-formatted text.""")
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/userlog(?:/(\w+).*|$)', req.path_info)
if match:
req.args['user'] = match.group(1) or '/'
return True
def process_request(self, req):
user = req.args.get('user')
sort = req.args.get('sort', 'ASC')
db = self.env.get_db_cnx()
changesets = self._get_userlog(req, db, user, sort)
toc_links = []
for rev, _, _, _ in changesets:
toc_links.append({'anchor': rev,
'title': 'Revision %s' % rev})
changeset_ranges = self._get_changeset_ranges(changesets)
changeset_links = []
for start, end in changeset_ranges:
if start != end:
title = 'Changeset [%s:%s]' % (start, end)
else:
title = 'Changeset [%s]' % start
link = req.href.changeset(old=start, old_path='/',
new=end, new_path='/')
changeset_links.append({'href': link,
'title': title})
req.hdf['user'] = user
req.hdf['changesets'] = changesets
req.hdf['toc_links'] = toc_links
req.hdf['changeset_links'] = changeset_links
add_stylesheet(req, 'common/css/wiki.css')
add_stylesheet(req, 'userlog/css/userlog.css')
return 'userlog.cs', None
def _get_userlog(self, req, db, user, sort):
mimeview = Mimeview(self.env)
repos = self.env.get_repository()
diff_options = get_diff_options(req)
cursor = db.cursor()
cursor.execute("SELECT rev, time, message FROM revision "
"WHERE author='%s' ORDER BY time %s" % (user, sort))
# Have to sort by time because rev is a text field
# and sorts lexicographically rather than numerically
changesets = []
for rev, time, message in cursor:
if self.wiki_format_messages:
message = wiki_to_html(message, self.env, req,
escape_newlines=True)
else:
message = html.PRE(message)
prev = repos.get_node('/', rev).get_previous()
if prev:
prev_rev = prev[1]
else:
prev_rev = rev
diffs = []
changes = repos.get_changes(old_path='/', old_rev=prev_rev,
new_path='/', new_rev=rev)
for old_node, new_node, kind, change in changes:
if kind == Node.DIRECTORY:
if change == Changeset.ADD:
diffs.append(('%s added' % new_node.path, ''))
elif change == Changeset.DELETE:
diffs.append(('%s deleted' % old_node.path, ''))
continue
new_content = old_content = ''
new_node_info = old_node_info = ('','')
if old_node:
old_content = old_node.get_content().read()
if is_binary(old_content):
continue
old_node_info = (old_node.path, old_node.rev)
old_content = mimeview.to_unicode(old_content,
old_node.content_type)
if new_node:
new_content = new_node.get_content().read()
if is_binary(new_content):
continue
new_node_info = (new_node.path, new_node.rev)
new_path = new_node.path
new_content = mimeview.to_unicode(new_content,
new_node.content_type)
else:
old_node_path = repos.normalize_path(old_node.path)
diff_old_path = repos.normalize_path('/')
new_path = posixpath.join('/', old_node_path[len(diff_old_path)+1:])
if old_content != new_content:
context = 3
options = diff_options[1]
for option in options:
if option.startswith('-U'):
context = int(option[2:])
break
if not old_node_info[0]:
old_node_info = new_node_info # support for 'A'dd changes
diff = 'Index: ' + new_path + CRLF
diff += '=' * 67 + CRLF
diff += '--- %s (revision %s)' % old_node_info + CRLF
diff += '+++ %s (revision %s)' % new_node_info + CRLF
for line in unified_diff(old_content.splitlines(),
new_content.splitlines(), context,
ignore_blank_lines='-B' in options,
ignore_case='-i' in options,
ignore_space_changes='-b' in options):
diff += line + CRLF
if change == Changeset.ADD:
diffs.append(('%s added' % (new_node.path,), diff))
elif change == Changeset.DELETE:
diffs.append(('%s deleted' % (old_node.path,), diff))
else:
diffs.append(('%s edited' % (new_node.path,), diff))
changesets.append((int(rev), format_datetime(time), message, diffs))
return changesets
def _get_changeset_ranges(self, changesets):
ranges = [] # will be a list of pairs: (start, end)
for rev, _, _, _ in changesets:
# if rev is more than two greater than last max
# or list is empty
if ranges == [] or rev > (ranges[-1][1] + 1):
# create a new tuple
ranges.append((rev, rev))
# else if rev is greater (by one) than last max
elif rev == (ranges[-1][1] + 1):
ranges[-1] = (ranges[-1][0], rev)
return ranges
# ITemplateProvider methods
def get_templates_dirs(self):
"""Return a list of directories containing the provided
ClearSilver templates.
"""
from pkg_resources import resource_filename
return [resource_filename(__name__, 'templates')]
def get_htdocs_dirs(self):
from pkg_resources import resource_filename
return [('userlog', resource_filename(__name__, 'htdocs'))]
| gpl-3.0 | -8,103,132,112,731,082,000 | 43.786127 | 88 | 0.495225 | false |
geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi_cache.py | 6 | 1090 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState
from telemetry.page import page as page_module
from telemetry import story
class LoFiPageCache(page_module.Page):
"""
A test page for the chrome proxy Lo-Fi cache tests.
Checks that LoFi placeholder images are not loaded from cache on page reloads
when LoFi mode is disabled or data reduction proxy is disabled.
"""
def __init__(self, url, page_set):
super(LoFiPageCache, self).__init__(url=url, page_set=page_set,
shared_page_state_class=ChromeProxySharedPageState)
class LoFiCacheStorySet(story.StorySet):
""" Chrome proxy test sites """
def __init__(self):
super(LoFiCacheStorySet, self).__init__()
urls_list = [
'http://check.googlezip.net/cacheable/test.html',
'http://check.googlezip.net/cacheable/test.html',
]
for url in urls_list:
self.AddStory(LoFiPageCache(url, self))
| gpl-3.0 | 3,508,047,452,543,410,700 | 31.058824 | 79 | 0.721101 | false |
pcm17/tensorflow | tensorflow/contrib/distributions/python/ops/poisson.py | 1 | 5426 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Poisson distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distributions.python.ops import distribution
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
__all__ = [
"Poisson",
]
_poisson_sample_note = """
Note that the input value must be a non-negative floating point tensor with
dtype `dtype` and whose shape can be broadcast with `self.rate`. `x` is only
legal if it is non-negative and its components are equal to integer values.
"""
class Poisson(distribution.Distribution):
"""Poisson distribution.
The Poisson distribution is parameterized by an event `rate` parameter.
#### Mathematical Details
The probability mass function (pmf) is,
```none
pmf(k; lambda, k >= 0) = (lambda^k / k!) / Z
Z = exp(lambda).
```
where `rate = lambda` and `Z` is the normalizing constant.
"""
def __init__(self,
rate,
validate_args=False,
allow_nan_stats=True,
name="Poisson"):
"""Initialize a batch of Poisson distributions.
Args:
rate: Floating point tensor, the rate parameter of the
distribution(s). `rate` must be positive.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[rate]) as ns:
with ops.control_dependencies([check_ops.assert_positive(rate)] if
validate_args else []):
self._rate = array_ops.identity(rate, name="rate")
super(Poisson, self).__init__(
dtype=self._rate.dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._rate],
name=ns)
@property
def rate(self):
"""Rate parameter."""
return self._rate
def _batch_shape_tensor(self):
return array_ops.shape(self.rate)
def _batch_shape(self):
return self.rate.get_shape()
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
@distribution_util.AppendDocstring(_poisson_sample_note)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
@distribution_util.AppendDocstring(_poisson_sample_note)
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
@distribution_util.AppendDocstring(_poisson_sample_note)
def _log_cdf(self, x):
return math_ops.log(self.cdf(x))
@distribution_util.AppendDocstring(_poisson_sample_note)
def _cdf(self, x):
if self.validate_args:
# We set `check_integer=False` since the CDF is defined on whole real
# line.
x = distribution_util.embed_check_nonnegative_discrete(
x, check_integer=False)
return math_ops.igammac(math_ops.floor(x + 1), self.rate)
def _log_normalization(self):
return self.rate
def _log_unnormalized_prob(self, x):
if self.validate_args:
x = distribution_util.embed_check_nonnegative_discrete(
x, check_integer=True)
return x * math_ops.log(self.rate) - math_ops.lgamma(x + 1)
def _mean(self):
return array_ops.identity(self.rate)
def _variance(self):
return array_ops.identity(self.rate)
@distribution_util.AppendDocstring(
"""Note: when `rate` is an integer, there are actually two modes: `rate`
and `rate - 1`. In this case we return the larger, i.e., `rate`.""")
def _mode(self):
return math_ops.floor(self.rate)
def _sample_n(self, n, seed=None):
return random_ops.random_poisson(
self.rate, [n], dtype=self.dtype, seed=seed)
| apache-2.0 | 8,151,053,767,586,295,000 | 33.56051 | 80 | 0.679506 | false |
harlequin/sickbeard | sickbeard/tv.py | 1 | 60124 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os.path
import datetime
import threading
import re
import glob
import sickbeard
import xml.etree.cElementTree as etree
from name_parser.parser import NameParser, InvalidNameException
from lib.tvdb_api import tvdb_api, tvdb_exceptions
from sickbeard import db
from sickbeard import helpers, exceptions, logger
from sickbeard.exceptions import ex
from sickbeard import tvrage
from sickbeard import config
from sickbeard import image_cache
from sickbeard import postProcessor
from sickbeard import encodingKludge as ek
from common import Quality, Overview
from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, UNKNOWN
class TVShow(object):
def __init__ (self, tvdbid, lang=""):
self.tvdbid = tvdbid
self._location = ""
self.name = ""
self.tvrid = 0
self.tvrname = ""
self.network = ""
self.genre = ""
self.runtime = 0
self.quality = int(sickbeard.QUALITY_DEFAULT)
self.seasonfolders = int(sickbeard.SEASON_FOLDERS_DEFAULT)
self.status = ""
self.airs = ""
self.startyear = 0
self.paused = 0
self.air_by_date = 0
self.lang = lang
self.lock = threading.Lock()
self._isDirGood = False
self.episodes = {}
otherShow = helpers.findCertainShow(sickbeard.showList, self.tvdbid)
if otherShow != None:
raise exceptions.MultipleShowObjectsException("Can't create a show if it already exists")
self.loadFromDB()
self.saveToDB()
def _getLocation(self):
if ek.ek(os.path.isdir, self._location):
return self._location
else:
raise exceptions.ShowDirNotFoundException("Show folder doesn't exist, you shouldn't be using it")
if self._isDirGood:
return self._location
else:
raise exceptions.NoNFOException("Show folder doesn't exist, you shouldn't be using it")
def _setLocation(self, newLocation):
logger.log(u"Setter sets location to " + newLocation, logger.DEBUG)
if ek.ek(os.path.isdir, newLocation):
self._location = newLocation
self._isDirGood = True
else:
raise exceptions.NoNFOException("Invalid folder for the show!")
location = property(_getLocation, _setLocation)
# delete references to anything that's not in the internal lists
def flushEpisodes(self):
for curSeason in self.episodes:
for curEp in self.episodes[curSeason]:
myEp = self.episodes[curSeason][curEp]
self.episodes[curSeason][curEp] = None
del myEp
def getEpisode(self, season, episode, file=None, noCreate=False):
#return TVEpisode(self, season, episode)
if not season in self.episodes:
self.episodes[season] = {}
ep = None
if not episode in self.episodes[season] or self.episodes[season][episode] == None:
if noCreate:
return None
logger.log(str(self.tvdbid) + ": An object for episode " + str(season) + "x" + str(episode) + " didn't exist in the cache, trying to create it", logger.DEBUG)
if file != None:
ep = TVEpisode(self, season, episode, file)
else:
ep = TVEpisode(self, season, episode)
if ep != None:
self.episodes[season][episode] = ep
return self.episodes[season][episode]
def writeShowNFO(self):
result = False
if not ek.ek(os.path.isdir, self._location):
logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
return False
for cur_provider in sickbeard.metadata_provider_dict.values():
result = cur_provider.create_show_metadata(self) or result
return result
def writeMetadata(self):
if not ek.ek(os.path.isdir, self._location):
logger.log(str(self.tvdbid) + u": Show dir doesn't exist, skipping NFO generation")
return
self.getImages()
self.writeShowNFO()
self.writeEpisodeNFOs()
def writeEpisodeNFOs (self):
if not ek.ek(os.path.isdir, self._location):
logger.log(str(self.tvdbid) + ": Show dir doesn't exist, skipping NFO generation")
return
logger.log(str(self.tvdbid) + ": Writing NFOs for all episodes")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
for epResult in sqlResults:
logger.log(str(self.tvdbid) + ": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(epResult["episode"]), logger.DEBUG)
curEp = self.getEpisode(epResult["season"], epResult["episode"])
curEp.createMetaFiles()
# find all media files in the show folder and create episodes for as many as possible
def loadEpisodesFromDir (self):
if not ek.ek(os.path.isdir, self._location):
logger.log(str(self.tvdbid) + ": Show dir doesn't exist, not loading episodes from disk")
return
logger.log(str(self.tvdbid) + ": Loading all episodes from the show directory " + self._location)
# get file list
mediaFiles = helpers.listMediaFiles(self._location)
# create TVEpisodes from each media file (if possible)
for mediaFile in mediaFiles:
curEpisode = None
logger.log(str(self.tvdbid) + ": Creating episode from " + mediaFile, logger.DEBUG)
try:
curEpisode = self.makeEpFromFile(os.path.join(self._location, mediaFile))
except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e:
logger.log(u"Episode "+mediaFile+" returned an exception: "+ex(e), logger.ERROR)
except exceptions.EpisodeDeletedException:
logger.log(u"The episode deleted itself when I tried making an object for it", logger.DEBUG)
# store the reference in the show
if curEpisode != None:
curEpisode.saveToDB()
def loadEpisodesFromDB(self):
logger.log(u"Loading all episodes from the DB")
myDB = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ?"
sqlResults = myDB.select(sql, [self.tvdbid])
scannedEps = {}
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if self.lang:
ltvdb_api_parms['language'] = self.lang
t = tvdb_api.Tvdb(**ltvdb_api_parms)
cachedShow = t[self.tvdbid]
cachedSeasons = {}
for curResult in sqlResults:
deleteEp = False
curSeason = int(curResult["season"])
curEpisode = int(curResult["episode"])
if curSeason not in cachedSeasons:
try:
cachedSeasons[curSeason] = cachedShow[curSeason]
except tvdb_exceptions.tvdb_seasonnotfound, e:
logger.log(u"Error when trying to load the episode from TVDB: "+e.message, logger.WARNING)
deleteEp = True
if not curSeason in scannedEps:
scannedEps[curSeason] = {}
logger.log(u"Loading episode "+str(curSeason)+"x"+str(curEpisode)+" from the DB", logger.DEBUG)
try:
curEp = self.getEpisode(curSeason, curEpisode)
# if we found out that the ep is no longer on TVDB then delete it from our database too
if deleteEp:
curEp.deleteEpisode()
curEp.loadFromDB(curSeason, curEpisode)
curEp.loadFromTVDB(tvapi=t, cachedSeason=cachedSeasons[curSeason])
scannedEps[curSeason][curEpisode] = True
except exceptions.EpisodeDeletedException:
logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG)
continue
return scannedEps
def loadEpisodesFromTVDB(self, cache=True):
# There's gotta be a better way of doing this but we don't wanna
# change the cache value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if not cache:
ltvdb_api_parms['cache'] = 'recache'
if self.lang:
ltvdb_api_parms['language'] = self.lang
try:
t = tvdb_api.Tvdb(**ltvdb_api_parms)
showObj = t[self.tvdbid]
except tvdb_exceptions.tvdb_error:
logger.log(u"TVDB timed out, unable to update episodes from TVDB", logger.ERROR)
return None
logger.log(str(self.tvdbid) + ": Loading all episodes from theTVDB...")
scannedEps = {}
for season in showObj:
scannedEps[season] = {}
for episode in showObj[season]:
# need some examples of wtf episode 0 means to decide if we want it or not
if episode == 0:
continue
try:
#ep = TVEpisode(self, season, episode)
ep = self.getEpisode(season, episode)
except exceptions.EpisodeNotFoundException:
logger.log(str(self.tvdbid) + ": TVDB object for " + str(season) + "x" + str(episode) + " is incomplete, skipping this episode")
continue
else:
try:
ep.loadFromTVDB(tvapi=t)
except exceptions.EpisodeDeletedException:
logger.log(u"The episode was deleted, skipping the rest of the load")
continue
with ep.lock:
logger.log(str(self.tvdbid) + ": Loading info from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
ep.loadFromTVDB(season, episode, tvapi=t)
if ep.dirty:
ep.saveToDB()
scannedEps[season][episode] = True
return scannedEps
def setTVRID(self, force=False):
if self.tvrid != 0 and not force:
logger.log(u"No need to get the TVRage ID, it's already populated", logger.DEBUG)
return
logger.log(u"Attempting to retrieve the TVRage ID", logger.DEBUG)
try:
# load the tvrage object, it will set the ID in its constructor if possible
tvrage.TVRage(self)
self.saveToDB()
except exceptions.TVRageException, e:
logger.log(u"Couldn't get TVRage ID because we're unable to sync TVDB and TVRage: "+ex(e), logger.DEBUG)
return
def getImages(self, fanart=None, poster=None):
poster_result = fanart_result = season_thumb_result = False
for cur_provider in sickbeard.metadata_provider_dict.values():
logger.log("Running season folders for "+cur_provider.name, logger.DEBUG)
poster_result = cur_provider.create_poster(self) or poster_result
fanart_result = cur_provider.create_fanart(self) or fanart_result
season_thumb_result = cur_provider.create_season_thumbs(self) or season_thumb_result
return poster_result or fanart_result or season_thumb_result
def loadLatestFromTVRage(self):
try:
# load the tvrage object
tvr = tvrage.TVRage(self)
newEp = tvr.findLatestEp()
if newEp != None:
logger.log(u"TVRage gave us an episode object - saving it for now", logger.DEBUG)
newEp.saveToDB()
# make an episode out of it
except exceptions.TVRageException, e:
logger.log(u"Unable to add TVRage info: " + ex(e), logger.WARNING)
# make a TVEpisode object from a media file
def makeEpFromFile(self, file):
if not ek.ek(os.path.isfile, file):
logger.log(str(self.tvdbid) + ": That isn't even a real file dude... " + file)
return None
logger.log(str(self.tvdbid) + ": Creating episode object from " + file, logger.DEBUG)
try:
myParser = NameParser()
parse_result = myParser.parse(file)
except InvalidNameException:
logger.log(u"Unable to parse the filename "+file+" into a valid episode", logger.ERROR)
return None
if len(parse_result.episode_numbers) == 0 and not parse_result.air_by_date:
logger.log("parse_result: "+str(parse_result))
logger.log(u"No episode number found in "+file+", ignoring it", logger.ERROR)
return None
# for now lets assume that any episode in the show dir belongs to that show
season = parse_result.season_number if parse_result.season_number != None else 1
episodes = parse_result.episode_numbers
rootEp = None
# if we have an air-by-date show then get the real season/episode numbers
if parse_result.air_by_date:
try:
# There's gotta be a better way of doing this but we don't wanna
# change the cache value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if self.lang:
ltvdb_api_parms['language'] = self.lang
t = tvdb_api.Tvdb(**ltvdb_api_parms)
epObj = t[self.tvdbid].airedOn(parse_result.air_date)[0]
season = int(epObj["seasonnumber"])
episodes = [int(epObj["episodenumber"])]
except tvdb_exceptions.tvdb_episodenotfound:
logger.log(u"Unable to find episode with date "+str(episodes[0])+" for show "+self.name+", skipping", logger.WARNING)
return None
except tvdb_exceptions.tvdb_error, e:
logger.log(u"Unable to contact TVDB: "+ex(e), logger.WARNING)
return None
for curEpNum in episodes:
episode = int(curEpNum)
logger.log(str(self.tvdbid) + ": " + file + " parsed to " + self.name + " " + str(season) + "x" + str(episode), logger.DEBUG)
checkQualityAgain = False
curEp = self.getEpisode(season, episode)
if curEp == None:
try:
curEp = self.getEpisode(season, episode, file)
except exceptions.EpisodeNotFoundException:
logger.log(str(self.tvdbid) + ": Unable to figure out what this file is, skipping", logger.ERROR)
continue
else:
# if there is a new file associated with this ep then re-check the quality
if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file):
logger.log(u"The old episode had a different file associated with it, I will re-check the quality based on the new filename "+file, logger.DEBUG)
checkQualityAgain = True
with curEp.lock:
curEp.location = file
curEp.checkForMetaFiles()
if rootEp == None:
rootEp = curEp
else:
rootEp.relatedEps.append(curEp)
# if they replace a file on me I'll make some attempt at re-checking the quality
if checkQualityAgain:
newQuality = Quality.nameQuality(file)
logger.log(u"Since this file has been renamed, I checked "+file+" and found quality "+Quality.qualityStrings[newQuality], logger.DEBUG)
if newQuality != Quality.UNKNOWN:
curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality)
elif sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ARCHIVED, IGNORED]:
oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status)
newQuality = Quality.nameQuality(file)
if newQuality == Quality.UNKNOWN:
newQuality = Quality.assumeQuality(file)
newStatus = None
# if it was snatched and now exists then set the status correctly
if oldStatus == SNATCHED and oldQuality <= newQuality:
logger.log(u"STATUS: this ep used to be snatched with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
newStatus = DOWNLOADED
# if it was snatched proper and we found a higher quality one then allow the status change
elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality:
logger.log(u"STATUS: this ep used to be snatched proper with quality "+Quality.qualityStrings[oldQuality]+" but a file exists with quality "+Quality.qualityStrings[newQuality]+" so I'm setting the status to DOWNLOADED", logger.DEBUG)
newStatus = DOWNLOADED
elif oldStatus not in (SNATCHED, SNATCHED_PROPER):
newStatus = DOWNLOADED
if newStatus != None:
with curEp.lock:
logger.log(u"STATUS: we have an associated file, so setting the status from "+str(curEp.status)+" to DOWNLOADED/" + str(Quality.statusFromName(file)), logger.DEBUG)
curEp.status = Quality.compositeStatus(newStatus, newQuality)
with curEp.lock:
curEp.saveToDB()
# creating metafiles on the root should be good enough
if rootEp != None:
with rootEp.lock:
rootEp.createMetaFiles()
return rootEp
def loadFromDB(self, skipNFO=False):
logger.log(str(self.tvdbid) + ": Loading show info from database")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
if len(sqlResults) > 1:
raise exceptions.MultipleDBShowsException()
elif len(sqlResults) == 0:
logger.log(str(self.tvdbid) + ": Unable to find the show in the database")
return
else:
if self.name == "":
self.name = sqlResults[0]["show_name"]
self.tvrname = sqlResults[0]["tvr_name"]
if self.network == "":
self.network = sqlResults[0]["network"]
if self.genre == "":
self.genre = sqlResults[0]["genre"]
self.runtime = sqlResults[0]["runtime"]
self.status = sqlResults[0]["status"]
if self.status == None:
self.status = ""
self.airs = sqlResults[0]["airs"]
if self.airs == None:
self.airs = ""
self.startyear = sqlResults[0]["startyear"]
if self.startyear == None:
self.startyear = 0
self.air_by_date = sqlResults[0]["air_by_date"]
if self.air_by_date == None:
self.air_by_date = 0
self.quality = int(sqlResults[0]["quality"])
self.seasonfolders = int(sqlResults[0]["seasonfolders"])
self.paused = int(sqlResults[0]["paused"])
self._location = sqlResults[0]["location"]
if self.tvrid == 0:
self.tvrid = int(sqlResults[0]["tvr_id"])
if self.lang == "":
self.lang = sqlResults[0]["lang"]
def loadFromTVDB(self, cache=True, tvapi=None, cachedSeason=None):
logger.log(str(self.tvdbid) + ": Loading show info from theTVDB")
# There's gotta be a better way of doing this but we don't wanna
# change the cache value elsewhere
if tvapi is None:
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if not cache:
ltvdb_api_parms['cache'] = 'recache'
if self.lang:
ltvdb_api_parms['language'] = self.lang
t = tvdb_api.Tvdb(**ltvdb_api_parms)
else:
t = tvapi
myEp = t[self.tvdbid]
self.name = myEp["seriesname"]
self.genre = myEp['genre']
self.network = myEp['network']
if myEp["airs_dayofweek"] != None and myEp["airs_time"] != None:
self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"]
if myEp["firstaired"] != None and myEp["firstaired"]:
self.startyear = int(myEp["firstaired"].split('-')[0])
if self.airs == None:
self.airs = ""
if myEp["status"] != None:
self.status = myEp["status"]
if self.status == None:
self.status = ""
self.saveToDB()
def loadNFO (self):
if not os.path.isdir(self._location):
logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't load NFO")
raise exceptions.NoNFOException("The show dir doesn't exist, no NFO could be loaded")
logger.log(str(self.tvdbid) + ": Loading show info from NFO")
xmlFile = os.path.join(self._location, "tvshow.nfo")
try:
xmlFileObj = open(xmlFile, 'r')
showXML = etree.ElementTree(file = xmlFileObj)
if showXML.findtext('title') == None or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None):
raise exceptions.NoNFOException("Invalid info in tvshow.nfo (missing name or id):" \
+ str(showXML.findtext('title')) + " " \
+ str(showXML.findtext('tvdbid')) + " " \
+ str(showXML.findtext('id')))
self.name = showXML.findtext('title')
if showXML.findtext('tvdbid') != None:
self.tvdbid = int(showXML.findtext('tvdbid'))
elif showXML.findtext('id'):
self.tvdbid = int(showXML.findtext('id'))
else:
raise exceptions.NoNFOException("Empty <id> or <tvdbid> field in NFO")
except (exceptions.NoNFOException, SyntaxError, ValueError), e:
logger.log(u"There was an error parsing your existing tvshow.nfo file: " + ex(e), logger.ERROR)
logger.log(u"Attempting to rename it to tvshow.nfo.old", logger.DEBUG)
try:
xmlFileObj.close()
ek.ek(os.rename, xmlFile, xmlFile + ".old")
except Exception, e:
logger.log(u"Failed to rename your tvshow.nfo file - you need to delete it or fix it: " + ex(e), logger.ERROR)
raise exceptions.NoNFOException("Invalid info in tvshow.nfo")
if showXML.findtext('studio') != None:
self.network = showXML.findtext('studio')
if self.network == None and showXML.findtext('network') != None:
self.network = ""
if showXML.findtext('genre') != None:
self.genre = showXML.findtext('genre')
else:
self.genre = ""
# TODO: need to validate the input, I'm assuming it's good until then
def nextEpisode(self):
logger.log(str(self.tvdbid) + ": Finding the episode which airs next", logger.DEBUG)
myDB = db.DBConnection()
innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status = ? ORDER BY airdate ASC LIMIT 1"
innerParams = [self.tvdbid, datetime.date.today().toordinal(), UNAIRED]
query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status = ?"
params = [self.tvdbid, datetime.date.today().toordinal()] + innerParams + [UNAIRED]
sqlResults = myDB.select(query, params)
if sqlResults == None or len(sqlResults) == 0:
logger.log(str(self.tvdbid) + ": No episode found... need to implement tvrage and also show status", logger.DEBUG)
return []
else:
logger.log(str(self.tvdbid) + ": Found episode " + str(sqlResults[0]["season"]) + "x" + str(sqlResults[0]["episode"]), logger.DEBUG)
foundEps = []
for sqlEp in sqlResults:
curEp = self.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
foundEps.append(curEp)
return foundEps
# if we didn't get an episode then try getting one from tvrage
# load tvrage info
# extract NextEpisode info
# verify that we don't have it in the DB somehow (ep mismatch)
def deleteShow(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM tv_episodes WHERE showid = ?", [self.tvdbid])
myDB.action("DELETE FROM tv_shows WHERE tvdb_id = ?", [self.tvdbid])
# remove self from show list
sickbeard.showList = [x for x in sickbeard.showList if x.tvdbid != self.tvdbid]
# clear the cache
image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')
for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.tvdbid)+'.*')):
logger.log(u"Deleting cache file "+cache_file)
os.remove(cache_file)
def populateCache(self):
cache_inst = image_cache.ImageCache()
logger.log(u"Checking & filling cache for show "+self.name)
cache_inst.fill_cache(self)
def refreshDir(self):
# make sure the show dir is where we think it is
if not ek.ek(os.path.isdir, self._location):
return False
# load from dir
self.loadEpisodesFromDir()
# run through all locations from DB, check that they exist
logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
for ep in sqlResults:
curLoc = os.path.normpath(ep["location"])
season = int(ep["season"])
episode = int(ep["episode"])
try:
curEp = self.getEpisode(season, episode)
except exceptions.EpisodeDeletedException:
logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG)
continue
# if the path doesn't exist or if it's not in our show dir
if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith(os.path.normpath(self.location)):
with curEp.lock:
# if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
if curEp.location and curEp.status in Quality.DOWNLOADED:
logger.log(str(self.tvdbid) + ": Location for " + str(season) + "x" + str(episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG)
curEp.status = IGNORED
curEp.location = ''
curEp.hasnfo = False
curEp.hastbn = False
curEp.saveToDB()
def fixEpisodeNames(self):
if not os.path.isdir(self._location):
logger.log(str(self.tvdbid) + ": Show dir doesn't exist, can't rename episodes")
return
# load episodes from my folder
self.loadEpisodesFromDir()
logger.log(str(self.tvdbid) + ": Loading all episodes with a location from the database")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.tvdbid])
# build list of locations
fileLocations = {}
for epResult in sqlResults:
goodLoc = os.path.normpath(epResult["location"])
goodSeason = int(epResult["season"])
goodEpisode = int(epResult["episode"])
if fileLocations.has_key(goodLoc):
fileLocations[goodLoc].append((goodSeason, goodEpisode))
else:
fileLocations[goodLoc] = [(goodSeason, goodEpisode)]
logger.log(u"File results: " + str(fileLocations), logger.DEBUG)
for curLocation in fileLocations:
epList = fileLocations[curLocation]
# get the root episode and add all related episodes to it
rootEp = None
for myEp in epList:
curEp = self.getEpisode(myEp[0], myEp[1])
if rootEp == None:
rootEp = curEp
rootEp.relatedEps = []
else:
rootEp.relatedEps.append(curEp)
goodName = rootEp.prettyName()
actualName = os.path.splitext(os.path.basename(curLocation))
if goodName == actualName[0]:
logger.log(str(self.tvdbid) + ": File " + rootEp.location + " is already named correctly, skipping", logger.DEBUG)
continue
with rootEp.lock:
result = helpers.rename_file(rootEp.location, rootEp.prettyName())
if result != False:
rootEp.location = result
for relEp in rootEp.relatedEps:
relEp.location = result
fileList = postProcessor.PostProcessor(curLocation)._list_associated_files(curLocation)
logger.log(u"Files associated to "+curLocation+": "+str(fileList), logger.DEBUG)
for file in fileList:
result = helpers.rename_file(file, rootEp.prettyName())
if result == False:
logger.log(str(self.tvdbid) + ": Unable to rename file "+file, logger.ERROR)
for curEp in [rootEp]+rootEp.relatedEps:
curEp.checkForMetaFiles()
with rootEp.lock:
rootEp.saveToDB()
for relEp in rootEp.relatedEps:
relEp.saveToDB()
def saveToDB(self):
logger.log(str(self.tvdbid) + ": Saving show info to database", logger.DEBUG)
myDB = db.DBConnection()
controlValueDict = {"tvdb_id": self.tvdbid}
newValueDict = {"show_name": self.name,
"tvr_id": self.tvrid,
"location": self._location,
"network": self.network,
"genre": self.genre,
"runtime": self.runtime,
"quality": self.quality,
"airs": self.airs,
"status": self.status,
"seasonfolders": self.seasonfolders,
"paused": self.paused,
"air_by_date": self.air_by_date,
"startyear": self.startyear,
"tvr_name": self.tvrname,
"lang": self.lang
}
myDB.upsert("tv_shows", newValueDict, controlValueDict)
def __str__(self):
toReturn = ""
toReturn += "name: " + self.name + "\n"
toReturn += "location: " + self._location + "\n"
toReturn += "tvdbid: " + str(self.tvdbid) + "\n"
if self.network != None:
toReturn += "network: " + self.network + "\n"
if self.airs != None:
toReturn += "airs: " + self.airs + "\n"
if self.status != None:
toReturn += "status: " + self.status + "\n"
toReturn += "startyear: " + str(self.startyear) + "\n"
toReturn += "genre: " + self.genre + "\n"
toReturn += "runtime: " + str(self.runtime) + "\n"
toReturn += "quality: " + str(self.quality) + "\n"
return toReturn
def wantEpisode(self, season, episode, quality, manualSearch=False):
logger.log(u"Checking if we want episode "+str(season)+"x"+str(episode)+" at quality "+Quality.qualityStrings[quality], logger.DEBUG)
# if the quality isn't one we want under any circumstances then just say no
anyQualities, bestQualities = Quality.splitQuality(self.quality)
logger.log(u"any,best = "+str(anyQualities)+" "+str(bestQualities)+" and we are "+str(quality), logger.DEBUG)
if quality not in anyQualities + bestQualities:
logger.log(u"I know for sure I don't want this episode, saying no", logger.DEBUG)
return False
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.tvdbid, season, episode])
if not sqlResults or not len(sqlResults):
logger.log(u"Unable to find the episode", logger.DEBUG)
return False
epStatus = int(sqlResults[0]["status"])
logger.log(u"current episode status: "+str(epStatus), logger.DEBUG)
# if we know we don't want it then just say no
if epStatus in (SKIPPED, IGNORED, ARCHIVED) and not manualSearch:
logger.log(u"Ep is skipped, not bothering", logger.DEBUG)
return False
# if it's one of these then we want it as long as it's in our allowed initial qualities
if quality in anyQualities + bestQualities:
if epStatus in (WANTED, UNAIRED, SKIPPED):
logger.log(u"Ep is wanted/unaired/skipped, definitely get it", logger.DEBUG)
return True
elif manualSearch:
logger.log(u"Usually I would ignore this ep but because you forced the search I'm overriding the default and allowing the quality", logger.DEBUG)
return True
else:
logger.log(u"This quality looks like something we might want but I don't know for sure yet", logger.DEBUG)
curStatus, curQuality = Quality.splitCompositeStatus(epStatus)
# if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have
if curStatus in Quality.SNATCHED + Quality.DOWNLOADED and quality in bestQualities and quality > curQuality:
logger.log(u"We already have this ep but the new one is better quality, saying yes", logger.DEBUG)
return True
logger.log(u"None of the conditions were met so I'm just saying no", logger.DEBUG)
return False
def getOverview(self, epStatus):
if epStatus == WANTED:
return Overview.WANTED
elif epStatus in (UNAIRED, UNKNOWN):
return Overview.UNAIRED
elif epStatus in (SKIPPED, IGNORED):
return Overview.SKIPPED
elif epStatus == ARCHIVED:
return Overview.GOOD
elif epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER:
anyQualities, bestQualities = Quality.splitQuality(self.quality) #@UnusedVariable
if bestQualities:
maxBestQuality = max(bestQualities)
else:
maxBestQuality = None
epStatus, curQuality = Quality.splitCompositeStatus(epStatus)
# if they don't want re-downloads then we call it good if they have anything
if maxBestQuality == None:
return Overview.GOOD
# if they have one but it's not the best they want then mark it as qual
elif curQuality < maxBestQuality:
return Overview.QUAL
# if it's >= maxBestQuality then it's good
else:
return Overview.GOOD
def dirty_setter(attr_name):
def wrapper(self, val):
if getattr(self, attr_name) != val:
setattr(self, attr_name, val)
self.dirty = True
return wrapper
class TVEpisode(object):
def __init__(self, show, season, episode, file=""):
self._name = ""
self._season = season
self._episode = episode
self._description = ""
self._airdate = datetime.date.fromordinal(1)
self._hasnfo = False
self._hastbn = False
self._status = UNKNOWN
self._tvdbid = 0
self._language = "en"
# setting any of the above sets the dirty flag
self.dirty = True
self.show = show
self._location = file
self.lock = threading.Lock()
self.specifyEpisode(self.season, self.episode)
self.relatedEps = []
self.checkForMetaFiles()
name = property(lambda self: self._name, dirty_setter("_name"))
season = property(lambda self: self._season, dirty_setter("_season"))
episode = property(lambda self: self._episode, dirty_setter("_episode"))
description = property(lambda self: self._description, dirty_setter("_description"))
airdate = property(lambda self: self._airdate, dirty_setter("_airdate"))
hasnfo = property(lambda self: self._hasnfo, dirty_setter("_hasnfo"))
hastbn = property(lambda self: self._hastbn, dirty_setter("_hastbn"))
status = property(lambda self: self._status, dirty_setter("_status"))
tvdbid = property(lambda self: self._tvdbid, dirty_setter("_tvdbid"))
location = property(lambda self: self._location, dirty_setter("_location"))
language = property(lambda self: self._language, dirty_setter("_language"))
def checkForMetaFiles(self):
oldhasnfo = self.hasnfo
oldhastbn = self.hastbn
cur_nfo = False
cur_tbn = False
# check for nfo and tbn
if ek.ek(os.path.isfile, self.location):
for cur_provider in sickbeard.metadata_provider_dict.values():
if cur_provider.episode_metadata:
new_result = cur_provider._has_episode_metadata(self)
else:
new_result = False
cur_nfo = new_result or cur_nfo
if cur_provider.episode_thumbnails:
new_result = cur_provider._has_episode_thumb(self)
else:
new_result = False
cur_tbn = new_result or cur_tbn
self.hasnfo = cur_nfo
self.hastbn = cur_tbn
# if either setting has changed return true, if not return false
return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn
def specifyEpisode(self, season, episode):
sqlResult = self.loadFromDB(season, episode)
# only load from NFO if we didn't load from DB
if ek.ek(os.path.isfile, self.location) and self.name == "":
try:
self.loadFromNFO(self.location)
except exceptions.NoNFOException:
logger.log(str(self.show.tvdbid) + ": There was an error loading the NFO for episode " + str(season) + "x" + str(episode), logger.ERROR)
pass
# if we tried loading it from NFO and didn't find the NFO, use TVDB
if self.hasnfo == False:
try:
result = self.loadFromTVDB(season, episode)
except exceptions.EpisodeDeletedException:
result = False
# if we failed TVDB, NFO *and* SQL then fail
if result == False and not sqlResult:
raise exceptions.EpisodeNotFoundException("Couldn't find episode " + str(season) + "x" + str(episode))
# don't update if not needed
if self.dirty:
self.saveToDB()
def loadFromDB(self, season, episode):
logger.log(str(self.show.tvdbid) + ": Loading episode details from DB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.tvdbid, season, episode])
if len(sqlResults) > 1:
raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.")
elif len(sqlResults) == 0:
logger.log(str(self.show.tvdbid) + ": Episode " + str(self.season) + "x" + str(self.episode) + " not found in the database", logger.DEBUG)
return False
else:
#NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
if sqlResults[0]["name"] != None:
self.name = sqlResults[0]["name"]
self.season = season
self.episode = episode
self.description = sqlResults[0]["description"]
if self.description == None:
self.description = ""
self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"]))
#logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
self.status = int(sqlResults[0]["status"])
# don't overwrite my location
if sqlResults[0]["location"] != "" and sqlResults[0]["location"] != None:
self.location = os.path.normpath(sqlResults[0]["location"])
self.tvdbid = int(sqlResults[0]["tvdbid"])
self.language = sqlResults[0]["lang"]
self.dirty = False
return True
def loadFromTVDB(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None):
if season == None:
season = self.season
if episode == None:
episode = self.episode
logger.log(str(self.show.tvdbid) + ": Loading episode details from theTVDB for episode " + str(season) + "x" + str(episode), logger.DEBUG)
tvdb_lang = self.show.lang
try:
if cachedSeason is None:
if tvapi is None:
# There's gotta be a better way of doing this but we don't wanna
# change the cache value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if not cache:
ltvdb_api_parms['cache'] = 'recache'
if tvdb_lang:
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(**ltvdb_api_parms)
else:
t = tvapi
myEp = t[self.show.tvdbid][season][episode]
else:
myEp = cachedSeason[episode]
except (tvdb_exceptions.tvdb_error, IOError), e:
logger.log(u"TVDB threw up an error: "+ex(e), logger.DEBUG)
# if the episode is already valid just log it, if not throw it up
if self.name:
logger.log(u"TVDB timed out but we have enough info from other sources, allowing the error", logger.DEBUG)
return
else:
logger.log(u"TVDB timed out, unable to create the episode", logger.ERROR)
return False
except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
logger.log(u"Unable to find the episode on tvdb... has it been removed? Should I delete from db?", logger.DEBUG)
# if I'm no longer on TVDB but I once was then delete myself from the DB
if self.tvdbid != -1:
self.deleteEpisode()
return
if not myEp["firstaired"]:
myEp["firstaired"] = str(datetime.date.fromordinal(1))
if myEp["episodename"] == None or myEp["episodename"] == "":
logger.log(u"This episode ("+self.show.name+" - "+str(season)+"x"+str(episode)+") has no name on TVDB")
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
if self.tvdbid != -1:
self.deleteEpisode()
return False
#NAMEIT logger.log(u"BBBBBBBB from " + str(self.season)+"x"+str(self.episode) + " -" +self.name+" to "+myEp["episodename"])
self.name = myEp["episodename"]
self.season = season
self.episode = episode
tmp_description = myEp["overview"]
if tmp_description == None:
self.description = ""
else:
self.description = tmp_description
rawAirdate = [int(x) for x in myEp["firstaired"].split("-")]
try:
self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
except ValueError:
logger.log(u"Malformed air date retrieved from TVDB ("+self.show.name+" - "+str(season)+"x"+str(episode)+")", logger.ERROR)
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
if self.tvdbid != -1:
self.deleteEpisode()
return False
#early conversion to int so that episode doesn't get marked dirty
self.tvdbid = int(myEp["id"])
if not ek.ek(os.path.isdir, self.show._location):
logger.log(u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
return
logger.log(str(self.show.tvdbid) + ": Setting status for " + str(season) + "x" + str(episode) + " based on status " + str(self.status) + " and existence of " + self.location, logger.DEBUG)
if not ek.ek(os.path.isfile, self.location):
# if we don't have the file
if self.airdate >= datetime.date.today() and self.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER:
# and it hasn't aired yet set the status to UNAIRED
logger.log(u"Episode airs in the future, changing status from " + str(self.status) + " to " + str(UNAIRED), logger.DEBUG)
self.status = UNAIRED
# if there's no airdate then set it to skipped (and respect ignored)
elif self.airdate == datetime.date.fromordinal(1):
if self.status == IGNORED:
logger.log(u"Episode has no air date, but it's already marked as ignored", logger.DEBUG)
else:
logger.log(u"Episode has no air date, automatically marking it skipped", logger.DEBUG)
self.status = SKIPPED
# if we don't have the file and the airdate is in the past
else:
if self.status == UNAIRED:
self.status = WANTED
# if we somehow are still UNKNOWN then just skip it
elif self.status == UNKNOWN:
self.status = SKIPPED
else:
logger.log(u"Not touching status because we have no ep file, the airdate is in the past, and the status is "+str(self.status), logger.DEBUG)
# if we have a media file then it's downloaded
elif sickbeard.helpers.isMediaFile(self.location):
# leave propers alone, you have to either post-process them or manually change them back
if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]:
logger.log(u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
self.status = Quality.statusFromName(self.location)
# shouldn't get here probably
else:
logger.log(u"6 Status changes from " + str(self.status) + " to " + str(UNKNOWN), logger.DEBUG)
self.status = UNKNOWN
# hasnfo, hastbn, status?
def loadFromNFO(self, location):
if not os.path.isdir(self.show._location):
logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try loading the episode NFO")
return
logger.log(str(self.show.tvdbid) + ": Loading episode details from the NFO file associated with " + location, logger.DEBUG)
self.location = location
if self.location != "":
if self.status == UNKNOWN:
if sickbeard.helpers.isMediaFile(self.location):
logger.log(u"7 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG)
self.status = Quality.statusFromName(self.location)
nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo")
logger.log(str(self.show.tvdbid) + ": Using NFO name " + nfoFile, logger.DEBUG)
if ek.ek(os.path.isfile, nfoFile):
try:
showXML = etree.ElementTree(file = nfoFile)
except (SyntaxError, ValueError), e:
logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) #TODO: figure out what's wrong and fix it
try:
ek.ek(os.rename, nfoFile, nfoFile + ".old")
except Exception, e:
logger.log(u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR)
raise exceptions.NoNFOException("Error in NFO format")
for epDetails in showXML.getiterator('episodedetails'):
if epDetails.findtext('season') == None or int(epDetails.findtext('season')) != self.season or \
epDetails.findtext('episode') == None or int(epDetails.findtext('episode')) != self.episode:
logger.log(str(self.show.tvdbid) + ": NFO has an <episodedetails> block for a different episode - wanted " + str(self.season) + "x" + str(self.episode) + " but got " + str(epDetails.findtext('season')) + "x" + str(epDetails.findtext('episode')), logger.DEBUG)
continue
if epDetails.findtext('title') == None or epDetails.findtext('aired') == None:
raise exceptions.NoNFOException("Error in NFO format (missing episode title or airdate)")
self.name = epDetails.findtext('title')
self.episode = int(epDetails.findtext('episode'))
self.season = int(epDetails.findtext('season'))
self.description = epDetails.findtext('plot')
if self.description == None:
self.description = ""
if epDetails.findtext('aired'):
rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")]
self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
else:
self.airdate = datetime.date.fromordinal(1)
self.hasnfo = True
else:
self.hasnfo = False
if ek.ek(os.path.isfile, sickbeard.helpers.replaceExtension(nfoFile, "tbn")):
self.hastbn = True
else:
self.hastbn = False
def __str__ (self):
toReturn = ""
toReturn += str(self.show.name) + " - " + str(self.season) + "x" + str(self.episode) + " - " + str(self.name) + "\n"
toReturn += "location: " + str(self.location) + "\n"
toReturn += "description: " + str(self.description) + "\n"
toReturn += "airdate: " + str(self.airdate.toordinal()) + " (" + str(self.airdate) + ")\n"
toReturn += "hasnfo: " + str(self.hasnfo) + "\n"
toReturn += "hastbn: " + str(self.hastbn) + "\n"
toReturn += "status: " + str(self.status) + "\n"
toReturn += "language: " + str(self.language) + "\n"
return toReturn
def createMetaFiles(self, force=False):
if not os.path.isdir(self.show._location):
logger.log(str(self.show.tvdbid) + ": The show dir is missing, not bothering to try to create metadata")
return
self.createNFO(force)
self.createThumbnail(force)
if self.checkForMetaFiles():
self.saveToDB()
def createNFO(self, force=False):
result = False
for cur_provider in sickbeard.metadata_provider_dict.values():
result = cur_provider.create_episode_metadata(self) or result
return result
def createThumbnail(self, force=False):
result = False
for cur_provider in sickbeard.metadata_provider_dict.values():
result = cur_provider.create_episode_thumb(self) or result
return result
def deleteEpisode(self):
logger.log(u"Deleting "+self.show.name+" "+str(self.season)+"x"+str(self.episode)+" from the DB", logger.DEBUG)
# remove myself from the show dictionary
if self.show.getEpisode(self.season, self.episode, noCreate=True) == self:
logger.log(u"Removing myself from my show's list", logger.DEBUG)
del self.show.episodes[self.season][self.episode]
# delete myself from the DB
logger.log(u"Deleting myself from the database", logger.DEBUG)
myDB = db.DBConnection()
sql = "DELETE FROM tv_episodes WHERE showid="+str(self.show.tvdbid)+" AND season="+str(self.season)+" AND episode="+str(self.episode)
myDB.action(sql)
raise exceptions.EpisodeDeletedException()
def saveToDB(self, forceSave=False):
if not self.dirty and not forceSave:
logger.log(str(self.show.tvdbid) + ": Not saving episode to db - record is not dirty", logger.DEBUG)
return
logger.log(str(self.show.tvdbid) + ": Saving episode details to database", logger.DEBUG)
logger.log(u"STATUS IS " + str(self.status), logger.DEBUG)
myDB = db.DBConnection()
newValueDict = {"tvdbid": self.tvdbid,
"name": self.name,
"description": self.description,
"airdate": self.airdate.toordinal(),
"hasnfo": self.hasnfo,
"hastbn": self.hastbn,
"status": self.status,
"location": self.location,
"lang": self.language}
controlValueDict = {"showid": self.show.tvdbid,
"season": self.season,
"episode": self.episode}
# use a custom update/insert method to get the data into the DB
myDB.upsert("tv_episodes", newValueDict, controlValueDict)
def fullPath (self):
if self.location == None or self.location == "":
return None
else:
return os.path.join(self.show.location, self.location)
def getOverview(self):
return self.show.getOverview(self.status)
def prettyName (self, naming_show_name=None, naming_ep_type=None, naming_multi_ep_type=None,
naming_ep_name=None, naming_sep_type=None, naming_use_periods=None, naming_quality=None):
regex = "(.*) \(\d\)"
goodEpString = ''
self.relatedEps = sorted(self.relatedEps, key=lambda x: x.episode)
if len(self.relatedEps) == 0:
goodName = self.name
elif len(self.relatedEps) > 1:
goodName = ''
else:
singleName = True
curGoodName = None
for curName in [self.name]+[x.name for x in self.relatedEps]:
match = re.match(regex, curName)
if not match:
singleName = False
break
if curGoodName == None:
curGoodName = match.group(1)
elif curGoodName != match.group(1):
singleName = False
break
if singleName:
goodName = curGoodName
else:
goodName = self.name
for relEp in self.relatedEps:
goodName += " & " + relEp.name
if naming_show_name == None:
naming_show_name = sickbeard.NAMING_SHOW_NAME
if naming_ep_name == None:
naming_ep_name = sickbeard.NAMING_EP_NAME
if naming_ep_type == None:
naming_ep_type = sickbeard.NAMING_EP_TYPE
if naming_multi_ep_type == None:
naming_multi_ep_type = sickbeard.NAMING_MULTI_EP_TYPE
if naming_sep_type == None:
naming_sep_type = sickbeard.NAMING_SEP_TYPE
if naming_use_periods == None:
naming_use_periods = sickbeard.NAMING_USE_PERIODS
if naming_quality == None:
naming_quality = sickbeard.NAMING_QUALITY
if self.show.air_by_date and sickbeard.NAMING_DATES:
try:
goodEpString = self.airdate.strftime("%Y.%m.%d")
except ValueError:
pass
# if we didn't set it to the air-by-date value use the season/ep
if not goodEpString:
goodEpString = config.naming_ep_type[naming_ep_type] % {'seasonnumber': self.season, 'episodenumber': self.episode}
for relEp in self.relatedEps:
goodEpString += config.naming_multi_ep_type[naming_multi_ep_type][naming_ep_type] % {'seasonnumber': relEp.season, 'episodenumber': relEp.episode}
if goodName != '':
goodName = config.naming_sep_type[naming_sep_type] + goodName
finalName = ""
if naming_show_name:
finalName += self.show.name + config.naming_sep_type[naming_sep_type]
finalName += goodEpString
if naming_ep_name:
finalName += goodName
if naming_quality:
epStatus, epQual = Quality.splitCompositeStatus(self.status) #@UnusedVariable
if epQual != Quality.NONE:
finalName += config.naming_sep_type[naming_sep_type] + Quality.qualityStrings[epQual]
if naming_use_periods:
finalName = re.sub("\s+", ".", finalName)
return finalName
| gpl-3.0 | 129,994,857,721,770,370 | 39.379215 | 283 | 0.566795 | false |
hufsm/tu_gen2_libsigrokdecode | decoders/usb_power_delivery/pd.py | 1 | 20599 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2015 Google, Inc
## Copyright (C) 2018 davidanger <[email protected]>
## Copyright (C) 2018 Peter Hazenberg <[email protected]>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
import sigrokdecode as srd
import struct
import zlib # for crc32
# BMC encoding with a 600kHz datarate
UI_US = 1000000/600000.0
# Threshold to discriminate half-1 from 0 in Binary Mark Conding
THRESHOLD_US = (UI_US + 2 * UI_US) / 2
# Control Message type
CTRL_TYPES = {
0: 'reserved',
1: 'GOOD CRC',
2: 'GOTO MIN',
3: 'ACCEPT',
4: 'REJECT',
5: 'PING',
6: 'PS RDY',
7: 'GET SOURCE CAP',
8: 'GET SINK CAP',
9: 'DR SWAP',
10: 'PR SWAP',
11: 'VCONN SWAP',
12: 'WAIT',
13: 'SOFT RESET',
14: 'reserved',
15: 'reserved',
16: 'Not Supported',
17: 'Get_Source_Cap_Extended',
18: 'Get_Status',
19: 'FR_Swap',
20: 'Get_PPS_Status',
21: 'Get_Country_Codes',
}
# Data message type
DATA_TYPES = {
1: 'SOURCE CAP',
2: 'REQUEST',
3: 'BIST',
4: 'SINK CAP',
5: 'Battery_Status',
6: 'Alert',
7: 'Get_Country_Info',
15: 'VDM'
}
# 4b5b encoding of the symbols
DEC4B5B = [
0x10, # Error 00000
0x10, # Error 00001
0x10, # Error 00010
0x10, # Error 00011
0x10, # Error 00100
0x10, # Error 00101
0x13, # Sync-3 00110
0x14, # RST-1 00111
0x10, # Error 01000
0x01, # 1 = 0001 01001
0x04, # 4 = 0100 01010
0x05, # 5 = 0101 01011
0x10, # Error 01100
0x16, # EOP 01101
0x06, # 6 = 0110 01110
0x07, # 7 = 0111 01111
0x10, # Error 10000
0x12, # Sync-2 10001
0x08, # 8 = 1000 10010
0x09, # 9 = 1001 10011
0x02, # 2 = 0010 10100
0x03, # 3 = 0011 10101
0x0A, # A = 1010 10110
0x0B, # B = 1011 10111
0x11, # Sync-1 11000
0x15, # RST-2 11001
0x0C, # C = 1100 11010
0x0D, # D = 1101 11011
0x0E, # E = 1110 11100
0x0F, # F = 1111 11101
0x00, # 0 = 0000 11110
0x10, # Error 11111
]
SYM_ERR = 0x10
SYNC1 = 0x11
SYNC2 = 0x12
SYNC3 = 0x13
RST1 = 0x14
RST2 = 0x15
EOP = 0x16
SYNC_CODES = [SYNC1, SYNC2, SYNC3]
HRST_CODES = [RST1, RST1, RST1, RST2]
SOP_SEQUENCES = [
(SYNC1, SYNC1, SYNC1, SYNC2),
(SYNC1, SYNC1, SYNC3, SYNC3),
(SYNC1, SYNC3, SYNC1, SYNC3),
(SYNC1, RST2, RST2, SYNC3),
(SYNC1, RST2, SYNC3, SYNC2),
(RST1, SYNC1, RST1, SYNC3),
(RST1, RST1, RST1, RST2),
]
START_OF_PACKETS = {
SOP_SEQUENCES[0]: 'SOP',
SOP_SEQUENCES[1]: "SOP'",
SOP_SEQUENCES[2]: 'SOP"',
SOP_SEQUENCES[3]: "SOP' Debug",
SOP_SEQUENCES[4]: 'SOP" Debug',
SOP_SEQUENCES[5]: 'Cable Reset',
SOP_SEQUENCES[6]: 'Hard Reset',
}
SYM_NAME = [
['0x0', '0'],
['0x1', '1'],
['0x2', '2'],
['0x3', '3'],
['0x4', '4'],
['0x5', '5'],
['0x6', '6'],
['0x7', '7'],
['0x8', '8'],
['0x9', '9'],
['0xA', 'A'],
['0xB', 'B'],
['0xC', 'C'],
['0xD', 'D'],
['0xE', 'E'],
['0xF', 'F'],
['ERROR', 'X'],
['SYNC-1', 'S1'],
['SYNC-2', 'S2'],
['SYNC-3', 'S3'],
['RST-1', 'R1'],
['RST-2', 'R2'],
['EOP', '#'],
]
RDO_FLAGS = {
(1 << 23): 'unchunked',
(1 << 24): 'no_suspend',
(1 << 25): 'comm_cap',
(1 << 26): 'cap_mismatch',
(1 << 27): 'give_back'
}
BIST_MODES = {
0: 'Receiver',
1: 'Transmit',
2: 'Counters',
3: 'Carrier 0',
4: 'Carrier 1',
5: 'Carrier 2',
6: 'Carrier 3',
7: 'Eye',
}
VDM_CMDS = {
1: 'Disc Ident',
2: 'Disc SVID',
3: 'Disc Mode',
4: 'Enter Mode',
5: 'Exit Mode',
6: 'Attention',
# 16..31: SVID Specific Commands
# DisplayPort Commands
16: 'DP Status',
17: 'DP Configure',
}
VDM_ACK = ['REQ', 'ACK', 'NAK', 'BSY']
class SamplerateError(Exception):
pass
class Decoder(srd.Decoder):
api_version = 3
id = 'usb_power_delivery'
name = 'USB PD'
longname = 'USB Power Delivery'
desc = 'USB Power Delivery protocol.'
license = 'gplv2+'
inputs = ['logic']
outputs = ['usb_pd']
channels = (
{'id': 'cc1', 'name': 'CC1', 'desc': 'Configuration Channel 1'},
)
optional_channels = (
{'id': 'cc2', 'name': 'CC2', 'desc': 'Configuration Channel 2'},
)
options = (
{'id': 'fulltext', 'desc': 'Full text decoding of packets',
'default': 'no', 'values': ('yes', 'no')},
)
annotations = (
('type', 'Packet Type'),
('preamble', 'Preamble'),
('sop', 'Start of Packet'),
('header', 'Header'),
('data', 'Data'),
('crc', 'Checksum'),
('eop', 'End Of Packet'),
('sym', '4b5b symbols'),
('warnings', 'Warnings'),
('src', 'Source Message'),
('snk', 'Sink Message'),
('payload', 'Payload'),
('text', 'Plain text'),
)
annotation_rows = (
('4b5b', 'Symbols', (7,)),
('phase', 'Parts', (1, 2, 3, 4, 5, 6)),
('payload', 'Payload', (11,)),
('type', 'Type', (0, 9, 10)),
('warnings', 'Warnings', (8,)),
('text', 'Full text', (12,)),
)
binary = (
('raw-data', 'RAW binary data'),
)
stored_pdos = {}
def get_request(self, rdo):
pos = (rdo >> 28) & 7
op_ma = ((rdo >> 10) & 0x3ff) * 0.01
max_ma = (rdo & 0x3ff) * 0.01
mark = self.cap_mark[pos]
if mark == 3:
op_v = ((rdo >> 9) & 0x7ff) * 0.02
op_a = (rdo & 0x3f) * 0.05
t_settings = '%gV %gA' % (op_v, op_a)
elif mark == 2:
op_w = ((rdo >> 10) & 0x3ff) * 0.25
mp_w = (rdo & 0x3ff) * 0.25
t_settings = '%gW (operating)' % op_w
else:
op_a = ((rdo >> 10) & 0x3ff) * 0.01
max_a = (rdo & 0x3ff) * 0.01
t_settings = '%gA (operating) / %gA (max)' % (op_a, max_a)
t_flags = ''
for f in sorted(RDO_FLAGS.keys(), reverse = True):
if rdo & f:
t_flags += ' [' + RDO_FLAGS[f] + ']'
if pos in self.stored_pdos.keys():
t_pdo = '#%d: %s' % (pos, self.stored_pdos[pos])
else:
t_pdo = '#d' % (pos)
return '(PDO %s) %s%s' % (t_pdo, t_settings, t_flags)
def get_source_sink_cap(self, pdo, idx, source):
t1 = (pdo >> 30) & 3
self.cap_mark[idx] = t1
flags = {}
if t1 == 0:
t_name = 'Fixed'
if source:
flags = {
(1 << 29): 'dual_role_power',
(1 << 28): 'suspend',
(1 << 27): 'unconstrained',
(1 << 26): 'comm_cap',
(1 << 25): 'dual_role_data',
(1 << 24): 'unchunked',
}
else: # Sink
flags = {
(1 << 29): 'dual_role_power',
(1 << 28): 'high_capability',
(1 << 27): 'unconstrained',
(1 << 26): 'comm_cap',
(1 << 25): 'dual_role_data',
(0b01 << 23): 'fr_swap default power',
(0b10 << 23): 'fr_swap 1.5 A',
(0b11 << 23): 'fr_swap 3.0 A',
}
mv = ((pdo >> 10) & 0x3ff) * 0.05
ma = ((pdo >> 0) & 0x3ff) * 0.01
p = '%gV %gA (%gW)' % (mv, ma, mv*ma)
self.stored_pdos[idx] = '%s %gV' % (t_name, mv)
elif t1 == 1:
t_name = 'Battery'
flags = {} # No flags defined for Battery PDO in PD 3.0 spec
minv = ((pdo >> 10) & 0x3ff) * 0.05
maxv = ((pdo >> 20) & 0x3ff) * 0.05
mw = ((pdo >> 0) & 0x3ff) * 0.25
p = '%g/%gV %gW' % (minv, maxv, mw)
self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv)
elif t1 == 2:
t_name = 'Variable'
flags = {} # No flags defined for Variable PDO in PD 3.0 spec
minv = ((pdo >> 10) & 0x3ff) * 0.05
maxv = ((pdo >> 20) & 0x3ff) * 0.05
ma = ((pdo >> 0) & 0x3ff) * 0.01
p = '%g/%gV %gA' % (minv, maxv, ma)
self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv)
elif t1 == 3:
t2 = (pdo >> 28) & 3
if t2 == 0:
t_name = 'Programmable|PPS'
flags = {
(1 << 29): 'power_limited',
}
minv = ((pdo >> 8) & 0xff) * 0.1
maxv = ((pdo >> 17) & 0xff) * 0.1
ma = ((pdo >> 0) & 0xff) * 0.05
p = '%g/%gV %gA' % (minv, maxv, ma)
if (pdo >> 27) & 0x1:
p += ' [limited]'
self.stored_pdos[idx] = '%s %g/%gV' % (t_name, minv, maxv)
else:
t_name = 'Reserved APDO: '+bin(t2)
p = '[raw: %s]' % (bin(pdo))
self.stored_pdos[idx] = '%s %s' % (t_name, p)
t_flags = ''
for f in sorted(flags.keys(), reverse = True):
if pdo & f:
t_flags += ' [' + flags[f] + ']'
return '[%s] %s%s' % (t_name, p, t_flags)
def get_vdm(self, idx, data):
if idx == 0: # VDM header
vid = data >> 16
struct = data & (1 << 15)
txt = 'VDM'
if struct: # Structured VDM
cmd = data & 0x1f
src = data & (1 << 5)
ack = (data >> 6) & 3
pos = (data >> 8) & 7
ver = (data >> 13) & 3
txt = VDM_ACK[ack] + ' '
txt += VDM_CMDS[cmd] if cmd in VDM_CMDS else 'cmd?'
txt += ' pos %d' % (pos) if pos else ' '
else: # Unstructured VDM
txt = 'unstruct [%04x]' % (data & 0x7fff)
txt += ' SVID:%04x' % (vid)
else: # VDM payload
txt = 'VDO:%08x' % (data)
return txt
def get_bist(self, idx, data):
mode = data >> 28
counter = data & 0xffff
mode_name = BIST_MODES[mode] if mode in BIST_MODES else 'INVALID'
if mode == 2:
mode_name = 'Counter[= %d]' % (counter)
# TODO: Check all 0 bits are 0 / emit warnings.
return 'mode %s' % (mode_name) if idx == 0 else 'invalid BRO'
def putpayload(self, s0, s1, idx):
t = self.head_type()
txt = '['+str(idx+1)+'] '
if t == 2:
txt += self.get_request(self.data[idx])
elif t == 1 or t == 4:
txt += self.get_source_sink_cap(self.data[idx], idx+1, t==1)
elif t == 15:
txt += self.get_vdm(idx, self.data[idx])
elif t == 3:
txt += self.get_bist(idx, self.data[idx])
self.putx(s0, s1, [11, [txt, txt]])
self.text += ' - ' + txt
def puthead(self):
ann_type = 9 if self.head_power_role() else 10
role = 'SRC' if self.head_power_role() else 'SNK'
if self.head_data_role() != self.head_power_role():
role += '/DFP' if self.head_data_role() else '/UFP'
t = self.head_type()
if self.head_count() == 0:
shortm = CTRL_TYPES[t]
else:
shortm = DATA_TYPES[t] if t in DATA_TYPES else 'DAT???'
longm = '(r{:d}) {:s}[{:d}]: {:s}'.format(self.head_rev(), role, self.head_id(), shortm)
self.putx(0, -1, [ann_type, [longm, shortm]])
self.text += longm
def head_id(self):
return (self.head >> 9) & 7
def head_power_role(self):
return (self.head >> 8) & 1
def head_data_role(self):
return (self.head >> 5) & 1
def head_rev(self):
return ((self.head >> 6) & 3) + 1
def head_type(self):
return self.head & 0xF
def head_count(self):
return (self.head >> 12) & 7
def putx(self, s0, s1, data):
self.put(self.edges[s0], self.edges[s1], self.out_ann, data)
def putwarn(self, longm, shortm):
self.putx(0, -1, [8, [longm, shortm]])
def compute_crc32(self):
bdata = struct.pack('<H'+'I'*len(self.data), self.head & 0xffff,
*tuple([d & 0xffffffff for d in self.data]))
return zlib.crc32(bdata)
def rec_sym(self, i, sym):
self.putx(i, i+5, [7, SYM_NAME[sym]])
def get_sym(self, i, rec=True):
v = (self.bits[i] | (self.bits[i+1] << 1) | (self.bits[i+2] << 2) |
(self.bits[i+3] << 3) | (self.bits[i+4] << 4))
sym = DEC4B5B[v]
if rec:
self.rec_sym(i, sym)
return sym
def get_short(self):
i = self.idx
# Check it's not a truncated packet.
if len(self.bits) - i <= 20:
self.putwarn('Truncated', '!')
return 0x0BAD
k = [self.get_sym(i), self.get_sym(i+5),
self.get_sym(i+10), self.get_sym(i+15)]
# TODO: Check bad symbols.
val = k[0] | (k[1] << 4) | (k[2] << 8) | (k[3] << 12)
self.idx += 20
return val
def get_word(self):
lo = self.get_short()
hi = self.get_short()
return lo | (hi << 16)
def find_corrupted_sop(self, k):
# Start of packet are valid even if they have only 3 correct symbols
# out of 4.
for seq in SOP_SEQUENCES:
if [k[i] == seq[i] for i in range(len(k))].count(True) >= 3:
return START_OF_PACKETS[seq]
return None
def scan_eop(self):
for i in range(len(self.bits) - 19):
k = (self.get_sym(i, rec=False), self.get_sym(i+5, rec=False),
self.get_sym(i+10, rec=False), self.get_sym(i+15, rec=False))
sym = START_OF_PACKETS.get(k, None)
if not sym:
sym = self.find_corrupted_sop(k)
# We have an interesting symbol sequence.
if sym:
# Annotate the preamble.
self.putx(0, i, [1, ['Preamble', '...']])
# Annotate each symbol.
self.rec_sym(i, k[0])
self.rec_sym(i+5, k[1])
self.rec_sym(i+10, k[2])
self.rec_sym(i+15, k[3])
if sym == 'Hard Reset':
self.text += 'HRST'
return -1 # Hard reset
elif sym == 'Cable Reset':
self.text += 'CRST'
return -1 # Cable reset
else:
self.putx(i, i+20, [2, [sym, 'S']])
return i+20
self.putx(0, len(self.bits), [1, ['Junk???', 'XXX']])
self.text += 'Junk???'
self.putwarn('No start of packet found', 'XXX')
return -1 # No Start Of Packet
def __init__(self):
self.reset()
def reset(self):
self.samplerate = None
self.idx = 0
self.packet_seq = 0
self.previous = 0
self.startsample = None
self.bits = []
self.edges = []
self.bad = []
self.half_one = False
self.start_one = 0
self.stored_pdos = {}
self.cap_mark = [0, 0, 0, 0, 0, 0, 0, 0]
def metadata(self, key, value):
if key == srd.SRD_CONF_SAMPLERATE:
self.samplerate = value
# 0 is 2 UI, space larger than 1.5x 0 is definitely wrong.
self.maxbit = self.us2samples(3 * UI_US)
# Duration threshold between half 1 and 0.
self.threshold = self.us2samples(THRESHOLD_US)
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
self.out_binary = self.register(srd.OUTPUT_BINARY)
self.out_bitrate = self.register(
srd.OUTPUT_META,
meta=(int, 'Bitrate', 'Bitrate during the packet')
)
def us2samples(self, us):
return int(us * self.samplerate / 1000000)
def decode_packet(self):
self.data = []
self.idx = 0
self.text = ''
if len(self.edges) < 50:
return # Not a real PD packet
self.packet_seq += 1
tstamp = float(self.startsample) / self.samplerate
self.text += '#%-4d (%8.6fms): ' % (self.packet_seq, tstamp*1000)
self.idx = self.scan_eop()
if self.idx < 0:
# Full text trace of the issue.
self.putx(0, self.idx, [12, [self.text, '...']])
return # No real packet: ABORT.
# Packet header
self.head = self.get_short()
self.putx(self.idx-20, self.idx, [3, ['H:%04x' % (self.head), 'HD']])
self.puthead()
# Decode data payload
for i in range(self.head_count()):
self.data.append(self.get_word())
self.putx(self.idx-40, self.idx,
[4, ['[%d]%08x' % (i, self.data[i]), 'D%d' % (i)]])
self.putpayload(self.idx-40, self.idx, i)
# CRC check
self.crc = self.get_word()
ccrc = self.compute_crc32()
if self.crc != ccrc:
self.putwarn('Bad CRC %08x != %08x' % (self.crc, ccrc), 'CRC!')
self.putx(self.idx-40, self.idx, [5, ['CRC:%08x' % (self.crc), 'CRC']])
# End of Packet
if len(self.bits) >= self.idx + 5 and self.get_sym(self.idx) == EOP:
self.putx(self.idx, self.idx + 5, [6, ['EOP', 'E']])
self.idx += 5
else:
self.putwarn('No EOP', 'EOP!')
# Full text trace
if self.options['fulltext'] == 'yes':
self.putx(0, self.idx, [12, [self.text, '...']])
# Meta data for bitrate
ss, es = self.edges[0], self.edges[-1]
bitrate = self.samplerate*len(self.bits) / float(es - ss)
self.put(es, ss, self.out_bitrate, int(bitrate))
# Raw binary data (BMC decoded)
self.put(es, ss, self.out_binary, [0, bytes(self.bits)])
def decode(self):
if not self.samplerate:
raise SamplerateError('Cannot decode without samplerate.')
while True:
pins = self.wait([{0: 'e'}, {1: 'e'}, {'skip': int(self.samplerate/1e3)}])
# First sample of the packet, just record the start date.
if not self.startsample:
self.startsample = self.samplenum
self.previous = self.samplenum
continue
diff = self.samplenum - self.previous
# Large idle: use it as the end of packet.
if diff > self.maxbit:
# The last edge of the packet.
self.edges.append(self.previous)
# Export the packet.
self.decode_packet()
# Reset for next packet.
self.startsample = self.samplenum
self.bits = []
self.edges = []
self.bad = []
self.half_one = False
self.start_one = 0
else: # Add the bit to the packet.
is_zero = diff > self.threshold
if is_zero and not self.half_one:
self.bits.append(0)
self.edges.append(self.previous)
elif not is_zero and self.half_one:
self.bits.append(1)
self.edges.append(self.start_one)
self.half_one = False
elif not is_zero and not self.half_one:
self.half_one = True
self.start_one = self.previous
else: # Invalid BMC sequence
self.bad.append((self.start_one, self.previous))
# TODO: Try to recover.
self.bits.append(0)
self.edges.append(self.previous)
self.half_one = False
self.previous = self.samplenum
| gpl-3.0 | -1,047,846,842,259,668,100 | 31.286834 | 96 | 0.468032 | false |
chromium/chromium | tools/grit/grit/format/rc.py | 7 | 18109 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Support for formatting an RC file for compilation.
'''
from __future__ import print_function
import os
import re
from functools import partial
import six
from grit import util
from grit.node import misc
def Format(root, lang='en', output_dir='.'):
from grit.node import empty, include, message, structure
yield _FormatHeader(root, lang, output_dir)
for item in root.ActiveDescendants():
if isinstance(item, empty.MessagesNode):
# Write one STRINGTABLE per <messages> container.
# This is hacky: it iterates over the children twice.
yield 'STRINGTABLE\nBEGIN\n'
for subitem in item.ActiveDescendants():
if isinstance(subitem, message.MessageNode):
with subitem:
yield FormatMessage(subitem, lang)
yield 'END\n\n'
elif isinstance(item, include.IncludeNode):
with item:
yield FormatInclude(item, lang, output_dir)
elif isinstance(item, structure.StructureNode):
with item:
yield FormatStructure(item, lang, output_dir)
'''
This dictionary defines the language charset pair lookup table, which is used
for replacing the GRIT expand variables for language info in Product Version
resource. The key is the language ISO country code, and the value
is the language and character-set pair, which is a hexadecimal string
consisting of the concatenation of the language and character-set identifiers.
The first 4 digit of the value is the hex value of LCID, the remaining
4 digits is the hex value of character-set id(code page)of the language.
LCID resource: http://msdn.microsoft.com/en-us/library/ms776294.aspx
Codepage resource: http://www.science.co.il/language/locale-codes.asp
We have defined three GRIT expand_variables to be used in the version resource
file to set the language info. Here is an example how they should be used in
the VS_VERSION_INFO section of the resource file to allow GRIT to localize
the language info correctly according to product locale.
VS_VERSION_INFO VERSIONINFO
...
BEGIN
BLOCK "StringFileInfo"
BEGIN
BLOCK "[GRITVERLANGCHARSETHEX]"
BEGIN
...
END
END
BLOCK "VarFileInfo"
BEGIN
VALUE "Translation", [GRITVERLANGID], [GRITVERCHARSETID]
END
END
'''
_LANGUAGE_CHARSET_PAIR = {
# Language neutral LCID, unicode(1200) code page.
'neutral' : '000004b0',
# LANG_USER_DEFAULT LCID, unicode(1200) code page.
'userdefault' : '040004b0',
'ar' : '040104e8',
'fi' : '040b04e4',
'ko' : '041203b5',
'es' : '0c0a04e4',
'bg' : '040204e3',
# No codepage for filipino, use unicode(1200).
'fil' : '046404e4',
'fr' : '040c04e4',
'lv' : '042604e9',
'sv' : '041d04e4',
'ca' : '040304e4',
'de' : '040704e4',
'lt' : '042704e9',
# Do not use! This is only around for backwards
# compatibility and will be removed - use fil instead
'tl' : '0c0004b0',
'zh-CN' : '080403a8',
'zh-TW' : '040403b6',
'zh-HK' : '0c0403b6',
'el' : '040804e5',
'no' : '001404e4',
'nb' : '041404e4',
'nn' : '081404e4',
'th' : '041e036a',
'he' : '040d04e7',
'iw' : '040d04e7',
'pl' : '041504e2',
'tr' : '041f04e6',
'hr' : '041a04e4',
# No codepage for Hindi, use unicode(1200).
'hi' : '043904b0',
'pt-PT' : '081604e4',
'pt-BR' : '041604e4',
'uk' : '042204e3',
'cs' : '040504e2',
'hu' : '040e04e2',
'ro' : '041804e2',
# No codepage for Urdu, use unicode(1200).
'ur' : '042004b0',
'da' : '040604e4',
'is' : '040f04e4',
'ru' : '041904e3',
'vi' : '042a04ea',
'nl' : '041304e4',
'id' : '042104e4',
'sr' : '081a04e2',
'en-GB' : '0809040e',
'it' : '041004e4',
'sk' : '041b04e2',
'et' : '042504e9',
'ja' : '041103a4',
'sl' : '042404e2',
'en' : '040904b0',
# LCID for Mexico; Windows does not support L.A. LCID.
'es-419' : '080a04e4',
# No codepage for Bengali, use unicode(1200).
'bn' : '044504b0',
'fa' : '042904e8',
# No codepage for Gujarati, use unicode(1200).
'gu' : '044704b0',
# No codepage for Kannada, use unicode(1200).
'kn' : '044b04b0',
# Malay (Malaysia) [ms-MY]
'ms' : '043e04e4',
# No codepage for Malayalam, use unicode(1200).
'ml' : '044c04b0',
# No codepage for Marathi, use unicode(1200).
'mr' : '044e04b0',
# No codepage for Oriya , use unicode(1200).
'or' : '044804b0',
# No codepage for Tamil, use unicode(1200).
'ta' : '044904b0',
# No codepage for Telugu, use unicode(1200).
'te' : '044a04b0',
# No codepage for Amharic, use unicode(1200). >= Vista.
'am' : '045e04b0',
'sw' : '044104e4',
'af' : '043604e4',
'eu' : '042d04e4',
'fr-CA' : '0c0c04e4',
'gl' : '045604e4',
# No codepage for Zulu, use unicode(1200).
'zu' : '043504b0',
# Pseudolocales
'ar-XB' : '040d04e7',
'en-XA' : '040904b0',
}
# Language ID resource: http://msdn.microsoft.com/en-us/library/ms776294.aspx
#
# There is no appropriate sublang for Spanish (Latin America) [es-419], so we
# use Mexico. SUBLANG_DEFAULT would incorrectly map to Spain. Unlike other
# Latin American countries, Mexican Spanish is supported by VERSIONINFO:
# http://msdn.microsoft.com/en-us/library/aa381058.aspx
_LANGUAGE_DIRECTIVE_PAIR = {
'neutral' : 'LANG_NEUTRAL, SUBLANG_NEUTRAL',
'userdefault' : 'LANG_NEUTRAL, SUBLANG_DEFAULT',
'ar' : 'LANG_ARABIC, SUBLANG_DEFAULT',
'fi' : 'LANG_FINNISH, SUBLANG_DEFAULT',
'ko' : 'LANG_KOREAN, SUBLANG_KOREAN',
'es' : 'LANG_SPANISH, SUBLANG_SPANISH_MODERN',
'bg' : 'LANG_BULGARIAN, SUBLANG_DEFAULT',
# LANG_FILIPINO (100) not in VC 7 winnt.h.
'fil' : '100, SUBLANG_DEFAULT',
'fr' : 'LANG_FRENCH, SUBLANG_FRENCH',
'lv' : 'LANG_LATVIAN, SUBLANG_DEFAULT',
'sv' : 'LANG_SWEDISH, SUBLANG_SWEDISH',
'ca' : 'LANG_CATALAN, SUBLANG_DEFAULT',
'de' : 'LANG_GERMAN, SUBLANG_GERMAN',
'lt' : 'LANG_LITHUANIAN, SUBLANG_LITHUANIAN',
# Do not use! See above.
'tl' : 'LANG_NEUTRAL, SUBLANG_DEFAULT',
'zh-CN' : 'LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED',
'zh-TW' : 'LANG_CHINESE, SUBLANG_CHINESE_TRADITIONAL',
'zh-HK' : 'LANG_CHINESE, SUBLANG_CHINESE_HONGKONG',
'el' : 'LANG_GREEK, SUBLANG_DEFAULT',
'no' : 'LANG_NORWEGIAN, SUBLANG_DEFAULT',
'nb' : 'LANG_NORWEGIAN, SUBLANG_NORWEGIAN_BOKMAL',
'nn' : 'LANG_NORWEGIAN, SUBLANG_NORWEGIAN_NYNORSK',
'th' : 'LANG_THAI, SUBLANG_DEFAULT',
'he' : 'LANG_HEBREW, SUBLANG_DEFAULT',
'iw' : 'LANG_HEBREW, SUBLANG_DEFAULT',
'pl' : 'LANG_POLISH, SUBLANG_DEFAULT',
'tr' : 'LANG_TURKISH, SUBLANG_DEFAULT',
'hr' : 'LANG_CROATIAN, SUBLANG_DEFAULT',
'hi' : 'LANG_HINDI, SUBLANG_DEFAULT',
'pt-PT' : 'LANG_PORTUGUESE, SUBLANG_PORTUGUESE',
'pt-BR' : 'LANG_PORTUGUESE, SUBLANG_DEFAULT',
'uk' : 'LANG_UKRAINIAN, SUBLANG_DEFAULT',
'cs' : 'LANG_CZECH, SUBLANG_DEFAULT',
'hu' : 'LANG_HUNGARIAN, SUBLANG_DEFAULT',
'ro' : 'LANG_ROMANIAN, SUBLANG_DEFAULT',
'ur' : 'LANG_URDU, SUBLANG_DEFAULT',
'da' : 'LANG_DANISH, SUBLANG_DEFAULT',
'is' : 'LANG_ICELANDIC, SUBLANG_DEFAULT',
'ru' : 'LANG_RUSSIAN, SUBLANG_DEFAULT',
'vi' : 'LANG_VIETNAMESE, SUBLANG_DEFAULT',
'nl' : 'LANG_DUTCH, SUBLANG_DEFAULT',
'id' : 'LANG_INDONESIAN, SUBLANG_DEFAULT',
'sr' : 'LANG_SERBIAN, SUBLANG_SERBIAN_LATIN',
'en-GB' : 'LANG_ENGLISH, SUBLANG_ENGLISH_UK',
'it' : 'LANG_ITALIAN, SUBLANG_DEFAULT',
'sk' : 'LANG_SLOVAK, SUBLANG_DEFAULT',
'et' : 'LANG_ESTONIAN, SUBLANG_DEFAULT',
'ja' : 'LANG_JAPANESE, SUBLANG_DEFAULT',
'sl' : 'LANG_SLOVENIAN, SUBLANG_DEFAULT',
'en' : 'LANG_ENGLISH, SUBLANG_ENGLISH_US',
# No L.A. sublang exists.
'es-419' : 'LANG_SPANISH, SUBLANG_SPANISH_MEXICAN',
'bn' : 'LANG_BENGALI, SUBLANG_DEFAULT',
'fa' : 'LANG_PERSIAN, SUBLANG_DEFAULT',
'gu' : 'LANG_GUJARATI, SUBLANG_DEFAULT',
'kn' : 'LANG_KANNADA, SUBLANG_DEFAULT',
'ms' : 'LANG_MALAY, SUBLANG_DEFAULT',
'ml' : 'LANG_MALAYALAM, SUBLANG_DEFAULT',
'mr' : 'LANG_MARATHI, SUBLANG_DEFAULT',
'or' : 'LANG_ORIYA, SUBLANG_DEFAULT',
'ta' : 'LANG_TAMIL, SUBLANG_DEFAULT',
'te' : 'LANG_TELUGU, SUBLANG_DEFAULT',
'am' : 'LANG_AMHARIC, SUBLANG_DEFAULT',
'sw' : 'LANG_SWAHILI, SUBLANG_DEFAULT',
'af' : 'LANG_AFRIKAANS, SUBLANG_DEFAULT',
'eu' : 'LANG_BASQUE, SUBLANG_DEFAULT',
'fr-CA' : 'LANG_FRENCH, SUBLANG_FRENCH_CANADIAN',
'gl' : 'LANG_GALICIAN, SUBLANG_DEFAULT',
'zu' : 'LANG_ZULU, SUBLANG_DEFAULT',
'pa' : 'LANG_PUNJABI, SUBLANG_PUNJABI_INDIA',
'sa' : 'LANG_SANSKRIT, SUBLANG_SANSKRIT_INDIA',
'si' : 'LANG_SINHALESE, SUBLANG_SINHALESE_SRI_LANKA',
'ne' : 'LANG_NEPALI, SUBLANG_NEPALI_NEPAL',
'ti' : 'LANG_TIGRIGNA, SUBLANG_TIGRIGNA_ERITREA',
# Pseudolocales
'ar-XB' : 'LANG_HEBREW, SUBLANG_DEFAULT',
'en-XA' : 'LANG_ENGLISH, SUBLANG_ENGLISH_US',
}
# A note on 'no-specific-language' in the following few functions:
# Some build systems may wish to call GRIT to scan for dependencies in
# a language-agnostic way, and can then specify this fake language as
# the output context. It should never be used when output is actually
# being generated.
def GetLangCharsetPair(language):
if language in _LANGUAGE_CHARSET_PAIR:
return _LANGUAGE_CHARSET_PAIR[language]
if language != 'no-specific-language':
print('Warning:GetLangCharsetPair() found undefined language %s' % language)
return ''
def GetLangDirectivePair(language):
if language in _LANGUAGE_DIRECTIVE_PAIR:
return _LANGUAGE_DIRECTIVE_PAIR[language]
# We don't check for 'no-specific-language' here because this
# function should only get called when output is being formatted,
# and at that point we would not want to get
# 'no-specific-language' passed as the language.
print('Warning:GetLangDirectivePair() found undefined language %s' % language)
return 'unknown language: see tools/grit/format/rc.py'
def GetLangIdHex(language):
if language in _LANGUAGE_CHARSET_PAIR:
langcharset = _LANGUAGE_CHARSET_PAIR[language]
lang_id = '0x' + langcharset[0:4]
return lang_id
if language != 'no-specific-language':
print('Warning:GetLangIdHex() found undefined language %s' % language)
return ''
def GetCharsetIdDecimal(language):
if language in _LANGUAGE_CHARSET_PAIR:
langcharset = _LANGUAGE_CHARSET_PAIR[language]
charset_decimal = int(langcharset[4:], 16)
return str(charset_decimal)
if language != 'no-specific-language':
print('Warning:GetCharsetIdDecimal() found undefined language %s' % language)
return ''
def GetUnifiedLangCode(language) :
r = re.compile('([a-z]{1,2})_([a-z]{1,2})')
if r.match(language) :
underscore = language.find('_')
return language[0:underscore] + '-' + language[underscore + 1:].upper()
return language
def RcSubstitutions(substituter, lang):
'''Add language-based substitutions for Rc files to the substitutor.'''
unified_lang_code = GetUnifiedLangCode(lang)
substituter.AddSubstitutions({
'GRITVERLANGCHARSETHEX': GetLangCharsetPair(unified_lang_code),
'GRITVERLANGID': GetLangIdHex(unified_lang_code),
'GRITVERCHARSETID': GetCharsetIdDecimal(unified_lang_code)})
def _FormatHeader(root, lang, output_dir):
'''Returns the required preamble for RC files.'''
assert isinstance(lang, six.string_types)
assert isinstance(root, misc.GritNode)
# Find the location of the resource header file, so that we can include
# it.
resource_header = 'resource.h' # fall back to this
language_directive = ''
for output in root.GetOutputFiles():
if output.attrs['type'] == 'rc_header':
resource_header = os.path.abspath(output.GetOutputFilename())
resource_header = util.MakeRelativePath(output_dir, resource_header)
if output.attrs['lang'] != lang:
continue
if output.attrs['language_section'] == '':
# If no language_section is requested, no directive is added
# (Used when the generated rc will be included from another rc
# file that will have the appropriate language directive)
language_directive = ''
elif output.attrs['language_section'] == 'neutral':
# If a neutral language section is requested (default), add a
# neutral language directive
language_directive = 'LANGUAGE LANG_NEUTRAL, SUBLANG_NEUTRAL'
elif output.attrs['language_section'] == 'lang':
language_directive = 'LANGUAGE %s' % GetLangDirectivePair(lang)
resource_header = resource_header.replace('\\', '\\\\')
return '''// This file is automatically generated by GRIT. Do not edit.
#include "%s"
#include <winresrc.h>
#ifdef IDC_STATIC
#undef IDC_STATIC
#endif
#define IDC_STATIC (-1)
%s
''' % (resource_header, language_directive)
# end _FormatHeader() function
def FormatMessage(item, lang):
'''Returns a single message of a string table.'''
message = item.ws_at_start + item.Translate(lang) + item.ws_at_end
# Escape quotation marks (RC format uses doubling-up
message = message.replace('"', '""')
# Replace linebreaks with a \n escape
message = util.LINEBREAKS.sub(r'\\n', message)
if hasattr(item.GetRoot(), 'GetSubstituter'):
substituter = item.GetRoot().GetSubstituter()
message = substituter.Substitute(message)
name_attr = item.GetTextualIds()[0]
return ' %-15s "%s"\n' % (name_attr, message)
def _FormatSection(item, lang, output_dir):
'''Writes out an .rc file section.'''
assert isinstance(lang, six.string_types)
from grit.node import structure
assert isinstance(item, structure.StructureNode)
if item.IsExcludedFromRc():
return ''
text = item.gatherer.Translate(
lang, skeleton_gatherer=item.GetSkeletonGatherer(),
pseudo_if_not_available=item.PseudoIsAllowed(),
fallback_to_english=item.ShouldFallbackToEnglish()) + '\n\n'
# Replace the language expand_variables in version rc info.
if item.ExpandVariables() and hasattr(item.GetRoot(), 'GetSubstituter'):
substituter = item.GetRoot().GetSubstituter()
text = substituter.Substitute(text)
return text
def FormatInclude(item, lang, output_dir, type=None, process_html=False):
'''Formats an item that is included in an .rc file (e.g. an ICON).
Args:
item: an IncludeNode or StructureNode
lang, output_dir: standard formatter parameters
type: .rc file resource type, e.g. 'ICON' (ignored unless item is a
StructureNode)
process_html: False/True (ignored unless item is a StructureNode)
'''
assert isinstance(lang, six.string_types)
from grit.node import structure
from grit.node import include
assert isinstance(item, (structure.StructureNode, include.IncludeNode))
if isinstance(item, include.IncludeNode):
type = item.attrs['type'].upper()
process_html = item.attrs['flattenhtml'] == 'true'
filename_only = item.attrs['filenameonly'] == 'true'
relative_path = item.attrs['relativepath'] == 'true'
else:
assert (isinstance(item, structure.StructureNode) and item.attrs['type'] in
['admin_template', 'chrome_html', 'chrome_scaled_image',
'tr_html', 'txt'])
filename_only = False
relative_path = False
# By default, we use relative pathnames to included resources so that
# sharing the resulting .rc files is possible.
#
# The FileForLanguage() Function has the side effect of generating the file
# if needed (e.g. if it is an HTML file include).
file_for_lang = item.FileForLanguage(lang, output_dir)
if file_for_lang is None:
return ''
filename = os.path.abspath(file_for_lang)
if process_html:
filename = item.Process(output_dir)
elif filename_only:
filename = os.path.basename(filename)
elif relative_path:
filename = util.MakeRelativePath(output_dir, filename)
filename = filename.replace('\\', '\\\\') # escape for the RC format
if isinstance(item, structure.StructureNode) and item.IsExcludedFromRc():
return ''
name = item.attrs['name']
item_id = item.GetRoot().GetIdMap()[name]
return '// ID: %d\n%-18s %-18s "%s"\n' % (item_id, name, type, filename)
def _DoNotFormat(item, lang, output_dir):
return ''
# Formatter instance to use for each type attribute
# when formatting Structure nodes.
_STRUCTURE_FORMATTERS = {
'accelerators' : _FormatSection,
'dialog' : _FormatSection,
'menu' : _FormatSection,
'rcdata' : _FormatSection,
'version' : _FormatSection,
'admin_template' : partial(FormatInclude, type='ADM'),
'chrome_html' : partial(FormatInclude, type='BINDATA',
process_html=True),
'chrome_scaled_image' : partial(FormatInclude, type='BINDATA'),
'tr_html' : partial(FormatInclude, type='HTML'),
'txt' : partial(FormatInclude, type='TXT'),
'policy_template_metafile': _DoNotFormat,
}
def FormatStructure(item, lang, output_dir):
formatter = _STRUCTURE_FORMATTERS[item.attrs['type']]
return formatter(item, lang, output_dir)
| bsd-3-clause | 2,850,999,913,179,659,000 | 36.727083 | 81 | 0.628859 | false |
rainier-m/python-soccer | EPL-teamNews.py | 1 | 5001 | # -*- coding: utf-8 -*-
'''
Created on Jan 30, 2015
Modified on Jan 30, 2015
@author: [email protected]
A simple Python Program to scrape the ESPN FC website for content.
'''
'''
Version Number of Script
'''
version = '0.01.a'
# Import Libraries needed for Scraping the various web pages
from bs4 import BeautifulSoup
import urllib2
import datetime
import requests
import os
import platform
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
# Establish the process Date & Time Stamp
ts = datetime.datetime.now().strftime("%H:%M:%S")
ds = datetime.datetime.now().strftime("%Y-%m-%d")
date = datetime.datetime.now().strftime("%Y%m%d")
# Updates the Time Stamp
def updateTS():
update = datetime.datetime.now().strftime("%H:%M:%S")
return update
# Download Image
def downloadImage(imageURL, localFileName):
response = requests.get(imageURL)
if response.status_code == 200:
print 'Downloading %s...' % (localFileName)
with open(localFileName, 'wb') as fo:
for chunk in response.iter_content(4096):
fo.write(chunk)
return True
# Program Version & System Variables
parseVersion = 'ESPN Premier League Team News ' + version
print ds + ' :: ' + ts + ' :: ' + parseVersion
# Set Output Path for Windows or Mac environments
os_System = platform.system()
win_BasePath = "C:/Users/Rainier/Documents/GitHub/python-soccer"
if os_System == "Windows":
outputPath = win_BasePath + "/PL-Data/"
outputImgPath = win_BasePath + "/PL-Data/imgs/"
outputTeamPath = win_BasePath + "/PL-Data/teams/"
outputMatchPath = win_BasePath + "/PL-Data/match/"
else:
outputPath = 'PL-Data/'
outputImgPath = 'PL-Data/imgs/'
outputTeamPath = 'PL-Data/teams/'
outputMatchPath = 'PL-Data/match/'
hr = " >>> *** ====================================================== *** <<<"
shr = " >>> *** ==================== *** <<<"
prefixBBC = "http://www.bbc.com"
prefixESPN = "http://www.espnfc.us"
teamURLs = ['/club/arsenal/359/index','/club/aston-villa/362/index','/club/burnley/379/index','/club/chelsea/363/index','/club/crystal-palace/384/index','/club/everton/368/index','/club/hull-city/306/index','/club/leicester-city/375/index','/club/liverpool/364/index','/club/manchester-city/382/index','/club/manchester-united/360/index','/club/newcastle-united/361/index','/club/queens-park-rangers/334/index','/club/southampton/376/index','/club/stoke-city/336/index','/club/sunderland/366/index','/club/swansea-city/318/index','/club/tottenham-hotspur/367/index','/club/west-bromwich-albion/383/index','/club/west-ham-united/371/index']
def teamNews(x):
teamURL = x
teamName = x
teamName = teamName[6:len(teamName)-10]
teamURL = prefixESPN + teamURL
teamHTML = urllib2.urlopen(teamURL)
teamSoup = BeautifulSoup(teamHTML)
recentNews = teamSoup.find("div", {"id":"feed"})
recentNewsItems = recentNews.find_all("div", {"class":"feed-item-content"})
recapOutput = []
print "Team News Parsed :: " + teamName
for i in recentNewsItems:
recapPhotoItem = i.find("div", {"class":"thumbnail picture"})
if len(i) > 3:
# recapPhotoItem = recapPhotoItem.find("img")
# print recapPhotoItem["src"]
# with open(outputTxt, "a") as f:
# f.write('\n' + shr + '\n')
# f.write(i.prettify())
# f.write('\n' + shr + '\n')
# f.close()
# print shr
recapHeadline = i.find("h2")
recapHeadlineDetails = recapHeadline.find("a")
recapHeadlineDetails = recapHeadlineDetails["href"]
recapHeadline = recapHeadline.get_text(strip=True)
recapAge = i.find("span", {"class":"age"})
recapAge = recapAge.get_text(strip=True)
recapOutput.append(date + "|" + teamName + "|" + recapHeadline + "|" + recapHeadlineDetails + "|" + recapAge)
#print shr
# print i.prettify()
#print recapHeadlineDetails
#print shr
#recapDetails = recapHeadline.find("a")
#recapDetails = recapDetails["href"]
#print recapDetails
# print recapAge.get_text(strip=True)
#print updateTS()
#print shr
# print i
else:
#print i
#print shr
recapGameOpponents = i.find_all("div", {"class":"team-name"})
recapGameScore = i.find_all("div", {"class":"team-score"})
recapGameStatus = i.find("div", {"class":"game-info"})
recapGameHome = recapGameOpponents[0].get_text(strip=True)
recapGameAway = recapGameOpponents[1].get_text(strip=True)
recapHomeScore = recapGameScore[0].get_text(strip=True)
recapAwayScore = recapGameScore[1].get_text(strip=True)
#recapGameInfo = i.find("div", {"clas=":"game-info"})
recapOutput.append(date + "|" + teamName + "|" + recapGameHome + " " + recapHomeScore + " v. " + recapAwayScore + " "+ recapGameAway + "||")
# print i
for i in recapOutput:
print i
print hr
return recapOutput
teamNewstxt = 'teamNews.txt'
with open(teamNewstxt, "w") as f:
f.write(ds + " :: " + updateTS() + " :: " + parseVersion + '\n' )
f.close()
for i in teamURLs:
for x in teamNews(i):
with open(teamNewstxt, "a") as f:
f.write(x + '\n')
f.close()
| gpl-2.0 | -1,380,015,014,240,211,200 | 34.721429 | 639 | 0.660868 | false |
plq/spyne | spyne/protocol/_inbase.py | 1 | 24684 |
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
from __future__ import print_function
import logging
logger = logging.getLogger(__name__)
import re
import pytz
import uuid
from math import modf
from time import strptime, mktime
from datetime import timedelta, time, datetime, date
from decimal import Decimal as D, InvalidOperation
from pytz import FixedOffset
try:
from lxml import etree
from lxml import html
except ImportError:
etree = None
html = None
from spyne.protocol._base import ProtocolMixin
from spyne.model import ModelBase, XmlAttribute, Array, Null, \
ByteArray, File, ComplexModelBase, AnyXml, AnyHtml, Unicode, String, \
Decimal, Double, Integer, Time, DateTime, Uuid, Date, Duration, Boolean, Any
from spyne.error import ValidationError
from spyne.model.binary import binary_decoding_handlers, BINARY_ENCODING_USE_DEFAULT
from spyne.util import six
from spyne.model.enum import EnumBase
from spyne.model.primitive.datetime import TIME_PATTERN, DATE_PATTERN
from spyne.util.cdict import cdict
_date_re = re.compile(DATE_PATTERN)
_time_re = re.compile(TIME_PATTERN)
_duration_re = re.compile(
r'(?P<sign>-?)'
r'P'
r'(?:(?P<years>\d+)Y)?'
r'(?:(?P<months>\d+)M)?'
r'(?:(?P<days>\d+)D)?'
r'(?:T(?:(?P<hours>\d+)H)?'
r'(?:(?P<minutes>\d+)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?)?'
)
class InProtocolBase(ProtocolMixin):
"""This is the abstract base class for all input protocol implementations.
Child classes can implement only the required subset of the public methods.
An output protocol must implement :func:`serialize` and
:func:`create_out_string`.
An input protocol must implement :func:`create_in_document`,
:func:`decompose_incoming_envelope` and :func:`deserialize`.
The ProtocolBase class supports the following events:
* ``before_deserialize``:
Called before the deserialization operation is attempted.
* ``after_deserialize``:
Called after the deserialization operation is finished.
The arguments the constructor takes are as follows:
:param app: The application this protocol belongs to.
:param mime_type: The mime_type this protocol should set for transports
that support this. This is a quick way to override the mime_type by
default instead of subclassing the releavant protocol implementation.
"""
def __init__(self, app=None, validator=None, mime_type=None,
ignore_wrappers=False, binary_encoding=None, string_encoding=None):
self.validator = None
super(InProtocolBase, self).__init__(app=app, mime_type=mime_type,
ignore_wrappers=ignore_wrappers,
binary_encoding=binary_encoding, string_encoding=string_encoding)
self.message = None
self.validator = None
self.set_validator(validator)
if mime_type is not None:
self.mime_type = mime_type
fsh = {
Any: self.any_from_bytes,
Null: self.null_from_bytes,
File: self.file_from_bytes,
Array: self.array_from_bytes,
Double: self.double_from_bytes,
String: self.string_from_bytes,
AnyXml: self.any_xml_from_bytes,
Boolean: self.boolean_from_bytes,
Integer: self.integer_from_bytes,
Unicode: self.unicode_from_bytes,
AnyHtml: self.any_html_from_bytes,
ByteArray: self.byte_array_from_bytes,
EnumBase: self.enum_base_from_bytes,
ModelBase: self.model_base_from_bytes,
XmlAttribute: self.xmlattribute_from_bytes,
ComplexModelBase: self.complex_model_base_from_bytes
}
self._from_bytes_handlers = cdict(fsh)
self._from_unicode_handlers = cdict(fsh)
self._from_bytes_handlers[Date] = self.date_from_bytes
self._from_bytes_handlers[Time] = self.time_from_bytes
self._from_bytes_handlers[Uuid] = self.uuid_from_bytes
self._from_bytes_handlers[Decimal] = self.decimal_from_bytes
self._from_bytes_handlers[DateTime] = self.datetime_from_bytes
self._from_bytes_handlers[Duration] = self.duration_from_bytes
self._from_unicode_handlers[Date] = self.date_from_unicode
self._from_unicode_handlers[Uuid] = self.uuid_from_unicode
self._from_unicode_handlers[Time] = self.time_from_unicode
self._from_unicode_handlers[Decimal] = self.decimal_from_unicode
self._from_unicode_handlers[DateTime] = self.datetime_from_unicode
self._from_unicode_handlers[Duration] = self.duration_from_unicode
self._datetime_dsmap = {
None: self._datetime_from_unicode,
'sec': self._datetime_from_sec,
'sec_float': self._datetime_from_sec_float,
'msec': self._datetime_from_msec,
'msec_float': self._datetime_from_msec_float,
'usec': self._datetime_from_usec,
}
def _datetime_from_sec(self, cls, value):
try:
return datetime.fromtimestamp(value)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_sec_float(self, cls, value):
try:
return datetime.fromtimestamp(value)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_msec(self, cls, value):
try:
return datetime.fromtimestamp(value // 1000)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_msec_float(self, cls, value):
try:
return datetime.fromtimestamp(value / 1000)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_usec(self, cls, value):
try:
return datetime.fromtimestamp(value / 1e6)
except TypeError:
logger.error("Invalid value %r", value)
raise
def create_in_document(self, ctx, in_string_encoding=None):
"""Uses ``ctx.in_string`` to set ``ctx.in_document``."""
def decompose_incoming_envelope(self, ctx, message):
"""Sets the ``ctx.method_request_string``, ``ctx.in_body_doc``,
``ctx.in_header_doc`` and ``ctx.service`` properties of the ctx object,
if applicable.
"""
def deserialize(self, ctx, message):
"""Takes a MethodContext instance and a string containing ONE document
instance in the ``ctx.in_string`` attribute.
Returns the corresponding native python object in the ctx.in_object
attribute.
"""
def validate_document(self, payload):
"""Method to be overriden to perform any sort of custom input
validation on the parsed input document.
"""
def set_validator(self, validator):
"""You must override this function if you want your protocol to support
validation."""
assert validator is None
self.validator = None
def from_bytes(self, class_, string, *args, **kwargs):
if string is None:
return None
if isinstance(string, six.string_types) and \
len(string) == 0 and class_.Attributes.empty_is_none:
return None
handler = self._from_bytes_handlers[class_]
return handler(class_, string, *args, **kwargs)
def from_unicode(self, class_, string, *args, **kwargs):
if string is None:
return None
#if not six.PY2:
# assert isinstance(string, str), \
# "Invalid type passed to `from_unicode`: {}".format(
# (class_, type(string), string))
cls_attrs = self.get_cls_attrs(class_)
if isinstance(string, six.string_types) and len(string) == 0 and \
cls_attrs.empty_is_none:
return None
handler = self._from_unicode_handlers[class_]
return handler(class_, string, *args, **kwargs)
def null_from_bytes(self, cls, value):
return None
def any_from_bytes(self, cls, value):
return value
def any_xml_from_bytes(self, cls, string):
try:
return etree.fromstring(string)
except etree.XMLSyntaxError as e:
raise ValidationError(string, "%%r: %r" % e)
def any_html_from_bytes(self, cls, string):
try:
return html.fromstring(string)
except etree.ParserError as e:
if e.args[0] == "Document is empty":
pass
else:
raise
def uuid_from_unicode(self, cls, string, suggested_encoding=None):
attr = self.get_cls_attrs(cls)
ser_as = attr.serialize_as
encoding = attr.encoding
if encoding is None:
encoding = suggested_encoding
retval = string
if ser_as in ('bytes', 'bytes_le'):
retval, = binary_decoding_handlers[encoding](string)
try:
retval = _uuid_deserialize[ser_as](retval)
except (ValueError, TypeError, UnicodeDecodeError) as e:
raise ValidationError(e)
return retval
def uuid_from_bytes(self, cls, string, suggested_encoding=None, **_):
attr = self.get_cls_attrs(cls)
ser_as = attr.serialize_as
encoding = attr.encoding
if encoding is None:
encoding = suggested_encoding
retval = string
if ser_as in ('bytes', 'bytes_le'):
retval, = binary_decoding_handlers[encoding](string)
elif isinstance(string, six.binary_type):
retval = string.decode('ascii')
try:
retval = _uuid_deserialize[ser_as](retval)
except ValueError as e:
raise ValidationError(e)
return retval
def unicode_from_bytes(self, cls, value):
retval = value
if isinstance(value, six.binary_type):
cls_attrs = self.get_cls_attrs(cls)
if cls_attrs.encoding is not None:
retval = six.text_type(value, cls_attrs.encoding,
errors=cls_attrs.unicode_errors)
elif self.string_encoding is not None:
retval = six.text_type(value, self.string_encoding,
errors=cls_attrs.unicode_errors)
else:
retval = six.text_type(value, errors=cls_attrs.unicode_errors)
return retval
def string_from_bytes(self, cls, value):
retval = value
cls_attrs = self.get_cls_attrs(cls)
if isinstance(value, six.text_type):
if cls_attrs.encoding is None:
raise Exception("You need to define a source encoding for "
"decoding incoming unicode values.")
else:
retval = value.encode(cls_attrs.encoding)
return retval
def decimal_from_unicode(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
if cls_attrs.max_str_len is not None and len(string) > \
cls_attrs.max_str_len:
raise ValidationError(string, "Decimal %%r longer than %d "
"characters" % cls_attrs.max_str_len)
try:
return D(string)
except InvalidOperation as e:
raise ValidationError(string, "%%r: %r" % e)
def decimal_from_bytes(self, cls, string):
return self.decimal_from_unicode(cls,
string.decode(self.default_string_encoding))
def double_from_bytes(self, cls, string):
try:
return float(string)
except (TypeError, ValueError) as e:
raise ValidationError(string, "%%r: %r" % e)
def integer_from_bytes(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
if isinstance(string, (six.text_type, six.binary_type)) and \
cls_attrs.max_str_len is not None and \
len(string) > cls_attrs.max_str_len:
raise ValidationError(string,
"Integer %%r longer than %d characters"
% cls_attrs.max_str_len)
try:
return int(string)
except ValueError:
raise ValidationError(string, "Could not cast %r to integer")
def time_from_unicode(self, cls, string):
"""Expects ISO formatted times."""
match = _time_re.match(string)
if match is None:
raise ValidationError(string, "%%r does not match regex %r " %
_time_re.pattern)
fields = match.groupdict(0)
microsec = fields.get('sec_frac')
if microsec is None or microsec == 0:
microsec = 0
else:
microsec = min(999999, int(round(float(microsec) * 1e6)))
return time(int(fields['hr']), int(fields['min']),
int(fields['sec']), microsec)
def time_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
return self.time_from_unicode(cls, string)
def date_from_unicode_iso(self, cls, string):
"""This is used by protocols like SOAP who need ISO8601-formatted dates
no matter what.
"""
try:
return date(*(strptime(string, u'%Y-%m-%d')[0:3]))
except ValueError:
match = cls._offset_re.match(string)
if match:
year = int(match.group('year'))
month = int(match.group('month'))
day = int(match.group('day'))
return date(year, month, day)
raise ValidationError(string)
def enum_base_from_bytes(self, cls, value):
if self.validator is self.SOFT_VALIDATION and not (
cls.validate_string(cls, value)):
raise ValidationError(value)
return getattr(cls, value)
def model_base_from_bytes(self, cls, value):
return cls.from_bytes(value)
def datetime_from_unicode_iso(self, cls, string):
astz = self.get_cls_attrs(cls).as_timezone
match = cls._utc_re.match(string)
if match:
tz = pytz.utc
retval = _parse_datetime_iso_match(match, tz=tz)
if astz is not None:
retval = retval.astimezone(astz)
return retval
if match is None:
match = cls._offset_re.match(string)
if match:
tz_hr, tz_min = [int(match.group(x))
for x in ("tz_hr", "tz_min")]
tz = FixedOffset(tz_hr * 60 + tz_min, {})
retval = _parse_datetime_iso_match(match, tz=tz)
if astz is not None:
retval = retval.astimezone(astz)
return retval
if match is None:
match = cls._local_re.match(string)
if match:
retval = _parse_datetime_iso_match(match)
if astz:
retval = retval.replace(tzinfo=astz)
return retval
raise ValidationError(string)
def datetime_from_unicode(self, cls, string):
serialize_as = self.get_cls_attrs(cls).serialize_as
return self._datetime_dsmap[serialize_as](cls, string)
def datetime_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
serialize_as = self.get_cls_attrs(cls).serialize_as
return self._datetime_dsmap[serialize_as](cls, string)
def date_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
date_format = self._get_date_format(self.get_cls_attrs(cls))
try:
if date_format is not None:
dt = datetime.strptime(string, date_format)
return date(dt.year, dt.month, dt.day)
return self.date_from_unicode_iso(cls, string)
except ValueError as e:
match = cls._offset_re.match(string)
if match:
return date(int(match.group('year')),
int(match.group('month')), int(match.group('day')))
else:
raise ValidationError(string,
"%%r: %s" % repr(e).replace("%", "%%"))
def date_from_unicode(self, cls, string):
date_format = self._get_date_format(self.get_cls_attrs(cls))
try:
if date_format is not None:
dt = datetime.strptime(string, date_format)
return date(dt.year, dt.month, dt.day)
return self.date_from_unicode_iso(cls, string)
except ValueError as e:
match = cls._offset_re.match(string)
if match:
return date(int(match.group('year')),
int(match.group('month')), int(match.group('day')))
else:
# the message from ValueError is quite nice already
raise ValidationError(e.message, "%s")
def duration_from_unicode(self, cls, string):
duration = _duration_re.match(string).groupdict(0)
if duration is None:
raise ValidationError(string,
"Time data '%%s' does not match regex '%s'" %
(_duration_re.pattern,))
days = int(duration['days'])
days += int(duration['months']) * 30
days += int(duration['years']) * 365
hours = int(duration['hours'])
minutes = int(duration['minutes'])
seconds = float(duration['seconds'])
f, i = modf(seconds)
seconds = i
microseconds = int(1e6 * f)
delta = timedelta(days=days, hours=hours, minutes=minutes,
seconds=seconds, microseconds=microseconds)
if duration['sign'] == "-":
delta *= -1
return delta
def duration_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
return self.duration_from_unicode(cls, string)
def boolean_from_bytes(self, cls, string):
return string.lower() in ('true', '1')
def byte_array_from_bytes(self, cls, value, suggested_encoding=None):
encoding = self.get_cls_attrs(cls).encoding
if encoding is BINARY_ENCODING_USE_DEFAULT:
encoding = suggested_encoding
return binary_decoding_handlers[encoding](value)
def file_from_bytes(self, cls, value, suggested_encoding=None):
encoding = self.get_cls_attrs(cls).encoding
if encoding is BINARY_ENCODING_USE_DEFAULT:
encoding = suggested_encoding
return File.Value(data=binary_decoding_handlers[encoding](value))
def complex_model_base_from_bytes(self, cls, string, **_):
raise TypeError("Only primitives can be deserialized from string.")
def array_from_bytes(self, cls, string, **_):
if self.get_cls_attrs(cls).serialize_as != 'sd-list':
raise TypeError("Only primitives can be deserialized from string.")
# sd-list being space-delimited list.
retval = []
inner_type, = cls._type_info.values()
for s in string.split():
retval.append(self.from_bytes(inner_type, s))
return retval
def xmlattribute_from_bytes(self, cls, value):
return self.from_bytes(cls.type, value)
def _datetime_from_unicode(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
# get parser
parser = cls_attrs.parser
# get date_format
dt_format = cls_attrs.dt_format
if dt_format is None:
dt_format = cls_attrs.date_format
if dt_format is None:
dt_format = cls_attrs.out_format
if dt_format is None:
dt_format = cls_attrs.format
# parse the string
if parser is not None:
retval = parser(self, cls, string)
elif dt_format is not None:
if six.PY2:
# FIXME: perhaps it should encode to string's encoding instead
# of utf8 all the time
if isinstance(dt_format, six.text_type):
dt_format = dt_format.encode('utf8')
if isinstance(string, six.text_type):
string = string.encode('utf8')
retval = datetime.strptime(string, dt_format)
astz = cls_attrs.as_timezone
if astz:
retval = retval.astimezone(cls_attrs.as_time_zone)
else:
retval = self.datetime_from_unicode_iso(cls, string)
return retval
_uuid_deserialize = {
None: lambda s: uuid.UUID(s.decode('ascii') if isinstance(s, bytes) else s),
'hex': lambda s: uuid.UUID(hex=s),
'urn': lambda s: uuid.UUID(hex=s),
'bytes': lambda s: uuid.UUID(bytes=s),
'bytes_le': lambda s: uuid.UUID(bytes_le=s),
'fields': lambda s: uuid.UUID(fields=s),
'int': lambda s: uuid.UUID(int=s),
('int', int): lambda s: uuid.UUID(int=s),
('int', str): lambda s: uuid.UUID(int=int(s)),
}
if six.PY2:
_uuid_deserialize[None] = lambda s: uuid.UUID(s)
_uuid_deserialize[('int', long)] = _uuid_deserialize[('int', int)]
def _parse_datetime_iso_match(date_match, tz=None):
fields = date_match.groupdict()
year = int(fields.get('year'))
month = int(fields.get('month'))
day = int(fields.get('day'))
hour = int(fields.get('hr'))
minute = int(fields.get('min'))
second = int(fields.get('sec'))
usecond = fields.get("sec_frac")
if usecond is None:
usecond = 0
else:
# we only get the most significant 6 digits because that's what
# datetime can handle.
usecond = min(999999, int(round(float(usecond) * 1e6)))
return datetime(year, month, day, hour, minute, second, usecond, tz)
_dt_sec = lambda cls, val: \
int(mktime(val.timetuple()))
_dt_sec_float = lambda cls, val: \
mktime(val.timetuple()) + (val.microsecond / 1e6)
_dt_msec = lambda cls, val: \
int(mktime(val.timetuple())) * 1000 + (val.microsecond // 1000)
_dt_msec_float = lambda cls, val: \
mktime(val.timetuple()) * 1000 + (val.microsecond / 1000.0)
_dt_usec = lambda cls, val: \
int(mktime(val.timetuple())) * 1000000 + val.microsecond
_datetime_smap = {
'sec': _dt_sec,
'secs': _dt_sec,
'second': _dt_sec,
'seconds': _dt_sec,
'sec_float': _dt_sec_float,
'secs_float': _dt_sec_float,
'second_float': _dt_sec_float,
'seconds_float': _dt_sec_float,
'msec': _dt_msec,
'msecs': _dt_msec,
'msecond': _dt_msec,
'mseconds': _dt_msec,
'millisecond': _dt_msec,
'milliseconds': _dt_msec,
'msec_float': _dt_msec_float,
'msecs_float': _dt_msec_float,
'msecond_float': _dt_msec_float,
'mseconds_float': _dt_msec_float,
'millisecond_float': _dt_msec_float,
'milliseconds_float': _dt_msec_float,
'usec': _dt_usec,
'usecs': _dt_usec,
'usecond': _dt_usec,
'useconds': _dt_usec,
'microsecond': _dt_usec,
'microseconds': _dt_usec,
}
def _file_to_iter(f):
try:
data = f.read(65536)
while len(data) > 0:
yield data
data = f.read(65536)
finally:
f.close()
| lgpl-2.1 | 6,236,613,823,759,580,000 | 33.37883 | 84 | 0.581672 | false |
JerryYanWan/BigDL | spark/dl/src/test/resources/tf/models/vgg19.py | 1 | 1391 | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tensorflow as tf
from nets import vgg
from sys import argv
from util import run_model
def main():
"""
You can also run these commands manually to generate the pb file
1. git clone https://github.com/tensorflow/models.git
2. export PYTHONPATH=Path_to_your_model_folder
3. python alexnet.py
"""
height, width = 224, 224
inputs = tf.Variable(tf.random_uniform((1, height, width, 3)), name='input')
net, end_points = vgg.vgg_19(inputs, is_training = False)
print("nodes in the graph")
for n in end_points:
print(n + " => " + str(end_points[n]))
net_outputs = map(lambda x: tf.get_default_graph().get_tensor_by_name(x), argv[2].split(','))
run_model(net_outputs, argv[1], 'vgg_19', argv[3] == 'True')
if __name__ == "__main__":
main()
| apache-2.0 | 155,971,322,021,515,800 | 34.666667 | 97 | 0.686556 | false |
swarmer/autoscaler | autoscaler/server/scaling/algorithms/spline.py | 1 | 1065 | import math
import scipy.interpolate
from autoscaler.server.request_history import RequestHistory
from autoscaler.server.scaling.utils import parse_interval
class SplineScalingAlgorithm:
def __init__(self, algorithm_config):
self.interval_seconds = parse_interval(
algorithm_config['interval']
)
self.requests_per_instance_interval = (
algorithm_config['requests_per_instance_interval']
)
def get_instance_count(self, request_history: RequestHistory):
(interval1, interval2, interval3) = request_history.get_last_intervals(
self.interval_seconds, 3
)
x_values = [1, 2, 3]
y_values = [len(interval1), len(interval2), len(interval3)]
interpolated_function = scipy.interpolate.InterpolatedUnivariateSpline(
x_values, y_values, k=2,
)
expected_request_count = interpolated_function(len(x_values) + 1)
return max(1, math.ceil(
expected_request_count / self.requests_per_instance_interval)
)
| mit | 2,385,278,985,483,348,000 | 32.28125 | 79 | 0.656338 | false |
SunyataZero/buddhist-well-being-prototype-4 | bwb/practice_details.py | 1 | 3624 | import sched
import threading
import time
from PyQt5 import QtCore
from PyQt5 import QtWidgets
import bwb.model
ID_NOT_SET = -1
BUTTON_WIDTH_IT = 28
class PracticeCompositeWidget(QtWidgets.QWidget):
time_of_day_state_changed_signal = QtCore.pyqtSignal()
def __init__(self):
super().__init__()
self.id_it = ID_NOT_SET
self.scheduler = sched.scheduler(time.time, time.sleep)
vbox = QtWidgets.QVBoxLayout()
self.setLayout(vbox)
vbox.setAlignment(QtCore.Qt.AlignTop)
# ..for details
### self.details_ll = QtWidgets.QLabel("-----")
### self.details_ll.setWordWrap(True)
self.question_ll = QtWidgets.QLabel("<h4>Question</h4>")
vbox.addWidget(self.question_ll)
self.question_le = QtWidgets.QLineEdit()
self.question_le.textChanged.connect(self.on_question_text_changed)
vbox.addWidget(self.question_le)
def on_time_of_day_statechanged(self, i_new_checked_state):
self.update_db_time()
def on_time_of_day_changed(self, i_qtime):
self.update_db_time()
def update_db_time(self):
if self.id_it == ID_NOT_SET:
return
qtime = self.time_of_day_timeedit.time()
if self.time_of_day_active_qcb.checkState() == QtCore.Qt.Checked:
bwb.model.ReminderM.update_time_of_day(self.id_it, qtime.hour())
# Set a scheduled task
practice = bwb.model.ReminderM.get(self.id_it)
self.set_reminder(qtime.hour(), practice.title)
else:
model.ReminderM.update_time_of_day(self.id_it, model.TIME_NOT_SET)
self.time_of_day_state_changed_signal.emit()
def set_reminder(self, i_hour_it, i_practice_title_sg):
self.schedule_thread = threading.Thread(target=self.background_function, args=(i_hour_it, i_practice_title_sg), daemon=True)
self.schedule_thread.start()
def background_function(self, i_hour_it, i_practice_title_sg):
now = time.time()
reminder_time_qdatetime = QtCore.QDateTime.currentDateTime()
reminder_time_qdatetime.setTime(QtCore.QTime(i_hour_it, 50))
reminder_time_in_seconds_it = reminder_time_qdatetime.toMSecsSinceEpoch() / 1000
logging.debug("reminder_time_in_seconds_it = " + str(reminder_time_in_seconds_it))
self.scheduler.enterabs(reminder_time_in_seconds_it + 10, 1, self.popup_function, (i_practice_title_sg,))
# -Several events can be scheduled, (the enterabs function adds an event rather than replacing)
self.scheduler.run() # blocking=False
def popup_function(self, i_string):
message_box = QtWidgets.QMessageBox.information(None, i_string, (i_string))
def on_question_text_changed(self):
if self.id_it == ID_NOT_SET:
return
model.ReminderM.update_question_text(
self.id_it,
self.question_le.text().strip()
)
def change_practice(self, i_practice_id_it):
self.id_it = i_practice_id_it # storing the id locally
self.update_gui()
def update_gui(self):
###time_of_day_cb_was_blocked_bl = self.time_of_day_active_qcb.blockSignals(True)
###time_of_day_timeedit_was_blocked_bl = self.time_of_day_timeedit.blockSignals(True)
practice = model.ReminderM.get(self.id_it)
##self.details_ll.setText(practice.description)
self.question_le.setText(practice.question)
###self.time_of_day_active_qcb.blockSignals(time_of_day_cb_was_blocked_bl)
###self.time_of_day_timeedit.blockSignals(time_of_day_timeedit_was_blocked_bl)
| gpl-3.0 | 638,437,109,849,312,600 | 37.147368 | 132 | 0.651214 | false |
uw-it-aca/canvas-sis-provisioner | sis_provisioner/builders/__init__.py | 1 | 4786 | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from sis_provisioner.models.course import Course
from sis_provisioner.models.user import User
from sis_provisioner.csv.data import Collector
from sis_provisioner.csv.format import UserCSV, EnrollmentCSV
from sis_provisioner.dao.user import (
valid_net_id, get_person_by_netid, get_person_by_gmail_id)
from sis_provisioner.dao.course import (
get_section_by_id, get_registrations_by_section)
from sis_provisioner.dao.canvas import ENROLLMENT_ACTIVE
from sis_provisioner.exceptions import (
UserPolicyException, CoursePolicyException, InvalidLoginIdException)
from restclients_core.exceptions import DataFailureException
from logging import getLogger
class Builder(object):
def __init__(self, items=[]):
self.data = Collector()
self.queue_id = None
self.invalid_users = {}
self.items = items
self.logger = getLogger(__name__)
def _init_build(self, **kwargs):
return
def _process(self, item):
raise NotImplementedError
def _write(self):
return self.data.write_files()
def build(self, **kwargs):
self._init_build(**kwargs)
for item in self.items:
self._process(item)
return self._write()
def add_user_data_for_person(self, person, force=False):
"""
Creates a line of user data for the passed person. If force is not
true, the data will only be created if the person has not been
provisioned. Returns True for valid users, False otherwise.
"""
if person.uwregid in self.invalid_users:
return False
try:
valid_net_id(person.uwnetid)
except UserPolicyException as err:
self.invalid_users[person.uwregid] = True
self.logger.info("Skip user {}: {}".format(person.uwregid, err))
return False
if force is True:
self.data.add(UserCSV(person))
else:
user = User.objects.get_user(person)
if user.provisioned_date is None:
if (self.data.add(UserCSV(person)) and user.queue_id is None):
user.queue_id = self.queue_id
user.save()
return True
def add_teacher_enrollment_data(self, section, person, status='active'):
"""
Generates one teacher enrollment for the passed section and person.
"""
if self.add_user_data_for_person(person):
self.data.add(EnrollmentCSV(
section=section, instructor=person, status=status))
def add_student_enrollment_data(self, registration):
"""
Generates one student enrollment for the passed registration.
"""
if self.add_user_data_for_person(registration.person):
self.data.add(EnrollmentCSV(registration=registration))
def add_group_enrollment_data(self, login_id, section_id, role, status):
"""
Generates one enrollment for the passed group member.
"""
try:
person = get_person_by_netid(login_id)
if self.add_user_data_for_person(person):
self.data.add(EnrollmentCSV(
section_id=section_id, person=person, role=role,
status=status))
except InvalidLoginIdException:
try:
person = get_person_by_gmail_id(login_id)
if status == ENROLLMENT_ACTIVE:
self.data.add(UserCSV(person))
self.data.add(EnrollmentCSV(
section_id=section_id, person=person, role=role,
status=status))
except InvalidLoginIdException as ex:
self.logger.info("Skip group member {}: {}".format(
login_id, ex))
def add_registrations_by_section(self, section):
try:
for registration in get_registrations_by_section(section):
self.add_student_enrollment_data(registration)
except DataFailureException as ex:
self.logger.info("Skip enrollments for section {}: {}".format(
section.section_label(), ex))
def get_section_resource_by_id(self, section_id):
"""
Fetch the section resource for the passed section ID, and add to queue.
"""
try:
section = get_section_by_id(section_id)
Course.objects.add_to_queue(section, self.queue_id)
return section
except (ValueError, CoursePolicyException, DataFailureException) as ex:
Course.objects.remove_from_queue(section_id, ex)
self.logger.info("Skip section {}: {}".format(section_id, ex))
raise
| apache-2.0 | 5,952,493,172,330,509,000 | 36.390625 | 79 | 0.614919 | false |
steveb/heat | heat/tests/openstack/heat/test_remote_stack.py | 1 | 27511 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from heatclient import exc
from heatclient.v1 import stacks
import mock
from oslo_config import cfg
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common import template_format
from heat.engine.clients.os import heat_plugin
from heat.engine import environment
from heat.engine.resources.openstack.heat import remote_stack
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.engine import stack
from heat.engine import template
from heat.tests import common as tests_common
from heat.tests import utils
cfg.CONF.import_opt('action_retry_limit', 'heat.common.config')
parent_stack_template = '''
heat_template_version: 2013-05-23
resources:
remote_stack:
type: OS::Heat::Stack
properties:
context:
region_name: RegionOne
template: { get_file: remote_template.yaml }
timeout: 60
parameters:
name: foo
'''
remote_template = '''
heat_template_version: 2013-05-23
parameters:
name:
type: string
resources:
resource1:
type: GenericResourceType
outputs:
foo:
value: bar
'''
bad_template = '''
heat_template_version: 2013-05-26
parameters:
name:
type: string
resources:
resource1:
type: UnknownResourceType
outputs:
foo:
value: bar
'''
def get_stack(stack_id='c8a19429-7fde-47ea-a42f-40045488226c',
stack_name='teststack', description='No description',
creation_time='2013-08-04T20:57:55Z',
updated_time='2013-08-04T20:57:55Z',
stack_status='CREATE_COMPLETE',
stack_status_reason='',
outputs=None):
action = stack_status[:stack_status.index('_')]
status = stack_status[stack_status.index('_') + 1:]
data = {
'id': stack_id,
'stack_name': stack_name,
'description': description,
'creation_time': creation_time,
'updated_time': updated_time,
'stack_status': stack_status,
'stack_status_reason': stack_status_reason,
'action': action,
'status': status,
'outputs': outputs or None,
}
return stacks.Stack(mock.MagicMock(), data)
class FakeClients(object):
def __init__(self, context, region_name=None):
self.ctx = context
self.region_name = region_name or 'RegionOne'
self.hc = None
self.plugin = None
def client(self, name):
if self.region_name in ['RegionOne', 'RegionTwo']:
if self.hc is None:
self.hc = mock.MagicMock()
return self.hc
else:
raise Exception('Failed connecting to Heat')
def client_plugin(self, name):
if self.plugin is None:
self.plugin = heat_plugin.HeatClientPlugin(self.ctx)
return self.plugin
class RemoteStackTest(tests_common.HeatTestCase):
def setUp(self):
super(RemoteStackTest, self).setUp()
self.this_region = 'RegionOne'
self.that_region = 'RegionTwo'
self.bad_region = 'RegionNone'
cfg.CONF.set_override('action_retry_limit', 0, enforce_type=True)
self.parent = None
self.heat = None
self.client_plugin = None
self.this_context = None
self.old_clients = None
def unset_clients_property():
if self.this_context is not None:
type(self.this_context).clients = self.old_clients
self.addCleanup(unset_clients_property)
def initialize(self):
parent, rsrc = self.create_parent_stack(remote_region='RegionTwo')
self.parent = parent
self.heat = rsrc._context().clients.client("heat")
self.client_plugin = rsrc._context().clients.client_plugin('heat')
def create_parent_stack(self, remote_region=None, custom_template=None):
snippet = template_format.parse(parent_stack_template)
self.files = {
'remote_template.yaml': custom_template or remote_template
}
region_name = remote_region or self.this_region
props = snippet['resources']['remote_stack']['properties']
# context property is not required, default to current region
if remote_region is None:
del props['context']
else:
props['context']['region_name'] = region_name
if self.this_context is None:
self.this_context = utils.dummy_context(
region_name=self.this_region)
tmpl = template.Template(snippet, files=self.files)
parent = stack.Stack(self.this_context, 'parent_stack', tmpl)
# parent context checking
ctx = parent.context.to_dict()
self.assertEqual(self.this_region, ctx['region_name'])
self.assertEqual(self.this_context.to_dict(), ctx)
parent.store()
resource_defns = parent.t.resource_definitions(parent)
rsrc = remote_stack.RemoteStack(
'remote_stack_res',
resource_defns['remote_stack'],
parent)
# remote stack resource checking
self.assertEqual(60, rsrc.properties.get('timeout'))
remote_context = rsrc._context()
hc = FakeClients(self.this_context, rsrc._region_name)
if self.old_clients is None:
self.old_clients = type(remote_context).clients
type(remote_context).clients = mock.PropertyMock(return_value=hc)
return parent, rsrc
def create_remote_stack(self):
# This method default creates a stack on RegionTwo (self.other_region)
defaults = [get_stack(stack_status='CREATE_IN_PROGRESS'),
get_stack(stack_status='CREATE_COMPLETE')]
if self.parent is None:
self.initialize()
# prepare clients to return status
self.heat.stacks.create.return_value = {'stack': get_stack().to_dict()}
self.heat.stacks.get = mock.MagicMock(side_effect=defaults)
rsrc = self.parent['remote_stack']
scheduler.TaskRunner(rsrc.create)()
return rsrc
def test_create_remote_stack_default_region(self):
parent, rsrc = self.create_parent_stack()
self.assertEqual((rsrc.INIT, rsrc.COMPLETE), rsrc.state)
self.assertEqual(self.this_region, rsrc._region_name)
ctx = rsrc.properties.get('context')
self.assertIsNone(ctx)
self.assertIsNone(rsrc.validate())
def test_create_remote_stack_this_region(self):
parent, rsrc = self.create_parent_stack(remote_region=self.this_region)
self.assertEqual((rsrc.INIT, rsrc.COMPLETE), rsrc.state)
self.assertEqual(self.this_region, rsrc._region_name)
ctx = rsrc.properties.get('context')
self.assertEqual(self.this_region, ctx['region_name'])
self.assertIsNone(rsrc.validate())
def test_create_remote_stack_that_region(self):
parent, rsrc = self.create_parent_stack(remote_region=self.that_region)
self.assertEqual((rsrc.INIT, rsrc.COMPLETE), rsrc.state)
self.assertEqual(self.that_region, rsrc._region_name)
ctx = rsrc.properties.get('context')
self.assertEqual(self.that_region, ctx['region_name'])
self.assertIsNone(rsrc.validate())
def test_create_remote_stack_bad_region(self):
parent, rsrc = self.create_parent_stack(remote_region=self.bad_region)
self.assertEqual((rsrc.INIT, rsrc.COMPLETE), rsrc.state)
self.assertEqual(self.bad_region, rsrc._region_name)
ctx = rsrc.properties.get('context')
self.assertEqual(self.bad_region, ctx['region_name'])
ex = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
msg = ('Cannot establish connection to Heat endpoint '
'at region "%s"' % self.bad_region)
self.assertIn(msg, six.text_type(ex))
def test_remote_validation_failed(self):
parent, rsrc = self.create_parent_stack(remote_region=self.that_region,
custom_template=bad_template)
self.assertEqual((rsrc.INIT, rsrc.COMPLETE), rsrc.state)
self.assertEqual(self.that_region, rsrc._region_name)
ctx = rsrc.properties.get('context')
self.assertEqual(self.that_region, ctx['region_name'])
# not setting or using self.heat because this test case is a special
# one with the RemoteStack resource initialized but not created.
heat = rsrc._context().clients.client("heat")
# heatclient.exc.BadRequest is the exception returned by a failed
# validation
heat.stacks.validate = mock.MagicMock(side_effect=exc.HTTPBadRequest)
ex = self.assertRaises(exception.StackValidationFailed, rsrc.validate)
msg = ('Failed validating stack template using Heat endpoint at region'
' "%s"') % self.that_region
self.assertIn(msg, six.text_type(ex))
def test_create(self):
rsrc = self.create_remote_stack()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c',
rsrc.resource_id)
env = environment.get_child_environment(rsrc.stack.env,
{'name': 'foo'})
args = {
'stack_name': rsrc.physical_resource_name(),
'template': template_format.parse(remote_template),
'timeout_mins': 60,
'disable_rollback': True,
'parameters': {'name': 'foo'},
'files': self.files,
'environment': env.user_env_as_dict(),
}
self.heat.stacks.create.assert_called_with(**args)
self.assertEqual(2, len(self.heat.stacks.get.call_args_list))
def test_create_failed(self):
returns = [get_stack(stack_status='CREATE_IN_PROGRESS'),
get_stack(stack_status='CREATE_FAILED',
stack_status_reason='Remote stack creation '
'failed')]
# Note: only this test case does a out-of-band intialization, most of
# the other test cases will have self.parent initialized.
if self.parent is None:
self.initialize()
self.heat.stacks.create.return_value = {'stack': get_stack().to_dict()}
self.heat.stacks.get = mock.MagicMock(side_effect=returns)
rsrc = self.parent['remote_stack']
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
error_msg = ('ResourceInError: resources.remote_stack: '
'Went to status CREATE_FAILED due to '
'"Remote stack creation failed"')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
def test_delete(self):
returns = [get_stack(stack_status='DELETE_IN_PROGRESS'),
get_stack(stack_status='DELETE_COMPLETE')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=returns)
self.heat.stacks.delete = mock.MagicMock()
remote_stack_id = rsrc.resource_id
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.heat.stacks.delete.assert_called_with(stack_id=remote_stack_id)
def test_delete_already_gone(self):
rsrc = self.create_remote_stack()
self.heat.stacks.delete = mock.MagicMock(
side_effect=exc.HTTPNotFound())
self.heat.stacks.get = mock.MagicMock(side_effect=exc.HTTPNotFound())
remote_stack_id = rsrc.resource_id
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.heat.stacks.delete.assert_called_with(stack_id=remote_stack_id)
def test_delete_failed(self):
returns = [get_stack(stack_status='DELETE_IN_PROGRESS'),
get_stack(stack_status='DELETE_FAILED',
stack_status_reason='Remote stack deletion '
'failed')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=returns)
self.heat.stacks.delete = mock.MagicMock()
remote_stack_id = rsrc.resource_id
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
error_msg = ('ResourceInError: resources.remote_stack: '
'Went to status DELETE_FAILED due to '
'"Remote stack deletion failed"')
self.assertIn(error_msg, six.text_type(error))
self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state)
self.heat.stacks.delete.assert_called_with(stack_id=remote_stack_id)
self.assertEqual(rsrc.resource_id, remote_stack_id)
def test_attribute(self):
rsrc = self.create_remote_stack()
outputs = [
{
'output_key': 'foo',
'output_value': 'bar'
}
]
created_stack = get_stack(stack_name='stack1', outputs=outputs)
self.heat.stacks.get = mock.MagicMock(return_value=created_stack)
self.assertEqual('stack1', rsrc.FnGetAtt('stack_name'))
self.assertEqual('bar', rsrc.FnGetAtt('outputs')['foo'])
self.heat.stacks.get.assert_called_with(
stack_id='c8a19429-7fde-47ea-a42f-40045488226c')
def test_attribute_failed(self):
rsrc = self.create_remote_stack()
error = self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'non-existent_property')
self.assertEqual(
'The Referenced Attribute (remote_stack non-existent_property) is '
'incorrect.',
six.text_type(error))
def test_snapshot(self):
stacks = [get_stack(stack_status='SNAPSHOT_IN_PROGRESS'),
get_stack(stack_status='SNAPSHOT_COMPLETE')]
snapshot = {
'id': 'a29bc9e25aa44f99a9a3d59cd5b0e263',
'status': 'IN_PROGRESS'
}
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
self.heat.stacks.snapshot = mock.MagicMock(return_value=snapshot)
scheduler.TaskRunner(rsrc.snapshot)()
self.assertEqual((rsrc.SNAPSHOT, rsrc.COMPLETE), rsrc.state)
self.assertEqual('a29bc9e25aa44f99a9a3d59cd5b0e263',
rsrc.data().get('snapshot_id'))
self.heat.stacks.snapshot.assert_called_with(
stack_id=rsrc.resource_id)
def test_restore(self):
snapshot = {
'id': 'a29bc9e25aa44f99a9a3d59cd5b0e263',
'status': 'IN_PROGRESS'
}
remote_stack = mock.MagicMock()
remote_stack.action = 'SNAPSHOT'
remote_stack.status = 'COMPLETE'
parent, rsrc = self.create_parent_stack()
rsrc.action = rsrc.SNAPSHOT
heat = rsrc._context().clients.client("heat")
heat.stacks.snapshot = mock.MagicMock(return_value=snapshot)
heat.stacks.get = mock.MagicMock(return_value=remote_stack)
scheduler.TaskRunner(parent.snapshot, None)()
self.assertEqual((parent.SNAPSHOT, parent.COMPLETE), parent.state)
data = parent.prepare_abandon()
remote_stack_snapshot = {
'snapshot': {
'id': 'a29bc9e25aa44f99a9a3d59cd5b0e263',
'status': 'COMPLETE',
'data': {
'files': data['files'],
'environment': data['environment'],
'template': template_format.parse(
data['files']['remote_template.yaml'])
}
}
}
fake_snapshot = collections.namedtuple(
'Snapshot', ('data', 'stack_id'))(data, parent.id)
heat.stacks.snapshot_show = mock.MagicMock(
return_value=remote_stack_snapshot)
self.patchobject(rsrc, 'update').return_value = None
rsrc.action = rsrc.UPDATE
rsrc.status = rsrc.COMPLETE
remote_stack.action = 'UPDATE'
parent.restore(fake_snapshot)
self.assertEqual((parent.RESTORE, parent.COMPLETE), parent.state)
def test_check(self):
stacks = [get_stack(stack_status='CHECK_IN_PROGRESS'),
get_stack(stack_status='CHECK_COMPLETE')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
self.heat.actions.check = mock.MagicMock()
scheduler.TaskRunner(rsrc.check)()
self.assertEqual((rsrc.CHECK, rsrc.COMPLETE), rsrc.state)
self.heat.actions.check.assert_called_with(stack_id=rsrc.resource_id)
def test_check_failed(self):
returns = [get_stack(stack_status='CHECK_IN_PROGRESS'),
get_stack(stack_status='CHECK_FAILED',
stack_status_reason='Remote stack check failed')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=returns)
self.heat.actions.resume = mock.MagicMock()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.check))
error_msg = ('ResourceInError: resources.remote_stack: '
'Went to status CHECK_FAILED due to '
'"Remote stack check failed"')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.CHECK, rsrc.FAILED), rsrc.state)
self.heat.actions.check.assert_called_with(stack_id=rsrc.resource_id)
def test_resume(self):
stacks = [get_stack(stack_status='RESUME_IN_PROGRESS'),
get_stack(stack_status='RESUME_COMPLETE')]
rsrc = self.create_remote_stack()
rsrc.action = rsrc.SUSPEND
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
self.heat.actions.resume = mock.MagicMock()
scheduler.TaskRunner(rsrc.resume)()
self.assertEqual((rsrc.RESUME, rsrc.COMPLETE), rsrc.state)
self.heat.actions.resume.assert_called_with(stack_id=rsrc.resource_id)
def test_resume_failed(self):
returns = [get_stack(stack_status='RESUME_IN_PROGRESS'),
get_stack(stack_status='RESUME_FAILED',
stack_status_reason='Remote stack resume failed')]
rsrc = self.create_remote_stack()
rsrc.action = rsrc.SUSPEND
self.heat.stacks.get = mock.MagicMock(side_effect=returns)
self.heat.actions.resume = mock.MagicMock()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.resume))
error_msg = ('ResourceInError: resources.remote_stack: '
'Went to status RESUME_FAILED due to '
'"Remote stack resume failed"')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.RESUME, rsrc.FAILED), rsrc.state)
self.heat.actions.resume.assert_called_with(stack_id=rsrc.resource_id)
def test_resume_failed_not_created(self):
self.initialize()
rsrc = self.parent['remote_stack']
rsrc.action = rsrc.SUSPEND
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.resume))
error_msg = ('Error: resources.remote_stack: '
'Cannot resume remote_stack, resource not found')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.RESUME, rsrc.FAILED), rsrc.state)
def test_suspend(self):
stacks = [get_stack(stack_status='SUSPEND_IN_PROGRESS'),
get_stack(stack_status='SUSPEND_COMPLETE')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
self.heat.actions.suspend = mock.MagicMock()
scheduler.TaskRunner(rsrc.suspend)()
self.assertEqual((rsrc.SUSPEND, rsrc.COMPLETE), rsrc.state)
self.heat.actions.suspend.assert_called_with(stack_id=rsrc.resource_id)
def test_suspend_failed(self):
stacks = [get_stack(stack_status='SUSPEND_IN_PROGRESS'),
get_stack(stack_status='SUSPEND_FAILED',
stack_status_reason='Remote stack suspend failed')]
rsrc = self.create_remote_stack()
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
self.heat.actions.suspend = mock.MagicMock()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.suspend))
error_msg = ('ResourceInError: resources.remote_stack: '
'Went to status SUSPEND_FAILED due to '
'"Remote stack suspend failed"')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.SUSPEND, rsrc.FAILED), rsrc.state)
# assert suspend was not called
self.heat.actions.suspend.assert_has_calls([])
def test_suspend_failed_not_created(self):
self.initialize()
rsrc = self.parent['remote_stack']
# Note: the resource is not created so far
self.heat.actions.suspend = mock.MagicMock()
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.suspend))
error_msg = ('Error: resources.remote_stack: '
'Cannot suspend remote_stack, resource not found')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.SUSPEND, rsrc.FAILED), rsrc.state)
# assert suspend was not called
self.heat.actions.suspend.assert_has_calls([])
def test_update(self):
stacks = [get_stack(stack_status='UPDATE_IN_PROGRESS'),
get_stack(stack_status='UPDATE_COMPLETE')]
rsrc = self.create_remote_stack()
props = dict(rsrc.properties)
props['parameters']['name'] = 'bar'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('bar', rsrc.properties.get('parameters')['name'])
env = environment.get_child_environment(rsrc.stack.env,
{'name': 'bar'})
fields = {
'stack_id': rsrc.resource_id,
'template': template_format.parse(remote_template),
'timeout_mins': 60,
'disable_rollback': True,
'parameters': {'name': 'bar'},
'files': self.files,
'environment': env.user_env_as_dict(),
}
self.heat.stacks.update.assert_called_with(**fields)
self.assertEqual(2, len(self.heat.stacks.get.call_args_list))
def test_update_with_replace(self):
rsrc = self.create_remote_stack()
props = dict(rsrc.properties)
props['context']['region_name'] = 'RegionOne'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
self.assertRaises(exception.UpdateReplace,
scheduler.TaskRunner(rsrc.update, update_snippet))
def test_update_failed(self):
stacks = [get_stack(stack_status='UPDATE_IN_PROGRESS'),
get_stack(stack_status='UPDATE_FAILED',
stack_status_reason='Remote stack update failed')]
rsrc = self.create_remote_stack()
props = dict(rsrc.properties)
props['parameters']['name'] = 'bar'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.update,
update_snippet))
error_msg = _('ResourceInError: resources.remote_stack: '
'Went to status UPDATE_FAILED due to '
'"Remote stack update failed"')
self.assertEqual(error_msg, six.text_type(error))
self.assertEqual((rsrc.UPDATE, rsrc.FAILED), rsrc.state)
self.assertEqual(2, len(self.heat.stacks.get.call_args_list))
def test_update_no_change(self):
stacks = [get_stack(stack_status='UPDATE_IN_PROGRESS'),
get_stack(stack_status='UPDATE_COMPLETE')]
rsrc = self.create_remote_stack()
props = dict(rsrc.properties)
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
self.heat.stacks.get = mock.MagicMock(side_effect=stacks)
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
def test_remote_stack_refid(self):
t = template_format.parse(parent_stack_template)
stack = utils.parse_stack(t)
rsrc = stack['remote_stack']
rsrc.resource_id = 'xyz'
self.assertEqual('xyz', rsrc.FnGetRefId())
def test_remote_stack_refid_convergence_cache_data(self):
t = template_format.parse(parent_stack_template)
cache_data = {'remote_stack': {
'uuid': mock.ANY,
'id': mock.ANY,
'action': 'CREATE',
'status': 'COMPLETE',
'reference_id': 'convg_xyz'
}}
stack = utils.parse_stack(t, cache_data=cache_data)
rsrc = stack['remote_stack']
self.assertEqual('convg_xyz', rsrc.FnGetRefId())
def test_update_in_check_failed_state(self):
rsrc = self.create_remote_stack()
rsrc.state_set(rsrc.CHECK, rsrc.FAILED)
props = dict(rsrc.properties)
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
self.assertRaises(exception.UpdateReplace,
scheduler.TaskRunner(rsrc.update, update_snippet))
| apache-2.0 | -2,916,390,377,502,339,000 | 38.871014 | 79 | 0.601178 | false |
pytorch/fairseq | fairseq/model_parallel/models/pipeline_parallel_transformer/model.py | 1 | 33560 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.model_parallel.models.pipeline_parallel_transformer.layers import (
Embedding,
TransformerDecoderEmbedding,
TransformerDecoderLayer,
TransformerDecoderOutputLayer,
TransformerEncoderEmbedding,
TransformerEncoderLayer,
TransformerEncoderLayerNorm,
)
from fairseq.models import (
BaseFairseqModel,
FairseqDecoder,
FairseqEncoder,
register_model,
register_model_architecture,
)
from fairseq.models.fairseq_encoder import EncoderOut
from fairseq.models.transformer import (
base_architecture,
transformer_iwslt_de_en,
transformer_wmt_en_de_big,
)
from fairseq.modules import SinusoidalPositionalEmbedding
logger = logging.getLogger(__name__)
DEFAULT_MAX_SOURCE_POSITIONS = 1024
DEFAULT_MAX_TARGET_POSITIONS = 1024
TORCH_PIPE = False
RPC_INIT = False
def import_pipe():
global TORCH_PIPE
global RPC_INIT
try:
from torch.distributed.pipeline.sync import Pipe # noqa
global Pipe
from torch.distributed.pipeline.sync.utils import partition_model
global partition_model
from torch.distributed import rpc
import tempfile
TORCH_PIPE = True
# Initialize single process RPC agent since TORCH_PIPE requires
# RRef. RRef depends on RPC being initialized and as a result we initialize
# RPC with a single node.
tmpfile = tempfile.NamedTemporaryFile()
if not RPC_INIT:
rpc.init_rpc(
name="worker",
rank=0,
world_size=1,
rpc_backend_options=rpc.TensorPipeRpcBackendOptions(
init_method="file://{}".format(tmpfile.name),
)
)
RPC_INIT = True
logger.info('Using torch pipe')
except ImportError:
try:
from fairscale.nn import Pipe # noqa
logger.info('Using fairscale pipe')
except ImportError:
raise ImportError("Please install fairscale with: pip install fairscale")
@register_model("pipeline_parallel_transformer")
class PipelineParallelTransformerModel(BaseFairseqModel):
def __init__(self, encoder, decoder, balance, devices, chunks, checkpoint):
import_pipe()
super().__init__()
assert isinstance(encoder, FairseqEncoder)
assert isinstance(decoder, FairseqDecoder)
encoder_module_list = (
[encoder.embedding_layer]
+ list(encoder.encoder_layers)
+ [encoder.final_layer_norm]
)
self.num_encoder_modules = len(encoder_module_list)
decoder_module_list = (
[decoder.embedding_layer]
+ list(decoder.decoder_layers)
+ [decoder.decoder_output_layer]
)
self.num_decoder_modules = len(decoder_module_list)
module_list = encoder_module_list + decoder_module_list
self.devices = devices
if TORCH_PIPE:
self.model = Pipe(
partition_model(nn.Sequential(*module_list), balance, devices),
chunks=chunks,
checkpoint=checkpoint,
)
else:
self.model = Pipe(
nn.Sequential(*module_list),
balance=balance,
devices=devices,
chunks=chunks,
checkpoint=checkpoint,
)
self.encoder_max_positions = self.max_positions_helper(
encoder.embedding_layer, "max_source_positions"
)
self.decoder_max_positions = self.max_positions_helper(
decoder.embedding_layer, "max_target_positions"
)
self.adaptive_softmax = getattr(decoder, "adaptive_softmax", None)
# Note: To be populated during inference
self.encoder = None
self.decoder = None
def forward(self, src_tokens, src_lengths, prev_output_tokens):
if self.training:
input_lst = [src_tokens, src_lengths, prev_output_tokens]
input = tuple(i.to(self.devices[0], non_blocking=True) for i in input_lst)
if TORCH_PIPE:
return self.model(input).local_value()
else:
return self.model(input)
else:
assert self.encoder is not None and self.decoder is not None, (
"encoder and decoder need to be initialized by "
+ "calling the `prepare_for_inference_()` method"
)
encoder_output_tuple = self.encoder(input)
return self.decoder(encoder_output_tuple)
def prepare_for_inference_(self, cfg):
if self.encoder is not None and self.decoder is not None:
logger.info("Encoder and Decoder already initialized")
return
encoder_module_list = []
decoder_module_list = []
module_count = 0
for partition in self.model.partitions:
for module in partition:
if module_count < self.num_encoder_modules:
encoder_module_list.append(module)
else:
decoder_module_list.append(module)
module_count += 1
self.model = None
self.encoder = TransformerEncoder(cfg.distributed_training, None, None, encoder_module_list)
self.decoder = TransformerDecoder(
cfg.distributed_training, None, None, decoder_module_list=decoder_module_list
)
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
# fmt: off
parser.add_argument('--activation-fn',
choices=utils.get_available_activation_fns(),
help='activation function to use')
parser.add_argument('--dropout', type=float, metavar='D',
help='dropout probability')
parser.add_argument('--attention-dropout', type=float, metavar='D',
help='dropout probability for attention weights')
parser.add_argument('--activation-dropout', '--relu-dropout', type=float, metavar='D',
help='dropout probability after activation in FFN.')
parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
help='path to pre-trained encoder embedding')
parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
help='encoder embedding dimension')
parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
help='encoder embedding dimension for FFN')
parser.add_argument('--encoder-layers', type=int, metavar='N',
help='num encoder layers')
parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
help='num encoder attention heads')
parser.add_argument('--encoder-normalize-before', action='store_true',
help='apply layernorm before each encoder block')
parser.add_argument('--encoder-learned-pos', action='store_true',
help='use learned positional embeddings in the encoder')
parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
help='path to pre-trained decoder embedding')
parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
help='decoder embedding dimension')
parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
help='decoder embedding dimension for FFN')
parser.add_argument('--decoder-layers', type=int, metavar='N',
help='num decoder layers')
parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
help='num decoder attention heads')
parser.add_argument('--decoder-learned-pos', action='store_true',
help='use learned positional embeddings in the decoder')
parser.add_argument('--decoder-normalize-before', action='store_true',
help='apply layernorm before each decoder block')
parser.add_argument('--share-decoder-input-output-embed', action='store_true',
help='share decoder input and output embeddings')
parser.add_argument('--share-all-embeddings', action='store_true',
help='share encoder, decoder and output embeddings'
' (requires shared dictionary and embed dim)')
parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true',
help='if set, disables positional embeddings (outside self attention)')
parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR',
help='comma separated list of adaptive softmax cutoff points. '
'Must be used with adaptive_loss criterion'),
parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D',
help='sets adaptive softmax dropout for the tail projections')
parser.add_argument('--num-embedding-chunks', type=int, metavar='N', default=1,
help='Number of embedding layer chunks (enables more even distribution'
'of optimizer states across data parallel nodes'
'when using optimizer state sharding and'
'a big embedding vocabulary)')
# fmt: on
@classmethod
def build_model_base(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
if not hasattr(args, "max_source_positions"):
args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS
if not hasattr(args, "max_target_positions"):
args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
def build_embedding(dictionary, embed_dim, path=None, num_embed_chunks=1):
assert embed_dim % num_embed_chunks == 0, (
f"Number of embedding chunks = {num_embed_chunks} should be "
+ f"divisible by the embedding dimension = {embed_dim}"
)
assert path is None or num_embed_chunks == 1, (
"Loading embedding from a path with number of embedding chunks > 1"
+ " is not yet supported"
)
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
# if provided, load from preloaded dictionaries
if path:
emb = Embedding(num_embeddings, embed_dim, padding_idx)
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
else:
embed_chunk_dim = embed_dim // num_embed_chunks
emb = nn.ModuleList()
for i in range(num_embed_chunks):
emb.append(Embedding(num_embeddings, embed_chunk_dim, padding_idx))
return emb
num_embed_chunks = args.num_embedding_chunks
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise ValueError("--share-all-embeddings requires a joined dictionary")
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
"--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim"
)
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path
):
raise ValueError(
"--share-all-embeddings not compatible with --decoder-embed-path"
)
encoder_embed_tokens = build_embedding(
src_dict,
args.encoder_embed_dim,
args.encoder_embed_path,
num_embed_chunks,
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
assert args.share_decoder_input_output_embed or num_embed_chunks == 1, (
"Not sharing decoder I/O embeddings is not yet supported with number of "
+ "embedding chunks > 1"
)
encoder_embed_tokens = build_embedding(
src_dict,
args.encoder_embed_dim,
args.encoder_embed_path,
num_embed_chunks,
)
decoder_embed_tokens = build_embedding(
tgt_dict,
args.decoder_embed_dim,
args.decoder_embed_path,
num_embed_chunks,
)
encoder = cls.build_encoder(args, src_dict, encoder_embed_tokens)
decoder = cls.build_decoder(args, tgt_dict, decoder_embed_tokens)
return (encoder, decoder)
@classmethod
def build_encoder(cls, args, src_dict, embed_tokens):
return TransformerEncoder(args, src_dict, embed_tokens)
@classmethod
def build_decoder(cls, args, tgt_dict, embed_tokens):
return TransformerDecoder(args, tgt_dict, embed_tokens)
@classmethod
def build_model(cls, args, task):
encoder, decoder = cls.build_model_base(args, task)
return PipelineParallelTransformerModel(
encoder=encoder,
decoder=decoder,
balance=utils.eval_str_list(args.pipeline_balance, type=int),
devices=utils.eval_str_list(args.pipeline_devices, type=int),
chunks=args.pipeline_chunks,
checkpoint=args.pipeline_checkpoint,
)
def output_layer(self, features, **kwargs):
"""Project features to the default output size (typically vocabulary size)."""
return self.decoder.output_layer(features, **kwargs)
def max_positions(self):
"""Maximum length supported by the model."""
return (self.encoder_max_positions, self.decoder_max_positions)
def max_positions_helper(
self, embedding_layer, max_positions_field="max_source_positions"
):
"""Maximum input length supported by the encoder or decoder."""
if embedding_layer.embed_positions is None:
return getattr(embedding_layer, max_positions_field)
return min(
getattr(embedding_layer, max_positions_field),
embedding_layer.embed_positions.max_positions,
)
def get_normalized_probs(self, net_output, log_probs, sample=None):
"""Get normalized probabilities (or log probs) from a net's output."""
if hasattr(self, "adaptive_softmax") and self.adaptive_softmax is not None:
if sample is not None:
assert "target" in sample
target = sample["target"]
else:
target = None
out = self.adaptive_softmax.get_log_prob(net_output, target=target)
return out.exp_() if not log_probs else out
# A Pipe() module returns a tuple of tensors as the output.
# In this case, the tuple has one element - the output tensor of logits
logits = net_output if isinstance(net_output, torch.Tensor) else net_output[0]
if log_probs:
return utils.log_softmax(logits, dim=-1, onnx_trace=False)
else:
return utils.softmax(logits, dim=-1, onnx_trace=False)
def max_decoder_positions(self):
"""Maximum length supported by the decoder."""
return self.decoder_max_positions
def load_state_dict(self, state_dict, strict=True, model_cfg=None):
"""Copies parameters and buffers from *state_dict* into this module and
its descendants.
Overrides the method in :class:`nn.Module`. Compared with that method
this additionally "upgrades" *state_dicts* from old checkpoints.
"""
self.upgrade_state_dict(state_dict)
is_regular_transformer = not any("model.partitions" in k for k in state_dict)
if is_regular_transformer:
state_dict = self.convert_to_pipeline_parallel_state_dict(state_dict)
return super().load_state_dict(state_dict, strict)
def convert_to_pipeline_parallel_state_dict(self, state_dict):
new_state_dict = self.state_dict()
encoder_layer_idx = 0
decoder_layer_idx = 0
encoder_key_suffixes = [
"self_attn.k_proj.weight",
"self_attn.k_proj.bias",
"self_attn.v_proj.weight",
"self_attn.v_proj.bias",
"self_attn.q_proj.weight",
"self_attn.q_proj.bias",
"self_attn.out_proj.weight",
"self_attn.out_proj.bias",
"self_attn_layer_norm.weight",
"self_attn_layer_norm.bias",
"fc1.weight",
"fc1.bias",
"fc2.weight",
"fc2.bias",
"final_layer_norm.weight",
"final_layer_norm.bias",
]
decoder_key_suffixes = [
"self_attn.k_proj.weight",
"self_attn.k_proj.bias",
"self_attn.v_proj.weight",
"self_attn.v_proj.bias",
"self_attn.q_proj.weight",
"self_attn.q_proj.bias",
"self_attn.out_proj.weight",
"self_attn.out_proj.bias",
"self_attn_layer_norm.weight",
"self_attn_layer_norm.bias",
"encoder_attn.k_proj.weight",
"encoder_attn.k_proj.bias",
"encoder_attn.v_proj.weight",
"encoder_attn.v_proj.bias",
"encoder_attn.q_proj.weight",
"encoder_attn.q_proj.bias",
"encoder_attn.out_proj.weight",
"encoder_attn.out_proj.bias",
"encoder_attn_layer_norm.weight",
"encoder_attn_layer_norm.bias",
"fc1.weight",
"fc1.bias",
"fc2.weight",
"fc2.bias",
"final_layer_norm.weight",
"final_layer_norm.bias",
]
for pid, partition in enumerate(self.model.partitions):
logger.info(f"Begin Partition {pid}")
for mid, module in enumerate(partition):
# fmt: off
if isinstance(module, TransformerEncoderEmbedding):
new_state_dict[f'model.partitions.{pid}.{mid}.embed_tokens.weight'] = state_dict['encoder.embed_tokens.weight']
new_state_dict[f'model.partitions.{pid}.{mid}.embed_positions._float_tensor'] = state_dict['encoder.embed_positions._float_tensor']
if isinstance(module, TransformerEncoderLayer):
for suffix in encoder_key_suffixes:
new_state_dict[f'model.partitions.{pid}.{mid}.{suffix}'] = state_dict[f'encoder.layers.{encoder_layer_idx}.{suffix}']
encoder_layer_idx += 1
if isinstance(module, TransformerDecoderLayer):
for suffix in decoder_key_suffixes:
new_state_dict[f'model.partitions.{pid}.{mid}.{suffix}'] = state_dict[f'decoder.layers.{decoder_layer_idx}.{suffix}']
decoder_layer_idx += 1
if isinstance(module, TransformerEncoderLayerNorm):
if 'encoder.layer_norm.weight' in state_dict:
new_state_dict[f'model.partitions.{pid}.{mid}.layer_norm.weight'] = state_dict['encoder.layer_norm.weight']
new_state_dict[f'model.partitions.{pid}.{mid}.layer_norm.bias'] = state_dict['encoder.layer_norm.bias']
if isinstance(module, TransformerDecoderEmbedding):
new_state_dict[f'model.partitions.{pid}.{mid}.embed_tokens.weight'] = state_dict['decoder.embed_tokens.weight']
new_state_dict[f'model.partitions.{pid}.{mid}.embed_positions._float_tensor'] = state_dict['decoder.embed_positions._float_tensor']
if isinstance(module, TransformerDecoderOutputLayer):
new_state_dict[f'model.partitions.{pid}.{mid}.output_projection.weight'] = state_dict['decoder.output_projection.weight']
# fmt: on
return new_state_dict
class TransformerEncoder(FairseqEncoder):
"""
Transformer encoder consisting of *args.encoder_layers* layers. Each layer
is a :class:`TransformerEncoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): encoding dictionary
embed_tokens (torch.nn.Embedding): input embedding
"""
def __init__(self, args, dictionary, embed_tokens, encoder_module_list=None):
super().__init__(dictionary)
self.register_buffer("version", torch.Tensor([3]))
import_pipe()
self.use_pipeline = encoder_module_list is not None
if not self.use_pipeline:
self.embedding_layer = TransformerEncoderEmbedding(args, embed_tokens)
self.encoder_layers = nn.Sequential(*[TransformerEncoderLayer(args) for i in range(args.encoder_layers)])
if isinstance(embed_tokens, nn.ModuleList):
emb_dim = sum(e.embedding_dim for e in embed_tokens)
else:
emb_dim = embed_tokens.embedding_dim
self.final_layer_norm = TransformerEncoderLayerNorm(args, emb_dim)
else:
encoder_balance = utils.eval_str_list(
args.pipeline_encoder_balance, type=int
)
encoder_devices = utils.eval_str_list(
args.pipeline_encoder_devices, type=int
)
assert sum(encoder_balance) == len(encoder_module_list), (
f"Sum of encoder_balance={encoder_balance} is not equal "
+ f"to num_encoder_modules={len(encoder_module_list)}"
)
if TORCH_PIPE:
self.model = Pipe(
module=partition_model(nn.Sequential(*encoder_module_list), encoder_balance, encoder_devices),
chunks=args.pipeline_chunks,
checkpoint=args.pipeline_checkpoint,
)
else:
self.model = Pipe(
module=nn.Sequential(*encoder_module_list),
balance=encoder_balance,
devices=encoder_devices,
chunks=args.pipeline_chunks,
checkpoint=args.pipeline_checkpoint,
)
def forward(self, src_tokens, src_lengths):
"""
Args:
input_tuple(
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
src_lengths (torch.LongTensor): lengths of each source sentence of
shape `(batch)`
)
Returns:
output_tuple(
- **encoder_out** (Tensor): the last encoder layer's output of
shape `(src_len, batch, embed_dim)`
- **encoder_padding_mask** (ByteTensor): the positions of
padding elements of shape `(batch, src_len)`
- prev_output_tokens
- **encoder_states** (List[Tensor]): all intermediate
hidden states of shape `(src_len, batch, embed_dim)`.
Only populated if *return_all_hiddens* is True.
)
"""
dummy_prev_output_tokens = torch.zeros(
1, dtype=src_tokens.dtype, device=src_tokens.device
)
input_tuple = (src_tokens, src_lengths, dummy_prev_output_tokens)
if self.use_pipeline:
input_tuple = tuple(i.to(self.model.devices[0]) for i in input_tuple)
if TORCH_PIPE:
encoder_out = self.model(input_tuple).local_value()
else:
encoder_out = self.model(input_tuple)
else:
encoder_embed_output_tuple = self.embedding_layer(input_tuple)
encoder_layers_output = self.encoder_layers(encoder_embed_output_tuple)
encoder_out = self.final_layer_norm(encoder_layers_output)
# first element is the encoder output
# second element is the encoder padding mask
# the remaining elements of EncoderOut are not computed by
# the PipelineParallelTransformer
return EncoderOut(encoder_out[0], encoder_out[1], None, None, None, None)
def reorder_encoder_out(self, encoder_out, new_order):
"""
Reorder encoder output according to *new_order*.
Args:
encoder_out: output from the ``forward()`` method
new_order (LongTensor): desired order
Returns:
*encoder_out* rearranged according to *new_order*
"""
if encoder_out.encoder_out is not None:
encoder_out = encoder_out._replace(
encoder_out=encoder_out.encoder_out.index_select(1, new_order)
)
if encoder_out.encoder_padding_mask is not None:
encoder_out = encoder_out._replace(
encoder_padding_mask=encoder_out.encoder_padding_mask.index_select(
0, new_order
)
)
if encoder_out.encoder_embedding is not None:
encoder_out = encoder_out._replace(
encoder_embedding=encoder_out.encoder_embedding.index_select(
0, new_order
)
)
if encoder_out.encoder_states is not None:
for idx, state in enumerate(encoder_out.encoder_states):
encoder_out.encoder_states[idx] = state.index_select(1, new_order)
return encoder_out
def max_positions(self):
"""Maximum input length supported by the encoder."""
if self.embedding_layer.embed_positions is None:
return self.embedding_layer.max_source_positions
return min(
self.embedding_layer.max_source_positions,
self.embedding_layer.embed_positions.max_positions,
)
class TransformerDecoder(FairseqDecoder):
"""
Transformer decoder consisting of *args.decoder_layers* layers. Each layer
is a :class:`TransformerDecoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): decoding dictionary
embed_tokens (torch.nn.Embedding): output embedding
no_encoder_attn (bool, optional): whether to attend to encoder outputs
(default: False).
"""
def __init__(
self,
args,
dictionary,
embed_tokens,
no_encoder_attn=False,
decoder_module_list=None,
):
super().__init__(dictionary)
self.register_buffer("version", torch.Tensor([3]))
import_pipe()
self.use_pipeline = decoder_module_list is not None
if not self.use_pipeline:
self.embedding_layer = TransformerDecoderEmbedding(args, embed_tokens)
self.decoder_layers = nn.Sequential(*[
TransformerDecoderLayer(args, no_encoder_attn)
for _ in range(args.decoder_layers)
])
self.decoder_output_layer = TransformerDecoderOutputLayer(
args, embed_tokens, dictionary
)
else:
decoder_balance = utils.eval_str_list(
args.pipeline_decoder_balance, type=int
)
decoder_devices = utils.eval_str_list(
args.pipeline_decoder_devices, type=int
)
assert sum(decoder_balance) == len(decoder_module_list), (
f"Sum of decoder_balance={decoder_balance} is not equal "
+ f"to num_decoder_modules={len(decoder_module_list)}"
)
if TORCH_PIPE:
self.model = Pipe(
module=partition_model(nn.Sequential(*decoder_module_list), decoder_balance, decoder_devices),
chunks=args.pipeline_chunks,
checkpoint=args.pipeline_checkpoint,
)
else:
self.model = Pipe(
module=nn.Sequential(*decoder_module_list),
balance=decoder_balance,
devices=decoder_devices,
chunks=args.pipeline_chunks,
checkpoint=args.pipeline_checkpoint,
)
def forward(
self,
prev_output_tokens,
encoder_out=None,
):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for teacher forcing
encoder_out (optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
features_only (bool, optional): only return features without
applying output layer (default: False).
Returns:
tuple:
- the decoder's output of shape `(batch, tgt_len, vocab)`
- a dictionary with any model-specific outputs
"""
input_tuple = (
encoder_out.encoder_out,
encoder_out.encoder_padding_mask,
prev_output_tokens,
)
if self.use_pipeline:
input_tuple = tuple(i.to(self.model.devices[0]) for i in input_tuple)
if TORCH_PIPE:
return (self.model(input_tuple).local_value(),)
else:
return (self.model(input_tuple),)
else:
embed_layer_output = self.embedding_layer(input_tuple)
state = self.decoder_layers(embed_layer_output)
return (self.decoder_output_layer(state),)
def output_layer(self, features, **kwargs):
"""Project features to the vocabulary size."""
if self.adaptive_softmax is None:
# project back to size of vocabulary
if self.share_input_output_embed:
return F.linear(features, self.embed_tokens.weight)
else:
return F.linear(features, self.embed_out)
else:
return features
def max_positions(self):
"""Maximum output length supported by the decoder."""
if self.embedding_layer.embed_positions is None:
return self.embedding_layer.max_target_positions
return min(
self.embedding_layer.max_target_positions,
self.embedding_layer.embed_positions.max_positions,
)
def buffered_future_mask(self, tensor):
dim = tensor.size(0)
if (
not hasattr(self, "_future_mask")
or self._future_mask is None
or self._future_mask.device != tensor.device
or self._future_mask.size(0) < dim
):
self._future_mask = torch.triu(
utils.fill_with_neg_inf(tensor.new(dim, dim)), 1
)
return self._future_mask[:dim, :dim]
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = "{}.embed_positions.weights".format(name)
if weights_key in state_dict:
del state_dict[weights_key]
state_dict[
"{}.embed_positions._float_tensor".format(name)
] = torch.FloatTensor(1)
for i in range(len(self.layers)):
# update layer norms
layer_norm_map = {
"0": "self_attn_layer_norm",
"1": "encoder_attn_layer_norm",
"2": "final_layer_norm",
}
for old, new in layer_norm_map.items():
for m in ("weight", "bias"):
k = "{}.layers.{}.layer_norms.{}.{}".format(name, i, old, m)
if k in state_dict:
state_dict[
"{}.layers.{}.{}.{}".format(name, i, new, m)
] = state_dict[k]
del state_dict[k]
version_key = "{}.version".format(name)
if utils.item(state_dict.get(version_key, torch.Tensor([1]))[0]) <= 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict[version_key] = torch.Tensor([1])
return state_dict
@register_model_architecture(
"pipeline_parallel_transformer", "transformer_iwslt_de_en_pipeline_parallel"
)
def transformer_iwslt_de_en_dist(args):
transformer_iwslt_de_en(args)
@register_model_architecture(
"pipeline_parallel_transformer", "transformer_wmt_en_de_big_pipeline_parallel"
)
def transformer_wmt_en_de_big_dist(args):
transformer_wmt_en_de_big(args)
| mit | -946,977,085,584,911,400 | 42.754889 | 151 | 0.57947 | false |
6112/project-euler | problems/051.py | 1 | 2678 | #encoding=utf-8
## SOLVED 2014/04/18
## 121313
# By replacing the 1st digit of the 2-digit number *3, it turns out that six of
# the nine possible values: 13, 23, 43, 53, 73, and 83, are all prime.
# By replacing the 3rd and 4th digits of 56**3 with the same digit, this
# 5-digit number is the first example having seven primes among the ten
# generated numbers, yielding the family: 56003, 56113, 56333, 56443, 56663,
# 56773, and 56993. Consequently 56003, being the first member of this family,
# is the smallest prime with this property.
# Find the smallest prime which, by replacing part of the number (not
# necessarily adjacent digits) with the same digit, is part of an eight prime
# value family.
import helpers.prime as prime
# number of replacements of digits that have to work
FAMILY_SIZE = 8
def euler():
# for each "starting" prime number
for prime_number in prime.primes(200000):
# list of integers for each digit
prime_number_digits = list(int(digit) for digit in str(prime_number))
# set (without duplicates) of the digits in the prime number
prime_number_digit_set = set(prime_number_digits)
# for each digit that could be replaced in the prime number
for base_digit in prime_number_digit_set:
# number of digit replacements that are actual prime numbers
prime_count = 0
# never replace the first digit with a zero
replacements = range(10) if prime_number_digits[0] != base_digit \
else range(1, 10)
# for each possible digit replacement
for replacement_digit in replacements:
# replace the digit base_digit with replacement_digit
modified_digits = replace(prime_number_digits, base_digit,
replacement_digit)
# convert that list to a number
modified_number = int(''.join(str(digit) \
for digit in modified_digits))
# if it's a prime, increment the prime count (duh)
if prime.is_prime(modified_number):
prime_count += 1
# return if the answer if we found it
if prime_count == FAMILY_SIZE:
return prime_number
def replace(xs, base, replacement):
"""Replaces every 'base' in 'xs' with 'replacement'. Non destructive.
Args:
xs: Initial list of elements.
base: Element to be replaced in the new list.
replacement: Element to replace that value with.
Returns:
A new list with the replacement applied."""
return [x if x != base else replacement for x in xs]
| mit | -6,838,346,259,325,526,000 | 42.901639 | 79 | 0.641897 | false |
etkirsch/legends-of-erukar | erukar/content/skills/brutality/Cleave.py | 1 | 4561 | from erukar.system.engine import Attack, Damage, DamageScalar
from erukar.ext.math import Navigator
class Cleave(Attack):
Name = 'Cleave'
ShowInLists = True
Description = 'Cleave with {} at {}'
CurrentLevel = 'Swing wildly, inflicting {:0.1f}% damage per tile '\
'in three spaces at once. Cleave uses two action points and '\
'only rolls an attack once. Damage will hit in a in a 90 degree '\
'arc centered on the specified space and will only hit hostile '\
'creatures.'
NextLevel = 'Increases percentage of damage per tile to {:0.1f}%.'
OnSwing = 'You swing your {weapon} in the air wildly!'
SeeSwing = '{attacker} swings its {weapon} wildly!'
HitNothing = 'You fail to hit any target with your Cleave attack!'
def ap_cost(self, *_):
return 2
def command(self, creature, weapon, loc):
return {
'command': 'ActivateAbility',
'abilityModule': self.__module__,
'cost': self.ap_cost(None, None),
'description': self.format_description(creature, weapon, loc),
'weapon': str(weapon.uuid),
'interaction_target': loc,
}
def format_description(self, target, weapon, loc):
return self.Description.format(weapon.alias(), loc)
def valid_at(self, cmd, loc):
player = cmd.args['player_lifeform']
if player.action_points() < self.ap_cost(cmd, loc):
return False
if not any(Attack.weapons_in_range(player, loc)):
return False
return True
def action_for_map(self, cmd, loc):
player = cmd.args['player_lifeform']
for weapon in Attack.weapons_in_range(player, loc):
yield self.command(player, weapon, loc)
def perform_attack(self, cmd, player, weapon, target):
roll = self.calc_attack_roll(cmd, player, weapon, target)
targets = list(self.affected_enemies(cmd))
if len(targets) < 1:
whoops = Cleave.HitNothing.format(weapon.alias())
cmd.append_result(player.uid, whoops)
for enemy in targets:
self.perform_sub_attack(cmd, player, weapon, enemy, roll)
return cmd.succeed()
def calc_attack_roll(self, cmd, player, weapon, target):
weapon.on_calculate_attack(cmd)
roll = player.calculate_attack_roll(0.8, target)
for mod in self.possible_modifiers:
mod_name = 'modify_attack_roll'
roll = mod.modify_element(mod_name, roll)
return roll
def affected_enemies(self, cmd):
player = cmd.args['player_lifeform']
at = cmd.args['interaction_target']
for loc in self.affected_tiles(player, at):
for enemy in cmd.world.creatures_at(player, loc):
yield enemy
def perform_sub_attack(self, cmd, player, weapon, enemy, roll):
if not enemy.is_hostile_to(player):
return
hit = self.attack_succeeded(cmd, player, weapon, enemy, roll)
if not hit:
return
self.do_damage(cmd, player, weapon, enemy)
self.check_for_kill(cmd, player, weapon, enemy)
def is_in_valid_range(self, player, weapon, target):
dist = Navigator.distance(player.coordinates, target)
return dist <= weapon.attack_range(player)
def mit_carried_through(level):
if level < 5:
return 1.0
return 1.0 - (level - 4) * 0.10
def multiplier(level):
if level > 4:
return 1.0
return 0.75 * 0.05*level
def current_level_description(self):
percent = Cleave.multiplier(self.level) * 100.0
return self.CurrentLevel.format(percent)
def next_level_description(self):
percent = Cleave.multiplier(self.level + 1) * 100.0
return self.NextLevel.format(percent)
def affected_tiles(self, player, loc):
p_x, p_y = player.coordinates
x, y = loc
if p_x == x:
return Cleave.horizontal_tiles(p_x, y)
if p_y == y:
return Cleave.vertical_tiles(x, p_y)
return Cleave.arc_tiles(x, y, p_x, p_y)
def horizontal_tiles(x, y):
return [(x-1, y), (x, y), (x+1, y)]
def vertical_tiles(x, y):
return [(x, y-1), (x, y), (x, y+1)]
def arc_tiles(x, y, p_x, p_y):
tiles = [(p_x, p_y)]
# X Tiles
tiles += [(p_x+1, p_y)] if x < p_x\
else [(p_x-1, p_y)]
# Y Tiles
tiles += [(p_x, p_y+1)] if y < p_y\
else [(p_x, p_y-1)]
return tiles
| agpl-3.0 | -1,209,525,495,349,219,600 | 34.913386 | 74 | 0.585398 | false |
RNAcentral/rnacentral-import-pipeline | tests/databases/ensembl/genomes/plants_test.py | 1 | 26982 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import attr
import pytest
from rnacentral_pipeline.databases import data as dat
from rnacentral_pipeline.databases.helpers import publications as pubs
from rnacentral_pipeline.databases.ensembl import plants
from . import helpers
@pytest.fixture(scope="module") # pylint: disable=no-member
def cress_2():
return helpers.parse(
plants.parse,
"data/ensembl_plants/Arabidopsis_thaliana.TAIR10.40.chromosome.2.dat",
)
@pytest.fixture(scope="module") # pylint: disable=no-member
def oryza_9():
return helpers.parse(
plants.parse,
"data/ensembl_plants/Oryza_barthii.O.barthii_v1.41.chromosome.9.dat",
)
@pytest.fixture(scope="module") # pylint: disable=no-member
def hordeum_pt():
return helpers.parse(
plants.parse, "data/ensembl_plants/Hordeum_vulgare.IBSC_v2.41.chromosome.Pt.dat"
)
@pytest.fixture(scope="module") # pylint: disable=no-member
def zea_7():
return helpers.parse(
plants.parse, "data/ensembl_plants/Zea_mays.B73_RefGen_v4.41.chromosome.7.dat"
)
def test_can_parse_data(cress_2):
val = attr.asdict(helpers.entry_for(cress_2, "ENSEMBL_PLANTS:AT2G01010.1"))
assert val == attr.asdict(
dat.Entry(
primary_id="AT2G01010.1",
accession="ENSEMBL_PLANTS:AT2G01010.1",
ncbi_tax_id=3702,
database="ENSEMBL_PLANTS",
sequence=(
"TACCTGGTTGATCCTGCCAGTAGTCATATGCTTGTCTCAAAGATTAAGCCATGCATGTGT"
"AAGTATGAACGAATTCAGACTGTGAAACTGCGAATGGCTCATTAAATCAGTTATAGTTTG"
"TTTGATGGTAACTACTACTCGGATAACCGTAGTAATTCTAGAGCTAATACGTGCAACAAA"
"CCCCGACTTATGGAAGGGACGCATTTATTAGATAAAAGGTCGACGCGGGCTCTGCCCGTT"
"GCTCTGATGATTCATGATAACTCGACGGATCGCATGGCCTCTGTGCTGGCGACGCATCAT"
"TCAAATTTCTGCCCTATCAACTTTCGATGGTAGGATAGTGGCCTACCATGGTGGTAACGG"
"GTGACGGAGAATTAGGGTTCGATTCCGGAGAGGGAGCCTGAGAAACGGCTACCACATCCA"
"AGGAAGGCAGCAGGCGCGCAAATTACCCAATCCTGACACGGGGAGGTAGTGACAATAAAT"
"AACAATACTGGGCTCTTTCGAGTCTGGTAATTGGAATGAGTACAATCTAAATCCCTTAAC"
"GAGGATCCATTGGAGGGCAAGTCTGGTGCCAGCAGCCGCGGTAATTCCAGCTCCAATAGC"
"GTATATTTAAGTTGTTGCAGTTAAAAAGCTCGTAGTTGAACCTTGGGATGGGTCGGCCGG"
"TCCGCCTTTGGTGTGCATTGGTCGGCTTGTCCCTTCGGTCGGCGATACGCTCCTGGTCTT"
"AATTGGCCGGGTCGTGCCTCCGGCGCTGTTACTTTGAAGAAATTAGAGTGCTCAAAGCAA"
"GCCTACGCTCTGGATACATTAGCATGGGATAACATCATAGGATTTCGATCCTATTGTGTT"
"GGCCTTCGGGATCGGAGTAATGATTAACAGGGACAGTCGGGGGCATTCGTATTTCATAGT"
"CAGAGGTGAAATTCTTGGATTTATGAAAGACGAACAACTGCGAAAGCATTTGCCAAGGAT"
"GTTTTCATTAATCAAGAACGAAAGTTGGGGGCTCGAAGACGATCAGATACCGTCCTAGTC"
"TCAACCATAAACGATGCCGACCAGGGATCAGCGGATGTTGCTTATAGGACTCCGCTGGCA"
"CCTTATGAGAAATCAAAGTTTTTGGGTTCCGGGGGGAGTATGGTCGCAAGGCTGAAACTT"
"AAAGGAATTGACGGAAGGGCACCACCAGGAGTGGAGCCTGCGGCTTAATTTGACTCAACA"
"CGGGGAAACTTACCAGGTCCAGACATAGTAAGGATTGACAGACTGAGAGCTCTTTCTTGA"
"TTCTATGGGTGGTGGTGCATGGCCGTTCTTAGTTGGTGGAGCGATTTGTCTGGTTAATTC"
"CGTTAATGAACGAGACCTCAGCCTGCTAACTAGCTACGTGGAGGCATCCCTTCACGGCCG"
"GCTTCTTAGAGGGACTATGGCCGTTTAGGCCAAGGAAGTTTGAGGCAATAACAGGTCTGT"
"GATGCCCTTAGATGTTCTGGGCCGCACGCGCGCTACACTGATGTATTCAACGAGTTCACA"
"CCTTGGCCGACAGGCCCGGGTAATCTTTGAAATTTCATCGTGATGGGGATAGATCATTGC"
"AATTGTTGGTCTTCAACGAGGAATTCCTAGTAAGCGCGAGTCATCAGCTCGCGTTGACTA"
"CGTCCCTGCCCTTTGTACACACCGCCCGTCGCTCCTACCGATTGAATGATCCGGTGAAGT"
"GTTCGGATCGCGGCGACGTGGGTGGTTCGCCGCCCGCGACGTCGCGAGAAGTCCACTAAA"
"CCTTATCATTTAGAGGAAGGAGAAGTCGTAACAAGGTTTCCGTAGGTGAACCTGCGGAAG"
"GATCATTG"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=1,
exons=[dat.Exon(start=3706, stop=5513)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="rRNA",
url="",
seq_version="1",
note_data={},
xref_data={
"RefSeq": ["NR_139968.1"],
"TAIR": ["AT2G01010.1"],
},
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="AT2G01010",
description="Arabidopsis thaliana (thale-cress) rRNA",
references=[pubs.reference(29092050)],
)
)
def test_can_create_tair_entry(cress_2):
val = attr.asdict(helpers.entry_for(cress_2, "TAIR:AT2G01010.1"))
assert val == attr.asdict(
dat.Entry(
primary_id="AT2G01010.1",
accession="TAIR:AT2G01010.1",
ncbi_tax_id=3702,
database="TAIR",
sequence=(
"TACCTGGTTGATCCTGCCAGTAGTCATATGCTTGTCTCAAAGATTAAGCCATGCATGTGT"
"AAGTATGAACGAATTCAGACTGTGAAACTGCGAATGGCTCATTAAATCAGTTATAGTTTG"
"TTTGATGGTAACTACTACTCGGATAACCGTAGTAATTCTAGAGCTAATACGTGCAACAAA"
"CCCCGACTTATGGAAGGGACGCATTTATTAGATAAAAGGTCGACGCGGGCTCTGCCCGTT"
"GCTCTGATGATTCATGATAACTCGACGGATCGCATGGCCTCTGTGCTGGCGACGCATCAT"
"TCAAATTTCTGCCCTATCAACTTTCGATGGTAGGATAGTGGCCTACCATGGTGGTAACGG"
"GTGACGGAGAATTAGGGTTCGATTCCGGAGAGGGAGCCTGAGAAACGGCTACCACATCCA"
"AGGAAGGCAGCAGGCGCGCAAATTACCCAATCCTGACACGGGGAGGTAGTGACAATAAAT"
"AACAATACTGGGCTCTTTCGAGTCTGGTAATTGGAATGAGTACAATCTAAATCCCTTAAC"
"GAGGATCCATTGGAGGGCAAGTCTGGTGCCAGCAGCCGCGGTAATTCCAGCTCCAATAGC"
"GTATATTTAAGTTGTTGCAGTTAAAAAGCTCGTAGTTGAACCTTGGGATGGGTCGGCCGG"
"TCCGCCTTTGGTGTGCATTGGTCGGCTTGTCCCTTCGGTCGGCGATACGCTCCTGGTCTT"
"AATTGGCCGGGTCGTGCCTCCGGCGCTGTTACTTTGAAGAAATTAGAGTGCTCAAAGCAA"
"GCCTACGCTCTGGATACATTAGCATGGGATAACATCATAGGATTTCGATCCTATTGTGTT"
"GGCCTTCGGGATCGGAGTAATGATTAACAGGGACAGTCGGGGGCATTCGTATTTCATAGT"
"CAGAGGTGAAATTCTTGGATTTATGAAAGACGAACAACTGCGAAAGCATTTGCCAAGGAT"
"GTTTTCATTAATCAAGAACGAAAGTTGGGGGCTCGAAGACGATCAGATACCGTCCTAGTC"
"TCAACCATAAACGATGCCGACCAGGGATCAGCGGATGTTGCTTATAGGACTCCGCTGGCA"
"CCTTATGAGAAATCAAAGTTTTTGGGTTCCGGGGGGAGTATGGTCGCAAGGCTGAAACTT"
"AAAGGAATTGACGGAAGGGCACCACCAGGAGTGGAGCCTGCGGCTTAATTTGACTCAACA"
"CGGGGAAACTTACCAGGTCCAGACATAGTAAGGATTGACAGACTGAGAGCTCTTTCTTGA"
"TTCTATGGGTGGTGGTGCATGGCCGTTCTTAGTTGGTGGAGCGATTTGTCTGGTTAATTC"
"CGTTAATGAACGAGACCTCAGCCTGCTAACTAGCTACGTGGAGGCATCCCTTCACGGCCG"
"GCTTCTTAGAGGGACTATGGCCGTTTAGGCCAAGGAAGTTTGAGGCAATAACAGGTCTGT"
"GATGCCCTTAGATGTTCTGGGCCGCACGCGCGCTACACTGATGTATTCAACGAGTTCACA"
"CCTTGGCCGACAGGCCCGGGTAATCTTTGAAATTTCATCGTGATGGGGATAGATCATTGC"
"AATTGTTGGTCTTCAACGAGGAATTCCTAGTAAGCGCGAGTCATCAGCTCGCGTTGACTA"
"CGTCCCTGCCCTTTGTACACACCGCCCGTCGCTCCTACCGATTGAATGATCCGGTGAAGT"
"GTTCGGATCGCGGCGACGTGGGTGGTTCGCCGCCCGCGACGTCGCGAGAAGTCCACTAAA"
"CCTTATCATTTAGAGGAAGGAGAAGTCGTAACAAGGTTTCCGTAGGTGAACCTGCGGAAG"
"GATCATTG"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=1,
exons=[dat.Exon(start=3706, stop=5513)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="rRNA",
url="",
seq_version="1",
note_data={},
xref_data={
"RefSeq": ["NR_139968.1"],
},
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="AT2G01010",
description="Arabidopsis thaliana (thale-cress) rRNA",
references=[pubs.reference(29092050)],
)
)
@pytest.mark.parametrize(
"accession,status",
[
("TAIR:AT2G01010.1", True),
("TAIR:ENSRNA049757808-T1", False),
],
)
def generates_expected_inferred_entries(cress_2, accession, status):
assert helpers.has_entry_for(cress_2, accession) == status
def test_can_get_with_odd_rna_type(cress_2):
val = attr.asdict(helpers.entry_for(cress_2, "ENSEMBL_PLANTS:AT2G03895.1"))
assert val == attr.asdict(
dat.Entry(
primary_id="AT2G03895.1",
accession="ENSEMBL_PLANTS:AT2G03895.1",
ncbi_tax_id=3702,
database="ENSEMBL_PLANTS",
sequence=(
"GGTGGTCTCTGTTGGTGAATCGTCGTCATTGAGAGCTGACACCGGCCCAAAGCCTTTGCT"
"CCGGCGTTGCGTGACGGAGTATCGGAGTCCAGCTTCCCTCCACGAATTGCAGAAAGTTAC"
"AGCGTAAGGACAACGCTGCTTTGTAGGCGAACCCAAGTTGCGAGTGGTGAGGCGGAAATG"
"GTGGATAAGAGCAGAACTAGTGCTTGTGCTGCTC"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=1,
exons=[dat.Exon(start=84873, stop=85086)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="lncRNA",
url="",
seq_version="1",
note_data={},
xref_data={
"RefSeq": ["NR_139974.1"],
},
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="AT2G03895",
description="Arabidopsis thaliana (thale-cress) lncRNA AT2G03895",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_a_trna(cress_2):
val = attr.asdict(helpers.entry_for(cress_2, "ENSEMBL_PLANTS:ENSRNA049492366-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049492366-T1",
accession="ENSEMBL_PLANTS:ENSRNA049492366-T1",
ncbi_tax_id=3702,
database="ENSEMBL_PLANTS",
sequence=(
"GCTGGAATAGCTCAGTTGGTTAGAGCGTGTGGCTGTTAACCACAAGGTCGGAGGTTCGAC"
"CCCTCCTTCTAGCG"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=1,
exons=[dat.Exon(start=102065, stop=102138)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="tRNA",
url="",
seq_version="1",
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="ENSRNA049492366",
locus_tag="tRNA-Asn",
description="Arabidopsis thaliana (thale-cress) tRNA-Asn for anticodon GUU",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_gene_with_minimal_metadata(cress_2):
assert attr.asdict(
helpers.entry_for(cress_2, "ENSEMBL_PLANTS:AT2G03905.1")
) == attr.asdict(
dat.Entry(
primary_id="AT2G03905.1",
accession="ENSEMBL_PLANTS:AT2G03905.1",
ncbi_tax_id=3702,
database="ENSEMBL_PLANTS",
sequence=(
"CGCCGTTAGTCCGTGAGGAGAAAATAGGCCCACTCTGGCACACTCTCTCTGGGTTTAGGT"
"TTAGGTTTTTTTGGGGCTCTCTATCCTAAGAAACTAGGAGACATCACACTTCACCAAGTC"
"TACTTATCGACAATTTTATCGTATCACCATAACGACAATAAGGGCCGGACTAATGTTTGT"
"ACACATGTCCTCTCCTTTTACCCTT"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=-1,
exons=[dat.Exon(start=122882, stop=123086)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="lncRNA",
url="",
seq_version="1",
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="AT2G03905",
description="Arabidopsis thaliana (thale-cress) lncRNA AT2G03905",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_premirna(cress_2):
val = attr.asdict(helpers.entry_for(cress_2, "ENSEMBL_PLANTS:ENSRNA049757815-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049757815-T1",
accession="ENSEMBL_PLANTS:ENSRNA049757815-T1",
ncbi_tax_id=3702,
database="ENSEMBL_PLANTS",
sequence=(
"CGTAAAGCAGGTGATTCACCAATTTAGGTTTACATCCACAGTGTGGAAGACACTGAAGGA"
"CCTAAACTAACAAAGGTAAACGGCTCAGTGTGCGGGGTATTACACTCGGTTTAATGTCTG"
"AATGCGATAATCCGCACGATGATCTCTTTATCTTTGTTTGTTTAGGTCCCTTAGTTTCTT"
"CTATACCGTGAATCCAATCCTGTATTGGATGAGCTGGTTTATGACC"
),
regions=[
dat.SequenceRegion(
chromosome="2",
strand=-1,
exons=[dat.Exon(start=771337, stop=771562)],
assembly_id="TAIR10",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="pre_miRNA",
url="",
seq_version="1",
species="Arabidopsis thaliana",
common_name="thale-cress",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"eudicotyledons; Gunneridae; Pentapetalae; rosids; malvids; "
"Brassicales; Brassicaceae; Camelineae; Arabidopsis; "
"Arabidopsis thaliana"
),
gene="ENSRNA049757815",
locus_tag="ath-MIR840",
description="Arabidopsis thaliana (thale-cress) pre miRNA ath-MIR840",
references=[pubs.reference(29092050)],
)
)
# OBART09G00240.1 transposable_elements
# ENSRNA049475598-T1 sense_intronic
@pytest.mark.skip()
def test_skips_transposable_elements(oryza_9):
pass
def test_can_parse_rice_trna(oryza_9):
val = attr.asdict(helpers.entry_for(oryza_9, "ENSEMBL_PLANTS:ENSRNA049456349-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049456349-T1",
accession="ENSEMBL_PLANTS:ENSRNA049456349-T1",
ncbi_tax_id=65489,
database="ENSEMBL_PLANTS",
sequence="TCCGTTGTAGTCTAGCTGGTTAGGATACTCGGCTCTCACCCGAGAGACCCGGGTTCGAGTCCCGGCAACGGAA",
regions=[
dat.SequenceRegion(
chromosome="9",
strand=1,
exons=[dat.Exon(start=747032, stop=747104)],
assembly_id="O.barthii_v1",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="tRNA",
url="",
seq_version="1",
species="Oryza barthii",
common_name="African wild rice",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"Liliopsida; Poales; Poaceae; BOP clade; Oryzoideae; "
"Oryzeae; Oryzinae; Oryza; Oryza barthii"
),
gene="ENSRNA049456349",
locus_tag="tRNA-Glu",
description="Oryza barthii (African wild rice) tRNA-Glu for anticodon CUC",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_rice_snorna(oryza_9):
val = attr.asdict(helpers.entry_for(oryza_9, "ENSEMBL_PLANTS:ENSRNA049475670-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049475670-T1",
accession="ENSEMBL_PLANTS:ENSRNA049475670-T1",
ncbi_tax_id=65489,
database="ENSEMBL_PLANTS",
sequence="AAAAAAGCAGGATGCTGTGTTCTCTATAAGCAGTGTCCTCGTAAATTTTAGGAACATGTTTCATCGTTATTGGGTGAACCGTTGGGCTATTCAATGTCCATTGGTTCAGTAAATGATGGCACATTT",
regions=[
dat.SequenceRegion(
chromosome="9",
strand=-1,
exons=[dat.Exon(start=3344023, stop=3344148)],
assembly_id="O.barthii_v1",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="snoRNA",
url="",
seq_version="1",
species="Oryza barthii",
common_name="African wild rice",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"Liliopsida; Poales; Poaceae; BOP clade; Oryzoideae; "
"Oryzeae; Oryzinae; Oryza; Oryza barthii"
),
gene="ENSRNA049475670",
locus_tag="snoR74",
description="Oryza barthii (African wild rice) small nucleolar RNA snoR74",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_rice_pre_mirna(oryza_9):
val = attr.asdict(helpers.entry_for(oryza_9, "ENSEMBL_PLANTS:ENSRNA049475651-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049475651-T1",
accession="ENSEMBL_PLANTS:ENSRNA049475651-T1",
ncbi_tax_id=65489,
database="ENSEMBL_PLANTS",
sequence="CCTCCCCGCCGGACCTCCCAGTGAGGAGGCTAGGGCCGCCAGGTCCGGTGATCCCATTCTCCTTGCCGGCGGATTCTGCGCCCTAGA",
regions=[
dat.SequenceRegion(
chromosome="9",
strand=-1,
exons=[dat.Exon(start=3622031, stop=3622117)],
assembly_id="O.barthii_v1",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="pre_miRNA",
url="",
seq_version="1",
species="Oryza barthii",
common_name="African wild rice",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"Liliopsida; Poales; Poaceae; BOP clade; Oryzoideae; "
"Oryzeae; Oryzinae; Oryza; Oryza barthii"
),
gene="ENSRNA049475651",
locus_tag="MIR1846",
description="Oryza barthii (African wild rice) microRNA MIR1846",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_rice_u6(oryza_9):
val = attr.asdict(helpers.entry_for(oryza_9, "ENSEMBL_PLANTS:ENSRNA049475710-T1"))
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049475710-T1",
accession="ENSEMBL_PLANTS:ENSRNA049475710-T1",
ncbi_tax_id=65489,
database="ENSEMBL_PLANTS",
sequence="GTAGCTTATATACGCTGCTGTGCATAAAATTGAAACGATACAGAGAAGATTAGCATGGCCCCTGCGCAAGGAAGACGCACACAAATCGAGAAGTGGTCCAAATTTTT",
regions=[
dat.SequenceRegion(
chromosome="9",
strand=1,
exons=[dat.Exon(start=6092721, stop=6092827)],
assembly_id="O.barthii_v1",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="snRNA",
url="",
seq_version="1",
species="Oryza barthii",
common_name="African wild rice",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"Liliopsida; Poales; Poaceae; BOP clade; Oryzoideae; "
"Oryzeae; Oryzinae; Oryza; Oryza barthii"
),
gene="ENSRNA049475710",
locus_tag="U6",
description="Oryza barthii (African wild rice) U6 spliceosomal RNA",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_barley_antisense(hordeum_pt):
val = attr.asdict(
helpers.entry_for(hordeum_pt, "ENSEMBL_PLANTS:ENSRNA049483195-T1")
)
assert val == attr.asdict(
dat.Entry(
primary_id="ENSRNA049483195-T1",
accession="ENSEMBL_PLANTS:ENSRNA049483195-T1",
ncbi_tax_id=112509,
database="ENSEMBL_PLANTS",
sequence="AATAACCAAATATAACACTGGGACTAAGGGTCAAATTGGTAATTTTTCTTACATCTCCCCCCCCAGGGGCCCAGGTATCATATACACCGCCAAAATAAAGAGCCTTGAGTACTAGAAGAAAAGCACCTAGACCTAACAAAATTAAGTGAATACCCAAAATTGTAGTCATTTTATTTCTATCTTTCCA",
regions=[
dat.SequenceRegion(
chromosome="Pt",
strand=-1,
exons=[dat.Exon(start=10076, stop=10262)],
assembly_id="IBSC_v2",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="antisense_RNA",
url="",
seq_version="1",
species="Hordeum vulgare subsp. vulgare",
common_name="two-rowed barley",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; Embryophyta; "
"Tracheophyta; Spermatophyta; Magnoliopsida; Liliopsida; "
"Poales; Poaceae; BOP clade; Pooideae; Triticodae; "
"Triticeae; Hordeinae; Hordeum; Hordeum vulgare subsp. vulgare"
),
gene="ENSRNA049483195",
locus_tag="IsrR",
description="Hordeum vulgare subsp. vulgare (two-rowed barley) antisense RNA which regulates isiA expression",
references=[pubs.reference(29092050)],
)
)
def test_can_parse_zea_lincrna(zea_7):
val = attr.asdict(helpers.entry_for(zea_7, "ENSEMBL_PLANTS:Zm00001d001070_T001"))
assert val == attr.asdict(
dat.Entry(
primary_id="Zm00001d001070_T001",
accession="ENSEMBL_PLANTS:Zm00001d001070_T001",
ncbi_tax_id=4577,
database="ENSEMBL_PLANTS",
sequence=(
"GTATGGAACACGGCCGACCCCAGGCATTGGCTTCTGGAGGTTGAAGATGGGCGCATGTCC"
"GAGCGATCGGATGTGAATGGCTGTGGATAGTTGCGTGGTAGTGGTGGATGGCCAATCACT"
"GGCGTAGCCATCGCCCTGGGTGCAGAACGTGGTCCGTATGGGGTCAGCTATGGCGCCGCC"
"GCGCCGGACCCTGTTCACCTCCGTGGTTGCGGCCAGTGTGGGAAGATGGGCGAGCGCCGT"
"TGGTATGGCCTGGAGCGGCTAGGATTAGGTGAGCACCTGGGTTGGGCGGGTTAAGTCCTG"
"GGCGGTTAGAT"
),
regions=[
dat.SequenceRegion(
chromosome="7",
strand=-1,
exons=[dat.Exon(start=359423, stop=359733)],
assembly_id="B73_RefGen_v4",
coordinate_system=dat.CoordinateSystem.one_based(),
)
],
rna_type="lncRNA",
url="",
seq_version="1",
species="Zea mays",
common_name="maize",
lineage=(
"Eukaryota; Viridiplantae; Streptophyta; "
"Embryophyta; Tracheophyta; Spermatophyta; Magnoliopsida; "
"Liliopsida; Poales; Poaceae; PACMAD clade; Panicoideae; "
"Andropogonodae; Andropogoneae; Tripsacinae; Zea; Zea mays"
),
gene="Zm00001d001070",
description="Zea mays (maize) lncRNA Zm00001d001070",
references=[pubs.reference(29092050)],
)
)
def test_does_not_generate_tair_for_others(zea_7):
assert helpers.has_entry_for(zea_7, "TAIR:Zm00001d001070_T001") is False
assert helpers.has_entry_for(zea_7, "ENSEMBL_PLANTS:Zm00001d001070_T001") is True
def test_does_not_create_ncRNA_rna_type_zea_7(zea_7):
for entry in zea_7:
assert entry.rna_type != "ncRNA"
def test_does_not_create_ncRNA_rna_type_cress_2(cress_2):
for entry in cress_2:
assert entry.rna_type != "ncRNA"
def test_does_not_create_ncRNA_rna_type_oryza_9(oryza_9):
for entry in oryza_9:
assert entry.rna_type != "ncRNA"
def test_does_not_create_ncRNA_rna_type_hordeum_pt(hordeum_pt):
for entry in hordeum_pt:
assert entry.rna_type != "ncRNA"
| apache-2.0 | -8,262,563,082,367,672,000 | 40.767802 | 211 | 0.595879 | false |
sedden/django-basic-apps | basic/tools/shortcuts.py | 1 | 1190 | import os.path
import hashlib
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.http import HttpResponseRedirect
def get_image_path(instance, filename):
"""
Converts an image filename to a hash.
"""
name = hashlib.md5("%s" % instance.id).hexdigest()
ext = os.path.splitext(filename)
return os.path.join("%s/%s" % (instance._meta.app_label, instance._meta.module_name), '%s%s' % (name, ext[1]))
def render(request, *args, **kwargs):
"""
Simple wrapper for render_to_response.
"""
kwargs['context_instance'] = RequestContext(request)
return render_to_response(*args, **kwargs)
def redirect(request, obj=None):
"""
Simple wrapper for HttpResponseRedirect that checks the request for a
'next' GET parameter then falls back to a given object or url string.
"""
next = request.GET.get('next', None)
redirect_url = '/'
if next:
redirect_url = next
elif isinstance(obj, str):
redirect_url = obj
elif obj and hasattr(obj, 'get_absolute_url'):
redirect_url = obj.get_absolute_url()
return HttpResponseRedirect(redirect_url) | bsd-3-clause | -5,613,188,190,341,763,000 | 28.775 | 114 | 0.67395 | false |
geo2tag-logistics/Geo2Logistics | logistics/Geo2TagService.py | 1 | 6073 | import json
import requests
from logistics.models import Fleet
SERVER_URL = "http://demo.geo2tag.org/instance/"
BASE_SERVICE_NAME = "testservice"
SERVICE_NAME = BASE_SERVICE_NAME
channel_dict = {}
points_dict = {}
def getSerivceUrl():
return SERVER_URL + "service/" + SERVICE_NAME
def one_time_startup():
print("Application startup execution")
createService()
clearAllFleetChannels()
def createService():
# m = hashlib.md5()
# m.update(socket.gethostbyname(socket.getfqdn()).encode('utf-8'))
# global SERVICE_NAME
# SERVICE_NAME = BASE_SERVICE_NAME + "_" + str(m.hexdigest())
# print("SERVICE_NAME: "+SERVICE_NAME)
#
# url = SERVER_URL + 'service'
# data = {'name': SERVICE_NAME}
# request = requests.post(url, data=data)
# print(request.text)
pass
# возвращает url карты (при открытии driver-fleet-id)
def getFleetMap(fleet_id):
try:
fleet = Fleet.objects.get(id=fleet_id)
channel_id = getOrCreateFleetChannel(fleet)
except:
channel_id = "none"
return getSerivceUrl() + "/map?zoom=10&latitude=59.8944&longitude=30.2642&channel_ids=[\""+str(channel_id)+"\"]"
# создаёт канал для автопарка, если не существует (при добавлении точки updateDriverPos)
# возвращает oid канала для fleet
def getOrCreateFleetChannel(fleet):
try:
channel_oid = channel_dict.get(fleet.id, None)
if channel_oid is not None:
return channel_oid
print("create channel for fleet " + str(fleet))
url = getSerivceUrl() + '/channel'
full_name = str(fleet.name) + "_" + str(fleet.id)
data = {'name': full_name, 'json': {'name': str(fleet.name), 'id': str(fleet.id), 'owner': fleet.owner.first_name+' '+fleet.owner.last_name}}
request = requests.post(url, data=data)
response = request.text
channel_exists = response == 'null'
if channel_exists:
print(full_name+' already exists : '+str(channel_exists))
oid = None
else:
oid = json.loads(response)["$oid"]
channel_dict[fleet.id] = oid
return oid
except Exception as e:
print("EXCEPTION WHILE createFleetChannel: " + str(e))
# удаляет канал автопарка (при удалении автопарка)
def deleteFleetChannel(fleet):
try:
channel_oid = channel_dict.get(fleet.id)
headers = {'content-type': 'application/json'}
url = getSerivceUrl() + "/channel/" + channel_oid
request = requests.delete(url, headers=headers)
channel_dict.pop(fleet.id)
print("delete channel of fleet " + str(fleet) +" result: "+request.text)
except Exception as e:
print("EXCEPTION WHILE deleteFleetChannel: " + str(e))
# удаляет все каналы (при запуске приложения)
def clearAllFleetChannels():
print("delete all channels")
try:
url = getSerivceUrl() + '/channel?number=0'
request = requests.get(url)
response = request.text
print(response)
parsed_string = json.loads(response)
for channel in parsed_string:
channel_oid = channel["_id"]["$oid"]
headers = {'content-type': 'application/json'}
url = getSerivceUrl() + "/channel/" + channel_oid
print("DELETE " + url)
requests.delete(url, headers=headers)
channel_dict.clear()
points_dict.clear()
except Exception as e:
print("EXCEPTION WHILE clearAllFleetChannels: " + str(e))
# обновляет текущее метоположение водителя ( при api/driver/update_pos/)
def updateDriverPos(fleet, driver, lat, lon):
try:
channel_oid = getOrCreateFleetChannel(fleet)
if channel_oid is not None:
point_oid = points_dict.get(driver.id, None)
url = getSerivceUrl() + '/point'
data = [{"lon": float(lat), "lat": float(lon), "alt": 1.1,
"json": {"name": driver.first_name + " " + driver.last_name}, "channel_id": channel_oid}]
if point_oid is None:
request = requests.post(url, data=json.dumps(data))
point_oid = json.loads(request.text)[0]
points_dict[driver.id] = point_oid
print("added point " + str(lat) + " " + str(lon) + " for driver " + str(driver) + " in fleet " + str(fleet) + " result: "+request.text)
else:
# delete old
del_url = getSerivceUrl() + '/point/' + point_oid
request = requests.delete(del_url)
success = request.text == '{}'
if success:
points_dict.pop(driver.id)
# add new
request = requests.post(url, data=json.dumps(data))
point_oid = json.loads(request.text)[0]
points_dict[driver.id] = point_oid
print("updated point " + str(lat) + " " + str(lon) + " for driver " + str(driver) + " in fleet " + str(fleet) + " result: " + request.text)
else:
print("error while delete "+request.text)
except Exception as e:
print("EXCEPTION WHILE updateDriverPos: " + str(e))
# удаляет точку, соответствующую водителю в автопарке fleet (при исключении водителя из автопарка и при завершении поездки)
def deleteDriverPos(fleet, driver):
try:
point_oid = points_dict.get(driver.id)
url = getSerivceUrl() + '/point/' + point_oid
request = requests.delete(url)
points_dict.pop(driver.id)
print("cleared position for driver " + str(driver) + " from fleet " + str(fleet) + " result: "+request.text)
except Exception as e:
print("EXCEPTION WHILE deleteDriverPos: " + str(e))
| apache-2.0 | -5,767,976,916,874,057,000 | 35.194969 | 159 | 0.596003 | false |
jimjing/smores_choose_behavior | src/smores_choose_behavior/visualizer.py | 1 | 1422 | #!/usr/bin/env python
import threading
import rospy
from visualization_msgs.msg import Marker, MarkerArray
class Visualizer(object):
def __init__(self):
self._current_pub_path = []
self.run = False
self.t = None
self.t = threading.Thread(target=self._startPathPub)
self.t.setDaemon(True)
self.run = True
self.t.start()
def stop(self):
self.run = False
self.t.join()
def _startPathPub(self):
path_marker_pub = rospy.Publisher('PathMarker', MarkerArray, queue_size=10)
rate = rospy.Rate(1) # 10hz
while self.run and not rospy.is_shutdown():
id = 1
m_array = MarkerArray()
for pt in self._current_pub_path:
m = Marker()
m.header.frame_id = "camera_link";
m.header.stamp = rospy.Time();
m.ns = "my_namespace";
m.id = id
m.type = Marker.SPHERE
m.action = Marker.ADD
m.pose = pt
m.scale.x = 0.05
m.scale.y = 0.05
m.scale.z = 0.05
m.color.r = 0.5
m.color.a = 1.0
m_array.markers.append(m)
id += 1
path_marker_pub.publish(m_array)
rate.sleep()
def setPubPath(self, path):
self._current_pub_path = path
| gpl-3.0 | -7,178,490,557,951,756,000 | 27.44 | 83 | 0.496484 | false |
ComputerNetworks-UFRGS/OpERA | python/device/radioDevice.py | 1 | 9121 | """
Copyright 2013 OpERA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from gnuradio import gr #pylint: disable=F0401
from abstractDevice import AbstractDevice
from uhdDevice import *
from uhdArch import *
import time
#::TODO:: documentacao das funcoes das classes
class APath(object):
"""
Class to handle a single path connection/disconnection
"""
PENDING = 0
CONNECTED = 1
DISABLED = 2
def __init__(self, source, arch, sink):
"""
CTOR
@param source
@param arch
@param sink
"""
if isinstance(source, UHDBase):
source = source.uhd
if isinstance(sink, UHDBase):
sink = sink.uhd
if source and not isinstance(source, tuple):
source = (source, 0)
if arch and not isinstance(arch, tuple):
arch = (arch, 0)
if sink and not isinstance(sink, tuple):
sink = (sink, 0)
self._source = source
self._arch = arch
self._sink = sink
self._state = APath.PENDING
def __hasattr__(self, name):
"""
Function override.
"""
if self._source and hasattr(self._source[0], name):
return hasattr(self._source[0], name)
elif self._arch and hasattr(self._arch[0], name):
return hasattr(self._arch[0], name)
elif self._sink and hasattr(self._sink[0], name):
return hasattr(self._sink[0], name)
raise AttributeError
def __getattr__(self, name):
"""
Function override.
"""
if self._source and hasattr(self._source[0], name):
return getattr(self._source[0], name)
elif self._arch and hasattr(self._arch[0], name):
return getattr(self._arch[0], name)
elif self._sink and hasattr(self._sink[0], name):
return getattr(self._sink[0], name)
raise AttributeError("%s not found in wrapper APath" % name)
def connect(self, tb):
"""
@param tb An OpERAFlow instance.
"""
if self.is_connected():
return
if self._source:
isinstance(self._source[0], UHDBaseArch) and self._source[0].pre_connect(tb)
if self._sink:
isinstance(self._sink[0], UHDBaseArch) and self._sink[0].pre_connect(tb)
if self._arch:
isinstance(self._arch[0], UHDBaseArch) and self._arch[0].pre_connect(tb)
if self._arch:
self._source and tb.connect(self._source, self._arch)
self._sink and tb.connect(self._arch, self._sink)
else:
self._source and self._sink and tb.connect(self._source, self._sink)
self._state = APath.CONNECTED
def disconnect(self, tb):
"""
@param tb OpERAFlow instance.
"""
if not self.is_connected():
return
if self._arch:
self._source and tb.disconnect(self._source, self._arch)
self._sink and tb.disconnect(self._arch, self._sink)
else:
self._source and self._sink and tb.disconnect(self._source, self._sink)
self._state = APath.DISABLED
def get_state(self):
"""
"""
return self._state
def is_connected(self):
"""
"""
return self._state == APath.CONNECTED
def is_disabled(self):
"""
"""
return self._state == APath.DISABLED
def is_pending(self):
"""
"""
return self._state == APath.PENDING
class RadioDevice(AbstractDevice):
"""
"""
def __init__(self, name="RadioDevice"):
"""
CTOR
@param name
"""
AbstractDevice.__init__(self, name=name)
# Dictionary of all UHD devices
# Dictionary of AbstractArch of this radio device
self._dict_of_uhds = {}
self._dict_of_archs = {}
self._tb = None
# We need this flag because lock/unlock in OpERAFlow is not working
# To avoid performing the "RadioDevice::connect" 1+ times, we control it with this flag.
self._pending_done = False
def add_arch(self, source, sink, arch, name, uhd_device):
"""
Add a reference to a arch in which this radio_device.is a source/sink.
@param source Arch source.
@param sink Architecture sink.
@param arch AbstractArch device implementation.
@param name Name Name of the architecture.
@param uhd_device UHD device. Should be source or sink.
"""
# The arch has a reference to the radio.
if hasattr(arch, 'set_radio_device'):
arch.set_radio_device(uhd_device)
self._dict_of_archs[name] = APath(source=source, arch=arch, sink=sink)
self._dict_of_uhds[name] = uhd_device
# makes the name be accessible by doing radio.$name
setattr(self, name, self._dict_of_archs[name])
def disable_arch(self, name):
"""
@param name
"""
# Arch is not enabled
if not name in self._dict_of_archs:
raise AttributeError
# ::TRICKY::
# lock()/unlock() are not working with python sync blocks.
# So, we use stop/wait/start
# For more info check the link:
# http://gnuradio.org/redmine/issues/594
self._tb.stop()
self._tb.wait()
self._dict_of_archs[name].disconnect(self._tb)
self._tb.start()
def enable_arch(self, name):
"""
@param name
"""
# Arch is not enabled
if not name in self._dict_of_archs:
raise AttributeError
self._tb.stop()
self._tb.wait()
self._dict_of_archs[name].connect(self._tb)
self._tb.start()
def connect(self):
"""
"""
if self._pending_done:
return
self._pending_done = True
for x in self._dict_of_archs.itervalues():
x.connect(self._tb)
def set_top_block(self, tb):
"""
@param tb Set the top block.
"""
self._tb = tb
def __getattr__(self, name):
"""
Search for a parameter/function in all archs of this Radio.
So, a programer that doed radio.function, activates this __getattr__
function, which searches for 'function' in all architectures.
@param name Name of parameter/function.
"""
if name == "_dict_of_archs":
return object.getattr(self, "_dict_of_archs") #pylint: disable=E1101
else:
# search for method in the architectures
for key in self._dict_of_archs:
if hasattr(self._dict_of_archs[key], name):
return getattr(self._dict_of_archs[key], name)
raise AttributeError("%r object has no attribute %s" % (self.__class__, name))
### Implementations required for the AbstractDevice
def __getter(self, str_callback):
"""
A gereric getter for this class.
@param str_callback String with the name of the real getter function.
"""
arr = []
for uhd in self._dict_of_uhds.values():
uhd and arr.append(getattr(uhd, str_callback)())
return arr
def _get_center_freq(self):
"""
"""
return self.__getter('get_center_freq')
def _set_center_freq(self, center_freq):
"""
@param center_freq
"""
for uhd in self._dict_of_uhds.values():
uhd and uhd.set_center_freq(center_freq)
return center_freq
def _get_samp_rate(self):
"""
Device sample rate getter.
"""
return self.__getter('get_samp_rate')
def _set_samp_rate(self, samp_rate):
"""
@param samp_rate
"""
for uhd in self._dict_of_uhds.values():
uhd and uhd.set_samp_rate(samp_rate)
return samp_rate
def _get_gain(self):
"""
Device gain getter.
"""
return self.__getter('get_gain')
def _set_gain(self, gain):
"""
@param gain
"""
for uhd in self._dict_of_uhds.values():
uhd and uhd.set_gain(gain)
return gain
def _get_bandwidth(self):
"""
Get the device's bandwidth.
@return
"""
return self.__getter('get_bandwidth')
def _set_bandwidth(self, bw):
"""
@param bw
"""
for uhd in self._dict_of_uhds.values():
uhd and uhd.set_bandwidth(bw)
return bw
| apache-2.0 | -5,384,129,354,236,725,000 | 25.591837 | 97 | 0.558162 | false |
tramin/frobo | frobo_nav/nodes/nav_calibrate_linear.py | 1 | 5813 | #!/usr/bin/env python
""" nav_square.py - Version 1.1 2013-12-20
A basic demo of the using odometry data to move the robot
along a square trajectory.
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2012 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy
from geometry_msgs.msg import Twist, Point, Quaternion
import tf
from frobo_nav.transform_utils import quat_to_angle, normalize_angle
from math import degrees, radians, copysign, sqrt, pow, pi
class NavSquare():
def __init__(self):
# Give the node a name
rospy.init_node('nav_square', anonymous=False)
# Set rospy to execute a shutdown function when terminating the script
rospy.on_shutdown(self.shutdown)
# How fast will we check the odometry values?
rate = 20
# Set the equivalent ROS rate variable
r = rospy.Rate(rate)
# Set the parameters for the target square
goal_distance = rospy.get_param("~goal_distance", 3.0) # meters
goal_angle = rospy.get_param("~goal_angle", radians(90)) # degrees converted to radians
linear_speed = rospy.get_param("~linear_speed", 0.1) # meters per second
angular_speed = rospy.get_param("~angular_speed", 0.6) # radians per second
angular_tolerance = rospy.get_param("~angular_tolerance", radians(0)) # degrees to radians
# Publisher to control the robot's speed
self.cmd_vel = rospy.Publisher('/cmd_vel', Twist)
# The base frame is base_footprint for the TurtleBot but base_link for Pi Robot
self.base_frame = rospy.get_param('~base_frame', '/base_link')
# The odom frame is usually just /odom
self.odom_frame = rospy.get_param('~odom_frame', '/odom')
# Initialize the tf listener
self.tf_listener = tf.TransformListener()
# Give tf some time to fill its buffer
rospy.sleep(2)
# Set the odom frame
self.odom_frame = '/odom'
# Find out if the robot uses /base_link or /base_footprint
try:
self.tf_listener.waitForTransform(self.odom_frame, '/base_footprint', rospy.Time(), rospy.Duration(1.0))
self.base_frame = '/base_footprint'
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
try:
self.tf_listener.waitForTransform(self.odom_frame, '/base_link', rospy.Time(), rospy.Duration(1.0))
self.base_frame = '/base_link'
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("Cannot find transform between /odom and /base_link or /base_footprint")
rospy.signal_shutdown("tf Exception")
# Initialize the position variable as a Point type
position = Point()
# Initialize the movement command
move_cmd = Twist()
# Set the movement command to forward motion
move_cmd.linear.x = linear_speed
# Get the starting position values
(position, rotation) = self.get_odom()
initPosition = position
rospy.loginfo("Initial position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees")
x_start = position.x
y_start = position.y
# Keep track of the distance traveled
distance = 0
# Enter the loop to move along a side
while distance < goal_distance and not rospy.is_shutdown():
# Publish the Twist message and sleep 1 cycle
self.cmd_vel.publish(move_cmd)
r.sleep()
# Get the current position
(position, rotation) = self.get_odom()
# Compute the Euclidean distance from the start
distance = sqrt(pow((position.x - x_start), 2) +
pow((position.y - y_start), 2))
# Stop the robot when we are done
self.cmd_vel.publish(Twist())
#print result
(position, rotation) = self.get_odom()
rospy.loginfo("Final position at " + str(position) + " and rotation " + str(degrees(rotation)) + " degrees")
rospy.loginfo("Difference (position.x - initPosition.x) " + str(position.x - initPosition.x) + "m")
def get_odom(self):
# Get the current transform between the odom and base frames
try:
(trans, rot) = self.tf_listener.lookupTransform(self.odom_frame, self.base_frame, rospy.Time(0))
except (tf.Exception, tf.ConnectivityException, tf.LookupException):
rospy.loginfo("TF Exception")
return
return (Point(*trans), quat_to_angle(Quaternion(*rot)))
def shutdown(self):
# Always stop the robot when shutting down the node
rospy.loginfo("Stopping the robot...")
self.cmd_vel.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
NavSquare()
except rospy.ROSInterruptException:
rospy.loginfo("Navigation terminated.")
| mit | 3,346,981,609,078,906,400 | 39.089655 | 116 | 0.607088 | false |
morgangiraud/openai-rl | main.py | 1 | 6781 | import gym, os, time, json, random, sys
import tensorflow as tf
import numpy as np
from agents import make_agent, get_agent_class
from hpsearch.hyperband import Hyperband, run_params
from hpsearch import fullsearch
from hpsearch import randomsearch
dir = os.path.dirname(os.path.realpath(__file__))
flags = tf.app.flags
# HP search
flags.DEFINE_boolean('randomsearch', False, 'Perform a random search fixing one HP at a time')
flags.DEFINE_boolean('fullsearch', False, 'Perform a full search of hyperparameter space (hyperband -> lr search -> hyperband with best lr)')
flags.DEFINE_string('fixed_params', "{}", 'JSON inputs to fix some params in a random search, ex: \'{"lr": 0.001}\'')
# Hyperband
flags.DEFINE_boolean('hyperband', False, 'Perform a hyperband search of hyperparameters')
flags.DEFINE_boolean('dry_run', False, 'Perform a hyperband dry_run')
flags.DEFINE_integer('nb_process', 4, 'Number of parallel process to perform a hyperband search')
flags.DEFINE_integer('games_per_epoch', 100, 'Number of parallel process to perform a hyperband search')
# Agent
flags.DEFINE_string('agent_name', 'DQNAgent', 'Name of the agent')
flags.DEFINE_boolean('best', False, 'Use the best known configuration')
flags.DEFINE_float('initial_q_value', 0., 'Initial Q values in the Tabular case')
flags.DEFINE_float('initial_mean', 0., 'Initial mean for NN')
flags.DEFINE_float('initial_stddev', 1e-1, 'Initial standard deviation for NN')
flags.DEFINE_float('lambda', .9, 'Lambda parameters used with eligibility traces')
flags.DEFINE_float('discount', .999, 'Discount factor')
flags.DEFINE_float('lr', 1e-3, 'Learning rate for actor/controller network')
flags.DEFINE_float('m_lr', 1e-3, 'Learning rate for the predictive model/critic')
flags.DEFINE_integer('lr_decay_steps', 50000, 'Learning rate decay steps for tabular methods')
flags.DEFINE_integer('nb_units', 20, 'Number of hidden units in Deep learning agents')
flags.DEFINE_float('critic_lr', 1e-3, 'For Learning rate for the critic part of AC agents')
flags.DEFINE_float('nb_critic_iter', 16, 'Number of iteration to fit the critic')
flags.DEFINE_integer('n_step', 4, 'Number of step used in TD(n) algorithm')
flags.DEFINE_float('reg_coef', 1e-2, 'regularization coefficient')
# CM agents
flags.DEFINE_integer('nb_sleep_iter', 100, 'Used in CM models: number of step used to train the actor in the environment')
flags.DEFINE_integer('nb_wake_iter', 50, 'Used in CM models: number of step used to train the predictive model of the environment')
flags.DEFINE_float('initial_m_stddev', 2e-1, 'Initial standard deviation for the predictive model')
flags.DEFINE_integer('nb_m_units', 50, 'Number of hidden units for the predictive model')
# Policy
flags.DEFINE_boolean('UCB', False, 'Use the UCB policy for tabular agents')
flags.DEFINE_integer('N0', 100, 'Offset used in the decay algorithm of epsilon')
flags.DEFINE_float('min_eps', 1e-2, 'Limit after which the decay stops')
# Experience replay
flags.DEFINE_integer('er_batch_size', 512, 'Batch size of the experience replay learning')
flags.DEFINE_integer('er_epoch_size', 50, 'Number of sampled contained in an epoch of experience replay')
flags.DEFINE_integer('er_rm_size', 20000, 'Size of the replay memory buffer')
flags.DEFINE_integer('update_every', 20, 'Update the fixed Q network every chosen step')
# Environment
flags.DEFINE_string('env_name', 'CartPole-v0', 'The name of gym environment to use')
flags.DEFINE_boolean('debug', False, 'Debug mode')
flags.DEFINE_integer('max_iter', 2000, 'Number of training step')
flags.DEFINE_string('result_dir', dir + '/results/' + flags.FLAGS.env_name + '/' + flags.FLAGS.agent_name + '/' + str(int(time.time())), 'Name of the directory to store/log the agent (if it exists, the agent will be loaded from it)')
flags.DEFINE_boolean('play', False, 'Load an agent for playing')
flags.DEFINE_integer('play_nb', 10, 'Number of games to play')
flags.DEFINE_integer('random_seed', random.randint(0, 2**32 - 1), 'Value of random seed')
def main(_):
config = flags.FLAGS.__flags.copy()
config["fixed_params"] = json.loads(config["fixed_params"])
# if os.path.isfile(config['result_dir'] + '/config.json'):
# print("Overriding shell configuration with the one found in " + config['result_dir'])
# with open(config['result_dir'] + '/config.json', 'r') as f:
# config = json.loads(f.read())
if config['hyperband']:
print('Starting hyperband search')
config['result_dir_prefix'] = dir + '/results/hyperband/' + str(int(time.time()))
get_params = get_agent_class(config).get_random_config
hb = Hyperband( get_params, run_params )
results = hb.run(config, skip_last=True, dry_run=config['dry_run'])
if not os.path.exists(config['result_dir_prefix']):
os.makedirs(config['result_dir_prefix'])
with open(config['result_dir_prefix'] + '/hb_results.json', 'w') as f:
json.dump(results, f)
elif config['fullsearch']:
print('*** Starting full search')
config['result_dir_prefix'] = dir + '/results/fullsearch/' + str(int(time.time())) + '-' + config['agent_name']
os.makedirs(config['result_dir_prefix'])
print('*** Starting first pass: full random search')
summary = fullsearch.first_pass(config)
with open(config['result_dir_prefix'] + '/fullsearch_results1.json', 'w') as f:
json.dump(summary, f)
print('*** Starting second pass: Learning rate search')
best_agent_config = summary['results'][0]['params']
summary = fullsearch.second_pass(config, best_agent_config)
with open(config['result_dir_prefix'] + '/fullsearch_results2.json', 'w') as f:
json.dump(summary, f)
print('*** Starting third pass: Hyperband search with best lr')
best_lr = summary['results'][0]['lr']
summary = fullsearch.third_pass(config, best_lr)
with open(config['result_dir_prefix'] + '/fullsearch_results3.json', 'w') as f:
json.dump(summary, f)
elif config['randomsearch']:
print('*** Starting random search')
config['result_dir_prefix'] = dir + '/results/randomsearch/' + str(int(time.time())) + '-' + config['agent_name']
os.makedirs(config['result_dir_prefix'])
summary = randomsearch.search(config)
with open(config['result_dir_prefix'] + '/fullsearch_results1.json', 'w') as f:
json.dump(summary, f)
else:
env = gym.make(config['env_name'])
agent = make_agent(config, env)
if config['play']:
for i in range(config['play_nb']):
agent.play(env)
else:
agent.train()
agent.save()
if __name__ == '__main__':
tf.app.run() | mit | 6,664,872,669,113,156,000 | 49.992481 | 233 | 0.679546 | false |
rjl09c/ysp2017 | katiehessian.py | 1 | 7324 | import yt
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import pylab
from yt.analysis_modules.halo_finding.api import HaloFinder
from pylab import*
from numpy import ma
from numpy import linalg as LA
#derives vel with respect to x
def derivx(vel,xcoords):
distance = xcoords[1][0] - xcoords[0][0]
velxdx = np.zeros((320,320))
for i in range(len(vel)):
for x in range(len(vel)):
if 0 < i < len(vel) - 1:
velxdx[i,x] = ((-1/2) * vel[i-1][x]) + ((1/2) * vel[i+1][x])
elif i == 0:
velxdx[i,x] = (((-3/2) * vel[i][x]) + (2 * vel[i+1][x]) + ((-1/2) * vel[i+2][x]))
elif i == len(vel) - 1:
velxdx[i,x] = ((-3/2) * vel[i][x]) + (2 * vel[i-1][x]) + ((-1/2) * vel[i-2][x])
return velxdx/distance
#derives vel with respect to y
def derivy(vel,xcoords):
distance = xcoords[1][0] - xcoords[0][0]
velydy = np.zeros((320,320))
for i in range(len(vel)):
for x in range(len(vel)):
if 0 < x < len(vel) - 1:
velydy[i,x] = (((-1/2) * vel[i][x-1]) + ((1/2) * vel[i][x+1]))
elif x == 0:
velydy[i,x] = (((-3/2)*vel[i][x]) + (2*vel[i][x+1]) + ((-1/2) * vel[i][x + 2]))
elif x == len(vel) - 1:
velydy[i,x] = (((-3/2)*vel[i][x]) + (2*vel[i][x-1]) + ((-1/2) * vel[i][x-2]))
return velydy/distance
#second derivative of vel with respect to x
def deriv2x(vel,xcoords):
distance = xcoords[1][0] - xcoords[0][0]
velxdx = np.zeros((320,320))
for i in range(len(vel)):
for x in range(len(vel)):
if 0 < i < len(vel) - 1:
velxdx[i,x] = (vel[i-1][x]) + (-2 * vel[i][x]) + (vel[i+1][x])
elif i == 0:
velxdx[i,x] = ((2 * vel[i][x]) + (-5 * vel[i+1][x]) + (4* vel[i+2][x]) + (-1 * vel[i+3][x]))
elif i == len(vel) - 1:
velxdx[i,x] = ((-3/2) * vel[i][x]) + (2 * vel[i-1][x]) + ((-1/2) * vel[i-2][x])
return velxdx/distance
#second derivative of vel with respect to y
def deriv2y(vel,xcoords):
distance = xcoords[1][0] - xcoords[0][0]
velydy = np.zeros((320,320))
for i in range(len(vel)):
for x in range(len(vel)):
if 0 < x < len(vel) - 1:
velydy[i,x] = ((vel[i][x-1]) + (-2 * vel[i][x]) + (vel[i][x+1]))
elif x == 0:
velydy[i,x] = (((2)*vel[i][x]) + (-5 * vel[i][x+1]) + ((4) * vel[i][x+2]) + (-1 * vel[i][x+3]))
elif x == len(vel) - 1:
velydy[i,x] = (((2) * vel[i][x]) + (-5 * vel[i][x - 1]) + ((4) * vel[i][x-2]) + (-1 * vel[i][x-3]))
return velydy/distance
#second derivative of a mixed derivative
def mixed_deriv(xcoords, ycoords, vel):
distx = xcoords[1][0] - xcoords[0][0]
disty = ycoords[0][1] - ycoords[0][0]
mixed = np.zeros((320,320))
veldx = derivx(vel, xcoords)
veldy = derivy(veldx, xcoords) #takes deriv of vel with respect to x and derives that in the y direction
for i in range(len(vel)):
for x in range(len(vel)):
if 0 < i < len(vel) - 1 and 0 < x < len(vel) - 1:
mixed[i][x] = ((vel[i+1][x+1]) - (vel[i+1][x-1]) - (vel[i-1][x+1]) + (vel[i-1][x-1]))/(4*distx*disty)
#if on edges derives with respect to x first
elif i == 0 or i == len(vel) - 1 or x == 0 or x == len(vel) - 1:
mixed[i][x]=veldy[i][x]
return mixed
#create hessian matrix for each point
def hess(xcoords, ycoords, vel):
veldx = deriv2x(vel, xcoords) #retrieves the second derivatives of the velocity in the x direction
veldy = deriv2y(vel, xcoords) #retrieves the second derivatives of the velocity in the y direction
mixed = mixed_deriv(xcoords, ycoords, vel) #retrieves the second mixed derivatives of the velocity
hessian = np.zeros((2,2))
allhessian = [[[] for j in range(320)] for i in range(320)]
for j in range(len(veldx)):
for k in range(len(veldx)):
for i in range(len(hessian)):
for x in range(len(hessian)):
if i == 0 and x == 1:
hessian[i,x] = mixed[j,k]
hessian[i+1][x-1] = mixed[j,k]
elif x == 0 and i == 0:
hessian[i,x] = veldx[j,k]
elif x == 1 and i == 1:
hessian[i,x] = veldy[j,k]
allhessian[j][k] = hessian
allhessian = np.array(allhessian)
return allhessian
#find determinant
def determinant(allhessian):
deters = np.zeros((320,320))
for j in range(len(allhessian)):
for k in range(len(allhessian)):
x = allhessian[j,k]
deters[j,k] = (x[0,0]*x[1,1]) - (x[1,0]*x[0,1])
return deters
#find magnitude
def magnitude(velx,vely, xcoords):
mag = np.zeros((320,320))
yderiv = derivy(vely, xcoords)
xderiv = derivx(velx, xcoords)
for i in range(len(xderiv)):
for x in range(len(xderiv)):
mag[i][x] = (((yderiv[i,x]**2) + (xderiv[i,x]**2))**.5)
return mag
#finds extrema and saddlepoints
def extrema(allhessian, velx, vely, xcoords):
deters = determinant(allhessian)
extrem = np.zeros((320,320))
mag = magnitude(velx, vely, xcoords)
for j in range(len(extrem)):
for k in range(len(extrem)):
if mag[j][k] == 0:
if deters[j,k] < 0:
extrem[j, k] = -1
elif deters[j,k] == 0:
extrem[j,k] = 0
else:
x = allhessian[j,k]
if deter[j,k] > 0 and x[0,0] > 0:
extem[j, k] = -2
elif deter[j,k] > 0 and x[0,0] < 0:
extrem[j, k] = 2
return extrem
#creates jacobia matrix for each point
def jacobian(xcoords,velx, vely):
xx = derivx(velx, xcoords)
xy = derivy(velx, xcoords)
yx = derivx(vely, xcoords)
yy = derivy(vely, xcoords)
jacob = np.zeros ((2,2))
alljacob = [[[] for j in range(320)] for i in range(320)]
for j in range(len(xx)):
for k in range(len(xx)):
for i in range(len(jacob)):
for c in range(len(jacob)):
if c == 0 and i == 0:
jacob [i][c] = xx[j][k]
elif c == 1 and i == 0:
jacob[i][c] = xy[j][k]
elif c ==1 and i == 1:
jacob[i][c] = yy[j][k]
alljacob[j][k] = jacob
alljacob = np.array(alljacob)
return alljacob
#obtains eigenvalues for all points' jacobian matrices and then checks the extrema
def evals(alljacob):
eigen = [[[] for j in range(320)] for i in range(320)]
extrema = np.zeros((320,320))
for j in range(len(alljacob)):
for k in range(len(alljacob)):
x = alljacob[j,k]
eigen[j][k] = LA.eigvalsh(x)
y = eigen [j][k]
if y[0]>0 and y[1] > 0:
extrema[j,k] = 2
elif y[0]<0 and y[1] <0:
extrema[j,k] = -2
elif y[0]*y[1] < 0:
extrema[j,k] = 3
return extrema
#loads files and calls hess function
def main():
ds = yt.load("kh_mhd_Ma=0.803333333333At=0.0hdf5_chk_0000")
ad = ds.covering_grid(level=0, left_edge=ds.index.grids[0].LeftEdge,
dims=ds.domain_dimensions)
xcoords = np.array(ad["x"])
ycoords = np.array(ad["y"])
velx = np.array(ad["velx"])
vely = np.array(ad["vely"])
ds1 = yt.load("kh_mhd_Ma=0.803333333333At=0.0hdf5_chk_0001")
dd = ds1.covering_grid(level=0, left_edge=ds1.index.grids[0].LeftEdge, dims=ds1.domain_dimensions)
xcoords1 = np.array(dd["x"])
ycoords1 = np.array(dd["y"])
velx1 = np.array(dd["velx"])
vely1 = np.array(dd["vely"])
#creates Hessian matrix for x velocity for file 1
extrema(hess(xcoords, ycoords, velx), velx, vely, xcoords)
#creates Hessian marix for y velocity for file 1
(extrema(hess(xcoords, ycoords, vely), velx, vely, xcoords))
#prints extrema for file1
print(evals(jacobian(xcoords, velx, vely)))
'''plt.figure()
plt.scatter(xcoords, ycoords,c=evals(jacobian(xcoords, velx, vely)), marker= 'o',edgecolor='none')
cb = plt.colorbar()
cb.set_label('Extrema')
plt.show()'''
main()
| gpl-3.0 | -7,559,592,820,612,261,000 | 24.255172 | 106 | 0.58834 | false |
tehpug/Ares | leagues/models.py | 1 | 5319 | # -*- coding: utf-8 -*-
from celery.result import AsyncResult
from django.core.exceptions import ValidationError
from django.db import models, connection
from django.utils import timezone
def validate_even(value):
"""
Validate a number to be even
"""
if value % 2 != 0:
raise ValidationError('%(value)s is not an even number',
params={'value': value})
class League(models.Model):
"""
Leagues for robots
"""
title = models.CharField(
'title',
max_length=150,
unique=True,
help_text='Required. 150 characters or fewer.',
error_messages={
'unique': 'A league with that name already exists.',
},
)
description = models.CharField(max_length=1000, blank=True, null=True)
finished = models.BooleanField('finished', default=False)
registration_start = models.DateTimeField('registration start time')
registration_end = models.DateTimeField('registration end time')
start = models.DateField('league start date')
# Times to schedule matches within a day
match_start_time = models.TimeField('matches start time')
match_end_time = models.TimeField('matches end time')
num_robots = models.PositiveSmallIntegerField('number of robots',
validators=[validate_even])
game = models.ForeignKey('games.Game', related_name='leagues',
on_delete=models.CASCADE)
robots = models.ManyToManyField('robots.Robot', blank=True,
related_name='leagues')
_match_scheduler_id = models.CharField('match scheduler id',
max_length=36, null=True)
def __init__(self, *args, **kwargs):
super(League, self).__init__(*args, **kwargs)
self.__original_registration_end = self.registration_end
@property
def match_scheduler(self):
"""
Match scheduler task
"""
if self._match_scheduler_id:
return AsyncResult(self._match_scheduler_id)
return None
@match_scheduler.setter
def match_scheduler(self, scheduler):
if isinstance(scheduler, AsyncResult):
self._match_scheduler_id = scheduler.id
@match_scheduler.deleter
def match_scheduler(self):
self._match_scheduler_id = None
def has_schedule_changed(self):
"""
Check if the league schedule has changed or not
"""
return self.registration_end != self.__original_registration_end
def clean(self):
"""
Validate the the values
"""
now = timezone.now()
if self.registration_start <= now:
raise ValidationError(
u'Registration starting time must be after now')
if self.registration_end <= self.registration_start:
raise ValidationError(
u'Registration ending time must be after its starting')
if self.start <= self.registration_end.date():
raise ValidationError(
u'League starting time must be after registration ending time')
if self.match_end_time <= self.match_start_time:
raise ValidationError(
u'Match ending time must be after its starting time')
def __str__(self):
return '{} ({})'.format(self.title, self.game)
@staticmethod
def get_table():
"""
Create a table of league
"""
table = list()
with connection.cursor() as c:
c.execute("""SELECT robot_id, name,
SUM(P) AS P, SUM(W) AS W, SUM(L) AS L, SUM(D) AS D, SUM(robot1_score) AS GF
, SUM(robot2_score) AS GA, SUM(GD) AS GD, SUM(PTS) AS PTS
FROM
(SELECT robot1_id AS robot_id, robots_robot.name AS name, 1 AS P,
CASE WHEN robot1_score > robot2_score THEN 1 ELSE 0 END AS W,
CASE WHEN robot1_score < robot2_score THEN 1 ELSE 0 END AS L,
CASE WHEN robot1_score = robot2_score THEN 1 ELSE 0 END AS D,
robot1_score, robot2_score, robot1_score-robot2_score AS GD,
CASE
WHEN robot1_score > robot2_score THEN 3
WHEN robot1_score < robot2_score THEN 0
ELSE 1 END AS PTS
FROM matches_match
LEFT JOIN robots_robot ON matches_match.robot1_id=robots_robot.id
WHERE matches_match.finished != 0
UNION
SELECT robot2_id, robots_robot.name,
1 AS Played,
CASE WHEN robot1_score > robot2_score THEN 1 ELSE 0 END,
CASE WHEN robot1_score < robot2_score THEN 1 ELSE 0 END,
CASE WHEN robot1_score = robot2_score THEN 1 ELSE 0 END,
robot1_score, robot2_score, robot1_score-robot2_score,
CASE
WHEN robot1_score > robot2_score THEN 3
WHEN robot1_score < robot2_score THEN 0
ELSE 1 END
FROM matches_match
LEFT JOIN robots_robot ON matches_match.robot2_id=robots_robot.id
WHERE matches_match.finished != 0)
GROUP BY robot_id
ORDER BY P DESC, PTS DESC, GD DESC""")
for row in c.fetchall():
table.append({
'robot_id': row[0],
'name': row[1],
'played': row[2],
'won': row[3],
'lost': row[4],
'drawn': row[5],
'GF': row[6],
'GA': row[7],
'GD': row[8],
'points': row[9]})
return table
| gpl-3.0 | 6,876,304,096,557,620,000 | 34.225166 | 79 | 0.602369 | false |
Bajoo/client-pc | tests/unit_tests/filesync/task_consumer_test.py | 1 | 6616 | # -*- coding:utf-8 -*-
import threading
import pytest
from bajoo.filesync import task_consumer
from bajoo.promise import Promise
class TestTaskConsumer(object):
def _make_external_promise(self):
"""Helper used to make stub Promise.
Returns:
Promise, resolve, reject: the promise and its callbacks.
"""
callbacks = []
def executor(resolve, reject):
callbacks.append(resolve)
callbacks.append(reject)
return Promise(executor), callbacks[0], callbacks[1]
def test_add_empty_task(self):
"""Add a task who is an almost empty generator."""
with task_consumer.Context():
task_executed = []
def task():
task_executed.append(True)
yield
promise = task_consumer.add_task(task)
promise.result(0.01)
assert task_executed
def test_add_task_returning_value(self):
"""Add a simple task who must return a value."""
with task_consumer.Context():
def task():
yield 56
promise = task_consumer.add_task(task)
assert promise.result(0.01) is 56
def test_add_task_multistep(self):
"""Add a task who has to wait other external tasks (promise)."""
p1, resolve, _ = self._make_external_promise()
p2, resolve2, _ = self._make_external_promise()
def task():
value = yield p1
assert value is 44
value2 = yield p2
yield value2 * 2
with task_consumer.Context():
p_task = task_consumer.add_task(task)
resolve(44)
resolve2(26)
assert p_task.result(0.01) is 52
def test_all_step_use_dedicated_thread(self):
"""Ensures the code in a task is always executed in a filesync thread.
The generator code is always executed in a thread belonging to the
filesync threads.
"""
main_thread = threading.current_thread().ident
p1, resolve, _ = self._make_external_promise()
p2, resolve2, _ = self._make_external_promise()
def task():
assert threading.current_thread().ident is not main_thread
yield p1
assert threading.current_thread().ident is not main_thread
yield p2
assert threading.current_thread().ident is not main_thread
yield Promise.resolve(None)
assert threading.current_thread().ident is not main_thread
with task_consumer.Context():
p_task = task_consumer.add_task(task)
resolve(None)
resolve2(None)
p_task.result(0.01)
def test_add_task_waiting_rejected_promise(self):
"""Add a task who should fail due to a rejected promise."""
class Err(Exception):
pass
def task():
yield Promise.resolve('OK')
yield Promise.reject(Err())
with task_consumer.Context():
p = task_consumer.add_task(task)
with pytest.raises(Err):
p.result(0.01)
def test_add_task_catching_rejected_promise(self):
"""Add a task who will catch a rejected promise."""
class Err(Exception):
pass
def task():
yield Promise.resolve('OK')
with pytest.raises(Err):
yield Promise.reject(Err())
yield 'OK'
with task_consumer.Context():
p = task_consumer.add_task(task)
assert p.result(0.01) == 'OK'
def test_add_failing_task(self):
"""Add a task who will raises an Exception."""
class Err(Exception):
pass
def task():
yield Promise.resolve(True)
raise Err()
with task_consumer.Context():
p = task_consumer.add_task(task)
with pytest.raises(Err):
p.result(0.1)
def test_add_many_tasks(self):
"""Add 100 new tasks and wait them all."""
promises = []
def task():
yield Promise.resolve(1)
yield Promise.resolve(2)
yield Promise.resolve(3)
yield 1
with task_consumer.Context():
for i in range(40):
promises.append(task_consumer.add_task(task))
result = Promise.all(promises).result(0.1)
print(result)
assert sum(result) is 40
def test_add_concurrent_tasks(self):
"""Add three tasks who are required to run at the same time.
The task A will wait the Task B, then B will wait A.
This test "force" the tasks to be executed in a non-linear order.
"""
p1_a, r1_a, _ = self._make_external_promise()
p1_b, r1_b, _ = self._make_external_promise()
p1_c, r1_c, _ = self._make_external_promise()
p2_a, r2_a, _ = self._make_external_promise()
p2_b, r2_b, _ = self._make_external_promise()
p2_c, r2_c, _ = self._make_external_promise()
def task_A():
r1_a(None)
yield p1_b
r2_a(None)
yield p2_c
yield 'A'
def task_B():
r1_b(None)
yield p1_c
r2_b(None)
yield p2_a
yield 'B'
def task_C():
r1_c(None)
yield p1_a
r2_c(None)
yield p2_b
yield 'C'
with task_consumer.Context():
results = Promise.all([
task_consumer.add_task(task_A),
task_consumer.add_task(task_B),
task_consumer.add_task(task_C)
]).result(0.01)
assert results == list('ABC')
def test_ensure_task_generator_are_closed(self):
"""Ensure the task generators are properly closed after use.
If a generator has yielded the final result, and the caller don't want
to iter until the end, the caller must close the generator.
Closing the generator will raise an exception GeneratorExit, and so
allow the generator to clean resources.
Without the close, resources locked by `with` will not be released.
"""
is_generator_closed = []
def task():
try:
yield 'RESULT'
except GeneratorExit:
is_generator_closed.append(True)
with task_consumer.Context():
p = task_consumer.add_task(task)
assert p.result(0.01) == 'RESULT'
assert is_generator_closed
| gpl-3.0 | 6,029,750,878,475,561,000 | 29.348624 | 78 | 0.546705 | false |
visio2img/visio2img | setup.py | 1 | 1585 | import sys
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Documentation',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Topic :: Office/Business :: Office Suites',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
if sys.version_info > (3, 0):
test_requires = []
else:
test_requires = ['mock']
setup(
name='visio2img',
version='1.3.0',
description='MS-Visio file (.vsd, .vsdx) to images converter',
long_description=open('README.rst').read(),
author='Yassu',
author_email='[email protected]',
maintainer='Takeshi KOMIYA',
maintainer_email='[email protected]',
url='https://github.com/visio2img/visio2img',
classifiers=classifiers,
packages=['visio2img'],
tests_require=test_requires,
entry_points="""
[console_scripts]
visio2img = visio2img.visio2img:main
"""
)
| apache-2.0 | -7,219,872,094,020,648,000 | 31.346939 | 67 | 0.635962 | false |
azide0x37/tableFramer | tableFramer.py | 1 | 1267 | import requests
import json
from bs4 import BeautifulSoup
from collections import OrderedDict
class tableFramer:
def __init__(self, url):
self.url = url
self.response = requests.get(url, headers = {'User-Agent': 'Mozilla/5.0'})
def __call__(self):
souped = BeautifulSoup(self.response.text)
tableHead = souped.find('thead')
colNames = tableHead.findAll('th')
print "colNames", colNames
table = souped.find('table', summary = "Table listing details of the accident.")
rows = table.findAll('tr', class_ = "infoCell")
print "rows", rows
dataset = []
for tr in rows:
cols = tr.findAll('td')
rowData = OrderedDict()
counter = 1
for td in cols[1:]:
text = ''.join(td.find(text=True))
try:
rowData[colNames[counter]] = text
counter += 1
except:
counter = 0
continue
dataset.append(rowData)
return json.dumps(dataset)#, indent=4, separators=(',',':'))
crashData = tableFramer('http://www.mshp.dps.missouri.gov/HP68/SearchAction')
print crashData()
| mit | 2,197,101,322,134,218,800 | 29.166667 | 88 | 0.535912 | false |
philipkershaw/ndg_security_server | ndg/security/server/wsgi/utils.py | 2 | 7507 | """NDG Security WSGI utilities
MashMyData Project
"""
__author__ = "P J Kershaw"
__date__ = "21/08/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "[email protected]"
__revision__ = "$Id$"
import logging
log = logging.getLogger(__name__)
class FileObjResponseIterator(object):
"""Helper class creates iterable WSGI response based on a given block size
"""
DEFAULT_BLK_SIZE = 1024
BYTE_RANGE_PREFIX = 'bytes='
BYTE_RANGE_SEP = '-'
CONTENT_RANGE_FIELDNAME = 'Content-range'
CONTENT_RANGE_FORMAT_STR = "bytes %d-%d/%d"
INVALID_CONTENT_RANGE_FORMAT_STR = "bytes */%d"
__slots__ = (
'file_obj',
'file_size',
'__block_size',
'read_lengths',
'content_length',
'content_range',
'content_range_hdr',
'closed_method'
)
class IteratorError(Exception):
"""Base exception type for exceptions raised from
FileObjResponseIterator class instances"""
class InvalidRangeRequest(IteratorError):
"""Raise for an invalid byte range requested"""
def __init__(self, *arg, **kw):
FileObjResponseIterator.IteratorError.__init__(self, *arg, **kw)
if len(arg) > 1:
self.content_range_hdr = arg[1]
else:
self.content_range_hdr = None
class InvalidRangeRequestSyntax(IteratorError):
"""Raise for invalid range request syntax"""
def __init__(self, file_obj, file_size=-1, request_range=None,
block_size=DEFAULT_BLK_SIZE):
'''Open a file and set the blocks for reading, any input range set and
the response size
'''
self.file_obj = file_obj
self.file_size = file_size
# Find method of determining whether the file object is closed.
if hasattr(file_obj, 'closed'):
# Standard file interface has optional 'closed' attribute.
self.closed_method = lambda : self.file_obj.closed
elif hasattr(file_obj, 'isclosed'):
# httplib.HTTPResponse has a non-standard 'isclosed' method.
self.closed_method = self.file_obj.isclosed
elif hasattr(file_obj, 'fp'):
# urllib.addbase and derived classes returned by urllib and urllib2:
self.closed_method = lambda : self.fp is None
else:
self.closed_method = None
# the length of the content to return - this will be different to the
# file size if the client a byte range header field setting
self.content_length = 0
# None unless a valid input range was given
self.content_range = None
# Formatted for HTTP content range header field
self.content_range_hdr = None
# This will call the relevant set property method
self.block_size = block_size
# Array of blocks lengths for iterator to use to read the file
self.read_lengths = []
if request_range is not None:
# Prepare a content range header in case the range specified is
# invalid
content_range_hdr = (self.__class__.CONTENT_RANGE_FIELDNAME,
self.__class__.INVALID_CONTENT_RANGE_FORMAT_STR %
self.file_size)
try:
# Remove 'bytes=' prefix
rangeVals = request_range.split(
self.__class__.BYTE_RANGE_PREFIX)[-1]
# Convert into integers taking into account that a value may be
# absent
startStr, endStr = rangeVals.split(
self.__class__.BYTE_RANGE_SEP)
start = int(startStr or 0)
end = int(endStr or self.file_size - 1)
except ValueError:
raise self.__class__.InvalidRangeRequestSyntax('Invalid format '
'for request range %r' % request_range)
# Verify range bounds
if start > end:
raise self.__class__.InvalidRangeRequest('Range start index %r '
'is greater than the end index %r' %
(start, end), content_range_hdr)
elif start < 0:
raise self.__class__.InvalidRangeRequest('Range start index %r '
'is less than zero' %
start,
content_range_hdr)
elif end >= self.file_size:
# This is not an error -
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
log.warning('Range end index %r is greater than the length %r '
'of the requested resource - reseting to %r',
end, self.file_size, self.file_size - 1)
end = self.file_size - 1
# Set the total content length to return
self.content_length = end + 1 - start
self.content_range = (start, end)
self.content_range_hdr = (
self.__class__.CONTENT_RANGE_FIELDNAME,
self.__class__.CONTENT_RANGE_FORMAT_STR %
(self.content_range + (self.file_size,))
)
try:
self.file_obj.seek(start)
except AttributeError:
# File seek method is optional.
pass
else:
# Set the total content length to return
self.content_length = self.file_size
nReads = self.content_length / self.block_size
lastReadLen = self.content_length % self.block_size
self.read_lengths = [self.block_size] * nReads
if lastReadLen > 0:
nReads += 1
self.read_lengths.append(lastReadLen)
def __iter__(self):
'''Read the file object a block at a time'''
# Leave read_lengths attribute intact
read_lengths = self.read_lengths[:]
while (self.content_length < 0) or (len(read_lengths) > 0):
if self.content_length < 0:
if self.closed_method():
return
amt = self.block_size
else:
amt = read_lengths.pop()
output = self.file_obj.read(amt)
if not output:
self.close()
yield output
def close(self):
"""Closes the file object.
"""
self.file_obj.close()
@property
def block_size(self):
"""block size for reading the file in the iterator and returning a
response
"""
return self.__block_size
@block_size.setter
def block_size(self, value):
"""block size for reading the file in the iterator and returning a
response
"""
self.__block_size = int(value)
if self.__block_size < 0:
raise ValueError('Expecting positive integer value for block size '
'attribute')
| bsd-3-clause | 3,673,313,203,661,367,000 | 37.497436 | 83 | 0.520714 | false |
zjkl19/AbaqusPython | SimpleSupportBeam/SimpleSupportBeam01.py | 1 | 7518 | # -*- coding: mbcs -*-
#auto generated by GUI operation.
#the template of the original example:ExpAbq00.py
#link:http://www.020fea.com/a/5/152/11521.html
#explanation:
#structure:simple supported beam
#load:ConcentratedForce in the midSpan
#post:none
#comment by lindinan in 20170829
#
from part import *
from material import *
from section import *
from assembly import *
from step import *
from interaction import *
from load import *
from mesh import *
from optimization import *
from job import *
from sketch import *
from visualization import *
from connectorBehavior import *
mdb.models['Model-1'].ConstrainedSketch(name='__profile__', sheetSize=10.0)
mdb.models['Model-1'].sketches['__profile__'].Line(point1=(0.0, 0.0), point2=(
2.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((1.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].HorizontalConstraint(
addUndoState=False, entity=
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((1.0, 0.0),
))
mdb.models['Model-1'].sketches['__profile__'].Line(point1=(2.0, 0.0), point2=(
4.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((3.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].HorizontalConstraint(
addUndoState=False, entity=
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((3.0, 0.0),
))
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((1.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((3.0, 0.0))
mdb.models['Model-1'].sketches['__profile__'].ParallelConstraint(addUndoState=
False, entity1=
mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((1.0, 0.0), )
, entity2=mdb.models['Model-1'].sketches['__profile__'].geometry.findAt((
3.0, 0.0), ))
mdb.models['Model-1'].Part(dimensionality=THREE_D, name='Part-1', type=
DEFORMABLE_BODY)
mdb.models['Model-1'].parts['Part-1'].BaseWire(sketch=
mdb.models['Model-1'].sketches['__profile__'])
del mdb.models['Model-1'].sketches['__profile__']
mdb.models['Model-1'].IProfile(b1=0.1, b2=0.1, h=0.2, l=0.1, name='Profile-1',
t1=0.01, t2=0.01, t3=0.01)
mdb.models['Model-1'].BeamSection(alphaDamping=0.0, beamShape=CONSTANT,
betaDamping=0.0, centroid=(0.0, 0.0), compositeDamping=0.0,
consistentMassMatrix=False, dependencies=0, integration=BEFORE_ANALYSIS,
name='Section-1', poissonRatio=0.28, profile='Profile-1', shearCenter=(0.0,
0.0), table=((210000000000.0, 82030000000.0), ), temperatureDependency=OFF,
thermalExpansion=OFF)
mdb.models['Model-1'].parts['Part-1'].Set(edges=
mdb.models['Model-1'].parts['Part-1'].edges.findAt(((0.5, 0.0, 0.0), ), ((
2.5, 0.0, 0.0), ), ), name='Set-1')
mdb.models['Model-1'].parts['Part-1'].SectionAssignment(offset=0.0,
offsetField='', offsetType=MIDDLE_SURFACE, region=
mdb.models['Model-1'].parts['Part-1'].sets['Set-1'], sectionName=
'Section-1', thicknessAssignment=FROM_SECTION)
mdb.models['Model-1'].rootAssembly.DatumCsysByDefault(CARTESIAN)
mdb.models['Model-1'].rootAssembly.Instance(dependent=OFF, name='Part-1-1',
part=mdb.models['Model-1'].parts['Part-1'])
mdb.models['Model-1'].StaticStep(name='Step-1', previous='Initial')
mdb.models['Model-1'].rootAssembly.Set(name='Set-1', vertices=
mdb.models['Model-1'].rootAssembly.instances['Part-1-1'].vertices.findAt(((
0.0, 0.0, 0.0), )))
mdb.models['Model-1'].DisplacementBC(amplitude=UNSET, createStepName='Step-1',
distributionType=UNIFORM, fieldName='', fixed=OFF, localCsys=None, name=
'BC-1', region=mdb.models['Model-1'].rootAssembly.sets['Set-1'], u1=0.0,
u2=0.0, u3=0.0, ur1=0.0, ur2=0.0, ur3=UNSET)
mdb.models['Model-1'].rootAssembly.Set(name='Set-2', vertices=
mdb.models['Model-1'].rootAssembly.instances['Part-1-1'].vertices.findAt(((
4.0, 0.0, 0.0), )))
mdb.models['Model-1'].DisplacementBC(amplitude=UNSET, createStepName='Step-1',
distributionType=UNIFORM, fieldName='', fixed=OFF, localCsys=None, name=
'BC-2', region=mdb.models['Model-1'].rootAssembly.sets['Set-2'], u1=UNSET,
u2=0.0, u3=0.0, ur1=0.0, ur2=0.0, ur3=UNSET)
mdb.models['Model-1'].rootAssembly.Set(name='Set-3', vertices=
mdb.models['Model-1'].rootAssembly.instances['Part-1-1'].vertices.findAt(((
2.0, 0.0, 0.0), )))
mdb.models['Model-1'].ConcentratedForce(cf2=-10000.0, createStepName='Step-1',
distributionType=UNIFORM, field='', localCsys=None, name='Load-1', region=
mdb.models['Model-1'].rootAssembly.sets['Set-3'])
mdb.models['Model-1'].rootAssembly.seedPartInstance(deviationFactor=0.1,
minSizeFactor=0.1, regions=(
mdb.models['Model-1'].rootAssembly.instances['Part-1-1'], ), size=0.2)
mdb.models['Model-1'].rootAssembly.generateMesh(regions=(
mdb.models['Model-1'].rootAssembly.instances['Part-1-1'], ))
mdb.Job(atTime=None, contactPrint=OFF, description='', echoPrint=OFF,
explicitPrecision=SINGLE, getMemoryFromAnalysis=True, historyPrint=OFF,
memory=90, memoryUnits=PERCENTAGE, model='Model-1', modelPrint=OFF,
multiprocessingMode=DEFAULT, name='ExpAbq00', nodalOutputPrecision=SINGLE,
numCpus=1, numGPUs=0, queue=None, scratch='', type=ANALYSIS,
userSubroutine='', waitHours=0, waitMinutes=0)
mdb.models['Model-1'].parts['Part-1'].assignBeamSectionOrientation(method=
N1_COSINES, n1=(0.0, 0.0, 1.0), region=Region(
edges=mdb.models['Model-1'].parts['Part-1'].edges.findAt(((0.5, 0.0, 0.0),
), ((2.5, 0.0, 0.0), ), )))
mdb.models['Model-1'].rootAssembly.regenerate()
mdb.jobs['ExpAbq00'].submit(consistencyChecking=OFF)
mdb.jobs['ExpAbq00']._Message(STARTED, {'phase': BATCHPRE_PHASE,
'clientHost': 'bdl-PC', 'handle': 0, 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(ODB_FILE, {'phase': BATCHPRE_PHASE,
'file': 'D:\\SIMULIA\\Temp\\ExpAbq00.odb', 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(COMPLETED, {'phase': BATCHPRE_PHASE,
'message': 'Analysis phase complete', 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(STARTED, {'phase': STANDARD_PHASE,
'clientHost': 'bdl-PC', 'handle': 60784, 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(STEP, {'phase': STANDARD_PHASE, 'stepId': 1,
'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(ODB_FRAME, {'phase': STANDARD_PHASE, 'step': 0,
'frame': 0, 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(STATUS, {'totalTime': 0.0, 'attempts': 0,
'timeIncrement': 1.0, 'increment': 0, 'stepTime': 0.0, 'step': 1,
'jobName': 'ExpAbq00', 'severe': 0, 'iterations': 0,
'phase': STANDARD_PHASE, 'equilibrium': 0})
mdb.jobs['ExpAbq00']._Message(MEMORY_ESTIMATE, {'phase': STANDARD_PHASE,
'jobName': 'ExpAbq00', 'memory': 23.6787071228027})
mdb.jobs['ExpAbq00']._Message(ODB_FRAME, {'phase': STANDARD_PHASE, 'step': 0,
'frame': 1, 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(STATUS, {'totalTime': 1.0, 'attempts': 1,
'timeIncrement': 1.0, 'increment': 1, 'stepTime': 1.0, 'step': 1,
'jobName': 'ExpAbq00', 'severe': 0, 'iterations': 1,
'phase': STANDARD_PHASE, 'equilibrium': 1})
mdb.jobs['ExpAbq00']._Message(END_STEP, {'phase': STANDARD_PHASE, 'stepId': 1,
'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(COMPLETED, {'phase': STANDARD_PHASE,
'message': 'Analysis phase complete', 'jobName': 'ExpAbq00'})
mdb.jobs['ExpAbq00']._Message(JOB_COMPLETED, {
'time': 'Sat Aug 05 12:09:15 2017', 'jobName': 'ExpAbq00'})
# Save by bdl on 2017_08_05-12.10.39; build 6.13-1 2013_05_16-10.28.56 126354
| mit | -4,320,695,320,007,110,000 | 51.573427 | 80 | 0.672918 | false |
selmanj/repel | datasets/recipe/synthetic/synth.py | 1 | 3044 | #!/usr/bin/python
#
# Generates synthetic recipe domains.
# RUNS ON PYTHON 3.x, not 2.7
# GENERAL DIRECTIONS
# 1. Plate items down
# * Cup, bowl, cakemix
# 2. Pour cakemix into bowl
# 3. Mix bowl
# 4. Place oil bottle
# 5. pour oil bottle into mixing bowl
# 6. MIX
# 7. Place 1-2 more bowls, pour and mix
#
# So we mainly have variation in how we bring items out, and a bit in how we
# pour/mix. But essentially we will always be pouring and mixing.
# number of frames on average between actions
delayBetweenActsMean = 130
delayBetweenActsDev = 100
shortestLength = 20
putDownActsMean = 100
putDownActsDev = 50
pourActsMean = 200
pourActsDev = 75
mixActsMean = 400
mixActsDev = 200
pickupMean = 100
pickupDev = 30
import argparse
import random
def normalvariateMin(mean, stddev, min):
sample = random.normalvariate(mean, stddev)
if sample < min:
sample = min
return round(sample)
def interval(mean, stddev, offset):
'''Get an interval for a generic event (including delay) from offset'''
start = offset+normalvariateMin(mean, stddev, shortestLength)
end = start+normalvariateMin(mean, stddev, shortestLength)
return (start, end)
def placeInterval(offset):
'''Get an interval for a place event (including delay) from offset'''
start = offset+normalvariateMin(delayBetweenActsMean, delayBetweenActsDev, shortestLength)
end = start+normalvariateMin(putDownActsMean, putDownActsDev, shortestLength)
return (start, end)
def outputAsREPEL(basicEvents):
# determine max interval
maxStart = 1
maxEnd = None
for pred, start, end in basicEvents:
if maxStart == None or maxStart > start:
maxStart = start
if maxEnd == None or maxEnd < end:
maxEnd = end
# add a small delay to the end
maxEnd = maxEnd+normalvariateMin(delayBetweenActsMean, delayBetweenActsDev, shortestLength)
output = ''
for pred, start, end in basicEvents:
if pred[0] == 'place':
output += 'D-OnTable({0}) @ [{1} : {2}]\n'.format(pred[1], end+1, maxEnd)
output += 'D-HandInFrame() @ [{0} : {1}]\n'.format(start, end+10)
if pred[0] == 'pour':
output += 'D-Pour() @ [{0} : {1}]\n'.format(start, end)
print(output)
parser = argparse.ArgumentParser(description='Generate synthetic data.')
parser.add_argument('-s', '--seed', type=int, default=1)
args = parser.parse_args()
seed = args.seed
random.seed(seed)
# choose our order for putting down cup, bowl, then cakemix
bowlOrder = ['cup', 'bowl', 'cakemix']
random.shuffle(bowlOrder)
basicEvents = []
end = 0
for i in range(len(bowlOrder)):
start, end = placeInterval(end)
basicEvents.append((['place', bowlOrder.pop()], start, end))
# pour cakemix into bowl
start, end = interval(pourActsMean, pourActsDev, end)
basicEvents.append((['pour', 'cakemix', 'bowl'], start, end))
# perform a mix
start,end = interval(mixActsMean, mixActsDev, end)
basicEvents.append((['mix', 'bowl'], start, end))
outputAsREPEL(basicEvents)
| mit | -6,441,630,987,821,203,000 | 27.448598 | 95 | 0.68134 | false |
nugget/home-assistant | homeassistant/components/google_assistant/trait.py | 1 | 32913 | """Implement the Google Smart Home traits."""
import logging
from homeassistant.components import (
cover,
group,
fan,
input_boolean,
media_player,
light,
lock,
scene,
script,
switch,
vacuum,
)
from homeassistant.components.climate import const as climate
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_LOCKED,
STATE_OFF,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
)
from homeassistant.core import DOMAIN as HA_DOMAIN
from homeassistant.util import color as color_util, temperature as temp_util
from .const import ERR_VALUE_OUT_OF_RANGE
from .helpers import SmartHomeError
_LOGGER = logging.getLogger(__name__)
PREFIX_TRAITS = 'action.devices.traits.'
TRAIT_ONOFF = PREFIX_TRAITS + 'OnOff'
TRAIT_DOCK = PREFIX_TRAITS + 'Dock'
TRAIT_STARTSTOP = PREFIX_TRAITS + 'StartStop'
TRAIT_BRIGHTNESS = PREFIX_TRAITS + 'Brightness'
TRAIT_COLOR_SPECTRUM = PREFIX_TRAITS + 'ColorSpectrum'
TRAIT_COLOR_TEMP = PREFIX_TRAITS + 'ColorTemperature'
TRAIT_SCENE = PREFIX_TRAITS + 'Scene'
TRAIT_TEMPERATURE_SETTING = PREFIX_TRAITS + 'TemperatureSetting'
TRAIT_LOCKUNLOCK = PREFIX_TRAITS + 'LockUnlock'
TRAIT_FANSPEED = PREFIX_TRAITS + 'FanSpeed'
TRAIT_MODES = PREFIX_TRAITS + 'Modes'
PREFIX_COMMANDS = 'action.devices.commands.'
COMMAND_ONOFF = PREFIX_COMMANDS + 'OnOff'
COMMAND_DOCK = PREFIX_COMMANDS + 'Dock'
COMMAND_STARTSTOP = PREFIX_COMMANDS + 'StartStop'
COMMAND_PAUSEUNPAUSE = PREFIX_COMMANDS + 'PauseUnpause'
COMMAND_BRIGHTNESS_ABSOLUTE = PREFIX_COMMANDS + 'BrightnessAbsolute'
COMMAND_COLOR_ABSOLUTE = PREFIX_COMMANDS + 'ColorAbsolute'
COMMAND_ACTIVATE_SCENE = PREFIX_COMMANDS + 'ActivateScene'
COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = (
PREFIX_COMMANDS + 'ThermostatTemperatureSetpoint')
COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = (
PREFIX_COMMANDS + 'ThermostatTemperatureSetRange')
COMMAND_THERMOSTAT_SET_MODE = PREFIX_COMMANDS + 'ThermostatSetMode'
COMMAND_LOCKUNLOCK = PREFIX_COMMANDS + 'LockUnlock'
COMMAND_FANSPEED = PREFIX_COMMANDS + 'SetFanSpeed'
COMMAND_MODES = PREFIX_COMMANDS + 'SetModes'
TRAITS = []
def register_trait(trait):
"""Decorate a function to register a trait."""
TRAITS.append(trait)
return trait
def _google_temp_unit(units):
"""Return Google temperature unit."""
if units == TEMP_FAHRENHEIT:
return 'F'
return 'C'
class _Trait:
"""Represents a Trait inside Google Assistant skill."""
commands = []
def __init__(self, hass, state, config):
"""Initialize a trait for a state."""
self.hass = hass
self.state = state
self.config = config
def sync_attributes(self):
"""Return attributes for a sync request."""
raise NotImplementedError
def query_attributes(self):
"""Return the attributes of this trait for this entity."""
raise NotImplementedError
def can_execute(self, command, params):
"""Test if command can be executed."""
return command in self.commands
async def execute(self, command, data, params):
"""Execute a trait command."""
raise NotImplementedError
@register_trait
class BrightnessTrait(_Trait):
"""Trait to control brightness of a device.
https://developers.google.com/actions/smarthome/traits/brightness
"""
name = TRAIT_BRIGHTNESS
commands = [
COMMAND_BRIGHTNESS_ABSOLUTE
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain == light.DOMAIN:
return features & light.SUPPORT_BRIGHTNESS
if domain == cover.DOMAIN:
return features & cover.SUPPORT_SET_POSITION
if domain == media_player.DOMAIN:
return features & media_player.SUPPORT_VOLUME_SET
return False
def sync_attributes(self):
"""Return brightness attributes for a sync request."""
return {}
def query_attributes(self):
"""Return brightness query attributes."""
domain = self.state.domain
response = {}
if domain == light.DOMAIN:
brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS)
if brightness is not None:
response['brightness'] = int(100 * (brightness / 255))
elif domain == cover.DOMAIN:
position = self.state.attributes.get(cover.ATTR_CURRENT_POSITION)
if position is not None:
response['brightness'] = position
elif domain == media_player.DOMAIN:
level = self.state.attributes.get(
media_player.ATTR_MEDIA_VOLUME_LEVEL)
if level is not None:
# Convert 0.0-1.0 to 0-255
response['brightness'] = int(level * 100)
return response
async def execute(self, command, data, params):
"""Execute a brightness command."""
domain = self.state.domain
if domain == light.DOMAIN:
await self.hass.services.async_call(
light.DOMAIN, light.SERVICE_TURN_ON, {
ATTR_ENTITY_ID: self.state.entity_id,
light.ATTR_BRIGHTNESS_PCT: params['brightness']
}, blocking=True, context=data.context)
elif domain == cover.DOMAIN:
await self.hass.services.async_call(
cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION, {
ATTR_ENTITY_ID: self.state.entity_id,
cover.ATTR_POSITION: params['brightness']
}, blocking=True, context=data.context)
elif domain == media_player.DOMAIN:
await self.hass.services.async_call(
media_player.DOMAIN, media_player.SERVICE_VOLUME_SET, {
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_MEDIA_VOLUME_LEVEL:
params['brightness'] / 100
}, blocking=True, context=data.context)
@register_trait
class OnOffTrait(_Trait):
"""Trait to offer basic on and off functionality.
https://developers.google.com/actions/smarthome/traits/onoff
"""
name = TRAIT_ONOFF
commands = [
COMMAND_ONOFF
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain == climate.DOMAIN:
return features & climate.SUPPORT_ON_OFF != 0
return domain in (
group.DOMAIN,
input_boolean.DOMAIN,
switch.DOMAIN,
fan.DOMAIN,
light.DOMAIN,
cover.DOMAIN,
media_player.DOMAIN,
)
def sync_attributes(self):
"""Return OnOff attributes for a sync request."""
return {}
def query_attributes(self):
"""Return OnOff query attributes."""
if self.state.domain == cover.DOMAIN:
return {'on': self.state.state != cover.STATE_CLOSED}
return {'on': self.state.state != STATE_OFF}
async def execute(self, command, data, params):
"""Execute an OnOff command."""
domain = self.state.domain
if domain == cover.DOMAIN:
service_domain = domain
if params['on']:
service = cover.SERVICE_OPEN_COVER
else:
service = cover.SERVICE_CLOSE_COVER
elif domain == group.DOMAIN:
service_domain = HA_DOMAIN
service = SERVICE_TURN_ON if params['on'] else SERVICE_TURN_OFF
else:
service_domain = domain
service = SERVICE_TURN_ON if params['on'] else SERVICE_TURN_OFF
await self.hass.services.async_call(service_domain, service, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
@register_trait
class ColorSpectrumTrait(_Trait):
"""Trait to offer color spectrum functionality.
https://developers.google.com/actions/smarthome/traits/colorspectrum
"""
name = TRAIT_COLOR_SPECTRUM
commands = [
COMMAND_COLOR_ABSOLUTE
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain != light.DOMAIN:
return False
return features & light.SUPPORT_COLOR
def sync_attributes(self):
"""Return color spectrum attributes for a sync request."""
# Other colorModel is hsv
return {'colorModel': 'rgb'}
def query_attributes(self):
"""Return color spectrum query attributes."""
response = {}
color_hs = self.state.attributes.get(light.ATTR_HS_COLOR)
if color_hs is not None:
response['color'] = {
'spectrumRGB': int(color_util.color_rgb_to_hex(
*color_util.color_hs_to_RGB(*color_hs)), 16),
}
return response
def can_execute(self, command, params):
"""Test if command can be executed."""
return (command in self.commands and
'spectrumRGB' in params.get('color', {}))
async def execute(self, command, data, params):
"""Execute a color spectrum command."""
# Convert integer to hex format and left pad with 0's till length 6
hex_value = "{0:06x}".format(params['color']['spectrumRGB'])
color = color_util.color_RGB_to_hs(
*color_util.rgb_hex_to_rgb_list(hex_value))
await self.hass.services.async_call(light.DOMAIN, SERVICE_TURN_ON, {
ATTR_ENTITY_ID: self.state.entity_id,
light.ATTR_HS_COLOR: color
}, blocking=True, context=data.context)
@register_trait
class ColorTemperatureTrait(_Trait):
"""Trait to offer color temperature functionality.
https://developers.google.com/actions/smarthome/traits/colortemperature
"""
name = TRAIT_COLOR_TEMP
commands = [
COMMAND_COLOR_ABSOLUTE
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain != light.DOMAIN:
return False
return features & light.SUPPORT_COLOR_TEMP
def sync_attributes(self):
"""Return color temperature attributes for a sync request."""
attrs = self.state.attributes
# Max Kelvin is Min Mireds K = 1000000 / mireds
# Min Kevin is Max Mireds K = 1000000 / mireds
return {
'temperatureMaxK': color_util.color_temperature_mired_to_kelvin(
attrs.get(light.ATTR_MIN_MIREDS)),
'temperatureMinK': color_util.color_temperature_mired_to_kelvin(
attrs.get(light.ATTR_MAX_MIREDS)),
}
def query_attributes(self):
"""Return color temperature query attributes."""
response = {}
temp = self.state.attributes.get(light.ATTR_COLOR_TEMP)
# Some faulty integrations might put 0 in here, raising exception.
if temp == 0:
_LOGGER.warning('Entity %s has incorrect color temperature %s',
self.state.entity_id, temp)
elif temp is not None:
response['color'] = {
'temperature':
color_util.color_temperature_mired_to_kelvin(temp)
}
return response
def can_execute(self, command, params):
"""Test if command can be executed."""
return (command in self.commands and
'temperature' in params.get('color', {}))
async def execute(self, command, data, params):
"""Execute a color temperature command."""
temp = color_util.color_temperature_kelvin_to_mired(
params['color']['temperature'])
min_temp = self.state.attributes[light.ATTR_MIN_MIREDS]
max_temp = self.state.attributes[light.ATTR_MAX_MIREDS]
if temp < min_temp or temp > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
"Temperature should be between {} and {}".format(min_temp,
max_temp))
await self.hass.services.async_call(light.DOMAIN, SERVICE_TURN_ON, {
ATTR_ENTITY_ID: self.state.entity_id,
light.ATTR_COLOR_TEMP: temp,
}, blocking=True, context=data.context)
@register_trait
class SceneTrait(_Trait):
"""Trait to offer scene functionality.
https://developers.google.com/actions/smarthome/traits/scene
"""
name = TRAIT_SCENE
commands = [
COMMAND_ACTIVATE_SCENE
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
return domain in (scene.DOMAIN, script.DOMAIN)
def sync_attributes(self):
"""Return scene attributes for a sync request."""
# Neither supported domain can support sceneReversible
return {}
def query_attributes(self):
"""Return scene query attributes."""
return {}
async def execute(self, command, data, params):
"""Execute a scene command."""
# Don't block for scripts as they can be slow.
await self.hass.services.async_call(
self.state.domain, SERVICE_TURN_ON, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=self.state.domain != script.DOMAIN,
context=data.context)
@register_trait
class DockTrait(_Trait):
"""Trait to offer dock functionality.
https://developers.google.com/actions/smarthome/traits/dock
"""
name = TRAIT_DOCK
commands = [
COMMAND_DOCK
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
return domain == vacuum.DOMAIN
def sync_attributes(self):
"""Return dock attributes for a sync request."""
return {}
def query_attributes(self):
"""Return dock query attributes."""
return {'isDocked': self.state.state == vacuum.STATE_DOCKED}
async def execute(self, command, data, params):
"""Execute a dock command."""
await self.hass.services.async_call(
self.state.domain, vacuum.SERVICE_RETURN_TO_BASE, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
@register_trait
class StartStopTrait(_Trait):
"""Trait to offer StartStop functionality.
https://developers.google.com/actions/smarthome/traits/startstop
"""
name = TRAIT_STARTSTOP
commands = [
COMMAND_STARTSTOP,
COMMAND_PAUSEUNPAUSE
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
return domain == vacuum.DOMAIN
def sync_attributes(self):
"""Return StartStop attributes for a sync request."""
return {'pausable':
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
& vacuum.SUPPORT_PAUSE != 0}
def query_attributes(self):
"""Return StartStop query attributes."""
return {
'isRunning': self.state.state == vacuum.STATE_CLEANING,
'isPaused': self.state.state == vacuum.STATE_PAUSED,
}
async def execute(self, command, data, params):
"""Execute a StartStop command."""
if command == COMMAND_STARTSTOP:
if params['start']:
await self.hass.services.async_call(
self.state.domain, vacuum.SERVICE_START, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
else:
await self.hass.services.async_call(
self.state.domain, vacuum.SERVICE_STOP, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
elif command == COMMAND_PAUSEUNPAUSE:
if params['pause']:
await self.hass.services.async_call(
self.state.domain, vacuum.SERVICE_PAUSE, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
else:
await self.hass.services.async_call(
self.state.domain, vacuum.SERVICE_START, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
@register_trait
class TemperatureSettingTrait(_Trait):
"""Trait to offer handling both temperature point and modes functionality.
https://developers.google.com/actions/smarthome/traits/temperaturesetting
"""
name = TRAIT_TEMPERATURE_SETTING
commands = [
COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE,
COMMAND_THERMOSTAT_SET_MODE,
]
# We do not support "on" as we are unable to know how to restore
# the last mode.
hass_to_google = {
climate.STATE_HEAT: 'heat',
climate.STATE_COOL: 'cool',
STATE_OFF: 'off',
climate.STATE_AUTO: 'heatcool',
climate.STATE_FAN_ONLY: 'fan-only',
climate.STATE_DRY: 'dry',
climate.STATE_ECO: 'eco'
}
google_to_hass = {value: key for key, value in hass_to_google.items()}
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain != climate.DOMAIN:
return False
return features & climate.SUPPORT_OPERATION_MODE
def sync_attributes(self):
"""Return temperature point and modes attributes for a sync request."""
modes = []
for mode in self.state.attributes.get(climate.ATTR_OPERATION_LIST, []):
google_mode = self.hass_to_google.get(mode)
if google_mode is not None:
modes.append(google_mode)
return {
'availableThermostatModes': ','.join(modes),
'thermostatTemperatureUnit': _google_temp_unit(
self.hass.config.units.temperature_unit)
}
def query_attributes(self):
"""Return temperature point and modes query attributes."""
attrs = self.state.attributes
response = {}
operation = attrs.get(climate.ATTR_OPERATION_MODE)
if operation is not None and operation in self.hass_to_google:
response['thermostatMode'] = self.hass_to_google[operation]
unit = self.hass.config.units.temperature_unit
current_temp = attrs.get(climate.ATTR_CURRENT_TEMPERATURE)
if current_temp is not None:
response['thermostatTemperatureAmbient'] = \
round(temp_util.convert(current_temp, unit, TEMP_CELSIUS), 1)
current_humidity = attrs.get(climate.ATTR_CURRENT_HUMIDITY)
if current_humidity is not None:
response['thermostatHumidityAmbient'] = current_humidity
if (operation == climate.STATE_AUTO and
climate.ATTR_TARGET_TEMP_HIGH in attrs and
climate.ATTR_TARGET_TEMP_LOW in attrs):
response['thermostatTemperatureSetpointHigh'] = \
round(temp_util.convert(attrs[climate.ATTR_TARGET_TEMP_HIGH],
unit, TEMP_CELSIUS), 1)
response['thermostatTemperatureSetpointLow'] = \
round(temp_util.convert(attrs[climate.ATTR_TARGET_TEMP_LOW],
unit, TEMP_CELSIUS), 1)
else:
target_temp = attrs.get(ATTR_TEMPERATURE)
if target_temp is not None:
response['thermostatTemperatureSetpoint'] = round(
temp_util.convert(target_temp, unit, TEMP_CELSIUS), 1)
return response
async def execute(self, command, data, params):
"""Execute a temperature point or mode command."""
# All sent in temperatures are always in Celsius
unit = self.hass.config.units.temperature_unit
min_temp = self.state.attributes[climate.ATTR_MIN_TEMP]
max_temp = self.state.attributes[climate.ATTR_MAX_TEMP]
if command == COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT:
temp = temp_util.convert(
params['thermostatTemperatureSetpoint'], TEMP_CELSIUS,
unit)
if unit == TEMP_FAHRENHEIT:
temp = round(temp)
if temp < min_temp or temp > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
"Temperature should be between {} and {}".format(min_temp,
max_temp))
await self.hass.services.async_call(
climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, {
ATTR_ENTITY_ID: self.state.entity_id,
ATTR_TEMPERATURE: temp
}, blocking=True, context=data.context)
elif command == COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE:
temp_high = temp_util.convert(
params['thermostatTemperatureSetpointHigh'], TEMP_CELSIUS,
unit)
if unit == TEMP_FAHRENHEIT:
temp_high = round(temp_high)
if temp_high < min_temp or temp_high > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
"Upper bound for temperature range should be between "
"{} and {}".format(min_temp, max_temp))
temp_low = temp_util.convert(
params['thermostatTemperatureSetpointLow'], TEMP_CELSIUS,
unit)
if unit == TEMP_FAHRENHEIT:
temp_low = round(temp_low)
if temp_low < min_temp or temp_low > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
"Lower bound for temperature range should be between "
"{} and {}".format(min_temp, max_temp))
await self.hass.services.async_call(
climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, {
ATTR_ENTITY_ID: self.state.entity_id,
climate.ATTR_TARGET_TEMP_HIGH: temp_high,
climate.ATTR_TARGET_TEMP_LOW: temp_low,
}, blocking=True, context=data.context)
elif command == COMMAND_THERMOSTAT_SET_MODE:
await self.hass.services.async_call(
climate.DOMAIN, climate.SERVICE_SET_OPERATION_MODE, {
ATTR_ENTITY_ID: self.state.entity_id,
climate.ATTR_OPERATION_MODE:
self.google_to_hass[params['thermostatMode']],
}, blocking=True, context=data.context)
@register_trait
class LockUnlockTrait(_Trait):
"""Trait to lock or unlock a lock.
https://developers.google.com/actions/smarthome/traits/lockunlock
"""
name = TRAIT_LOCKUNLOCK
commands = [
COMMAND_LOCKUNLOCK
]
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
return domain == lock.DOMAIN
def sync_attributes(self):
"""Return LockUnlock attributes for a sync request."""
return {}
def query_attributes(self):
"""Return LockUnlock query attributes."""
return {'isLocked': self.state.state == STATE_LOCKED}
def can_execute(self, command, params):
"""Test if command can be executed."""
allowed_unlock = not params['lock'] and self.config.allow_unlock
return params['lock'] or allowed_unlock
async def execute(self, command, data, params):
"""Execute an LockUnlock command."""
if params['lock']:
service = lock.SERVICE_LOCK
else:
service = lock.SERVICE_UNLOCK
await self.hass.services.async_call(lock.DOMAIN, service, {
ATTR_ENTITY_ID: self.state.entity_id
}, blocking=True, context=data.context)
@register_trait
class FanSpeedTrait(_Trait):
"""Trait to control speed of Fan.
https://developers.google.com/actions/smarthome/traits/fanspeed
"""
name = TRAIT_FANSPEED
commands = [
COMMAND_FANSPEED
]
speed_synonyms = {
fan.SPEED_OFF: ['stop', 'off'],
fan.SPEED_LOW: ['slow', 'low', 'slowest', 'lowest'],
fan.SPEED_MEDIUM: ['medium', 'mid', 'middle'],
fan.SPEED_HIGH: [
'high', 'max', 'fast', 'highest', 'fastest', 'maximum'
]
}
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain != fan.DOMAIN:
return False
return features & fan.SUPPORT_SET_SPEED
def sync_attributes(self):
"""Return speed point and modes attributes for a sync request."""
modes = self.state.attributes.get(fan.ATTR_SPEED_LIST, [])
speeds = []
for mode in modes:
if mode not in self.speed_synonyms:
continue
speed = {
"speed_name": mode,
"speed_values": [{
"speed_synonym": self.speed_synonyms.get(mode),
"lang": 'en'
}]
}
speeds.append(speed)
return {
'availableFanSpeeds': {
'speeds': speeds,
'ordered': True
},
"reversible": bool(self.state.attributes.get(
ATTR_SUPPORTED_FEATURES, 0) & fan.SUPPORT_DIRECTION)
}
def query_attributes(self):
"""Return speed point and modes query attributes."""
attrs = self.state.attributes
response = {}
speed = attrs.get(fan.ATTR_SPEED)
if speed is not None:
response['on'] = speed != fan.SPEED_OFF
response['online'] = True
response['currentFanSpeedSetting'] = speed
return response
async def execute(self, command, data, params):
"""Execute an SetFanSpeed command."""
await self.hass.services.async_call(
fan.DOMAIN, fan.SERVICE_SET_SPEED, {
ATTR_ENTITY_ID: self.state.entity_id,
fan.ATTR_SPEED: params['fanSpeed']
}, blocking=True, context=data.context)
@register_trait
class ModesTrait(_Trait):
"""Trait to set modes.
https://developers.google.com/actions/smarthome/traits/modes
"""
name = TRAIT_MODES
commands = [
COMMAND_MODES
]
# Google requires specific mode names and settings. Here is the full list.
# https://developers.google.com/actions/reference/smarthome/traits/modes
# All settings are mapped here as of 2018-11-28 and can be used for other
# entity types.
HA_TO_GOOGLE = {
media_player.ATTR_INPUT_SOURCE: "input source",
}
SUPPORTED_MODE_SETTINGS = {
'xsmall': [
'xsmall', 'extra small', 'min', 'minimum', 'tiny', 'xs'],
'small': ['small', 'half'],
'large': ['large', 'big', 'full'],
'xlarge': ['extra large', 'xlarge', 'xl'],
'Cool': ['cool', 'rapid cool', 'rapid cooling'],
'Heat': ['heat'], 'Low': ['low'],
'Medium': ['medium', 'med', 'mid', 'half'],
'High': ['high'],
'Auto': ['auto', 'automatic'],
'Bake': ['bake'], 'Roast': ['roast'],
'Convection Bake': ['convection bake', 'convect bake'],
'Convection Roast': ['convection roast', 'convect roast'],
'Favorite': ['favorite'],
'Broil': ['broil'],
'Warm': ['warm'],
'Off': ['off'],
'On': ['on'],
'Normal': [
'normal', 'normal mode', 'normal setting', 'standard',
'schedule', 'original', 'default', 'old settings'
],
'None': ['none'],
'Tap Cold': ['tap cold'],
'Cold Warm': ['cold warm'],
'Hot': ['hot'],
'Extra Hot': ['extra hot'],
'Eco': ['eco'],
'Wool': ['wool', 'fleece'],
'Turbo': ['turbo'],
'Rinse': ['rinse', 'rinsing', 'rinse wash'],
'Away': ['away', 'holiday'],
'maximum': ['maximum'],
'media player': ['media player'],
'chromecast': ['chromecast'],
'tv': [
'tv', 'television', 'tv position', 'television position',
'watching tv', 'watching tv position', 'entertainment',
'entertainment position'
],
'am fm': ['am fm', 'am radio', 'fm radio'],
'internet radio': ['internet radio'],
'satellite': ['satellite'],
'game console': ['game console'],
'antifrost': ['antifrost', 'anti-frost'],
'boost': ['boost'],
'Clock': ['clock'],
'Message': ['message'],
'Messages': ['messages'],
'News': ['news'],
'Disco': ['disco'],
'antifreeze': ['antifreeze', 'anti-freeze', 'anti freeze'],
'balanced': ['balanced', 'normal'],
'swing': ['swing'],
'media': ['media', 'media mode'],
'panic': ['panic'],
'ring': ['ring'],
'frozen': ['frozen', 'rapid frozen', 'rapid freeze'],
'cotton': ['cotton', 'cottons'],
'blend': ['blend', 'mix'],
'baby wash': ['baby wash'],
'synthetics': ['synthetic', 'synthetics', 'compose'],
'hygiene': ['hygiene', 'sterilization'],
'smart': ['smart', 'intelligent', 'intelligence'],
'comfortable': ['comfortable', 'comfort'],
'manual': ['manual'],
'energy saving': ['energy saving'],
'sleep': ['sleep'],
'quick wash': ['quick wash', 'fast wash'],
'cold': ['cold'],
'airsupply': ['airsupply', 'air supply'],
'dehumidification': ['dehumidication', 'dehumidify'],
'game': ['game', 'game mode']
}
@staticmethod
def supported(domain, features):
"""Test if state is supported."""
if domain != media_player.DOMAIN:
return False
return features & media_player.SUPPORT_SELECT_SOURCE
def sync_attributes(self):
"""Return mode attributes for a sync request."""
sources_list = self.state.attributes.get(
media_player.ATTR_INPUT_SOURCE_LIST, [])
modes = []
sources = {}
if sources_list:
sources = {
"name": self.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE),
"name_values": [{
"name_synonym": ['input source'],
"lang": "en"
}],
"settings": [],
"ordered": False
}
for source in sources_list:
if source in self.SUPPORTED_MODE_SETTINGS:
src = source
synonyms = self.SUPPORTED_MODE_SETTINGS.get(src)
elif source.lower() in self.SUPPORTED_MODE_SETTINGS:
src = source.lower()
synonyms = self.SUPPORTED_MODE_SETTINGS.get(src)
else:
continue
sources['settings'].append(
{
"setting_name": src,
"setting_values": [{
"setting_synonym": synonyms,
"lang": "en"
}]
}
)
if sources:
modes.append(sources)
payload = {'availableModes': modes}
return payload
def query_attributes(self):
"""Return current modes."""
attrs = self.state.attributes
response = {}
mode_settings = {}
if attrs.get(media_player.ATTR_INPUT_SOURCE_LIST):
mode_settings.update({
media_player.ATTR_INPUT_SOURCE: attrs.get(
media_player.ATTR_INPUT_SOURCE)
})
if mode_settings:
response['on'] = self.state.state != STATE_OFF
response['online'] = True
response['currentModeSettings'] = mode_settings
return response
async def execute(self, command, data, params):
"""Execute an SetModes command."""
settings = params.get('updateModeSettings')
requested_source = settings.get(
self.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE))
if requested_source:
for src in self.state.attributes.get(
media_player.ATTR_INPUT_SOURCE_LIST):
if src.lower() == requested_source.lower():
source = src
await self.hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_SELECT_SOURCE, {
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_INPUT_SOURCE: source
}, blocking=True, context=data.context)
| apache-2.0 | -2,187,925,773,775,859,700 | 33.463874 | 79 | 0.57412 | false |
leviroth/praw | tests/integration/models/test_redditors.py | 1 | 1453 | """Test praw.models.redditors."""
import mock
from praw.models import Redditor, Subreddit
from .. import IntegrationTest
class TestRedditors(IntegrationTest):
def test_new(self):
with self.recorder.use_cassette("TestRedditors.test_new"):
profiles = list(self.reddit.redditors.new(limit=300))
assert len(profiles) == 300
assert all(isinstance(profile, Subreddit) for profile in profiles)
assert all(str(profile).startswith("u_") for profile in profiles)
def test_popular(self):
with self.recorder.use_cassette("TestRedditors.test_popular"):
profiles = list(self.reddit.redditors.popular(limit=15))
assert len(profiles) == 15
assert all(isinstance(profile, Subreddit) for profile in profiles)
assert all(str(profile).startswith("u_") for profile in profiles)
def test_search(self):
with self.recorder.use_cassette("TestRedditors.test_search"):
found = False
for profile in self.reddit.redditors.search("praw"):
assert isinstance(profile, Redditor)
found = True
assert found
@mock.patch("time.sleep", return_value=None)
def test_stream(self, _):
with self.recorder.use_cassette("TestRedditors.test_stream"):
generator = self.reddit.redditors.stream()
for i in range(101):
assert isinstance(next(generator), Subreddit)
| bsd-2-clause | -4,265,739,451,176,297,000 | 39.361111 | 74 | 0.653131 | false |
RCOS-Grading-Server/HWserver | migration/tests/test_main.py | 2 | 1369 | from argparse import Namespace
import unittest
from migrator import main
class TestMain(unittest.TestCase):
def test_noop(self):
"""Test that noop function can take variable amount of parameters."""
test = []
for i in range(5):
main.noop(*test)
test.append(i)
def test_call_func_system(self):
args = Namespace()
args.config = 'a'
def func(*func_args):
self.assertEqual(1, len(func_args))
self.assertEqual('a', func_args[0])
main.call_func(func, None, 'system', args)
def test_call_func_master(self):
args = Namespace()
args.config = 'b'
def func(*func_args):
self.assertEqual(2, len(func_args))
self.assertEqual('b', func_args[0])
self.assertEqual('c', func_args[1])
main.call_func(func, 'c', 'master', args)
def test_call_func_course(self):
args = Namespace()
args.config = 'd'
args.semester = 'e'
args.course = 'f'
def func(*func_args):
self.assertEqual(4, len(func_args))
self.assertEqual('d', func_args[0])
self.assertEqual('g', func_args[1])
self.assertEqual('e', func_args[2])
self.assertEqual('f', func_args[3])
main.call_func(func, 'g', 'course', args)
| bsd-3-clause | -4,302,171,282,327,793,000 | 26.938776 | 77 | 0.551497 | false |
cltrudeau/django-awl | awl/tests/test_utils.py | 1 | 3111 | # awl.tests.test_utils.py
import sys
from io import StringIO
from django.test import TestCase
from awl.tests.models import Link
from awl.utils import (URLTree, refetch, refetch_for_update, render_page,
render_page_to_string, get_field_names, get_obj_attr)
from awl.waelsteng import FakeRequest
# ============================================================================
class UtilsTest(TestCase):
def test_url_tree(self):
# print_tree() exercises everything, so run it and capture stdout
tree = URLTree()
saved_stdout = sys.stderr
try:
out = StringIO()
sys.stdout = out
tree.print_tree()
finally:
sys.stdout = saved_stdout
def test_refetch(self):
link = Link.objects.create(url='url', text='text')
link.text = 'foo'
link = refetch(link)
self.assertEqual('url', link.url)
self.assertEqual('text', link.text)
link.text = 'foo'
link = refetch_for_update(link)
self.assertEqual('url', link.url)
self.assertEqual('text', link.text)
def test_renders(self):
request = FakeRequest()
expected = 'Hello World\n'
result = render_page_to_string(request, 'sample.html', {'name':'World'})
self.assertEqual(expected, result)
response = render_page(request, 'sample.html', {'name':'World'})
self.assertEqual(expected, response.content.decode('ascii'))
def test_get_field_names(self):
from awl.tests.models import Person
# test defaults, ignore order
expected = ['name', 'phone']
result = get_field_names(Person)
self.assertEqual(set(result), set(expected))
# test ignore_auto, ignore_relations and exclude
expected.extend(['id', 'building', 'address', 'courses', 'best_friend',
'person'])
expected.remove('phone')
result = get_field_names(Person, ignore_auto=False,
ignore_relations=False, exclude=['phone'])
self.assertEqual(set(result), set(expected))
def test_get_obj_attr(self):
# --- data for testing
class Character(object):
pass
class Cartoon(object):
pass
barney = Character()
barney.name = 'Barney'
betty = Character()
betty.name = 'Betty'
betty.husband = barney
wilma = Character()
wilma.name = 'Wilma'
wilma.friend = betty
cartoon = Cartoon()
cartoon.name = 'Flinstones'
cartoon.character = wilma
# --- tests
self.assertEqual('Flinstones', get_obj_attr(cartoon, 'name'))
self.assertEqual(wilma, get_obj_attr(cartoon, 'character'))
self.assertEqual(betty, get_obj_attr(cartoon, 'character__friend'))
self.assertEqual(barney, get_obj_attr(cartoon,
'character__friend__husband'))
with self.assertRaises(AttributeError):
get_obj_attr(cartoon, 'foo')
with self.assertRaises(AttributeError):
get_obj_attr(cartoon, 'character__foo')
| mit | 4,314,477,777,599,757,000 | 30.11 | 80 | 0.58695 | false |
gaganjyot/EXIFGeoLocation | main.py | 1 | 1898 | from PIL import Image
from PIL.ExifTags import GPSTAGS, TAGS
def _get_if_exist(data, key):
if key in data:
return data[key]
return None
def _convert_to_degrees(value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_exif(fn):
ret = {}
i = Image.open(fn)
info = i._getexif()
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
#print "TAG", decoded, value
if decoded == "GPSInfo":
gps_data = {}
for t in value:
sub_decoded = GPSTAGS.get(t, t)
gps_data[sub_decoded] = value[t]
ret[decoded] = gps_data
lat = None
lon = None
gps_latitude = _get_if_exist(gps_data, "GPSLatitude")
gps_latitude_ref = _get_if_exist(gps_data, 'GPSLatitudeRef')
gps_longitude = _get_if_exist(gps_data, 'GPSLongitude')
gps_longitude_ref = _get_if_exist(gps_data, 'GPSLongitudeRef')
if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref:
lat = _convert_to_degrees(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = _convert_to_degrees(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
ret['latitude'] = lat
ret['longitude'] = lon
else:
ret[decoded] = value
return ret
info_found = get_exif("/home/gagan/Downloads/a.jpg")
print(info_found["latitude"])
print(info_found["longitude"]) | gpl-3.0 | -757,813,787,278,791,300 | 27.772727 | 102 | 0.535827 | false |
bheinzerling/bpemb | bpemb/available_vocab_sizes.py | 1 | 19854 | vocab_sizes = (
{
"ab": {
1000,
3000,
5000
},
"ace": {
1000,
3000,
5000
},
"ady": {
1000,
3000,
5000
},
"af": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ak": {
1000,
3000,
5000
},
"als": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"am": {
10000,
1000,
25000,
3000,
50000,
5000
},
"an": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"ang": {
10000,
1000,
25000,
3000,
5000
},
"ar": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"arc": {
1000,
3000,
5000
},
"arz": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"as": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"ast": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"atj": {
1000,
3000,
5000
},
"av": {
10000,
1000,
3000,
5000
},
"ay": {
10000,
1000,
3000,
5000
},
"az": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"azb": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"ba": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"bar": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bcl": {
10000,
1000,
25000,
3000,
5000
},
"be": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bg": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bi": {
1000,
3000,
5000
},
"bjn": {
10000,
1000,
3000,
5000
},
"bm": {
1000,
3000,
5000
},
"bn": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bo": {
100000,
10000,
25000,
3000,
50000,
5000
},
"bpy": {
10000,
1000,
25000,
3000,
5000
},
"br": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bs": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"bug": {
1000,
3000,
5000
},
"bxr": {
10000,
1000,
25000,
3000,
5000
},
"ca": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"cdo": {
3000,
5000
},
"ce": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"ceb": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ch": {
1000,
3000,
5000
},
"chr": {
1000,
3000,
5000
},
"chy": {
1000,
3000,
5000
},
"ckb": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"co": {
10000,
1000,
25000,
3000,
5000
},
"cr": {
1000,
3000,
5000
},
"crh": {
10000,
1000,
3000,
5000
},
"cs": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"csb": {
10000,
1000,
25000,
3000,
5000
},
"cu": {
1000,
3000,
5000
},
"cv": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"cy": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"da": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"de": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"din": {
1000,
3000,
5000
},
"diq": {
10000,
1000,
25000,
3000,
50000,
5000
},
"dsb": {
10000,
1000,
25000,
3000,
5000
},
"dty": {
10000,
1000,
3000,
5000
},
"dummy": {},
"dv": {
10000,
1000,
25000,
3000,
5000
},
"dz": {
1000,
3000,
5000
},
"ee": {
1000,
3000,
5000
},
"el": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"en": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"eo": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"es": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"et": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"eu": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ext": {
10000,
1000,
25000,
3000,
5000
},
"fa": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ff": {
1000,
3000,
5000
},
"fi": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"fj": {
1000,
3000,
5000
},
"fo": {
10000,
1000,
25000,
3000,
50000,
5000
},
"fr": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"frp": {
1000,
3000,
5000
},
"frr": {
10000,
1000,
25000,
3000,
5000
},
"fur": {
10000,
1000,
25000,
3000,
5000
},
"fy": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ga": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"gag": {
10000,
1000,
3000,
5000
},
"gan": {
10000
},
"gd": {
10000,
1000,
25000,
3000,
5000
},
"gl": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"glk": {
10000,
1000,
3000,
5000
},
"gn": {
10000,
1000,
25000,
3000,
5000
},
"gom": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"got": {
1000,
3000,
5000
},
"gu": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"gv": {
10000,
1000,
25000,
3000,
5000
},
"ha": {
1000,
3000,
5000
},
"hak": {
10000,
5000
},
"haw": {
1000,
3000,
5000
},
"he": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"hi": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"hif": {
10000,
1000,
3000,
5000
},
"hr": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"hsb": {
10000,
1000,
25000,
3000,
50000,
5000
},
"ht": {
10000,
1000,
25000,
3000,
5000
},
"hu": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"hy": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ia": {
10000,
1000,
25000,
3000,
5000
},
"id": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ie": {
1000,
3000,
5000
},
"ig": {
1000,
3000,
5000
},
"ik": {
1000,
3000,
5000
},
"ilo": {
10000,
1000,
25000,
3000,
5000
},
"io": {
10000,
1000,
25000,
3000,
50000,
5000
},
"is": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"it": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"iu": {
1000,
3000,
5000
},
"ja": {
100000,
10000,
200000,
25000,
50000,
5000
},
"jam": {
1000,
3000,
5000
},
"jbo": {
1000,
3000,
5000
},
"jv": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ka": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"kaa": {
10000,
1000,
3000,
5000
},
"kab": {
10000,
1000,
3000,
5000
},
"kbd": {
10000,
1000,
3000,
5000
},
"kbp": {
10000,
1000,
3000,
5000
},
"kg": {
1000,
3000,
5000
},
"ki": {
1000,
3000,
5000
},
"kk": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"kl": {
1000,
3000,
5000
},
"km": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"kn": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ko": {
100000,
10000,
200000,
25000,
50000,
5000
},
"koi": {
10000,
1000,
3000,
5000
},
"krc": {
10000,
1000,
25000,
3000,
5000
},
"ks": {
1000,
3000,
5000
},
"ksh": {
10000,
1000,
25000,
3000,
5000
},
"ku": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"kv": {
10000,
1000,
25000,
3000,
5000
},
"kw": {
10000,
1000,
3000,
5000
},
"ky": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"la": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"lad": {
10000,
1000,
25000,
3000,
5000
},
"lb": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"lbe": {
1000,
3000,
5000
},
"lez": {
10000,
1000,
25000,
3000,
5000
},
"lg": {
10000,
1000,
25000,
3000,
5000
},
"li": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"lij": {
10000,
1000,
25000,
3000,
5000
},
"lmo": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"ln": {
10000,
1000,
3000,
5000
},
"lo": {
10000,
1000,
25000,
3000,
5000
},
"lrc": {
10000,
1000,
3000,
5000
},
"lt": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ltg": {
10000,
1000,
3000,
5000
},
"lv": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"mai": {
10000,
1000,
25000,
3000,
5000
},
"mdf": {
1000,
3000,
5000
},
"mg": {
10000,
1000,
25000,
3000,
5000
},
"mh": {
1000
},
"mhr": {
10000,
1000,
25000,
3000,
5000
},
"mi": {
1000,
3000,
5000
},
"min": {
10000,
1000,
25000,
3000,
5000
},
"mk": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ml": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"mn": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"mr": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"mrj": {
10000,
1000,
25000,
3000,
5000
},
"ms": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"mt": {
10000,
1000,
25000,
3000,
50000,
5000
},
"multi": {
1000000,
100000,
320000
},
"mwl": {
10000,
1000,
25000,
3000,
50000,
5000
},
"my": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"myv": {
10000,
1000,
3000,
5000
},
"mzn": {
10000,
1000,
25000,
3000,
5000
},
"na": {
1000,
3000,
5000
},
"nap": {
10000,
1000,
25000,
3000,
5000
},
"nds": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ne": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"new": {
10000,
1000,
25000,
3000,
50000,
5000
},
"ng": {
1000,
3000
},
"nl": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"nn": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"no": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"nov": {
1000,
3000,
5000
},
"nrm": {
10000,
1000,
3000,
5000
},
"nso": {
1000,
3000,
5000
},
"nv": {
1000,
3000,
5000
},
"ny": {
1000,
3000,
5000
},
"oc": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"olo": {
10000,
1000,
3000,
5000
},
"om": {
10000,
1000,
3000,
5000
},
"or": {
10000,
1000,
25000,
3000,
50000,
5000
},
"os": {
10000,
1000,
3000,
5000
},
"pa": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"pag": {
10000,
1000,
3000,
5000
},
"pam": {
10000,
1000,
25000,
3000,
5000
},
"pap": {
10000,
1000,
3000,
5000
},
"pcd": {
10000,
1000,
3000,
5000
},
"pdc": {
1000,
3000,
5000
},
"pfl": {
10000,
1000,
25000,
3000,
5000
},
"pi": {
1000,
3000,
5000
},
"pih": {
1000,
3000,
5000
},
"pl": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"pms": {
10000,
1000,
25000,
3000,
5000
},
"pnb": {
10000,
1000,
25000,
3000,
50000,
5000
},
"pnt": {
1000,
3000,
5000
},
"ps": {
10000,
1000,
25000,
3000,
50000,
5000
},
"pt": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"qu": {
10000,
1000,
25000,
3000,
50000,
5000
},
"rm": {
10000,
1000,
25000,
3000,
50000,
5000
},
"rmy": {
1000,
3000,
5000
},
"rn": {
1000,
3000,
5000
},
"ro": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ru": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"rue": {
10000,
1000,
25000,
3000,
5000
},
"rw": {
1000,
3000,
5000
},
"sa": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"sah": {
10000,
1000,
25000,
3000,
50000,
5000
},
"sc": {
10000,
1000,
25000,
3000,
50000,
5000
},
"scn": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"sco": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"sd": {
10000,
1000,
25000,
3000,
5000
},
"se": {
10000,
1000,
3000,
5000
},
"sg": {
1000,
3000,
5000
},
"sh": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"si": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"sk": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"sl": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"sm": {
1000,
3000,
5000
},
"sn": {
10000,
1000,
25000,
3000,
5000
},
"so": {
10000,
1000,
25000,
3000,
50000,
5000
},
"sq": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"sr": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"srn": {
1000,
3000,
5000
},
"ss": {
1000,
3000,
5000
},
"st": {
1000,
3000,
5000
},
"stq": {
10000,
1000,
25000,
3000,
5000
},
"su": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"sv": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"sw": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"szl": {
10000,
1000,
25000,
3000,
5000
},
"ta": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"tcy": {
10000,
1000,
3000,
5000
},
"te": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"tet": {
1000,
3000,
5000
},
"tg": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"th": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ti": {
1000,
3000,
5000
},
"tk": {
10000,
1000,
25000,
3000,
50000,
5000
},
"tl": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"tn": {
1000,
3000,
5000
},
"to": {
1000,
3000,
5000
},
"tpi": {
1000,
3000,
5000
},
"tr": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ts": {
1000,
3000,
5000
},
"tt": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"tum": {
1000,
3000,
5000
},
"tw": {
1000,
3000,
5000
},
"ty": {
1000,
3000,
5000
},
"tyv": {
10000,
1000,
25000,
3000,
5000
},
"udm": {
10000,
1000,
3000,
5000
},
"ug": {
10000,
1000,
25000,
3000,
50000,
5000
},
"uk": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ur": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"uz": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"ve": {
1000,
3000,
5000
},
"vec": {
10000,
1000,
25000,
3000,
50000,
5000
},
"vep": {
10000,
1000,
25000,
3000,
5000
},
"vi": {
100000,
10000,
1000,
200000,
25000,
3000,
50000,
5000
},
"vls": {
10000,
1000,
25000,
3000,
50000,
5000
},
"vo": {
10000,
1000,
25000,
3000,
5000
},
"wa": {
10000,
1000,
25000,
3000,
50000,
5000
},
"war": {
100000,
10000,
1000,
25000,
3000,
50000,
5000
},
"wo": {
10000,
1000,
3000,
5000
},
"wuu": {
10000,
25000,
50000
},
"xal": {
1000,
3000,
5000
},
"xh": {
10000,
1000,
3000,
5000
},
"xmf": {
10000,
1000,
25000,
3000,
50000,
5000
},
"yi": {
10000,
1000,
25000,
3000,
50000,
5000
},
"yo": {
10000,
1000,
25000,
3000,
5000
},
"za": {
3000,
5000
},
"zea": {
10000,
1000,
25000,
3000,
5000
},
"zh": {
100000,
10000,
200000,
25000,
50000
},
"zu": {
1000,
3000,
5000
}
}
)
| mit | 6,636,670,103,671,172,000 | 8.666018 | 15 | 0.379722 | false |
kennethreitz/pipenv | tests/integration/test_project.py | 1 | 8289 | # -*- coding=utf-8 -*-
from __future__ import absolute_import, print_function
import io
import os
import tarfile
import pytest
from pipenv.patched import pipfile
from pipenv.project import Project
from pipenv.utils import temp_environ
from pipenv.vendor.vistir.path import is_in_path, normalize_path
from pipenv.vendor.delegator import run as delegator_run
@pytest.mark.project
@pytest.mark.sources
@pytest.mark.environ
def test_pipfile_envvar_expansion(PipenvInstance):
with PipenvInstance(chdir=True) as p:
with temp_environ():
with open(p.pipfile_path, 'w') as f:
f.write("""
[[source]]
url = 'https://${TEST_HOST}/simple'
verify_ssl = false
name = "pypi"
[packages]
pytz = "*"
""".strip())
os.environ['TEST_HOST'] = 'localhost:5000'
project = Project()
assert project.sources[0]['url'] == 'https://localhost:5000/simple'
assert 'localhost:5000' not in str(pipfile.load(p.pipfile_path))
@pytest.mark.project
@pytest.mark.sources
@pytest.mark.parametrize('lock_first', [True, False])
def test_get_source(PipenvInstance, lock_first):
with PipenvInstance(chdir=True) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[[source]]
url = "{0}"
verify_ssl = false
name = "testindex"
[[source]]
url = "https://pypi.org/simple"
verify_ssl = "true"
name = "pypi"
[packages]
pytz = "*"
six = {{version = "*", index = "pypi"}}
[dev-packages]
""".format(os.environ['PIPENV_TEST_INDEX']).strip()
f.write(contents)
if lock_first:
# force source to be cached
c = p.pipenv('lock')
assert c.return_code == 0
project = Project()
sources = [
['pypi', 'https://pypi.org/simple'],
['testindex', os.environ.get('PIPENV_TEST_INDEX')]
]
for src in sources:
name, url = src
source = [s for s in project.pipfile_sources if s.get('name') == name]
assert source
source = source[0]
assert source['name'] == name
assert source['url'] == url
assert sorted(source.items()) == sorted(project.get_source(name=name).items())
assert sorted(source.items()) == sorted(project.get_source(url=url).items())
assert sorted(source.items()) == sorted(project.find_source(name).items())
assert sorted(source.items()) == sorted(project.find_source(url).items())
@pytest.mark.install
@pytest.mark.project
@pytest.mark.parametrize('newlines', [u'\n', u'\r\n'])
def test_maintain_file_line_endings(PipenvInstance, newlines):
with PipenvInstance(chdir=True) as p:
# Initial pipfile + lockfile generation
c = p.pipenv('install pytz')
assert c.return_code == 0
# Rewrite each file with parameterized newlines
for fn in [p.pipfile_path, p.lockfile_path]:
with io.open(fn) as f:
contents = f.read()
written_newlines = f.newlines
assert written_newlines == u'\n', '{0!r} != {1!r} for {2}'.format(
written_newlines, u'\n', fn,
)
# message because of https://github.com/pytest-dev/pytest/issues/3443
with io.open(fn, 'w', newline=newlines) as f:
f.write(contents)
# Run pipenv install to programatically rewrite
c = p.pipenv('install chardet')
assert c.return_code == 0
# Make sure we kept the right newlines
for fn in [p.pipfile_path, p.lockfile_path]:
with io.open(fn) as f:
f.read() # Consumes the content to detect newlines.
actual_newlines = f.newlines
assert actual_newlines == newlines, '{0!r} != {1!r} for {2}'.format(
actual_newlines, newlines, fn,
)
# message because of https://github.com/pytest-dev/pytest/issues/3443
@pytest.mark.project
@pytest.mark.sources
@pytest.mark.needs_internet
def test_many_indexes(PipenvInstance):
with PipenvInstance(chdir=True) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[[source]]
url = "{0}"
verify_ssl = false
name = "testindex"
[[source]]
url = "https://pypi.org/simple"
verify_ssl = "true"
name = "pypi"
[[source]]
url = "https://pypi.python.org/simple"
verify_ssl = "true"
name = "legacy"
[packages]
pytz = "*"
six = {{version = "*", index = "pypi"}}
[dev-packages]
""".format(os.environ['PIPENV_TEST_INDEX']).strip()
f.write(contents)
c = p.pipenv('install')
assert c.return_code == 0
@pytest.mark.install
@pytest.mark.project
def test_include_editable_packages(PipenvInstance, testsroot, pathlib_tmpdir):
file_name = "tablib-0.12.1.tar.gz"
package = pathlib_tmpdir.joinpath("tablib-0.12.1")
source_path = os.path.abspath(os.path.join(testsroot, "pypi", "tablib", file_name))
with PipenvInstance(chdir=True) as p:
with tarfile.open(source_path, "r:gz") as tarinfo:
tarinfo.extractall(path=str(pathlib_tmpdir))
c = p.pipenv('install -e {0}'.format(package.as_posix()))
assert c.return_code == 0
project = Project()
assert "tablib" in [
package.project_name
for package in project.environment.get_installed_packages()
]
@pytest.mark.project
@pytest.mark.virtualenv
def test_run_in_virtualenv_with_global_context(PipenvInstance, virtualenv):
with PipenvInstance(chdir=True, venv_root=virtualenv.as_posix(), ignore_virtualenvs=False, venv_in_project=False) as p:
c = delegator_run(
"pipenv run pip freeze", cwd=os.path.abspath(p.path),
env=os.environ.copy()
)
assert c.return_code == 0, (c.out, c.err)
assert 'Creating a virtualenv' not in c.err, c.err
project = Project()
assert project.virtualenv_location == virtualenv.as_posix(), (
project.virtualenv_location, virtualenv.as_posix()
)
c = delegator_run(
"pipenv run pip install -i {} click".format(p.index_url),
cwd=os.path.abspath(p.path),
env=os.environ.copy()
)
assert c.return_code == 0, (c.out, c.err)
assert "Courtesy Notice" in c.err, (c.out, c.err)
c = delegator_run(
"pipenv install -i {} six".format(p.index_url),
cwd=os.path.abspath(p.path), env=os.environ.copy()
)
assert c.return_code == 0, (c.out, c.err)
c = delegator_run(
'pipenv run python -c "import click;print(click.__file__)"',
cwd=os.path.abspath(p.path), env=os.environ.copy()
)
assert c.return_code == 0, (c.out, c.err)
assert is_in_path(c.out.strip(), str(virtualenv)), (c.out.strip(), str(virtualenv))
c = delegator_run(
"pipenv clean --dry-run", cwd=os.path.abspath(p.path),
env=os.environ.copy()
)
assert c.return_code == 0, (c.out, c.err)
assert "click" in c.out, c.out
@pytest.mark.project
@pytest.mark.virtualenv
def test_run_in_virtualenv(PipenvInstance):
with PipenvInstance(chdir=True) as p:
c = p.pipenv('run pip freeze')
assert c.return_code == 0
assert 'Creating a virtualenv' in c.err
project = Project()
c = p.pipenv("run pip install click")
assert c.return_code == 0
c = p.pipenv("install six")
assert c.return_code == 0
c = p.pipenv('run python -c "import click;print(click.__file__)"')
assert c.return_code == 0
assert normalize_path(c.out.strip()).startswith(
normalize_path(str(project.virtualenv_location))
)
c = p.pipenv("clean --dry-run")
assert c.return_code == 0
assert "click" in c.out
@pytest.mark.project
@pytest.mark.sources
def test_no_sources_in_pipfile(PipenvInstance):
with PipenvInstance(chdir=True) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[packages]
pytest = "*"
""".format(os.environ['PIPENV_TEST_INDEX']).strip()
f.write(contents)
c = p.pipenv('install --skip-lock')
assert c.return_code == 0
| mit | 8,963,331,359,839,735,000 | 32.695122 | 123 | 0.59392 | false |
its-dirg/pefim-proxy | tests/test_ServerConfiguration.py | 1 | 2161 | import unittest
from argparse import Namespace
from pefimproxy.server import WsgiApplication
class ServerConfigurationTestCase(unittest.TestCase):
def setup_class(self):
pass
def test_server_config_files_ok(self):
valid, message = WsgiApplication.validate_server_config(
Namespace(
server_config="pefim_server_conf_default"
)
)
assert valid, "Missing the configuration file pefim_server_conf_default.py"
def test_server_config_missing_file(self):
valid, message = WsgiApplication.validate_server_config(
Namespace(
server_config="pefim_server_conf_missing"
)
)
assert valid is False, "The file pefim_server_conf_missing.py must not exists!"
def test_server_config_missing_parameters(self):
valid, message = WsgiApplication.validate_server_config(
Namespace(
server_config="empty"
)
)
assert valid is False, "No parameter should exist."
for param in WsgiApplication.SERVER_CONF_MANDITORY_PARAMETERS:
assert param in message, "The parameter %s should be in the message." % param
def test_config_files_ok(self):
valid, message = WsgiApplication.validate_config(
Namespace(
config="pefim_proxy_conf_local"
)
)
assert valid, "Missing the configuration file pefim_proxy_conf_local.py"
def test_config_missing_file(self):
valid, message = WsgiApplication.validate_config(
Namespace(
config="pefim_proxy_conf_missing"
)
)
assert valid is False, "The file pefim_proxy_conf_missing.py must not exists!"
def test_missing_parameters(self):
valid, message = WsgiApplication.validate_config(
Namespace(
config="empty"
)
)
assert valid is False, "No parameter should exist."
for param in WsgiApplication.CONF_MANDITORY_PARAMETERS:
assert param in message, "The parameter %s should be in the message." % param | gpl-3.0 | 2,929,277,978,590,305,000 | 35.033333 | 89 | 0.621472 | false |
ababino/networkb | networkb/algorithms/utils.py | 1 | 3266 | # -*- coding: utf-8 -*-
"""
Created on Wed May 15 20:11:59 2013
@author: andres
"""
import numpy
import networkx
from scipy import spatial
from scipy import stats
def find_peaks(th,gc):
peaks=[]
for i in range(1,len(th)-1):
if gc[1][i-1]<gc[1][i] and gc[1][i]>gc[1][i+1]:
peaks.append((th[i],gc[1][i]))
return peaks
def nodedistance(affine,vP,n1,n2):
"""
node distance in cm. (en general)
"""
ind1=vP[n1]
ind2=vP[n2]
if len(ind1)==3:
ind1.append(1)
if len(ind2)==3:
ind2.append(1)
v1=numpy.dot(affine, numpy.transpose(ind1))[0:3]
v2=numpy.dot(affine, numpy.transpose(ind2))[0:3]
d=spatial.distance.euclidean(v1,v2)
return d
def power_law_fit(x,y):
pl = lambda A, d, x: A*x**d
a, b, r_value, p_value, std_err = stats.linregress(numpy.log(x),numpy.log(y))
y_fit=pl(numpy.exp(b),a,x)
return (a,y_fit)
def exp_fit(x,y):
exp_fun = lambda A, x0, x: A*numpy.exp(x/x0)
a, b, r_value, p_value, std_err = stats.linregress(x,numpy.log(y))
A=numpy.exp(b)
x0=1.0/a
y_fit=exp_fun(A,x0,x)
return (A,x0,y_fit)
def gaussian_fit(x,y):
pl = lambda A, x0, s, x: A*numpy.exp(((x-x0)**2)/s)
p = numpy.polyfit(x,numpy.log(y),2)
s=1./p[0]
x0=-p[1]/(2*p[0])
A=numpy.exp(p[2]+(p[1]**2)/(4*p[0]))
y_fit=pl(A,x0,s,x)
return ((A,x0,s),y_fit)
def window_correlation(x,y,w):
if len(x)!=len(y):
print 'vector x and y must be of the same size'
print 'len(x)='+str(len(x))
print 'len(y)='+str(len(y))
return
if len(x)<w:
print 'window mus be smaller than len(x)'
print 'len(x)='+str(len(x))+' w='+str(w)
N=len(x)-w
return [stats.pearsonr(x[i:i+w],y[i:i+w])[0] for i in range(N)]
def find_th_jumps(bn,max_clus=2):
"""
Returns the thresholds where a jump occurs. A jump is defined as the
join of the biggest cluster with, up to, the max_clus cluster.
"""
NON=bn.get_non()
node_list=[node for node,dat in NON.nodes(data=True) if dat['order']==0]
subNON=networkx.Graph()
for n1,n2 in NON.edges_iter(nbunch=node_list):
subNON.add_edge(n1,n2)
node_list=networkx.connected_components(subNON)[0]
subNON=NON.subgraph(node_list)
max_th=max([dat['th'] for n,dat in subNON.nodes(data=True)])
N=bn.number_of_nodes()
jumps=[]
first_cluster=(0,[])
for node,data in NON.nodes(data=True):
if NON.degree(node)>=3 and NON.node[node]['order']==0:
for node2 in NON.neighbors(node):
if 0<NON.node[node2]['order']<=max_clus:
if 20*len(NON.node[node2]['cc'])>len(NON.node[node]['cc']) or 200*len(NON.node[node2]['cc'])>N:
if NON.node[node2]['th']<max_th:
jumps.append((NON.node[node2]['th'],NON.node[node2]['cc']))
if NON.node[node2]['th']>first_cluster[0]:
for node3 in NON.neighbors(node):
if NON.node[node3]['order']==0 and NON.node[node3]['th']==NON.node[node2]['th']:
first_cluster=((NON.node[node3]['th'],NON.node[node3]['cc']))
jumps.append(first_cluster)
jumps=sorted(jumps,key=lambda x: x[0],reverse=True)
return jumps
def nodelist2volumen(bn,nodelist,element):
node2voxel=bn.node2voxel
B=numpy.zeros(bn.volume_shape)
for node in nodelist:
(i,j,k)=node2voxel[str(node)]
B[i,j,k]=element
return B
| mit | 3,844,366,845,292,625,000 | 28.963303 | 105 | 0.609002 | false |
pytest-dev/pytest-bdd | pytest_bdd/utils.py | 1 | 1191 | """Various utility functions."""
from inspect import getframeinfo
from inspect import signature as _signature
from sys import _getframe
CONFIG_STACK = []
def get_args(func):
"""Get a list of argument names for a function.
:param func: The function to inspect.
:return: A list of argument names.
:rtype: list
"""
params = _signature(func).parameters.values()
return [param.name for param in params if param.kind == param.POSITIONAL_OR_KEYWORD]
def get_parametrize_markers_args(node):
return tuple(arg for mark in node.iter_markers("parametrize") for arg in mark.args)
def get_caller_module_locals(depth=2):
"""Get the caller module locals dictionary.
We use sys._getframe instead of inspect.stack(0) because the latter is way slower, since it iterates over
all the frames in the stack.
"""
return _getframe(depth).f_locals
def get_caller_module_path(depth=2):
"""Get the caller module path.
We use sys._getframe instead of inspect.stack(0) because the latter is way slower, since it iterates over
all the frames in the stack.
"""
frame = _getframe(depth)
return getframeinfo(frame, context=0).filename
| mit | -1,642,162,315,754,404,600 | 27.357143 | 109 | 0.706129 | false |
Xelaadryth/Xelabot | quest/quests/monster.py | 1 | 4536 | from random import getrandbits, randint
from ..quest import Quest
from ..quest_segment import QuestSegment
import settings
from utils.command_set import CommandSet
GOLD_SAFE_REWARD = 75
GOLD_VARIANCE_SAFE = 21
EXP_SAFE_REWARD = 2
GOLD_RISKY_PENALTY = 200
GOLD_RISKY_REWARD = 300
GOLD_RISKY_REWARD_BIG = 400
GOLD_VARIANCE_RISKY = GOLD_VARIANCE_SAFE * 2
EXP_RISKY_REWARD = EXP_SAFE_REWARD * 2
EXP_RISKY_REWARD_BIG = EXP_SAFE_REWARD + 1
GOLD_TIMEOUT_PENALTY = 300
MONSTER_LEVEL = 12
LEVEL_VARIANCE = 15
class Monster(Quest):
def __init__(self, quest_manager):
super().__init__(quest_manager)
self.starting_segment = Start
class Start(QuestSegment):
def set_commands(self):
self.commands = CommandSet(exact_match_commands={
'!attack': self.attack,
'!flee': self.flee
})
def play(self):
msg = (
'In the treasure room of an abandoned ruin, a strange Void creature materializes in front of {}. '
'Do you !attack or !flee?'.format(self.quest.party[0]))
self.channel.send_msg(msg)
def attack(self, display_name):
if display_name not in self.quest.party:
return
level = randint(-LEVEL_VARIANCE, LEVEL_VARIANCE) + self.player_manager.get_level(display_name)
if level < -2:
gold = GOLD_RISKY_PENALTY + randint(-GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY)
msg = (
'{0} never stood a chance, getting immediately suppressed and mobbed to death by Voidlings without '
'so much as a chance to twitch. Maybe try leveling up! {0} loses {1} gold.'.format(display_name, gold))
self.channel.send_msg(msg)
self.penalize(display_name, gold=gold)
elif level < MONSTER_LEVEL:
gold = GOLD_RISKY_PENALTY + randint(-GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY)
msg = (
'{0} charges towards the Void creature and gets immediately vaporized by lazers. Pew Pew! '
'{0} loses {1} gold.'.format(display_name, gold))
self.channel.send_msg(msg)
self.penalize(display_name, gold=gold)
elif level < settings.LEVEL_CAP + LEVEL_VARIANCE / 3:
gold = GOLD_RISKY_REWARD + randint(GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY)
msg = (
'{0} manages to slay the Void creature after a long struggle and some celebratory crumpets. '
'{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_RISKY_REWARD))
self.channel.send_msg(msg)
self.reward(display_name, gold=gold, exp=EXP_RISKY_REWARD)
else:
gold = GOLD_RISKY_REWARD_BIG + randint(GOLD_VARIANCE_RISKY, GOLD_VARIANCE_RISKY)
msg = (
'{0} dismembers the creature with almost surgical precision, and even discovers a new class of '
'organ in the process. Hurrah! '
'{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_RISKY_REWARD_BIG))
self.channel.send_msg(msg)
self.reward(display_name, gold=gold, exp=EXP_RISKY_REWARD_BIG)
self.complete_quest()
def flee(self, display_name):
if display_name not in self.quest.party:
return
gold = GOLD_SAFE_REWARD + randint(-GOLD_VARIANCE_SAFE, GOLD_VARIANCE_SAFE)
if bool(getrandbits(1)):
msg = (
'{0} manages to bravely run away in the face of overwhelming power, '
'and even manages to snatch a few coins on the way out! '
'{0} gains {1} gold and {2} exp.'.format(display_name, gold, EXP_SAFE_REWARD))
self.reward(display_name, gold=gold, exp=EXP_SAFE_REWARD)
self.channel.send_msg(msg)
else:
msg = ('{0} tries to run away but is torn to shreds by blade-like arms. Owie! '
'{0} loses {1} gold.'.format(display_name, gold))
self.channel.send_msg(msg)
self.penalize(display_name, gold=gold)
self.complete_quest()
def timeout(self):
self.channel.send_msg(
'{0} makes no motion to attack or flee, and instead stands motionless in the face of the enemy. '
'{0} becomes covered by caustic spittle, digested alive, and slowly devoured. '
'{0} loses {1} gold.'.format(self.quest.party[0], GOLD_TIMEOUT_PENALTY))
self.penalize(self.quest.party[0], gold=GOLD_TIMEOUT_PENALTY)
self.complete_quest()
| mit | 256,312,372,235,626,660 | 40.614679 | 119 | 0.612434 | false |
epage/The-One-Ring | src/tp/_generated/Channel_Interface_DTMF.py | 1 | 2340 | # -*- coding: utf-8 -*-
# Generated from the Telepathy spec
"""Copyright (C) 2005, 2006 Collabora Limited
Copyright (C) 2005, 2006 Nokia Corporation
Copyright (C) 2006 INdT
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import dbus.service
class ChannelInterfaceDTMF(dbus.service.Interface):
"""\
An interface that gives a Channel the ability to send DTMF events over
audio streams which have been established using the StreamedMedia channel
type. The event codes used are in common with those defined in RFC4733, and are
listed in the DTMF_Event enumeration.
"""
def __init__(self):
self._interfaces.add('org.freedesktop.Telepathy.Channel.Interface.DTMF')
@dbus.service.method('org.freedesktop.Telepathy.Channel.Interface.DTMF', in_signature='uy', out_signature='')
def StartTone(self, Stream_ID, Event):
"""
Start sending a DTMF tone on this stream. Where possible, the tone
will continue until StopTone is called.
On certain protocols, it may
only be possible to send events with a predetermined length. In this
case, the implementation may emit a fixed-length tone, and the StopTone
method call should return NotAvailable.
"""
raise NotImplementedError
@dbus.service.method('org.freedesktop.Telepathy.Channel.Interface.DTMF', in_signature='u', out_signature='')
def StopTone(self, Stream_ID):
"""
Stop sending any DTMF tone which has been started using the
StartTone
method. If there is no current tone, this method will do nothing.
"""
raise NotImplementedError
| lgpl-2.1 | -6,948,686,769,589,709,000 | 38.677966 | 113 | 0.714103 | false |
smallyear/linuxLearn | salt/salt/returners/mongo_future_return.py | 1 | 7113 | # -*- coding: utf-8 -*-
'''
Return data to a mongodb server
Required python modules: pymongo
This returner will send data from the minions to a MongoDB server. To
configure the settings for your MongoDB server, add the following lines
to the minion config files:
.. code-block:: yaml
mongo.db: <database name>
mongo.host: <server ip address>
mongo.user: <MongoDB username>
mongo.password: <MongoDB user password>
mongo.port: 27017
You can also ask for indexes creation on the most common used fields, which
should greatly improve performance. Indexes are not created by default.
.. code-block:: yaml
mongo.indexes: true
Alternative configuration values can be used by prefacing the configuration.
Any values not found in the alternative configuration will be pulled from
the default location:
.. code-block:: yaml
alternative.mongo.db: <database name>
alternative.mongo.host: <server ip address>
alternative.mongo.user: <MongoDB username>
alternative.mongo.password: <MongoDB user password>
alternative.mongo.port: 27017
This mongo returner is being developed to replace the default mongodb returner
in the future and should not be considered API stable yet.
To use the mongo returner, append '--return mongo' to the salt command.
.. code-block:: bash
salt '*' test.ping --return mongo
To use the alternative configuration, append '--return_config alternative' to the salt command.
.. versionadded:: 2015.5.0
.. code-block:: bash
salt '*' test.ping --return mongo --return_config alternative
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import Salt libs
import salt.utils.jid
import salt.returners
import salt.ext.six as six
# Import third party libs
try:
import pymongo
version = pymongo.version
version = '.'.join(version.split('.')[:2])
HAS_PYMONGO = True
except ImportError:
HAS_PYMONGO = False
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'mongo'
def __virtual__():
if not HAS_PYMONGO:
return False
return __virtualname__
def _remove_dots(src):
'''
Remove the dots from the given data structure
'''
output = {}
for key, val in six.iteritems(src):
if isinstance(val, dict):
val = _remove_dots(val)
output[key.replace('.', '-')] = val
return output
def _get_options(ret=None):
'''
Get the mongo options from salt.
'''
attrs = {'host': 'host',
'port': 'port',
'db': 'db',
'username': 'username',
'password': 'password',
'indexes': 'indexes'}
_options = salt.returners.get_returner_options(__virtualname__,
ret,
attrs,
__salt__=__salt__,
__opts__=__opts__)
return _options
def _get_conn(ret):
'''
Return a mongodb connection object
'''
_options = _get_options(ret)
host = _options.get('host')
port = _options.get('port')
db_ = _options.get('db')
user = _options.get('user')
password = _options.get('password')
indexes = _options.get('indexes', False)
# at some point we should remove support for
# pymongo versions < 2.3 until then there are
# a bunch of these sections that need to be supported
if float(version) > 2.3:
conn = pymongo.MongoClient(host, port)
else:
conn = pymongo.Connection(host, port)
mdb = conn[db_]
if user and password:
mdb.authenticate(user, password)
if indexes:
if float(version) > 2.3:
mdb.saltReturns.create_index('minion')
mdb.saltReturns.create_index('jid')
mdb.jobs.create_index('jid')
else:
mdb.saltReturns.ensure_index('minion')
mdb.saltReturns.ensure_index('jid')
mdb.jobs.ensure_index('jid')
return conn, mdb
def returner(ret):
'''
Return data to a mongodb server
'''
conn, mdb = _get_conn(ret)
if isinstance(ret['return'], dict):
back = _remove_dots(ret['return'])
else:
back = ret['return']
if isinstance(ret, dict):
full_ret = _remove_dots(ret)
else:
full_ret = ret
log.debug(back)
sdata = {'minion': ret['id'], 'jid': ret['jid'], 'return': back, 'fun': ret['fun'], 'full_ret': full_ret}
if 'out' in ret:
sdata['out'] = ret['out']
# save returns in the saltReturns collection in the json format:
# { 'minion': <minion_name>, 'jid': <job_id>, 'return': <return info with dots removed>,
# 'fun': <function>, 'full_ret': <unformatted return with dots removed>}
#
# again we run into the issue with deprecated code from previous versions
if float(version) > 2.3:
#using .copy() to ensure that the original data is not changed, raising issue with pymongo team
mdb.saltReturns.insert_one(sdata.copy())
else:
mdb.saltReturns.insert(sdata.copy())
def save_load(jid, load):
'''
Save the load for a given job id
'''
conn, mdb = _get_conn(ret=None)
if float(version) > 2.3:
#using .copy() to ensure original data for load is unchanged
mdb.jobs.insert_one(load.copy())
else:
mdb.jobs.insert(load.copy())
def save_minions(jid, minions): # pylint: disable=unused-argument
'''
Included for API consistency
'''
pass
def get_load(jid):
'''
Return the load associated with a given job id
'''
conn, mdb = _get_conn(ret=None)
ret = mdb.jobs.find_one({'jid': jid}, {'_id': 0})
return ret['load']
def get_jid(jid):
'''
Return the return information associated with a jid
'''
conn, mdb = _get_conn(ret=None)
ret = {}
rdata = mdb.saltReturns.find({'jid': jid}, {'_id': 0})
if rdata:
for data in rdata:
minion = data['minion']
# return data in the format {<minion>: { <unformatted full return data>}}
ret[minion] = data['full_ret']
return ret
def get_fun(fun):
'''
Return the most recent jobs that have executed the named function
'''
conn, mdb = _get_conn(ret=None)
ret = {}
rdata = mdb.saltReturns.find_one({'fun': fun}, {'_id': 0})
if rdata:
ret = rdata
return ret
def get_minions():
'''
Return a list of minions
'''
conn, mdb = _get_conn(ret=None)
ret = []
name = mdb.saltReturns.distinct('minion')
ret.append(name)
return ret
def get_jids():
'''
Return a list of job ids
'''
conn, mdb = _get_conn(ret=None)
ret = []
name = mdb.jobs.distinct('jid')
ret.append(name)
return ret
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
'''
Do any work necessary to prepare a JID, including sending a custom id
'''
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid()
| apache-2.0 | -3,668,263,459,001,041,000 | 24.959854 | 109 | 0.604246 | false |
SU-ECE-17-7/ibeis | ibeis/viz/viz_name.py | 1 | 15641 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import plottool.draw_func2 as df2
import numpy as np
from ibeis.other import ibsfuncs
from plottool import plot_helpers as ph
import plottool as pt
import utool as ut
from ibeis.viz import viz_chip
(print, print_, printDBG, rrr, profile) = ut.inject(__name__, '[viz]', DEBUG=False)
def show_name_of(ibs, aid, **kwargs):
nid = ibs.get_annot_names(aid)
show_name(ibs, nid, sel_aids=[aid], **kwargs)
def testdata_showname():
import ibeis
ibs = ibeis.opendb(defaultdb='testdb1')
default = None
if ibs.dbname == 'testdb1':
default = 'easy'
name_text = ut.get_argval('--name', type_=str, default=default)
if name_text is None:
nid = 1
else:
nid = ibs.get_name_rowids_from_text(name_text)
in_image = not ut.get_argflag('--no-inimage')
index_list = ut.get_argval('--index_list', type_=list, default=None)
return ibs, nid, in_image, index_list
def testdata_multichips():
import ibeis
ibs = ibeis.opendb(defaultdb='testdb1')
nid = ut.get_argval('--nid', type_=int, default=None)
tags = ut.get_argval('--tags', type_=list, default=None)
if nid is not None:
aid_list = ibs.get_name_aids(nid)
elif tags is not None:
index = ut.get_argval('--index', default=0)
aid_list = ibs.filter_aidpairs_by_tags(any_tags=tags)[index]
else:
#aid_list = ut.get_argval('--aids', type_=list, default=[1, 2, 3])
aid_list = ibeis.testdata_aids(default_aids=[1, 2, 3], ibs=ibs)
in_image = not ut.get_argflag('--no-inimage')
return ibs, aid_list, in_image
#9108 and 9180
def show_multiple_chips(ibs, aid_list, in_image=True, fnum=0, sel_aids=[],
subtitle='', annote=False, **kwargs):
"""
CommandLine:
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --no-inimage
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --db NNP_Master3 --aids=6435,9861,137,6563,9167,12547,9332,12598,13285 --no-inimage --notitle
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --db NNP_Master3 --aids=137,6563,12547,9332,12598,13285 --no-inimage --notitle --adjust=.05
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --db NNP_Master3 --aids=6563,9332,13285,12598 --no-inimage --notitle --adjust=.05 --rc=1,4
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --db PZ_Master0 --aids=1288 --no-inimage --notitle --adjust=.05
python -m ibeis.viz.viz_name --test-show_multiple_chips --show --db PZ_Master0 --aids=4020,4839 --no-inimage --notitle --adjust=.05
python -m ibeis.viz.viz_name --test-show_multiple_chips --db NNP_Master3 --aids=6524,6540,6571,6751 --no-inimage --notitle --adjust=.05 --diskshow
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST -a default:index=0:4 --show
--aids=1 --doboth --show --no-inimage
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST --aids=1 --doboth --show --no-inimage
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST --aids=1 --doboth --rc=2,1 --show --no-inimage
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST --aids=1 --doboth --rc=2,1 --show --notitle --trydrawline --no-draw_lbls
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST --aids=1,2 --doboth --show --notitle --trydrawline
python -m ibeis.viz.viz_name --test-show_multiple_chips --db PZ_MTEST --aids=1,2,3,4,5 --doboth --rc=2,5 --show --chrlbl --trydrawline --qualtitle --no-figtitle --notitle
--doboth
--doboth --show
python -m ibeis.viz.viz_name --test-show_multiple_chips --db NNP_Master3 --aids=15419 --doboth --rc=2,1 --show --notitle --trydrawline --no-draw_lbls
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.viz.viz_name import * # NOQA
>>> import ibeis
>>> ibs, aid_list, in_image = testdata_multichips()
>>> fnum = 0
>>> sel_aids = []
>>> subtitle = ''
>>> annote = False
>>> fig = show_multiple_chips(ibs, aid_list, in_image, fnum, sel_aids, subtitle, annote)
>>> ut.quit_if_noshow()
>>> fig.canvas.draw()
>>> ut.show_if_requested()
"""
fnum = pt.ensure_fnum(fnum)
nAids = len(aid_list)
if nAids == 0:
fig = df2.figure(fnum=fnum, pnum=(1, 1, 1), **kwargs)
df2.imshow_null(fnum=fnum, **kwargs)
return fig
# Trigger computation of all chips in parallel
ibsfuncs.ensure_annotation_data(ibs, aid_list, chips=(not in_image or annote), feats=annote)
print('[viz_name] * annot_vuuid=%r' % ((ibs.get_annot_visual_uuids(aid_list),)))
print('[viz_name] * aid_list=%r' % ((aid_list,)))
DOBOTH = ut.get_argflag('--doboth')
rc = ut.get_argval('--rc', type_=list, default=None)
if rc is None:
nRows, nCols = ph.get_square_row_cols(nAids * (2 if DOBOTH else 1))
else:
nRows, nCols = rc
notitle = ut.get_argflag('--notitle')
draw_lbls = not ut.get_argflag('--no-draw_lbls')
show_chip_kw = dict(annote=annote, in_image=in_image, notitle=notitle, draw_lbls=draw_lbls)
#print('[viz_name] * r=%r, c=%r' % (nRows, nCols))
#gs2 = gridspec.GridSpec(nRows, nCols)
pnum_ = df2.get_pnum_func(nRows, nCols)
fig = df2.figure(fnum=fnum, pnum=pnum_(0), **kwargs)
fig.clf()
ax_list1 = []
for px, aid in enumerate(aid_list):
print('px = %r' % (px,))
_fig, _ax1 = viz_chip.show_chip(ibs, aid=aid, pnum=pnum_(px), **show_chip_kw)
print('other_aids = %r' % (ibs.get_annot_contact_aids(aid),))
ax = df2.gca()
ax_list1.append(_ax1)
if aid in sel_aids:
df2.draw_border(ax, df2.GREEN, 4)
if ut.get_argflag('--chrlbl') and not DOBOTH:
ax.set_xlabel('(' + chr(ord('a') - 1 + px) + ')')
elif ut.get_argflag('--numlbl') and not DOBOTH:
ax.set_xlabel('(' + str(px + 1) + ')')
#plot_aid3(ibs, aid)
# HACK to show in image and not in image
if DOBOTH:
#ut.embed()
#ph.get_plotdat_dict(ax_list1[1])
#ph.get_plotdat_dict(ax_list2[1])
ax_list2 = []
show_chip_kw['in_image'] = not show_chip_kw['in_image']
start = px + 1
for px, aid in enumerate(aid_list, start=start):
_fig, _ax2 = viz_chip.show_chip(ibs, aid=aid, pnum=pnum_(px), **show_chip_kw)
ax = df2.gca()
ax_list2.append(_ax2)
if ut.get_argflag('--chrlbl'):
ax.set_xlabel('(' + chr(ord('a') - start + px) + ')')
elif ut.get_argflag('--numlbl'):
ax.set_xlabel('(' + str(px - start + 1) + ')')
if ut.get_argflag('--qualtitle'):
qualtext = ibs.get_annot_quality_texts(aid)
ax.set_title(qualtext)
if aid in sel_aids:
df2.draw_border(ax, df2.GREEN, 4)
if in_image:
ax_list1, ax_list2 = ax_list2, ax_list1
if ut.get_argflag('--trydrawline'):
# Unfinished
#ut.embed()
# Draw lines between corresponding axes
# References:
# http://stackoverflow.com/questions/17543359/drawing-lines-between-two-plots-in-matplotlib
import matplotlib as mpl
import vtool as vt
# !!!
#http://matplotlib.org/users/transforms_tutorial.html
#invTransFigure_fn1 = fig.transFigure.inverted().transform
#invTransFigure_fn2 = fig.transFigure.inverted().transform
#print(ax_list1)
#print(ax_list2)
assert len(ax_list1) == len(ax_list2)
for ax1, ax2 in zip(ax_list1, ax_list2):
#_ = ax1.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
#bbox1 = (0, 0, _.width * fig.dpi, _.height * fig.dpi)
# returns in figure coordinates
#bbox1 = df2.get_axis_bbox(ax=ax1)
#if bbox1[-1] < 0:
# # Weird bug
# bbox1 = bbox1[1]
print('--')
print('ax1 = %r' % (ax1,))
print('ax2 = %r' % (ax2,))
chipshape = ph.get_plotdat(ax1, 'chipshape')
#_bbox1 = ax1.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
#bbox1 = (0, 0, _bbox1.width * fig.dpi, _bbox1.height * fig.dpi)
bbox1 = (0, 0, chipshape[1], chipshape[0])
aid_ = ph.get_plotdat(ax2, 'aid')
aid_list_ = ph.get_plotdat(ax2, 'aid_list')
index = aid_list_.index(aid_)
annotation_bbox_list = ph.get_plotdat(ax2, 'annotation_bbox_list')
bbox2 = annotation_bbox_list[index]
print('bbox1 = %r' % (bbox1,))
print('bbox2 = %r' % (bbox2,))
vert_list1 = np.array(vt.verts_from_bbox(bbox1))
vert_list2 = np.array(vt.verts_from_bbox(bbox2))
print('vert_list1 = %r' % (vert_list1,))
print('vert_list2 = %r' % (vert_list2,))
#for vx in [0, 1, 2, 3]:
for vx in [0, 1]:
vert1 = vert_list1[vx].tolist()
vert2 = vert_list2[vx].tolist()
print(' ***')
print(' * vert1 = %r' % (vert1,))
print(' * vert2 = %r' % (vert2,))
coordsA = coordsB = 'data'
#coords = 'axes points'
#'axes fraction'
#'axes pixels'
#coordsA = 'axes pixels'
#coordsB = 'data'
#'figure fraction'
#'figure pixels'
#'figure pixels'
#'figure points'
#'polar'
#'offset points'
con = mpl.patches.ConnectionPatch(
xyA=vert1, xyB=vert2, coordsA=coordsA,
coordsB=coordsB,
axesA=ax1, axesB=ax2,
linewidth=1, color='k')
#, arrowstyle="-")
#ut.embed()
#con.set_zorder(None)
ax1.add_artist(con)
#ax2.add_artist(con)
#ut.embed()
#verts2.T[1] -= bbox2[-1]
#bottom_left1, bottom_right1 = verts1[1:3].tolist()
#bottom_left2, bottom_right2 = verts2[1:3].tolist()
##transAxes1 = ax1.transData.inverted()
#transAxes1_fn = ax1.transData.transform
#transAxes2_fn = ax2.transData.transform
#transAxes1_fn = ut.identity
#transAxes2_fn = ut.identity
#coord_bl1 = transFigure.transform(transAxes1.transform(bottom_left1))
#coord_br1 = transFigure.transform(transAxes1.transform(bottom_right1))
#coord_bl1 = invTransFigure_fn1(transAxes1_fn(bottom_left1))
#print('bottom_left2 = %r' % (bottom_left2,))
#coord_bl1 = (5, 5)
#coord_bl2 = invTransFigure_fn2(transAxes2_fn(bottom_left2))
#print('coord_bl2 = %r' % (coord_bl2,))
#coord_br1 = invTransFigure_fn1(transAxes1_fn(bottom_right1))
#coord_br2 = invTransFigure_fn2(transAxes2_fn(bottom_right2))
##print('coord_bl1 = %r' % (coord_bl1,))
#line_coords1 = np.vstack([coord_bl1, coord_bl2])
#line_coords2 = np.vstack([coord_br1, coord_br2])
#print('line_coords1 = %r' % (line_coords1,))
#line1 = mpl.lines.Line2D((line_coords1[0]), (line_coords1[1]), transform=fig.transFigure)
#line2 = mpl.lines.Line2D((line_coords2[0]), (line_coords2[1]), transform=fig.transFigure)
#xs1, ys1 = line_coords1.T
#xs2, ys2 = line_coords2.T
#linekw = dict(transform=fig.transFigure)
#linekw = dict()
#print('xs1 = %r' % (xs1,))
#print('ys1 = %r' % (ys1,))
#line1 = mpl.lines.Line2D(xs1, ys1, **linekw)
#line2 = mpl.lines.Line2D(xs2, ys2, **linekw) # NOQA
#shrinkA=5, shrinkB=5, mutation_scale=20, fc="w")
#ax2.add_artist(con)
#fig.lines.append(line1)
#fig.lines.append(line2)
pass
return fig
#@ut.indent_func
def show_name(ibs, nid, in_image=True, fnum=0, sel_aids=[], subtitle='',
annote=False, aid_list=None, index_list=None, **kwargs):
r"""
Args:
ibs (IBEISController): ibeis controller object
nid (?):
in_image (bool):
fnum (int): figure number
sel_aids (list):
subtitle (str):
annote (bool):
CommandLine:
python -m ibeis.viz.viz_name --test-show_name --dpath ~/latex/crall-candidacy-2015 --save 'figures/{name}.jpg' --no-figtitle --notitle --db NNP_Master3 --figsize=9,4 --clipwhite --dpi=180 --adjust=.05 --index_list=[0,1,2,3] --rc=2,4 --append temp_out_figure.tex --name=IBEIS_PZ_0739 --no-draw_lbls --doboth --no-inimage --diskshow
python -m ibeis.viz.viz_name --test-show_name --no-figtitle --notitle --db NNP_Master3 --figsize=9,4 --clipwhite --dpi=180 --adjust=.05 --index_list=[0,1,2,3] --rc=2,4 --append temp_out_figure.tex --name=IBEIS_PZ_0739 --no-draw_lbls --doboth --no-inimage --show
python -m ibeis.viz.viz_name --test-show_name --show
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.viz.viz_name import * # NOQA
>>> ibs, nid, in_image, index_list = testdata_showname()
>>> fnum = 0
>>> sel_aids = []
>>> subtitle = ''
>>> annote = False
>>> # execute function
>>> show_name(ibs, nid, in_image, fnum, sel_aids, subtitle, annote, index_list=index_list)
>>> ut.show_if_requested()
"""
print('[viz_name] show_name nid=%r, index_list=%r, aid_list=%r' % (nid, index_list, aid_list))
if aid_list is None:
aid_list = ibs.get_name_aids(nid)
else:
assert ut.list_all_eq_to(ibs.get_annot_nids(aid_list), nid)
if index_list is not None:
aid_list = ut.take(aid_list, index_list)
name = ibs.get_name_texts((nid,))
print('[viz_name] * name=%r aid_list=%r' % (name, aid_list))
show_multiple_chips(ibs, aid_list, in_image=in_image, fnum=fnum,
sel_aids=sel_aids, annote=annote, **kwargs)
if isinstance(nid, np.ndarray):
nid = nid[0]
if isinstance(name, np.ndarray):
name = name[0]
use_figtitle = not ut.get_argflag('--no-figtitle')
if use_figtitle:
figtitle = 'Name View nid=%r name=%r' % (nid, name)
df2.set_figtitle(figtitle)
#if not annote:
# title += ' noannote'
#gs2.tight_layout(fig)
#gs2.update(top=df2.TOP_SUBPLOT_ADJUST)
#df2.set_figtitle(title, subtitle)
if __name__ == '__main__':
"""
CommandLine:
python -m ibeis.viz.viz_name
python -m ibeis.viz.viz_name --allexamples
python -m ibeis.viz.viz_name --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
| apache-2.0 | 8,261,119,530,049,679,000 | 40.378307 | 339 | 0.548239 | false |
Pinyto/cloud | api_prototype/sandbox.py | 1 | 3475 | # coding=utf-8
"""
Pinyto cloud - A secure cloud database for your personal data
Copyright (C) 2105 Johannes Merkert <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import time
from multiprocessing import Process, Queue
from multiprocessing.queues import Empty
from api_prototype.seccomp_process import SecureHost
def sandbox(code, request, real_db, queue):
"""
This function gets executed in a separate subprocess which does not share the memory with the main
Django process. This is done a) for security reasons to minimize the risk that code inside of the
sandbox is able to do anything harmful and b) for cleanly measuring the execution time for the code
because the user may have to pay for it.
:param code: The python code which should be executed in the sandbox
:type code: str
:param request: Django's request object
:type request: HttpRequest
:param real_db: The database connection
:type real_db: service.database.CollectionWrapper
:param queue: Queue for communicating with the main process
:type queue: multiprocessing.Queue
:return: nothing (the queue is used for returning the results)
"""
start_time = time.clock()
secure_host = SecureHost()
secure_host.start_child()
try:
result = secure_host.execute(code, request, real_db)
finally:
secure_host.kill_child()
end_time = time.clock()
queue.put((result, end_time - start_time))
def safely_exec(code, request, db):
"""
If you want to execute something in the sandbox, call this method.
It will setup a process and execute the code there with seccomp. The passed database connections
will used to access the users collection.
:param code: The python code which should be executed in the sandbox
:type code: str
:param request: Django's request object which is passed into the sandbox process
:type request: HttpRequest
:param db: The already opened database connection
:type db: service.database.CollectionWrapper
:return: A tuple containing the result and the time needed to calculate the result.
:rtype: (dict, timedelta)
"""
start_time = time.clock()
queue = Queue(1)
sandbox_process = Process(target=sandbox, args=(code, request, db, queue))
sandbox_process.start()
result = ""
child_time = 0
wait_for_data = True
termination = False
while wait_for_data and not termination:
try:
result, child_time = queue.get(True, 0.001)
wait_for_data = False
except Empty:
wait_for_data = True
if not sandbox_process.is_alive():
termination = True
result = {'error': "The code could not be executed because it tried to do something illegal."}
sandbox_process.join()
end_time = time.clock()
return result, end_time - start_time + child_time
| gpl-3.0 | -6,002,983,267,726,479,000 | 38.488636 | 106 | 0.710216 | false |
jem-gh/STplayer | STplayer.py | 1 | 2502 | #!/usr/bin/python
# STplayer (aka STconverter 2) is under the MIT License
# Copyright (c) 2012 Jean-Etienne Morlighem <[email protected]>
# https://github.com/jem-gh/STplayer
###############################################################################
# STplayer (aka STconverter 2) allows you to execute Python scripts written for
# SimpleGUI on a machine configured with Tkinter GUI instead.
#
# STplayer is the successor of STconverter, and is different in such ways that
# it has been entirely rewritten to handle with a totally different approach
# the "conversion".
# While STconverter was converting (by this, meaning rewriting) each SimpleGUI
# operation in the user code before the program is executed with Tkinter,
# STplayer is an API between SimpleGUI operations and Tkinter.
#
# "Tkinter is Python's de-facto standard GUI (Graphical User Interface) package"
# (http://wiki.python.org/moin/TkInter)
# "SimpleGUI is a custom Python graphical user interface (GUI) module implemented
# directly in CodeSkulptor that provides an easy to learn interface for building
# interactive programs in Python" (http://www.codeskulptor.org) used for the
# online Coursera course "An Introduction to Interactive Programming in Python"
# by Joe Warren, Scott Rixner, John Greiner, and Stephen Wong (Rice University)
#
# I want to thank Amin Guzman for his valuable comments and suggestions on how
# to improve STconverter, which lead to the development of STplayer
#
# For the latest version of STplayer visit the repository on Github:
# https://github.com/jem-gh/STplayer
#
# STplayer is developed by Jean-Etienne Morlighem (https://github.com/jem-gh)
###############################################################################
import sys
from STplayer_GUI import ST_GUImain
from simplegui2tkinter_API import simplegui2tkinter
sys.modules['simplegui'] = simplegui2tkinter
if __name__ == "__main__":
print "STplayer started!"
try:
# when running STplayer in command line, start executing the
# SimpleGUI program if given as argument
file_simplegui = sys.argv[1]
print "SimpleGUI program is loading... Thanks for your patience!"
execfile( file_simplegui, {} )
except IndexError:
# when launching STplayer in GUI mode or from command line without
# providing a SimpleGUI program, start the GUI
print "STplayer GUI initializing!"
ST_GUImain.Main()
| mit | 9,001,916,947,905,313,000 | 38.09375 | 82 | 0.681855 | false |
SUSE/kiwi | kiwi/bootloader/install/__init__.py | 1 | 2173 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import importlib
from typing import Dict
from abc import (
ABCMeta,
abstractmethod
)
from ...exceptions import (
KiwiBootLoaderInstallSetupError
)
class BootLoaderInstall(metaclass=ABCMeta):
"""
**BootLoaderInstall Factory**
:param string name: bootloader name
:param string root_dir: root directory path name
:param object device_provider: instance of :class:`DeviceProvider`
:param dict custom_args: custom arguments dictionary
"""
@abstractmethod
def __init__(self) -> None:
return None # pragma: no cover
@staticmethod
def new(
name: str, root_dir: str, device_provider: object,
custom_args: Dict = None
):
name_map = {
'grub2': 'BootLoaderInstallGrub2'
if name == 'grub2' or name == 'grub2_s390x_emu' else None
}
for bootloader_namespace, bootloader_name in list(name_map.items()):
if bootloader_name:
break
try:
bootloader_install = importlib.import_module(
'kiwi.bootloader.install.{}'.format(bootloader_namespace)
)
return bootloader_install.__dict__[bootloader_name](
root_dir, device_provider, custom_args
)
except Exception:
raise KiwiBootLoaderInstallSetupError(
'Support for {} bootloader installation '
'not implemented'.format(name)
)
| gpl-3.0 | -4,317,752,855,747,303,000 | 31.432836 | 76 | 0.651173 | false |
onshape-public/onshape-clients | python/onshape_client/oas/models/bt_configured_values_column_info1025.py | 1 | 10546 | # coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_configured_dimension_column_info2168
except ImportError:
bt_configured_dimension_column_info2168 = sys.modules[
"onshape_client.oas.models.bt_configured_dimension_column_info2168"
]
try:
from onshape_client.oas.models import bt_configured_feature_column_info1014
except ImportError:
bt_configured_feature_column_info1014 = sys.modules[
"onshape_client.oas.models.bt_configured_feature_column_info1014"
]
try:
from onshape_client.oas.models import bt_configured_parameter_column_info2900
except ImportError:
bt_configured_parameter_column_info2900 = sys.modules[
"onshape_client.oas.models.bt_configured_parameter_column_info2900"
]
try:
from onshape_client.oas.models import bt_configured_suppression_column_info2498
except ImportError:
bt_configured_suppression_column_info2498 = sys.modules[
"onshape_client.oas.models.bt_configured_suppression_column_info2498"
]
try:
from onshape_client.oas.models import bt_configured_values_column_info1025_all_of
except ImportError:
bt_configured_values_column_info1025_all_of = sys.modules[
"onshape_client.oas.models.bt_configured_values_column_info1025_all_of"
]
try:
from onshape_client.oas.models import bt_table_column_info1222
except ImportError:
bt_table_column_info1222 = sys.modules[
"onshape_client.oas.models.bt_table_column_info1222"
]
try:
from onshape_client.oas.models import bt_table_column_spec1967
except ImportError:
bt_table_column_spec1967 = sys.modules[
"onshape_client.oas.models.bt_table_column_spec1967"
]
class BTConfiguredValuesColumnInfo1025(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("parent_type",): {
"FEATURE": "FEATURE",
"INSTANCE": "INSTANCE",
"MATE": "MATE",
"MATE_CONNECTOR": "MATE_CONNECTOR",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"parent_id": (str,), # noqa: E501
"parent_name": (str,), # noqa: E501
"parent_type": (str,), # noqa: E501
"id": (str,), # noqa: E501
"node_id": (str,), # noqa: E501
"specification": (
bt_table_column_spec1967.BTTableColumnSpec1967,
), # noqa: E501
}
@staticmethod
def discriminator():
return {
"bt_type": {
"BTConfiguredFeatureColumnInfo-1014": bt_configured_feature_column_info1014.BTConfiguredFeatureColumnInfo1014,
"BTConfiguredDimensionColumnInfo-2168": bt_configured_dimension_column_info2168.BTConfiguredDimensionColumnInfo2168,
"BTConfiguredSuppressionColumnInfo-2498": bt_configured_suppression_column_info2498.BTConfiguredSuppressionColumnInfo2498,
"BTConfiguredParameterColumnInfo-2900": bt_configured_parameter_column_info2900.BTConfiguredParameterColumnInfo2900,
},
}
attribute_map = {
"bt_type": "btType", # noqa: E501
"parent_id": "parentId", # noqa: E501
"parent_name": "parentName", # noqa: E501
"parent_type": "parentType", # noqa: E501
"id": "id", # noqa: E501
"node_id": "nodeId", # noqa: E501
"specification": "specification", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_configured_values_column_info1025.BTConfiguredValuesColumnInfo1025 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
parent_id (str): [optional] # noqa: E501
parent_name (str): [optional] # noqa: E501
parent_type (str): [optional] # noqa: E501
id (str): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
specification (bt_table_column_spec1967.BTTableColumnSpec1967): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
bt_configured_values_column_info1025_all_of.BTConfiguredValuesColumnInfo1025AllOf,
bt_table_column_info1222.BTTableColumnInfo1222,
],
"oneOf": [],
}
@classmethod
def get_discriminator_class(cls, from_server, data):
"""Returns the child class specified by the discriminator"""
discriminator = cls.discriminator()
discr_propertyname_py = list(discriminator.keys())[0]
discr_propertyname_js = cls.attribute_map[discr_propertyname_py]
if from_server:
class_name = data[discr_propertyname_js]
else:
class_name = data[discr_propertyname_py]
class_name_to_discr_class = discriminator[discr_propertyname_py]
return class_name_to_discr_class.get(class_name)
| mit | -8,931,959,705,869,452,000 | 37.489051 | 138 | 0.611132 | false |
sigmunau/nav | python/nav/ipdevpoll/pool.py | 1 | 10770 | #
# Copyright (C) 2017 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Handle sending jobs to worker processes."""
from __future__ import print_function
import os
import sys
from twisted.protocols import amp
from twisted.internet import reactor, protocol
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.endpoints import ProcessEndpoint, StandardIOEndpoint
import twisted.internet.endpoints
from nav.ipdevpoll import ContextLogger
from . import control, jobs
def initialize_worker():
handler = JobHandler()
factory = protocol.Factory()
factory.protocol = lambda: ProcessAMP(is_worker=True, locator=handler)
StandardIOEndpoint(reactor).listen(factory)
return handler
class Cancel(amp.Command):
"""Represent a cancel message for sending to workers"""
arguments = [
('serial', amp.Integer()),
]
response = []
class Shutdown(amp.Command):
"""Represent a shutdown message for sending to workers"""
arguments = []
response = []
class Job(amp.Command):
"""Represent a job for sending to a worker"""
arguments = [
('netbox', amp.Integer()),
('job', amp.String()),
('plugins', amp.ListOf(amp.String())),
('interval', amp.Integer()), # Needs to be included in database record.
# Not used for scheduling
('serial', amp.Integer()), # Serial number needed for cancelling
]
response = [('result', amp.Boolean())]
errors = {
jobs.AbortedJobError: 'AbortedJob',
jobs.SuggestedReschedule: 'SuggestedReschedule',
}
class JobHandler(amp.CommandLocator):
"""Resolve actions for jobs received over AMP"""
_logger = ContextLogger()
def __init__(self):
super(JobHandler, self).__init__()
self.jobs = dict()
self.done = False
def job_done(self, result, serial):
if serial in self.jobs:
del self.jobs[serial]
if self.done and not self.jobs:
reactor.callLater(3, reactor.stop)
return result
@Job.responder
def execute_job(self, netbox, job, plugins, interval, serial):
self._logger.debug("Process {pid} received job {job} for"
" netbox {netbox}"
" with plugins {plugins}".format(
pid=os.getpid(),
job=job,
netbox=netbox,
plugins=",".join(plugins)),)
job = jobs.JobHandler(job, netbox, plugins, interval)
self.jobs[serial] = job
deferred = job.run()
deferred.addBoth(self.job_done, serial)
deferred.addCallback(lambda x: {'result': x})
return deferred
@Cancel.responder
def cancel(self, serial):
if serial in self.jobs:
self.jobs[serial].cancel()
return {}
@Shutdown.responder
def shutdown(self):
self.done = True
return {}
def log_jobs(self):
self._logger.info("Got {jobs} active jobs".format(
jobs=len(self.jobs)))
for job in self.jobs.values():
self._logger.info("{job} {netbox} {plugins}".format(
job=job.name,
netbox=job.netbox,
plugins=", ".join(job.plugins)))
class ProcessAMP(amp.AMP):
"""Modify AMP protocol to allow running over process pipes"""
_logger = ContextLogger()
def __init__(self, is_worker, **kwargs):
super(ProcessAMP, self).__init__(**kwargs)
self.is_worker = is_worker
self.lost_handler = None
def makeConnection(self, transport):
if not hasattr(transport, 'getPeer'):
setattr(transport, 'getPeer', lambda: "peer")
if not hasattr(transport, 'getHost'):
setattr(transport, 'getHost', lambda: "host")
super(ProcessAMP, self).makeConnection(transport)
def connectionLost(self, reason):
super(ProcessAMP, self).connectionLost(reason)
if self.is_worker:
if reactor.running:
reactor.stop()
else:
if self.lost_handler:
self.lost_handler(self, reason)
class InlinePool(object):
"This is a dummy worker pool that executes all jobs in the current process"
def __init__(self):
self.active_jobs = {}
def job_done(self, result, deferred):
if deferred in self.active_jobs:
del self.active_jobs[deferred]
return result
def execute_job(self, job, netbox, plugins=None, interval=None):
job = jobs.JobHandler(job, netbox, plugins, interval)
deferred = job.run()
self.active_jobs[deferred] = job
deferred.addBoth(self.job_done, deferred)
return deferred
def cancel(self, deferred):
if deferred in self.active_jobs:
self.active_jobs[deferred].cancel()
class Worker(object):
"""This class holds information about one worker process as seen from
the worker pool"""
_logger = ContextLogger()
def __init__(self, pool, threadpoolsize, max_jobs):
self.active_jobs = 0
self.total_jobs = 0
self.max_concurrent_jobs = 0
self.pool = pool
self.threadpoolsize = threadpoolsize
self.max_jobs = max_jobs
@inlineCallbacks
def start(self):
args = [control.get_process_command(), '--worker', '-f', '-s', '-P']
if self.threadpoolsize:
args.append('--threadpoolsize=%d' % self.threadpoolsize)
endpoint = ProcessEndpoint(reactor, control.get_process_command(),
args, os.environ)
factory = protocol.Factory()
factory.protocol = lambda: ProcessAMP(is_worker=False,
locator=JobHandler())
self.process = yield endpoint.connect(factory)
self.process.lost_handler = self._worker_died
returnValue(self)
def done(self):
return self.max_jobs and (self.total_jobs >= self.max_jobs)
def _worker_died(self, worker, reason):
if not self.done():
self._logger.warning("Lost worker {worker} with {jobs} "
"active jobs".format(
worker=worker,
jobs=self.active_jobs))
elif self.active_jobs:
self._logger.warning("Worker {worker} exited with {jobs} "
"active jobs".format(
worker=worker,
jobs=self.active_jobs))
else:
self._logger.debug("Worker {worker} exited normally"
.format(worker=worker))
self.pool.worker_died(self)
def execute(self, serial, command, **kwargs):
self.active_jobs += 1
self.total_jobs += 1
self.max_concurrent_jobs = max(self.active_jobs,
self.max_concurrent_jobs)
deferred = self.process.callRemote(command, serial=serial, **kwargs)
if self.done():
self.process.callRemote(Shutdown)
return deferred
def cancel(self, serial):
return self.process.callRemote(Cancel, serial=serial)
class WorkerPool(object):
"""This class represent a pool of worker processes to which jobs can
be scheduled"""
_logger = ContextLogger()
def __init__(self, workers, max_jobs, threadpoolsize=None):
twisted.internet.endpoints.log = HackLog
self.workers = set()
self.target_count = workers
self.max_jobs = max_jobs
self.threadpoolsize = threadpoolsize
for i in range(self.target_count):
self._spawn_worker()
self.serial = 0
self.jobs = dict()
def worker_died(self, worker):
self.workers.remove(worker)
if not worker.done():
self._spawn_worker()
@inlineCallbacks
def _spawn_worker(self):
worker = yield Worker(self, self.threadpoolsize, self.max_jobs).start()
self.workers.add(worker)
def _cleanup(self, result, deferred):
serial, worker = self.jobs[deferred]
del self.jobs[deferred]
worker.active_jobs -= 1
return result
def _execute(self, command, **kwargs):
ready_workers = [w for w in self.workers if not w.done()]
if not ready_workers:
raise RuntimeError("No ready workers")
worker = min(ready_workers, key=lambda x: x.active_jobs)
self.serial += 1
deferred = worker.execute(self.serial, command, **kwargs)
if worker.done():
self._spawn_worker()
self.jobs[deferred] = (self.serial, worker)
deferred.addBoth(self._cleanup, deferred)
return deferred
def cancel(self, deferred):
if deferred not in self.jobs:
self._logger.debug("Cancelling job that isn't known")
return
serial, worker = self.jobs[deferred]
return worker.cancel(serial)
def execute_job(self, job, netbox, plugins=None, interval=None):
deferred = self._execute(Job, job=job, netbox=netbox,
plugins=plugins, interval=interval)
deferred.addCallback(lambda x: x['result'])
return deferred
def log_summary(self):
self._logger.info("{active} out of {target} workers running".format(
active=len(self.workers),
target=self.target_count))
for worker in self.workers:
self._logger.info(" - ready {ready} active {active}"
" max {max} total {total}".format(
ready=not worker.done(),
active=worker.active_jobs,
max=worker.max_concurrent_jobs,
total=worker.total_jobs))
class HackLog(object):
@staticmethod
def msg(data, **kwargs):
"""Used to monkeypatch twisted.endpoints to log worker output the
ipdevpoll way"""
sys.stderr.write(data)
| gpl-2.0 | -5,302,875,455,866,709,000 | 33.630225 | 80 | 0.589694 | false |
mazvv/travelcrm | travelcrm/forms/foodcats.py | 1 | 2079 | # -*-coding: utf-8 -*-
import colander
from . import(
ResourceSchema,
BaseForm,
BaseSearchForm,
BaseAssignForm,
)
from ..resources.foodcats import FoodcatsResource
from ..models.foodcat import Foodcat
from ..models.task import Task
from ..models.note import Note
from ..lib.qb.foodcats import FoodcatsQueryBuilder
from ..lib.utils.common_utils import translate as _
from ..lib.utils.security_utils import get_auth_employee
@colander.deferred
def name_validator(node, kw):
request = kw.get('request')
def validator(node, value):
foodcat = Foodcat.by_name(value)
if (
foodcat
and str(foodcat.id) != request.params.get('id')
):
raise colander.Invalid(
node,
_(u'Food category with the same name exists'),
)
return colander.All(colander.Length(max=32), validator,)
class _FoodcatSchema(ResourceSchema):
name = colander.SchemaNode(
colander.String(),
validator=name_validator,
)
class FoodcatForm(BaseForm):
_schema = _FoodcatSchema
def submit(self, foodcat=None):
if not foodcat:
foodcat = Foodcat(
resource=FoodcatsResource.create_resource(
get_auth_employee(self.request)
)
)
else:
foodcat.resource.notes = []
foodcat.resource.tasks = []
foodcat.name = self._controls.get('name')
for id in self._controls.get('note_id'):
note = Note.get(id)
foodcat.resource.notes.append(note)
for id in self._controls.get('task_id'):
task = Task.get(id)
foodcat.resource.tasks.append(task)
return foodcat
class FoodcatSearchForm(BaseSearchForm):
_qb = FoodcatsQueryBuilder
class FoodcatAssignForm(BaseAssignForm):
def submit(self, ids):
for id in ids:
foodcat = Foodcat.get(id)
foodcat.resource.maintainer_id = self._controls.get(
'maintainer_id'
)
| gpl-3.0 | -5,314,125,230,378,631,000 | 25.653846 | 64 | 0.601732 | false |
Havate/havate-openstack | proto-build/gui/horizon/Horizon_GUI/settings.py | 1 | 7641 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import sys
import warnings
from django.utils.translation import ugettext_lazy as _ # noqa
from openstack_dashboard import exceptions
warnings.formatwarning = lambda message, category, *args, **kwargs: \
'%s: %s' % (category.__name__, message)
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
BIN_DIR = os.path.abspath(os.path.join(ROOT_PATH, '..', 'bin'))
if ROOT_PATH not in sys.path:
sys.path.append(ROOT_PATH)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SITE_BRANDING = 'OpenStack Dashboard'
LOGIN_URL = '/auth/login/'
LOGOUT_URL = '/auth/logout/'
# LOGIN_REDIRECT_URL can be used as an alternative for
# HORIZON_CONFIG.user_home, if user_home is not set.
# Do not set it to '/home/', as this will cause circular redirect loop
LOGIN_REDIRECT_URL = '/'
MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'media'))
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static'))
STATIC_URL = '/static/'
ROOT_URLCONF = 'openstack_dashboard.urls'
HORIZON_CONFIG = {
'dashboards': ('project', 'admin', 'settings', 'router',),
'default_dashboard': 'project',
'user_home': 'openstack_dashboard.views.get_user_home',
'ajax_queue_limit': 10,
'auto_fade_alerts': {
'delay': 3000,
'fade_duration': 1500,
'types': ['alert-success', 'alert-info']
},
'help_url': "http://docs.openstack.org",
'exceptions': {'recoverable': exceptions.RECOVERABLE,
'not_found': exceptions.NOT_FOUND,
'unauthorized': exceptions.UNAUTHORIZED},
}
# Set to True to allow users to upload images to glance via Horizon server.
# When enabled, a file form field will appear on the create image form.
# See documentation for deployment considerations.
HORIZON_IMAGES_ALLOW_UPLOAD = True
# The OPENSTACK_IMAGE_BACKEND settings can be used to customize features
# in the OpenStack Dashboard related to the Image service, such as the list
# of supported image formats.
OPENSTACK_IMAGE_BACKEND = {
'image_formats': [
('', ''),
('aki', _('AKI - Amazon Kernel Image')),
('ami', _('AMI - Amazon Machine Image')),
('ari', _('ARI - Amazon Ramdisk Image')),
('iso', _('ISO - Optical Disk Image')),
('qcow2', _('QCOW2 - QEMU Emulator')),
('raw', _('Raw')),
('vdi', _('VDI')),
('vhd', _('VHD')),
('vmdk', _('VMDK'))
]
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'horizon.middleware.HorizonMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
'horizon.context_processors.horizon',
'openstack_dashboard.context_processors.openstack',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'horizon.loaders.TemplateLoader'
)
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'templates'),
)
STATICFILES_FINDERS = (
'compressor.finders.CompressorFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
COMPRESS_PRECOMPILERS = (
('text/less', ('lesscpy {infile}')),
)
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
)
COMPRESS_ENABLED = True
COMPRESS_OUTPUT_DIR = 'dashboard'
COMPRESS_CSS_HASHING_METHOD = 'hash'
COMPRESS_PARSER = 'compressor.parser.HtmlParser'
INSTALLED_APPS = (
'openstack_dashboard',
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'compressor',
'horizon',
'openstack_dashboard.dashboards.project',
'openstack_dashboard.dashboards.admin',
'openstack_dashboard.dashboards.settings',
'openstack_auth',
'openstack_dashboard.dashboards.router',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
AUTHENTICATION_BACKENDS = ('openstack_auth.backend.KeystoneBackend',)
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
SESSION_COOKIE_HTTPONLY = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_SECURE = False
SESSION_TIMEOUT = 1800
gettext_noop = lambda s: s
LANGUAGES = (
('en', gettext_noop('English')),
('en-au', gettext_noop('Australian English')),
('en-gb', gettext_noop('British English')),
('es', gettext_noop('Spanish')),
('fr', gettext_noop('French')),
('ja', gettext_noop('Japanese')),
('ko', gettext_noop('Korean (Korea)')),
('nl', gettext_noop('Dutch (Netherlands)')),
('pl', gettext_noop('Polish')),
('pt-br', gettext_noop('Portuguese (Brazil)')),
('ru', gettext_noop('Russian')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
LANGUAGE_CODE = 'en'
LANGUAGE_COOKIE_NAME = 'horizon_language'
USE_I18N = True
USE_L10N = True
USE_TZ = True
OPENSTACK_KEYSTONE_DEFAULT_ROLE = 'Member'
DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter'
POLICY_FILES_PATH = os.path.join(ROOT_PATH, "conf")
# Map of local copy of service policy files
POLICY_FILES = {
'identity': 'keystone_policy.json',
'compute': 'nova_policy.json'
}
SECRET_KEY = None
try:
from local.local_settings import * # noqa
except ImportError:
logging.warning("No local_settings file found.")
# Ensure that we always have a SECRET_KEY set, even when no local_settings.py
# file is present. See local_settings.py.example for full documentation on the
# horizon.utils.secret_key module and its use.
if not SECRET_KEY:
from horizon.utils import secret_key
LOCAL_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'local')
SECRET_KEY = secret_key.generate_or_read_from_file('/var/lib/openstack-dashboard/secret_key')
from openstack_dashboard import policy
POLICY_CHECK_FUNCTION = policy.check
# Add HORIZON_CONFIG to the context information for offline compression
COMPRESS_OFFLINE_CONTEXT = {
'STATIC_URL': STATIC_URL,
'HORIZON_CONFIG': HORIZON_CONFIG
}
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
| apache-2.0 | 2,743,992,611,349,540,400 | 31.653846 | 97 | 0.693888 | false |
follownjmoney/campaign-server | campaignserver/campaignserver/settings_prod.py | 1 | 2523 | """
Django settings for campaignserver project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'k-(62ls@8owkmo72ipb_x-#9zgt#!59+8^5kw(rf3yatpmou%h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'dataserver',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'campaignserver.urls'
WSGI_APPLICATION = 'campaignserver.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'followthemoney',
'USER': 'root',
'PASSWORD': 'followthemoney',
'HOST': 'followthemoney.chwj19dbxodd.us-east-1.rds.amazonaws.com',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATIC_URL = '/static/'
| apache-2.0 | 5,052,721,809,511,146,000 | 25.010309 | 74 | 0.722553 | false |
xesscorp/skidl | skidl/skidl.py | 1 | 7609 | # -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2016 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import os
from builtins import open, super
from future import standard_library
from . import tools # Import EDA tool-specific stuff.
from .circuit import Circuit
from .common import *
from .defines import *
from .logger import erc_logger, get_script_name, logger
from .part_query import footprint_cache
from .pin import Pin
from .utilities import *
standard_library.install_aliases()
class SkidlCfg(dict):
"""Class for holding SKiDL configuration."""
CFG_FILE_NAME = ".skidlcfg"
def __init__(self, *dirs):
super().__init__()
self.load(*dirs)
def load(self, *dirs):
"""Load SKiDL configuration from JSON files in given dirs."""
for dir in dirs:
path = os.path.join(dir, self.CFG_FILE_NAME)
path = os.path.expanduser(path)
path = os.path.abspath(path)
try:
with open(path) as cfg_fp:
merge_dicts(self, json.load(cfg_fp))
except (FileNotFoundError, IOError):
pass
def store(self, dir="."):
"""Store SKiDL configuration as JSON in directory as .skidlcfg file."""
path = os.path.join(dir, self.CFG_FILE_NAME)
path = os.path.expanduser(path)
path = os.path.abspath(path)
with open(path, "w") as cfg_fp:
json.dump(self, cfg_fp, indent=4)
def get_kicad_lib_tbl_dir():
"""Get the path to where the global fp-lib-table file is found."""
paths = (
"$HOME/.config/kicad",
"~/.config/kicad",
"%APPDATA%/kicad",
"$HOME/Library/Preferences/kicad",
"~/Library/Preferences/kicad",
)
for path in paths:
path = os.path.normpath(os.path.expanduser(os.path.expandvars(path)))
if os.path.lexists(path):
return path
return ""
###############################################################################
# Globals that are used by everything else.
###############################################################################
# Get SKiDL configuration.
skidl_cfg = SkidlCfg("/etc", "~", ".")
# If no configuration files were found, set some default lib search paths.
if "lib_search_paths" not in skidl_cfg:
skidl_cfg["lib_search_paths"] = {tool: ["."] for tool in ALL_TOOLS}
# Add the location of the default KiCad part libraries.
try:
skidl_cfg["lib_search_paths"][KICAD].append(os.environ["KICAD_SYMBOL_DIR"])
except KeyError:
logger.warning(
"KICAD_SYMBOL_DIR environment variable is missing, so the default KiCad symbol libraries won't be searched."
)
# Add the location of the default SKiDL part libraries.
default_skidl_libs = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "libs"
)
skidl_cfg["lib_search_paths"][SKIDL].append(default_skidl_libs)
# Shortcut to library search paths.
lib_search_paths = skidl_cfg["lib_search_paths"]
# If no configuration files were found, set some default footprint search paths.
if "footprint_search_paths" not in skidl_cfg:
dir_ = get_kicad_lib_tbl_dir()
skidl_cfg["footprint_search_paths"] = {tool: [dir_] for tool in ALL_TOOLS}
# Cause the footprint cache to be invalidated if the footprint search path changes.
def invalidate_footprint_cache(self, k, v):
footprint_cache.reset()
skidl_cfg["footprint_search_paths"] = TriggerDict(skidl_cfg["footprint_search_paths"])
skidl_cfg["footprint_search_paths"].trigger_funcs[KICAD] = invalidate_footprint_cache
# Shortcut to footprint search paths.
footprint_search_paths = skidl_cfg["footprint_search_paths"]
# Set default toolset being used with SKiDL.
def set_default_tool(tool):
"""Set the ECAD tool that will be used by default."""
skidl_cfg["default_tool"] = tool
def get_default_tool():
return skidl_cfg["default_tool"]
if "default_tool" not in skidl_cfg:
set_default_tool(KICAD)
# Make the various EDA tool library suffixes globally available.
lib_suffixes = tools.lib_suffixes
# Definitions for backup library of circuit parts.
BACKUP_LIB_NAME = get_script_name() + "_lib"
BACKUP_LIB_FILE_NAME = BACKUP_LIB_NAME + lib_suffixes[SKIDL]
# Boolean controls whether backup lib will be searched for missing parts.
QUERY_BACKUP_LIB = INITIAL_QUERY_BACKUP_LIB = True
def set_query_backup_lib(val):
"""Set the boolean that controls searching for the backup library."""
global QUERY_BACKUP_LIB
QUERY_BACKUP_LIB = val
def get_query_backup_lib():
return QUERY_BACKUP_LIB
# Backup lib for storing parts in a Circuit.
backup_lib = None
def set_backup_lib(lib):
"""Set the backup library."""
global backup_lib
backup_lib = lib
def get_backup_lib():
return backup_lib
@norecurse
def load_backup_lib():
"""Load a backup library that stores the parts used in the circuit."""
global backup_lib
# Don't keep reloading the backup library once it's loaded.
if not backup_lib:
try:
# The backup library is a SKiDL lib stored as a Python module.
exec(open(BACKUP_LIB_FILE_NAME).read())
# Copy the backup library in the local storage to the global storage.
backup_lib = locals()[BACKUP_LIB_NAME]
except (FileNotFoundError, ImportError, NameError, IOError):
pass
return backup_lib
# Create the default Circuit object that will be used unless another is explicitly created.
builtins.default_circuit = Circuit()
# NOCONNECT net for attaching pins that are intentionally left open.
builtins.NC = default_circuit.NC # pylint: disable=undefined-variable
# Create calls to functions on whichever Circuit object is the current default.
ERC = default_circuit.ERC
erc_assert = default_circuit.add_erc_assertion
generate_netlist = default_circuit.generate_netlist
generate_xml = default_circuit.generate_xml
generate_schematic = default_circuit.generate_schematic
generate_svg = default_circuit.generate_svg
generate_graph = default_circuit.generate_graph
reset = default_circuit.reset
backup_parts = default_circuit.backup_parts
# Define a tag for nets that convey power (e.g., VCC or GND).
POWER = Pin.drives.POWER
def no_files(circuit=default_circuit):
"""Prevent creation of output files (netlists, ERC, logs) by this Circuit object."""
circuit.no_files = True
erc_logger.stop_file_output()
logger.stop_file_output()
| mit | 5,613,484,846,963,720,000 | 32.372807 | 120 | 0.681036 | false |
iniverno/RnR-LLC | simics-3.0-install/simics-3.0.31/amd64-linux/lib/x86_components.py | 1 | 69880 | # MODULE: x86-components
# CLASS: x86-system
# CLASS: x86-apic-system
# CLASS: x86-separate-mem-io-system
# CLASS: x86-apic-bus-system
# CLASS: x86-e7520-system
# CLASS: i386-cpu
# CLASS: i486sx-cpu
# CLASS: i486dx2-cpu
# CLASS: pentium-cpu
# CLASS: pentium-mmx-cpu
# CLASS: pentium-pro-cpu
# CLASS: pentium-ii-cpu
# CLASS: pentium-iii-cpu
# CLASS: pentium-m-cpu
# CLASS: pentium-4-cpu
# CLASS: xeon-prestonia-cpu
# CLASS: pentium-4e-cpu
# CLASS: pentium-4e-2ht-cpu
# CLASS: pentium-4e-4ht-cpu
# CLASS: x86-hammer-cpu
# CLASS: opteron-cpu
# CLASS: north-bridge-443bx
# CLASS: north-bridge-443bx-agp
# CLASS: north-bridge-875p
# CLASS: north-bridge-e7520
# CLASS: north-bridge-k8
# CLASS: legacy-pc-devices
import os, time
from sim_core import *
from components import *
from base_components import standard_pc_devices_component
from base_components import find_device, get_highest_2exp
from x86_cmos_info import register_cmos_commands
### X86 Legacy System
class x86_system_component(component_object):
classname = 'x86-system'
basename = 'system'
description = ('The "x86-system" component represents a legacy ISA based '
'x86 system with a single processor.')
connectors = {
'cpu0' : {'type' : 'x86-processor', 'direction' : 'down',
'empty_ok' : False, 'hotplug' : False, 'multi' : False},
'chipset' : {'type' : 'x86-chipset', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False},
'reset' : {'type' : 'x86-reset-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}}
bios = None
cpus = []
def __init__(self, parse_obj):
component_object.__init__(self, parse_obj)
self.break_on_reboot = False
self.use_acpi = 0
self.do_create_acpi_tables = 1
self.do_init_cmos = 1
self.use_shadow = 1
self.use_hostfs = 1
self.do_init_mtrrs = 1
self.linux_acpi_bug_workaround = 1
self.map_ram = 1
self.disable_ap_cpus = 1
def get_memory_megs(self, idx):
return self.memory_megs
def set_memory_megs(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
if val < 1:
SIM_attribute_error('Unsupported memory size')
return Sim_Set_Illegal_Value
self.memory_megs = val
return Sim_Set_Ok
def get_rtc_time(self, idx):
return self.tod
def set_rtc_time(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
try:
time.strptime(val, '%Y-%m-%d %H:%M:%S %Z')
except Exception, msg:
SIM_attribute_error(str(msg))
return Sim_Set_Illegal_Value
self.tod = val
return Sim_Set_Ok
def get_bios(self, idx):
return self.bios
def set_bios(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
if self.instantiated:
# allow checkpoint to be loaded, bios isn't used once instantiated
self.bios = val
return Sim_Set_Ok
if not SIM_lookup_file(val):
# TODO: use exception
print 'Could not locate bios file %s' % val
return Sim_Set_Illegal_Value
self.bios_size = os.stat(SIM_lookup_file(val)).st_size
# Default bios contains trailing garbage
if val[:10] == 'rombios-2.':
self.bios_size = 64 * 1024
if self.bios_size > 0x100000:
# TODO: use exception
print 'BIOS size %d is larger than max (%d)' % (
self.bios_size, 0x100000)
return Sim_Set_Illegal_Value
self.bios = val
return Sim_Set_Ok
def add_objects(self):
self.cpus_per_slot = []
# RAM
self.o.dram_space = pre_obj('dram$', 'memory-space')
self.o.ram_image = pre_obj('ram$_image', 'image')
self.o.ram_image.size = self.memory_megs * 1024 * 1024
self.o.ram = pre_obj('ram$', 'ram')
self.o.ram.image = self.o.ram_image
self.o.dram_space.map = [[0, self.o.ram, 0, 0,
self.memory_megs * 1024 * 1024]]
self.o.reset_bus = pre_obj('reset$', 'x86-reset-bus')
self.o.reset_bus.reset_targets = []
self.o.conf = pre_obj('conf$', 'pc-config')
if self.bios:
self.o.rom_image = pre_obj('rom$_image', 'image')
self.o.rom_image.size = self.bios_size
self.o.rom = pre_obj('rom$', 'rom')
self.o.rom.image = self.o.rom_image
if self.use_hostfs:
self.o.hfs = pre_obj('hfs$', 'hostfs')
# Port space
self.o.port_space = pre_obj('port_mem$', 'port-space')
self.o.port_space.map = [
[0xfff0, self.o.conf, 0, 0, 1],
[0xfff1, self.o.conf, 1, 0, 1]]
# Northbridge memory-space
self.o.phys_mem = pre_obj('phys_mem$', 'memory-space')
self.o.phys_mem.map = []
# Southbridge memory-space
self.o.pci_mem = pre_obj('pci_mem$', 'memory-space')
self.o.pci_mem.map = []
self.o.phys_mem.default_target = [self.o.pci_mem, 0, 0, None]
if self.bios:
self.o.pci_mem.map += [
[0x100000000 - self.bios_size, self.o.rom, 0, 0,
self.bios_size]]
if self.use_hostfs:
self.o.pci_mem.map += [
[0x0ffe81000, self.o.hfs, 0, 0, 0x10]]
if self.use_shadow:
self.o.shadow = pre_obj('core$', 'pc-shadow')
self.o.shadow_mem = pre_obj('shadow$', 'memory-space')
self.o.shadow_mem.map = [[0x100000,
self.o.dram_space, 0, 0, 0x100000]]
self.o.port_space.map += [
[0xfff4, self.o.shadow, 0, 0, 1],
[0xfff5, self.o.shadow, 0, 1, 1]]
if self.bios:
self.o.pci_mem.map += [
[0x000f0000, self.o.rom, 0, self.bios_size - 0x10000, 0x10000]]
if self.linux_acpi_bug_workaround:
self.o.rom1_image = pre_obj('rom$_image', 'image')
self.o.rom1_image.size = 0x10000
self.o.rom1 = pre_obj('rom$', 'rom')
self.o.rom1.image = self.o.rom1_image
self.o.pci_mem.map += [
[0x000e0000, self.o.rom1, 0, 0, 0x10000]]
# Map RAM
if self.map_ram and self.bios:
ram_map = [[0x000000000, self.o.dram_space, 0, 0, 0xa0000]]
high_mem = 4096 - 256
if self.memory_megs > high_mem:
high_mem *= 1024 * 1024
highest = (self.memory_megs * 1024 * 1024) - high_mem
ram_map += [
[0x000100000, self.o.dram_space, 0, 0x100000, high_mem,
None, 0],
[0x100000000, self.o.dram_space, 0, high_mem, highest,
None, 0]]
else:
megs = (self.memory_megs - 1) * 1024 * 1024
ram_map += [
[0x000100000, self.o.ram, 0, 0x100000, megs, None, 0]]
self.o.phys_mem.map += ram_map
elif self.map_ram and not self.bios:
ram_map = [[0x00000000, self.o.dram_space, 0, 0, self.memory_megs * 1024 * 1024]]
self.o.phys_mem.map += ram_map
def add_connector_info(self):
self.connector_info['cpu0'] = [0,
self.o.phys_mem,
self.o.port_space]
self.connector_info['chipset'] = [self.o.phys_mem,
self.o.pci_mem,
self.o.port_space]
if "dram_space" in dir(self.o):
self.connector_info['chipset'] += [ self.o.dram_space]
else:
self.connector_info['chipset'] += [None]
self.connector_info['reset'] = [self.o.reset_bus]
def load_bios(self):
# Write the bios into the ROM area, so that checkpoints not
# depend on the BIOS file being available all time.
f = open(SIM_lookup_file(self.bios), "rb")
base = 0x100000000 - self.bios_size
data = map(ord, f.read(self.bios_size))
self.o.pci_mem.iface.memory_space.write(self.o.pci_mem, None,
base, tuple(data), 1)
f.close()
def set_cmos_info(self):
self.rtc = find_device(self.o.port_space, 0x70)
if not self.rtc:
#print "CMOS device not found - can not write information."
return
# set nvram info
eval_cli_line('%s.cmos-init' % self.obj.name)
eval_cli_line('%s.cmos-base-mem 640' % self.obj.name)
eval_cli_line('%s.cmos-extended-mem %d' %
(self.obj.name, self.memory_megs - 1))
m = re.match(r'(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', self.tod)
eval_cli_line(('%s.set-date-time '
+ 'year=%s month=%s mday=%s '
+ 'hour=%s minute=%s second=%s')
% ((self.rtc.name,) + m.groups()))
eval_cli_line('%s.cmos-boot-dev C' % self.obj.name)
flp = find_device(self.o.port_space, 0x3f2)
if flp:
if len(flp.drives):
eval_cli_line('%s.cmos-floppy A "1.44"' % self.obj.name)
if len(flp.drives) > 1:
eval_cli_line('%s.cmos-floppy B "1.44"' % self.obj.name)
ide0 = find_device(self.o.port_space, 0x1f0)
if ide0 and ide0.master:
size = ide0.master.disk_sectors
# Our BIOS does LBA directly: set sectors 63 and heads to x * 16
bios_S = 63
# The following would probably work if our BIOS had support
# for proper 'translation'. Now it seems to fail.
#if size < 504 * 1024 * 1024 / 512:
# bios_H = 16
#elif size < 1008 * 1024 * 1024 / 512:
# bios_H = 32
#elif size < 2016 * 1024 * 1024 / 512:
# bios_H = 64
#elif size < 4032 * 1024 * 1024 / 512:
# bios_H = 128
if size < 4032 * 1024 * 1024 / 512:
bios_H = 16
else:
# 255 is de facto standard since DOS and early Windows can't
# handle 256 heads, this is known as the 4GB limit
bios_H = 255
bios_C = size / (bios_H * bios_S)
#if bios_C * bios_H * bios_S != size:
# print 'Disk size can not be translated to exact BIOS CHS'
# print 'Using CHS: %d %d %d' % (bios_C, bios_H, bios_S)
eval_cli_line('%s.cmos-hd C %d %d %d' % (self.obj.name,
bios_C, bios_H, bios_S))
def calc_mtrr_mask(self, classname, size):
return (~(size - 1)
& ((1 << sim.classes[classname].classattrs.physical_bits) - 1))
def set_mtrr(self, cpu):
if not SIM_class_has_attribute(cpu.classname, 'mtrr_def_type'):
return
cpu.mtrr_def_type = 0xc00
megs_remaining = self.memory_megs
next_mtrr = 0
next_base = 0
while megs_remaining:
if next_mtrr > 7:
print ('Warning: %d megabytes of memory not mapped by '
'MTRRs' % megs_remaining)
break
this_size = get_highest_2exp(megs_remaining)
exec 'cpu.mtrr_base%d = next_base | 0x06' % next_mtrr
mask = self.calc_mtrr_mask(cpu.classname, this_size * 1024 * 1024)
exec 'cpu.mtrr_mask%d = mask | 0x800' % next_mtrr
megs_remaining = megs_remaining - this_size
next_base = next_base + this_size * 1024 * 1024
next_mtrr += 1
cpu.mtrr_fix_64k_00000 = 0x0606060606060606
cpu.mtrr_fix_16k_80000 = 0x0606060606060606
cpu.mtrr_fix_16k_a0000 = 0
cpu.mtrr_fix_4k_c0000 = 0
cpu.mtrr_fix_4k_c8000 = 0
cpu.mtrr_fix_4k_d0000 = 0
cpu.mtrr_fix_4k_d8000 = 0
cpu.mtrr_fix_4k_f0000 = 0
cpu.mtrr_fix_4k_f8000 = 0
def instantiation_done(self):
component_object.instantiation_done(self)
for i in range(len(self.cpus)):
self.cpus[i] = SIM_get_object(self.cpus[i].name)
if self.bios:
self.load_bios()
conf.sim.handle_outside_memory = 1
if self.do_init_cmos:
self.set_cmos_info()
self.install_reset_callback(self.cpus[0])
def add_cpu(self, id, cpu):
if len(self.cpus_per_slot) <= id:
self.cpus_per_slot += [None] * (id + 1 - len(self.cpus_per_slot))
if self.cpus_per_slot[id] == None:
self.cpus_per_slot[id] = [cpu]
else:
self.cpus_per_slot[id].append(cpu)
self.cpus = []
for l in self.cpus_per_slot:
if l:
for c in l:
self.cpus.append(c)
self.o.reset_bus.reset_targets = self.cpus
cpu.cpu_group = self.o.reset_bus
def connect_x86_processor(self, connector, cpu):
self.add_cpu(0, cpu)
def connect_x86_chipset(self, connector):
if self.use_shadow:
# move all mappings to shadow memory
self.o.shadow_mem.map += ([x for x in self.o.pci_mem.map
if x[0] >= 0xc0000 and x[0] < 0x100000])
self.o.pci_mem.map = [x for x in self.o.pci_mem.map
if x[0] < 0xc0000 or x[0] >= 0x100000]
self.o.pci_mem.map += ([
[0x0000c0000, self.o.shadow, 0, 0, 0x40000,
self.o.shadow_mem, 1]])
def connect_x86_reset_bus(self, connector):
pass
def get_clock(self):
return self.cpus[0]
def get_processors(self):
return self.cpus
def set_processors(self, cpus):
self.cpus = cpus
def on_reset_hap(self, udata, cpu, hard_reset):
self.on_reboot()
def on_triple_fault(self, udata, cpu, exc_no):
self.on_reboot()
def on_reboot(self):
SIM_log_message(self.obj, 1, 0, Sim_Log_Info,
"System rebooted. Note that the reboot support in"
" Simics is experimental at this point.")
if self.break_on_reboot:
SIM_log_message(self.obj, 1, 0, Sim_Log_Info,
"You will probably want to save the disk content and continue"
" from that in another Simics session.\nTo disable breaking,"
" change the break_on_reboot attribute.")
SIM_break_simulation("Break on system reboot")
def install_reset_callback(self, cpu):
SIM_hap_add_callback_obj("X86_Processor_Reset", cpu, 0,
self.on_reset_hap, None)
excno = cpu.iface.exception.get_number(cpu, "Triple_Fault")
SIM_hap_add_callback_obj_index("Core_Exception", cpu, 0,
self.on_triple_fault, None,
excno)
def set_break_on_reboot(self, val, idx):
self.break_on_reboot = val
return Sim_Set_Ok
def get_break_on_reboot(self, idx):
return self.break_on_reboot
def finalize_instance(self):
component_object.finalize_instance(self)
if self.instantiated:
for connector, cpu, dst_conn in self.connections:
if connector == 'cpu0':
break
else:
SIM_log_message(self.obj, 1, 0, Sim_Log_Error,
"No cpu to install reset callback on!")
return
cpu = cpu.object_list['cpu[0]']
self.install_reset_callback(cpu)
system_attributes = [
['memory_megs', Sim_Attr_Required, 'i',
'The amount of RAM in megabytes on the processor board.'],
['rtc_time', Sim_Attr_Required, 's',
'The date and time of the Real-Time clock.'],
['break_on_reboot', Sim_Attr_Optional, 'b',
'If true, the simulation will stop when machine is rebooted.']]
bios_attribute = [['bios', Sim_Attr_Optional, 's',
'The x86 BIOS file to use.']]
apic_attribute = [['apic_id_list', Sim_Attr_Optional, '[i*]',
'The APIC id to use for each processor']]
register_component_class(x86_system_component,
system_attributes + bios_attribute,
top_level = True)
register_cmos_commands(x86_system_component.classname)
### X86 Apic Bus System
class x86_apic_bus_system_component(x86_system_component):
classname = 'x86-apic-bus-system'
basename = 'system'
description = ('The "x86-apic-bus-system" component represents a '
'multi-processor capable x86 system with up to 255 cpus.')
connectors = x86_system_component.connectors.copy()
del connectors['cpu0']
connectors['io-apic'] = {
'type' : 'apic-bus', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : False, 'multi' : True}
for i in range(255):
connectors['cpu%d' % i] = {
'type' : 'x86-apic-processor', 'direction' : 'down',
'empty_ok' : i != 0, 'hotplug' : False, 'multi' : False}
def __init__(self, parse_obj):
x86_system_component.__init__(self, parse_obj)
self.linux_acpi_bug_workaround = 0
self.use_acpi = 0
self.use_hostfs = 0
self.use_shadow = 0
self.do_create_acpi_tables = 0
self.do_init_cmos = 0
self.do_init_mtrrs = 1
self.map_ram = 0
self.disable_ap_cpus = 1
# default APIC id list that respects more or less Intel's manuals
self.apic_id_list = [ 0, 1, 6, 7, 4, 5, 2, 3,
8, 9,14,15,12,13,10,11]
def add_connector_info(self):
x86_system_component.add_connector_info(self)
self.connector_info['io-apic'] = [self.o.apic_bus]
if "dram_space" in dir(self.o):
self.connector_info['io-apic'] += [ self.o.dram_space]
for i in range(255):
self.connector_info['cpu%d' % i] = [i,
self.o.phys_mem,
self.o.port_space,
self.o.apic_bus]
def connect_x86_apic_processor(self, connector, cpus):
id = self.connector_info[connector][0]
for i in range(len(cpus)):
self.add_cpu(id, cpus[i])
if self.do_init_mtrrs:
self.set_mtrr(cpus[i])
def connect_apic_bus(self, connector):
pass
def add_objects(self):
x86_system_component.add_objects(self)
self.o.apic_bus = pre_obj('apic_bus$', 'apic-bus')
self.o.apic_bus.apics = []
self.o.apic_bus.ioapic = []
def set_apic_id_list(self, val, idx):
self.apic_id_list = val
return Sim_Set_Ok
def get_apic_id_list(self, idx):
return self.apic_id_list
def instantiation_done(self):
x86_system_component.instantiation_done(self)
apics_list = []
la = len(self.apic_id_list)
for i in range(len(self.cpus)):
apics_list += [self.cpus[i].apic]
a_id = (i/la)*la + self.apic_id_list[i % la]
self.cpus[i].apic.apic_id = a_id
try:
self.cpus[i].cpuid_physical_apic_id = a_id
except:
pass
self.o.apic_bus.apics = apics_list
i = len(self.cpus)
for ioapic in self.o.apic_bus.ioapic:
try:
ioapic.ioapic_id = ((i/la)*la + self.apic_id_list[i % la]) << 24
except:
ioapic.ioapic_id = i << 24
i = i + 1
self.cpus[0].bsp = 1
if self.disable_ap_cpus:
for c in self.cpus[1:]:
c.activity_state = 3 # waiting for SIPI
SIM_disable_processor(c)
register_component_class(
x86_apic_bus_system_component,
system_attributes,
top_level = True)
register_cmos_commands(x86_apic_bus_system_component.classname)
### X86/APIC Multipro System
class x86_apic_system_component(x86_apic_bus_system_component):
classname = 'x86-apic-system'
basename = 'system'
description = ('The "x86-apic-system" component represents a '
'multi-processor capable x86 system with up to 255 cpus.')
connectors = x86_apic_bus_system_component.connectors.copy()
del connectors['io-apic']
connectors['interrupt'] = {
'type' : 'sb-interrupt', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : False, 'multi' : False}
def __init__(self, parse_obj):
x86_apic_bus_system_component.__init__(self, parse_obj)
self.linux_acpi_bug_workaround = 1
self.use_hostfs = 1
self.use_shadow = 1
self.do_create_acpi_tables = 1
self.do_init_cmos = 1
self.do_init_mtrrs = 1
self.map_ram = 1
self.use_acpi = 1
self.disable_ap_cpus = 1
def get_acpi(self, idx):
return self.use_acpi
def set_acpi(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
self.use_acpi = val
return Sim_Set_Ok
def add_objects(self):
x86_apic_bus_system_component.add_objects(self)
self.o.ioapic = pre_obj('ioapic$', 'io-apic')
self.o.ioapic.apic_bus = self.o.apic_bus
self.o.ioapic.ioapic_id = 0
self.o.apic_bus.ioapic = [self.o.ioapic]
self.o.pci_mem.map += [
[0xfec00000, self.o.ioapic, 0, 0, 0x20]]
def add_connector_info(self):
x86_apic_bus_system_component.add_connector_info(self)
for i in range(255):
self.connector_info['cpu%d' % i] = [i,
self.o.phys_mem,
self.o.port_space,
self.o.apic_bus]
self.connector_info['interrupt'] = [self.o.apic_bus,
self.o.ioapic]
def connect_sb_interrupt(self, connector, pic):
pass
def instantiation_done(self):
x86_apic_bus_system_component.instantiation_done(self)
# create the ACPI tables if necessary
if self.do_create_acpi_tables:
import x86_acpi_setup
rsdp_base = iff(self.linux_acpi_bug_workaround, 0xef000, 0)
bios_params = x86_acpi_setup.bios_params_holder()
if self.bios and self.bios.endswith("rombios-2.68"):
# this BIOS have larger data area and other offsets
bios_params.ebda_base = 0x9f800
bios_params.rsdp_offs = 0x400
bios_params.high_desc_offs = 0x424
bios_params.nvr_desc_offs = 0x438
bios_params.reclaim_desc_offs = 0x44c
bios_params.high_desc_offs2 = 0x460
bios_params.smem_offs = 0x488
x86_acpi_setup.create_acpi_tables(
build_acpi_tables = self.use_acpi,
cpu_list = self.cpus,
megs = self.memory_megs,
ioapic_id = self.o.apic_bus.ioapic[0].ioapic_id >> 24,
user_rsdp_address = rsdp_base,
bios_params = bios_params)
register_component_class(
x86_apic_system_component,
system_attributes
+ bios_attribute
+ [['acpi', Sim_Attr_Optional, 'b',
'TRUE if the machine uses ACPI. Default is TRUE.']],
top_level = True)
register_cmos_commands(x86_apic_system_component.classname)
# X86 Separate APIC System
class x86_separate_apic_system_component(x86_apic_system_component):
classname = 'x86-separate-mem-io-system'
basename = 'system'
description = ('The "x86-separate-mem-io-system" component represents a '
'multi-processor capable x86 system with up to 15 cpus.')
connectors = x86_apic_system_component.connectors.copy()
def __init__(self, parse_obj):
x86_apic_system_component.__init__(self, parse_obj)
self.use_acpi = 0
self.use_shadow = 0
self.do_create_acpi_tables = 0
self.do_init_cmos = 0
self.do_init_mtrrs = 1
self.map_ram = 0
def add_objects(self):
x86_apic_system_component.add_objects(self)
self.o.post_log = pre_obj('post$', 'amibios8-post-log')
self.o.port_space.map += [[0x80, self.o.post_log, 0, 0, 1]]
def add_cpu(self, id, cpu):
x86_apic_system_component.add_cpu(self, id, cpu)
cpu.physical_io = self.o.pci_mem
cpu.physical_dram = self.o.dram_space
cpu.physical_memory.default_target = [cpu, 0, 0, self.o.dram_space]
self.o.pci_mem.default_target = [self.o.dram_space, 0, 0,
self.o.dram_space]
register_component_class(
x86_separate_apic_system_component,
system_attributes + bios_attribute,
top_level = True)
register_cmos_commands(x86_separate_apic_system_component.classname)
# E7520 system component
class x86_e7520_system_component(x86_apic_bus_system_component):
classname = 'x86-e7520-system'
basename = 'system'
description = ('The "x86-e7520-system" component represents a E7520-based '
'multi-processor capable system with up to 15 cpus.')
fm2_callback_called = 0
fm2_target = 1
bios_name = ""
def define_bp_targets(self):
if self.memory_megs == 2048:
self.fm2_target = 2
elif self.memory_megs == 4096:
self.fm2_target = 2
elif self.memory_megs == 8192:
self.fm2_target = 4
if self.bios_name == "out_1024":
# p:0x0000f1d76 wrmsr
self.fix_cache_start = 0x0000f1d76
# p:0x0000f1dd2 wrmsr
self.fix_cache_end = 0x0000f1dd2
# p:0x0000f2544 cmp bl,0x0
self.fix_mem_1 = 0x0000f2544
# p:0x0000f290e cmp byte ptr ss:[0x7],0x0
fix_mem_2 = 0x0000f290e
elif self.bios_name == "goose-bios":
# p:0x0000f1f7a wrmsr
self.fix_cache_start = 0x0000f1f7a
# p:0x0000f1fd6 wrmsr
self.fix_cache_end = 0x0000f1fd6
# p:0x0000f2751 cmp bl,0x0
self.fix_mem_1 = 0x0000f2751
# p:0x0000f2b1b cmp byte ptr ss:[0x7],0x0
self.fix_mem_2 = 0x0000f2b1b
else:
print "Unknown BIOS, no patch available"
self.fix_cache_start = -1
self.fix_cache_end = -1
self.fix_mem_1 = -1
self.fix_mem_2 = -1
def fcs_callback(self, dummy, obj, bp, memop):
cp = self.cpus[0].name
print "Setting-up fake cache for %s" % (cp)
eval_cli_line("%s_mem.add-map device = %s_cache_space " % (cp, cp) +
"base = 0xE0000 length = 0x2000")
def fce_callback(self, dummy, obj, bp, memop):
cp = self.cpus[0].name
print "Removing fake cache for %s" % (cp)
eval_cli_line("%s_mem.del-map device = %s_cache_space" % (cp, cp))
def fm1_callback(self, dummy, obj, bp, memop):
cpu = self.cpus[0]
print "Patch memory calibration results (#1) for %s" % (cpu.name)
cpu.bl = 0
def fm2_callback(self, dummy, obj, bp, memop):
cp = self.cpus[0].name
if self.fm2_callback_called < self.fm2_target:
print "Patch memory calibration results (#2) for %s" % (cp)
eval_cli_line("%s_mem.set -l 0xe0007 size = 1 value = 0" % (cp))
self.fm2_callback_called += 1
def get_fm2_callback_called(self, idx):
return self.fm2_callback_called
def set_fm2_callback_called(self, val, idx):
self.fm2_callback_called = val
return Sim_Set_Ok
def set_bp(self):
pm = self.cpus[0].physical_memory
if self.fix_cache_start != -1:
fcs = SIM_breakpoint(pm, 0, 4, self.fix_cache_start, 1, 2)
SIM_hap_add_callback_index("Core_Breakpoint_Memop",
self.fcs_callback, None, fcs)
if self.fix_cache_end != -1:
fce = SIM_breakpoint(pm, 0, 4, self.fix_cache_end, 1, 2)
SIM_hap_add_callback_index("Core_Breakpoint_Memop",
self.fce_callback, None, fce)
if self.fix_mem_1 != -1:
fm1 = SIM_breakpoint(pm, 0, 4, self.fix_mem_1, 1, 2)
SIM_hap_add_callback_index("Core_Breakpoint_Memop",
self.fm1_callback, None, fm1)
if self.fix_mem_2 != -1:
fm2 = SIM_breakpoint(pm, 0, 4, self.fix_mem_2, 1, 2)
SIM_hap_add_callback_index("Core_Breakpoint_Memop",
self.fm2_callback, None, fm2)
def set_bios_name(self, val, idx):
self.bios_name = val
return Sim_Set_Ok
def get_bios_name(self, idx):
return self.bios_name
def finalize_instance(self):
x86_apic_bus_system_component.finalize_instance(self)
if self.instantiated:
self.define_bp_targets()
self.set_bp()
def instantiation_done(self):
x86_apic_bus_system_component.instantiation_done(self)
self.define_bp_targets()
self.set_bp()
register_component_class(
x86_e7520_system_component,
system_attributes
+ [['bios_name', Sim_Attr_Optional, 's',
'The x86 BIOS filename (to set correct breakpoints).']],
[['fm2_callback_called', Sim_Attr_Optional, 'i',
'Internal']],
top_level = True)
register_cmos_commands(x86_e7520_system_component.classname)
### X86 Processor Base Class
class x86_processor_component(component_object):
connectors = {
'cache-cpu' : {'type' : 'timing-model', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : True, 'multi' : False}}
connectors['direct-pins'] = {
'type' : 'x86-pins', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : False, 'multi' : False}
def get_cpu_frequency(self, idx):
return self.freq_mhz
def set_cpu_frequency(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
self.freq_mhz = val
return Sim_Set_Ok
def get_cpi(self, idx):
try:
return self.cpi
except:
return 1
def set_cpi(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
if not val in (1, 2, 4, 8, 16, 32, 64, 128):
return Sim_Set_Illegal_Value
self.cpi = val
return Sim_Set_Ok
def __init__(self, parse_obj, cpu_threads = 1):
component_object.__init__(self, parse_obj)
self.cpu_threads = cpu_threads
self.o.space = [None] * cpu_threads
self.o.cpu = [None] * cpu_threads
self.o.tlb = [None] * cpu_threads
def add_objects(self):
thread_list = []
for i in range(self.cpu_threads):
if self.cpu_threads == 1:
self.o.space[i] = pre_obj('cpu(x)_mem', 'memory-space')
self.o.cpu[i] = pre_obj('cpu(x)', self.cpuclass)
self.o.tlb[i] = pre_obj('cpu(x)_tlb', 'x86-tlb')
else:
self.o.space[i] = pre_obj('cpu(x)_%d_mem'%i, 'memory-space')
self.o.cpu[i] = pre_obj('cpu(x)_%d'%i, self.cpuclass)
self.o.tlb[i] = pre_obj('cpu(x)_%d_tlb'%i, 'x86-tlb')
thread_list.append(self.o.cpu[i])
self.o.space[i].map = []
self.o.cpu[i].processor_number = get_next_cpu_number()
self.o.cpu[i].freq_mhz = self.freq_mhz
try:
self.cpi
self.o.cpu[i].step_rate = [1, self.cpi, 0]
except:
pass
self.o.cpu[i].physical_memory = self.o.space[i]
self.o.tlb[i].cpu = self.o.cpu[i]
self.o.tlb[i].type = "unlimited"
self.o.cpu[i].tlb = self.o.tlb[i]
if self.cpu_threads > 1:
for i in range(self.cpu_threads):
self.o.cpu[i].threads = thread_list
def add_connector_info(self):
if self.cpu_threads == 1:
self.connector_info['cache-cpu'] = [0, self.o.cpu[0]]
else:
for i in range(self.cpu_threads):
self.connector_info['cache-cpu%d' % i] = [i, self.o.cpu[i]]
self.connector_info['direct-pins'] = [self.o.cpu]
def get_cpu_threads(self, idx):
return self.cpu_threads
def connect_x86_processor(self, connector, id, mem_space, port_space):
self.rename_component_objects('%d' % id)
for i in range(self.cpu_threads):
self.o.space[i].default_target = [mem_space, 0, 0, None]
self.o.cpu[i].port_space = port_space
def connect_x86_pins(self, connector):
pass
def connect_timing_model(self, connector, cache):
id = self.connector_info[connector][0]
self.o.space[id].timing_model = cache
def disconnect_timing_model(self, connector):
id = self.connector_info[connector][0]
self.o.space[id].timing_model = None
### Legacy X86 Processor
class x86_legacy_processor_component(x86_processor_component):
connectors = x86_processor_component.connectors.copy()
connectors['system'] = {
'type' : 'x86-processor', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
connectors['interrupt'] = {
'type' : 'sb-interrupt', 'direction' : 'down',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
def __init__(self, parse_obj):
x86_processor_component.__init__(self, parse_obj)
def add_objects(self):
x86_processor_component.add_objects(self)
self.o.x87 = pre_obj('x87_exc$', 'x87_exception')
self.o.x87.irq_level = 13
self.o.x87.ignne_target = self.o.cpu[0]
self.o.cpu[0].ferr_target = self.o.x87
def add_connector_info(self):
x86_processor_component.add_connector_info(self)
self.connector_info['system'] = [self.o.cpu[0]]
self.connector_info['interrupt'] = [self.o.cpu[0], None]
def connect_sb_interrupt(self, connector, pic):
self.o.x87.irq_dev = pic
class i386_processor_component(x86_legacy_processor_component):
classname = 'i386-cpu'
basename = 'cpu'
description = ('The "i386-cpu" component represents an Intel 386 '
'processor.')
cpuclass = 'x86-386'
class i486sx_processor_component(x86_legacy_processor_component):
classname = 'i486sx-cpu'
basename = 'cpu'
description = ('The "i486sx-cpu" component represents an Intel 486sx '
'processor.')
cpuclass = 'x86-486sx'
class i486dx2_processor_component(x86_legacy_processor_component):
classname = 'i486dx2-cpu'
basename = 'cpu'
description = ('The "i486dx2-cpu" component represents an Intel 486dx2 '
'processor.')
cpuclass = 'x86-486dx2'
for model in ['i386', 'i486sx', 'i486dx2']:
register_component_class(
eval('%s_processor_component' % model),
[['cpu_frequency', Sim_Attr_Required, 'i',
'Processor frequency in MHz.'],
['cpi', Sim_Attr_Optional, 'i',
'Cycles per instruction.']])
### X86/APIC Processor
class x86_apic_processor_component(x86_processor_component):
connectors = x86_processor_component.connectors.copy()
connectors['system'] = {
'type' : 'x86-apic-processor', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
# default APIC is a P4-type apic
apic_type = "P4"
def __init__(self, parse_obj, cpu_threads = 1):
x86_processor_component.__init__(self, parse_obj, cpu_threads)
self.o.apic = [None] * cpu_threads
self.apic_freq_mhz = 10
def get_apic_frequency(self, idx):
return self.apic_freq_mhz
def set_apic_frequency(self, val, idx):
if self.obj.configured:
return Sim_Set_Illegal_Value
self.apic_freq_mhz = val
return Sim_Set_Ok
def add_objects(self):
x86_processor_component.add_objects(self)
for i in range(self.cpu_threads):
if self.cpu_threads == 1:
self.o.apic[i] = pre_obj('cpu(x)_apic', 'apic')
else:
self.o.apic[i] = pre_obj('cpu(x)_%d_apic'%i, 'apic')
self.o.apic[i].cpu = self.o.cpu[i]
self.o.apic[i].queue = self.o.cpu[i]
self.o.apic[i].cpu_bus_divisor = (float(self.freq_mhz)
/ self.apic_freq_mhz)
if self.apic_type == "P4":
self.o.apic[i].physical_broadcast_address = 255;
self.o.apic[i].version = 0x14;
self.o.apic[i].apic_type = "P4"
elif self.apic_type == "P6":
self.o.apic[i].physical_broadcast_address = 15;
self.o.apic[i].version = 0x18;
self.o.apic[i].apic_type = "P6"
else:
raise "Unknown APIC type %s" % self.apic_type
self.o.cpu[i].apic = self.o.apic[i]
if self.cpu_threads > 1:
self.o.cpu[i].cpuid_logical_processor_count = self.cpu_threads
else:
self.o.cpu[i].cpuid_logical_processor_count = 0
self.o.space[i].map += [
[0xfee00000, self.o.apic[i], 0, 0, 0x4000]]
def add_connector_info(self):
x86_processor_component.add_connector_info(self)
self.connector_info['system'] = [self.o.cpu]
def connect_x86_apic_processor(self, connector, id,
mem_space, port_space, apic_bus):
self.connect_x86_processor(connector, id, mem_space, port_space)
for i in range(self.cpu_threads):
self.o.cpu[i].bsp = 0
self.o.apic[i].apic_id = 0 # temporary value
if self.cpu_threads > 1:
self.o.cpu[i].cpuid_physical_apic_id = 0
self.o.apic[i].apic_bus = apic_bus
### X86/HyperThreaded Processor, 2 Threads
class x86_2ht_processor_component(x86_apic_processor_component):
connectors = x86_apic_processor_component.connectors.copy()
del connectors['cache-cpu']
for i in range(2):
connectors['cache-cpu%d' % i] = {
'type' : 'timing-model', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : True, 'multi' : False}
def __init__(self, parse_obj):
x86_apic_processor_component.__init__(self, parse_obj, 2)
### X86/HyperThreaded Processor, 4 Threads
class x86_4ht_processor_component(x86_apic_processor_component):
connectors = x86_apic_processor_component.connectors.copy()
del connectors['cache-cpu']
for i in range(4):
connectors['cache-cpu%d' % i] = {
'type' : 'timing-model', 'direction' : 'down',
'empty_ok' : True, 'hotplug' : True, 'multi' : False}
def __init__(self, parse_obj):
x86_apic_processor_component.__init__(self, parse_obj, 4)
class pentium_processor_component(x86_apic_processor_component):
classname = 'pentium-cpu'
basename = 'cpu'
description = ('The "pentium-cpu" component represents an Intel Pentium '
'processor.')
cpuclass = 'x86-pentium'
apic_type = "P6"
class pentium_mmx_processor_component(x86_apic_processor_component):
classname = 'pentium-mmx-cpu'
basename = 'cpu'
description = ('The "pentium-mmx-cpu" component represents an Intel '
'Pentium MMX processor.')
cpuclass = 'x86-pentium-mmx'
apic_type = "P6"
class pentium_pro_processor_component(x86_apic_processor_component):
classname = 'pentium-pro-cpu'
basename = 'cpu'
description = ('The "pentium-pro-cpu" component represents an Intel '
'Pentium Pro (P6) processor.')
cpuclass = 'x86-ppro'
apic_type = "P6"
class pentium_ii_processor_component(x86_apic_processor_component):
classname = 'pentium-ii-cpu'
basename = 'cpu'
description = ('The "pentium-ii-cpu" component represents an Intel '
'Pentium II processor.')
cpuclass = 'x86-p2'
apic_type = "P6"
class pentium_iii_processor_component(x86_apic_processor_component):
classname = 'pentium-iii-cpu'
basename = 'cpu'
description = ('The "pentium-iii-cpu" component represents an Intel '
'Pentium III processor.')
cpuclass = 'x86-p3'
apic_type = "P6"
class pentium_m_processor_component(x86_apic_processor_component):
classname = 'pentium-m-cpu'
basename = 'cpu'
description = ('The "pentium-m-cpu" component represents an Intel '
'Pentium M processor.')
cpuclass = 'x86-pentium-m'
def add_objects(self):
x86_apic_processor_component.add_objects(self)
for i in range(self.cpu_threads):
cpu = self.o.cpu[i]
# 512kb L2 cache
cpu.cpuid_2_eax = 0x03020101;
cpu.cpuid_2_ebx = 0;
cpu.cpuid_2_ecx = 0;
cpu.cpuid_2_edx = 0x430c0804;
cpu.cpuid_family = 6
cpu.cpuid_model = 9
cpu.cpuid_stepping = 5
cpu.cpuid_brand_id = 22
cpu.cpuid_processor_name = "Virtutech Pentium M Processor"
# Sane speedstep frequency/voltage limits. Prevents division by
# zero on some software.
cpu.ia32_perf_sts = 0x01010F0F00000000
class pentium_4_processor_component(x86_apic_processor_component):
classname = 'pentium-4-cpu'
basename = 'cpu'
description = ('The "pentium-4-cpu" component represents an Intel '
'Pentium 4 processor.')
cpuclass = 'x86-p4'
class xeon_prestonia_processor_component(x86_apic_processor_component):
classname = 'xeon-prestonia-cpu'
basename = 'cpu'
description = ('The "xeon-prestonia-cpu" component represents a 32-bit Intel '
'Xeon processor.')
cpuclass = 'x86-p4'
def add_objects(self):
x86_apic_processor_component.add_objects(self)
for i in range(self.cpu_threads):
cpu = self.o.cpu[i]
cpu.cpuid_family = 0xf
cpu.cpuid_model = 2
cpu.cpuid_stepping = 9
cpu.cpuid_brand_id = 0xb
cpu.cpuid_processor_name = "Virtutech Xeon Processor"
class pentium_4e_processor_component(x86_apic_processor_component):
connectors = x86_apic_processor_component.connectors.copy()
classname = 'pentium-4e-cpu'
basename = 'cpu'
description = ('The "pentium-4e-cpu" component represents an Intel '
'64-bit Pentium 4E processor.')
cpuclass = 'x86-p4e'
class pentium_4e_2ht_processor_component(x86_2ht_processor_component):
connectors = x86_2ht_processor_component.connectors.copy()
classname = 'pentium-4e-2ht-cpu'
basename = 'cpu'
description = ('The "pentium-4e-2ht-cpu" component represents an Intel '
'64-bit Pentium 4E processor, with 2 hyper threads.')
cpuclass = 'x86-p4e'
class pentium_4e_4ht_processor_component(x86_4ht_processor_component):
connectors = x86_4ht_processor_component.connectors.copy()
classname = 'pentium-4e-4ht-cpu'
basename = 'cpu'
description = ('The "pentium-4e-4ht-cpu" component represents an Intel '
'64-bit Pentium 4E processor, with 4 hyper threads.')
cpuclass = 'x86-p4e'
class x86_hammer_processor_component(x86_apic_processor_component):
classname = 'x86-hammer-cpu'
basename = 'cpu'
description = ('The "x86-hammer-cpu" component represents a generic '
'64-bit AMD Athlon 64 or Opteron processor without '
'on-chip devices.')
cpuclass = 'x86-hammer'
class opteron_processor_component(x86_apic_processor_component):
connectors = x86_apic_processor_component.connectors.copy()
classname = 'opteron-cpu'
basename = 'cpu'
description = ('The "opteron-cpu" component represents an '
'Opteron 240 processor.')
connectors['pci-bus'] = {
'type' : 'pci-bus', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
cpuclass = 'x86-hammer'
def opteron_brand_id(self, num):
major = num / 100
minor = num % 100
mult = (minor - 38) / 2
if major == 1:
return (0xc << 6) | mult
elif major == 2:
return (0x10 << 6) | mult
elif major == 8:
return (0x14 << 6) | mult
def opteron_vid(self, millivolts):
if millivolts > 1550 or millivolts < 800 or (millivolts % 25) != 0:
raise "VID undefined"
steps = (millivolts - 800) / 25
return 0x1e - steps
def opteron_fid(self, mhz):
if (mhz % 200) != 0:
raise "FID undefined"
multiplier = mhz / 200
if multiplier < 4 or multiplier > 25:
raise "FID undefined"
return (multiplier - 4) << 1
def instantiation_done(self):
x86_apic_processor_component.instantiation_done(self)
for i in range(self.cpu_threads):
cpu = SIM_get_object(self.o.cpu[i].name)
print "[%s] Setting cpu type to Opteron 240." % cpu.name
# 0xf5a is revision CG
cpu.cpuid_family=0xf
cpu.cpuid_model=5
cpu.cpuid_stepping=0xa
cpu.edx=0xf5a
cpu.cpuid_brand_id=self.opteron_brand_id(240)
cpu.fidvid_status = ((self.opteron_vid(1500) << 48) |
(self.opteron_vid(1500) << 40) |
(self.opteron_vid(1500) << 32) |
(self.opteron_vid(1500) << 24) |
(self.opteron_fid(cpu.freq_mhz) << 16) |
(self.opteron_fid(cpu.freq_mhz) << 8) |
(self.opteron_fid(cpu.freq_mhz) << 0))
def add_objects(self):
x86_apic_processor_component.add_objects(self)
self.o.hypertransport = pre_obj(self.o.cpu[0].name + '_hypertransport$', 'k8_hypertransport')
self.o.address_map = pre_obj(self.o.cpu[0].name + '_address_map$', 'k8_address_map')
self.o.dram = pre_obj(self.o.cpu[0].name + '_dram$', 'k8_dram')
self.o.misc = pre_obj(self.o.cpu[0].name + '_misc$', 'k8_misc')
self.o.hypertransport.misc_function = self.o.misc
self.o.address_map.misc_function = self.o.misc
def add_connector_info(self):
x86_apic_processor_component.add_connector_info(self)
self.connector_info['pci-bus'] = [[[0, self.o.hypertransport],
[1, self.o.address_map],
[2, self.o.dram],
[3, self.o.misc]]]
def connect_pci_bus(self, connector, slot, pci_bus):
self.o.hypertransport.pci_bus = pci_bus
self.o.address_map.pci_bus = pci_bus
self.o.dram.pci_bus = pci_bus
self.o.misc.pci_bus = pci_bus
for model in ['pentium', 'pentium_mmx', 'pentium_pro',
'pentium_ii', 'pentium_iii', 'pentium_m', 'pentium_4',
'pentium_4e', 'pentium_4e_2ht', 'pentium_4e_4ht',
'x86_hammer', 'opteron', 'xeon_prestonia']:
register_component_class(
eval('%s_processor_component' % model),
[['cpu_frequency', Sim_Attr_Required, 'i',
'Processor frequency in MHz.'],
['cpi', Sim_Attr_Optional, 'i',
'Cycles per instruction.'],
['apic_frequency', Sim_Attr_Optional, 'i',
'APIC bus frequency in MHz, default is 10 MHz.'],
['cpu_threads', Sim_Attr_Pseudo, 'i',
'The number of hyper threads in the processor.']])
### North Bridge base class
class north_bridge_component(component_object):
connectors = {
'system' : {'type' : 'x86-chipset', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}}
for i in range(24):
connectors['pci-slot%d' % i] = {
'type' : 'pci-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}
def __init__(self, parse_obj):
component_object.__init__(self, parse_obj)
if not "pci_bus_class" in dir(self):
self.pci_bus_class = 'pci-bus'
def add_objects(self):
self.o.pci_conf = pre_obj('pci_conf$', 'memory-space')
self.o.pci_io = pre_obj('pci_io$', 'memory-space')
self.o.pci_io.map = []
self.o.pci_bus = pre_obj('pci_bus$', self.pci_bus_class)
self.o.pci_bus.conf_space = self.o.pci_conf
self.o.pci_bus.io_space = self.o.pci_io
self.o.pci_bus.pci_devices = []
def add_connector_info(self):
self.connector_info['system'] = []
for i in range(24):
self.connector_info['pci-slot%d' % i] = [i, self.o.pci_bus]
self.connector_info['southbridge'] = [self.o.pci_bus]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
self.o.pci_bus.memory_space = pci_space
port_space.map += [
[0xcf8, self.o.bridge, 0, 0xcf8, 4],
[0xcf9, self.o.bridge, 0, 0xcf9, 2],
[0xcfa, self.o.bridge, 0, 0xcfa, 2],
[0xcfb, self.o.bridge, 0, 0xcfb, 1],
[0xcfc, self.o.bridge, 0, 0xcfc, 4],
[0xcfd, self.o.bridge, 0, 0xcfd, 2],
[0xcfe, self.o.bridge, 0, 0xcfe, 2],
[0xcff, self.o.bridge, 0, 0xcff, 1]]
port_space.default_target = [self.o.pci_io, 0, 0, None]
pci_space.default_target = [ram, 0, 0, None]
def connect_pci_bus(self, connector, device_list):
slot = self.connector_info[connector][0]
bus = self.connector_info[connector][1]
devs = bus.pci_devices
for dev in device_list:
devs += [[slot, dev[0], dev[1]]]
bus.pci_devices = devs
### Intel 443BX North Bridge without AGP
class north_bridge_443bx_component(north_bridge_component):
classname = 'north-bridge-443bx'
basename = 'north_bridge'
description = ('The "north-bridge-443bx" component represents an Intel '
'443BX North Bridge (host-to-PCI bridge) without AGP.')
def add_objects(self):
north_bridge_component.add_objects(self)
self.o.bridge = pre_obj('north_bridge$', 'i82443bx')
self.o.bridge.pci_bus = self.o.pci_bus
self.o.pci_bus.bridge = self.o.bridge
self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge]]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
north_bridge_component.connect_x86_chipset(
self, connector, phys_space, pci_space, port_space, ram)
self.o.bridge.memory = phys_space
port_space.map += [[0x22, self.o.bridge, 0, 0x22, 1]]
register_component_class(north_bridge_443bx_component, [])
### Intel 443BX North Bridge with AGP
class north_bridge_443bx_agp_component(north_bridge_443bx_component):
classname = 'north-bridge-443bx-agp'
basename = 'north_bridge'
description = ('The "north-bridge-443bx" component represents an Intel '
'443BX North Bridge (host-to-PCI bridge) with AGP.')
connectors = north_bridge_443bx_component.connectors.copy()
connectors['agp-slot0'] = {
'type' : 'agp-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}
def add_objects(self):
north_bridge_443bx_component.add_objects(self)
self.o.agp_conf = pre_obj('agp_conf$', 'memory-space')
self.o.agp_io = pre_obj('agp_io$', 'memory-space')
self.o.agp_io.map = []
self.o.agp_mem = pre_obj('agp_mem$', 'memory-space')
self.o.agp_mem.map = []
self.o.agp_bus = pre_obj('agp_bus$', 'pci-bus')
self.o.agp_bus.conf_space = self.o.agp_conf
self.o.agp_bus.io_space = self.o.agp_io
self.o.agp_bus.memory_space = self.o.agp_mem
self.o.agp_bus.pci_devices = []
self.o.agp = pre_obj('pci_to_agp$', 'i82443bx_agp')
self.o.agp.pci_bus = self.o.pci_bus
self.o.agp.secondary_bus = self.o.agp_bus
self.o.agp_bus.bridge = self.o.agp
self.o.bridge.agp_bridge = self.o.agp
self.o.pci_bus.pci_devices += [[1, 0, self.o.agp]]
def add_connector_info(self):
north_bridge_443bx_component.add_connector_info(self)
self.connector_info['agp-slot0'] = [0, self.o.agp_bus]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
north_bridge_443bx_component.connect_x86_chipset(
self, connector, phys_space, pci_space, port_space, ram)
def connect_agp_bus(self, connector, device_list):
self.connect_pci_bus(connector, device_list)
register_component_class(north_bridge_443bx_agp_component, [])
### Intel 875P North Bridge
class north_bridge_875p_component(north_bridge_component):
classname = 'north-bridge-875p'
basename = 'north_bridge'
description = ('The "north-bridge-875p" component represents an Intel '
'875P North Bridge (host-to-PCI bridge).')
connectors = north_bridge_component.connectors.copy()
connectors['agp-slot0'] = {
'type' : 'agp-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}
def add_objects(self):
north_bridge_component.add_objects(self)
self.o.bridge = pre_obj('north_bridge$', 'i82875P')
self.o.bridge.pci_bus = self.o.pci_bus
self.o.pci_bus.bridge = self.o.bridge
self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge]]
self.o.agp_conf = pre_obj('agp_conf$', 'memory-space')
self.o.agp_io = pre_obj('agp_io$', 'memory-space')
self.o.agp_io.map = []
self.o.agp_mem = pre_obj('agp_mem$', 'memory-space')
self.o.agp_mem.map = []
self.o.agp_bus = pre_obj('agp_bus$', 'pci-bus')
self.o.agp_bus.conf_space = self.o.agp_conf
self.o.agp_bus.io_space = self.o.agp_io
self.o.agp_bus.memory_space = self.o.agp_mem
self.o.agp_bus.pci_devices = []
self.o.agp = pre_obj('pci_to_agp$', 'i82875P_agp')
self.o.agp.pci_bus = self.o.pci_bus
self.o.agp.secondary_bus = self.o.agp_bus
self.o.agp_bus.bridge = self.o.agp
self.o.bridge.agp_bridge = self.o.agp
self.o.pci_bus.pci_devices += [[1, 0, self.o.agp]]
def add_connector_info(self):
north_bridge_component.add_connector_info(self)
self.connector_info['agp-slot0'] = [0, self.o.agp_bus]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
north_bridge_component.connect_x86_chipset(
self, connector, phys_space, pci_space, port_space, ram)
self.o.bridge.memory = phys_space
port_space.map += [[0x22, self.o.bridge, 0, 0x22, 1]]
def connect_agp_bus(self, connector, device_list):
self.connect_pci_bus(connector, device_list)
register_component_class(north_bridge_875p_component, [])
### Intel E7520 North Bridge
class north_bridge_e7520_component(north_bridge_component):
classname = 'north-bridge-e7520'
basename = 'north_bridge'
description = ('The "north-bridge-e7520" component represents an Intel '
'E7520 North Bridge (host-to-PCI bridge).')
connectors = north_bridge_component.connectors.copy()
connectors['southbridge'] = {
'type' : 'hub-link', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}
connectors['pcie-a-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
connectors['pcie-a1-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
connectors['pcie-b-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
connectors['pcie-b1-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
connectors['pcie-c-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
connectors['pcie-c1-slot'] = {
'type' : 'pcie-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
for i in range(24):
del connectors['pci-slot%d' % i]
def add_objects(self):
self.pci_bus_class = 'pcie-switch'
north_bridge_component.add_objects(self)
self.o.bridge = pre_obj('mch$', 'e7520')
self.o.bridge.pci_bus = self.o.pci_bus
self.o.mch_error = pre_obj('mch$_error', 'e7520_error')
self.o.mch_error.pci_bus = self.o.pci_bus
self.o.mch_dma = pre_obj('mch$_dma', 'e7520_dma')
self.o.mch_dma.pci_bus = self.o.pci_bus
self.o.mch_extended = pre_obj('mch$_extended', 'e7520_extended')
self.o.mch_extended.pci_bus = self.o.pci_bus
# PCIE-A
self.o.mch_pcie_a = pre_obj('mch$_pcie_a', 'e7520_pcie_port')
self.o.mch_pcie_a.pci_config_device_id = 0x3595;
self.o.mch_pcie_a.pci_bus = self.o.pci_bus
self.o.mch_pcie_a_bus = pre_obj('mch$_pcie_a_bus', 'pcie-switch')
self.o.mch_pcie_a_bus.pci_devices = []
self.o.mch_pcie_a_conf = pre_obj('mch$_pcie_a_conf', 'memory-space')
self.o.mch_pcie_a_io = pre_obj('mch$_pcie_a_io$', 'memory-space')
self.o.mch_pcie_a_mem = pre_obj('mch$_pcie_a_mem$', 'memory-space')
self.o.mch_pcie_a_bus.conf_space = self.o.mch_pcie_a_conf
self.o.mch_pcie_a_bus.io_space = self.o.mch_pcie_a_io
self.o.mch_pcie_a_bus.memory_space = self.o.mch_pcie_a_mem
self.o.mch_pcie_a_bus.bridge = self.o.mch_pcie_a
self.o.mch_pcie_a.secondary_bus = self.o.mch_pcie_a_bus
# PCIE-A1
self.o.mch_pcie_a1 = pre_obj('mch$_pcie_a1', 'e7520_pcie_port')
self.o.mch_pcie_a1.pci_config_device_id = 0x3595;
self.o.mch_pcie_a1.pci_bus = self.o.pci_bus
self.o.mch_pcie_a1_bus = pre_obj('mch$_pcie_a1_bus', 'pcie-switch')
self.o.mch_pcie_a1_bus.pci_devices = []
self.o.mch_pcie_a1_conf = pre_obj('mch$_pcie_a1_conf', 'memory-space')
self.o.mch_pcie_a1_io = pre_obj('mch$_pcie_a1_io$', 'memory-space')
self.o.mch_pcie_a1_mem = pre_obj('mch$_pcie_a1_mem$', 'memory-space')
self.o.mch_pcie_a1_bus.conf_space = self.o.mch_pcie_a1_conf
self.o.mch_pcie_a1_bus.io_space = self.o.mch_pcie_a1_io
self.o.mch_pcie_a1_bus.memory_space = self.o.mch_pcie_a1_mem
self.o.mch_pcie_a1_bus.bridge = self.o.mch_pcie_a1
self.o.mch_pcie_a1.secondary_bus = self.o.mch_pcie_a1_bus
# PCIE-B
self.o.mch_pcie_b = pre_obj('mch$_pcie_b', 'e7520_pcie_port')
self.o.mch_pcie_b.pci_config_device_id = 0x3595;
self.o.mch_pcie_b.pci_bus = self.o.pci_bus
self.o.mch_pcie_b_bus = pre_obj('mch$_pcie_b_bus', 'pcie-switch')
self.o.mch_pcie_b_bus.pci_devices = []
self.o.mch_pcie_b_conf = pre_obj('mch$_pcie_b_conf', 'memory-space')
self.o.mch_pcie_b_io = pre_obj('mch$_pcie_b_io$', 'memory-space')
self.o.mch_pcie_b_mem = pre_obj('mch$_pcie_b_mem$', 'memory-space')
self.o.mch_pcie_b_bus.conf_space = self.o.mch_pcie_b_conf
self.o.mch_pcie_b_bus.io_space = self.o.mch_pcie_b_io
self.o.mch_pcie_b_bus.memory_space = self.o.mch_pcie_b_mem
self.o.mch_pcie_b_bus.bridge = self.o.mch_pcie_b
self.o.mch_pcie_b.secondary_bus = self.o.mch_pcie_b_bus
# PCIE-B1
self.o.mch_pcie_b1 = pre_obj('mch$_pcie_b1', 'e7520_pcie_port')
self.o.mch_pcie_b1.pci_config_device_id = 0x3595;
self.o.mch_pcie_b1.pci_bus = self.o.pci_bus
self.o.mch_pcie_b1_bus = pre_obj('mch$_pcie_b1_bus', 'pcie-switch')
self.o.mch_pcie_b1_bus.pci_devices = []
self.o.mch_pcie_b1_conf = pre_obj('mch$_pcie_b1_conf', 'memory-space')
self.o.mch_pcie_b1_io = pre_obj('mch$_pcie_b1_io$', 'memory-space')
self.o.mch_pcie_b1_mem = pre_obj('mch$_pcie_b1_mem$', 'memory-space')
self.o.mch_pcie_b1_bus.conf_space = self.o.mch_pcie_b1_conf
self.o.mch_pcie_b1_bus.io_space = self.o.mch_pcie_b1_io
self.o.mch_pcie_b1_bus.memory_space = self.o.mch_pcie_b1_mem
self.o.mch_pcie_b1_bus.bridge = self.o.mch_pcie_b1
self.o.mch_pcie_b1.secondary_bus = self.o.mch_pcie_b1_bus
# PCIE-C
self.o.mch_pcie_c = pre_obj('mch$_pcie_c', 'e7520_pcie_port')
self.o.mch_pcie_c.pci_config_device_id = 0x3595;
self.o.mch_pcie_c.pci_bus = self.o.pci_bus
self.o.mch_pcie_c_bus = pre_obj('mch$_pcie_c_bus', 'pcie-switch')
self.o.mch_pcie_c_bus.pci_devices = []
self.o.mch_pcie_c_conf = pre_obj('mch$_pcie_c_conf', 'memory-space')
self.o.mch_pcie_c_io = pre_obj('mch$_pcie_c_io$', 'memory-space')
self.o.mch_pcie_c_mem = pre_obj('mch$_pcie_c_mem$', 'memory-space')
self.o.mch_pcie_c_bus.conf_space = self.o.mch_pcie_c_conf
self.o.mch_pcie_c_bus.io_space = self.o.mch_pcie_c_io
self.o.mch_pcie_c_bus.memory_space = self.o.mch_pcie_c_mem
self.o.mch_pcie_c_bus.bridge = self.o.mch_pcie_c
self.o.mch_pcie_c.secondary_bus = self.o.mch_pcie_c_bus
# PCIE-C1
self.o.mch_pcie_c1 = pre_obj('mch$_pcie_c1', 'e7520_pcie_port')
self.o.mch_pcie_c1.pci_config_device_id = 0x3595;
self.o.mch_pcie_c1.pci_bus = self.o.pci_bus
self.o.mch_pcie_c1_bus = pre_obj('mch$_pcie_c1_bus', 'pcie-switch')
self.o.mch_pcie_c1_bus.pci_devices = []
self.o.mch_pcie_c1_conf = pre_obj('mch$_pcie_c1_conf', 'memory-space')
self.o.mch_pcie_c1_io = pre_obj('mch$_pcie_c1_io$', 'memory-space')
self.o.mch_pcie_c1_mem = pre_obj('mch$_pcie_c1_mem$', 'memory-space')
self.o.mch_pcie_c1_bus.conf_space = self.o.mch_pcie_c1_conf
self.o.mch_pcie_c1_bus.io_space = self.o.mch_pcie_c1_io
self.o.mch_pcie_c1_bus.memory_space = self.o.mch_pcie_c1_mem
self.o.mch_pcie_c1_bus.bridge = self.o.mch_pcie_c1
self.o.mch_pcie_c1.secondary_bus = self.o.mch_pcie_c1_bus
self.o.bridge.pcie_mem = [self.o.mch_pcie_a_mem,
self.o.mch_pcie_a1_mem,
self.o.mch_pcie_b_mem,
self.o.mch_pcie_b1_mem,
self.o.mch_pcie_c_mem,
self.o.mch_pcie_c1_mem]
self.o.pci_bus.bridge = self.o.bridge
self.o.pci_bus.pci_devices = [[0, 0, self.o.bridge],
[0, 1, self.o.mch_error],
[1, 0, self.o.mch_dma],
[2, 0, self.o.mch_pcie_a],
[3, 0, self.o.mch_pcie_a1],
[4, 0, self.o.mch_pcie_b],
[5, 0, self.o.mch_pcie_b1],
[6, 0, self.o.mch_pcie_c],
[7, 0, self.o.mch_pcie_c1],
[8, 0, self.o.mch_extended]]
def add_connector_info(self):
north_bridge_component.add_connector_info(self)
self.connector_info['southbridge'] = [self.o.pci_bus]
self.connector_info['pcie-a-slot'] = [self.o.mch_pcie_a_bus]
self.connector_info['pcie-a1-slot'] = [self.o.mch_pcie_a1_bus]
self.connector_info['pcie-b-slot'] = [self.o.mch_pcie_b_bus]
self.connector_info['pcie-b1-slot'] = [self.o.mch_pcie_b1_bus]
self.connector_info['pcie-c-slot'] = [self.o.mch_pcie_c_bus]
self.connector_info['pcie-c1-slot'] = [self.o.mch_pcie_c1_bus]
def check_pcie_bus(self, connector, device_list):
bus = self.connector_info[connector][0]
used = {}
for d in bus.pci_devices:
slot, fun, dev = d
used[(slot, fun)] = 1
for d in device_list:
slot, fun, dev = d
if used.has_key((slot, fun)):
raise Exception, "Slot %d Function %d already in use." % (slot, fun)
def connect_pcie_bus(self, connector, device_list):
bus = self.connector_info[connector][0]
bus.pci_devices += device_list
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
north_bridge_component.connect_x86_chipset(
self, connector, phys_space, pci_space, port_space, ram)
self.o.bridge.ram = ram.map[0][1] # real RAM object
self.o.bridge.dram = ram
self.o.bridge.cpu_memory = phys_space
self.o.bridge.pci_memory = pci_space
# static translator
phys_space.map += [
[0x000C0000, self.o.bridge, 10, 0xC0000, 0x40000, pci_space, 1,0,0]]
def connect_hub_link(self, connector):
pass
register_component_class(north_bridge_e7520_component, [])
### AMD K8 integrated north bridge
class north_bridge_k8_component(north_bridge_component):
connectors = north_bridge_component.connectors.copy()
for i in range(24,32):
connectors['pci-slot%d' % i] = {
'type' : 'pci-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : False}
classname = 'north-bridge-k8'
basename = 'north_bridge'
description = ('The "north-bridge-k8" component represents the '
'integrated north bridge on an AMD Athlon 64 or '
'AMD Opteron chip.')
def add_objects(self):
north_bridge_component.add_objects(self)
self.o.bridge = pre_obj('north_bridge$', 'k8_host_bridge')
self.o.bridge.pci_bus = self.o.pci_bus
self.o.pci_bus.bridge = self.o.bridge
def add_connector_info(self):
north_bridge_component.add_connector_info(self)
for i in range(24,32):
self.connector_info['pci-slot%d' % i] = [i, self.o.pci_bus]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
north_bridge_component.connect_x86_chipset(
self, connector, phys_space, pci_space, port_space, ram)
self.o.bridge.memory = phys_space
def connect_pci_bus(self, connector, device_list):
north_bridge_component.connect_pci_bus(self, connector, device_list)
register_component_class(north_bridge_k8_component, [])
### Legacy PC Devices
class legacy_pc_devices_component(standard_pc_devices_component):
classname = 'legacy-pc-devices'
basename = 'legacy'
description = ('The "legacy-pc-devices" component represents the legacy '
'devices found in PC/AT compatible computers. This '
'component can be used as system chipset in ISA based '
'x86 machines without PCI support.')
connectors = standard_pc_devices_component.connectors.copy()
connectors['system'] = {
'type' : 'x86-chipset', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
connectors['interrupt'] = {
'type' : 'sb-interrupt', 'direction' : 'up',
'empty_ok' : False, 'hotplug' : False, 'multi' : False}
connectors['isa-bus'] = {
'type' : 'isa-bus', 'direction' : 'down',
'empty_ok' : 1, 'hotplug' : False, 'multi' : True}
def add_objects(self):
self.used_ports = []
standard_pc_devices_component.add_objects(self)
self.mem_space = None
def add_connector_info(self):
standard_pc_devices_component.add_connector_info(self)
self.connector_info['system'] = []
self.connector_info['interrupt'] = [self.o.isa]
self.connector_info['isa-bus'] = [self.o.isa_bus, None,
self.o.isa, self.o.dma]
def connect_x86_chipset(self, connector, phys_space, pci_space,
port_space, ram):
self.mem_space = phys_space
self.o.dma.memory = phys_space
port_space.default_target = [self.o.isa_bus, 0, 0, None]
self.connector_info['isa-bus'] = [self.o.isa_bus, phys_space,
self.o.isa, self.o.dma]
def connect_sb_interrupt(self, connector, irq_dst, ioapic):
self.o.pic.irq_dev = irq_dst
def check_isa_bus(self, connector, ports):
for p in ports:
if p in self.used_ports:
# TODO: use specific exception
raise Exception, "Port 0x%x already in use." % p
self.used_ports += ports
def connect_isa_bus(self, connector):
pass
register_component_class(legacy_pc_devices_component, [])
| gpl-2.0 | -5,586,652,861,325,700,000 | 38.931429 | 101 | 0.557026 | false |
bscottm/SublimeHaskell | internals/proc_helper.py | 1 | 9148 | # -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
# ProcHelper: Process execution helper class.
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
import errno
import subprocess
import os
import os.path
import SublimeHaskell.sublime_haskell_common as Common
import SublimeHaskell.internals.logging as Logging
import SublimeHaskell.internals.settings as Settings
import SublimeHaskell.internals.utils as Utils
import SublimeHaskell.internals.which as Which
import SublimeHaskell.internals.cabal_cfgrdr as CabalConfigRdr
import SublimeHaskell.internals.cabal_reader as CabalReader
class ProcHelper(object):
"""Command and tool process execution helper."""
# Augmented PATH for the subprocesses and locating executables.
augmented_path = None
def __init__(self, command, **popen_kwargs):
"""Open a pipe to a command or tool."""
if ProcHelper.augmented_path is None:
ProcHelper.augmented_path = ProcHelper.make_augmented_path()
## Necessary evil: Don't cache the environment, just update the PATH in the current environment.
## Why? Because someone could (like me) change os.environ via the ST console and those changes
## would never make it here. Use case: settting $http_proxy so that stack can fetch packages.
proc_env = dict(os.environ)
proc_env['PATH'] = ProcHelper.augmented_path + os.pathsep + proc_env.get('PATH', '')
self.process = None
self.process_err = None
if Utils.is_windows():
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
popen_kwargs['startupinfo'] = startupinfo
# Allow caller to specify something different for stdout or stderr -- provide
# the default here if unspecified.
popen_kwargs['stdout'] = popen_kwargs.get('stdout', subprocess.PIPE)
popen_kwargs['stderr'] = popen_kwargs.get('stderr', subprocess.PIPE)
try:
normcmd = Which.which(command, proc_env['PATH'])
if normcmd is not None:
self.process = subprocess.Popen(normcmd, stdin=subprocess.PIPE, env=proc_env, **popen_kwargs)
else:
self.process = None
self.process_err = "SublimeHaskell.ProcHelper: {0} was not found on PATH!".format(command[0])
except OSError as os_exc:
self.process_err = \
'\n'.join(["SublimeHaskell: Problem executing '{0}'".format(' '.join(command))
, 'Operating system error: {0}'.format(os_exc)
])
if os_exc.errno == errno.EPIPE:
# Most likely reason: subprocess output a usage message
stdout, stderr = self.process.communicate()
exit_code = self.process.wait()
self.process_err = self.process_err + \
'\n'.join([''
, 'Process exit code: {0}'.format(exit_code)
, ''
, "output:"
, stdout if stdout else "--no output--"
, ''
, 'error:'
, stderr if stderr else "--no error output--"])
self.process = None
else:
self.process = None
raise os_exc
# 'with' statement support:
def __enter__(self):
return self
def __exit__(self, _type, _value, _traceback):
self.cleanup()
return False
def cleanup(self):
if self.process is not None:
self.process.stdin.close()
self.process.stdout.close()
if self.process.stderr is not None:
# stderr can be None if it is tied to stdout (i.e., 'stderr=subprocess.STDOUT')
self.process.stderr.close()
def wait(self, input_str=None):
"""Wait for subprocess to complete and exit, collect and decode ``stdout`` and ``stderr``,
returning the tuple ``(exit_code, stdout, stderr)```"""
if self.process is not None:
stdout, stderr = self.process.communicate(Utils.encode_bytes(input_str if input_str else ''))
exit_code = self.process.wait()
# Ensure that we reap the file descriptors.
self.cleanup()
return (exit_code, Utils.decode_bytes(stdout), Utils.decode_bytes(stderr))
return (-1, '', self.process_err or "?? unknown error -- no process.")
# Update the augmented environment when `add_to_PATH` or `add_standard_dirs` change.
@staticmethod
def update_environment(_key, _val):
# Reinitialize the tool -> path cache:
Which.reset_cache()
ProcHelper.augmented_path = ProcHelper.make_augmented_path()
@staticmethod
def make_augmented_path():
''' Generate the augmented PATH for subprocesses: adds the appropriate cabal/stack local install directory
($HOME/.local/bin for *nix, %APPDATA%/local/bin for Windows) and updates PATH with `add_to_PATH` extras.
'''
std_places = []
if Settings.PLUGIN.add_standard_dirs:
std_places.append("$HOME/.local/bin" if not Utils.is_windows() else "%APPDATA%/local/bin")
if Utils.is_macosx():
std_places.append('$HOME/Library/Haskell/bin')
std_places += CabalConfigRdr.cabal_config()
std_places = [dir for dir in [Utils.normalize_path(path) for path in std_places] if os.path.isdir(dir)]
add_to_path = list(filter(os.path.isdir, map(Utils.normalize_path, Settings.PLUGIN.add_to_path)))
Logging.log("std_places = {0}".format(std_places), Logging.LOG_INFO)
Logging.log("add_to_PATH = {0}".format(add_to_path), Logging.LOG_INFO)
return os.pathsep.join(add_to_path + std_places)
@staticmethod
def get_extended_path():
if ProcHelper.augmented_path is None:
ProcHelper.augmented_path = ProcHelper.make_augmented_path()
return ProcHelper.augmented_path + os.pathsep + (os.environ.get('PATH', ''))
@staticmethod
def run_process(command, input_string='', **popen_kwargs):
"""Execute a subprocess, wait for it to complete, returning a ``(exit_code, stdout, stderr)``` tuple."""
with ProcHelper(command, **popen_kwargs) as proc:
return proc.wait(input_string)
def exec_wrapper_cmd(exec_with, cmd_list):
wrapper = []
if exec_with == 'cabal':
wrapper = ['cabal', 'exec', cmd_list[0]]
elif exec_with == 'cabal-new-build':
wrapper = ['cabal', 'new-run', 'exe:' + cmd_list[0]]
elif exec_with == 'stack':
wrapper = ['stack', 'exec', cmd_list[0]]
else:
errmsg = 'ProcHelper.exec_wrapper_cmd: Unknown execution prefix \'{0}\''.format(exec_with)
raise RuntimeError(errmsg)
return wrapper + ['--'] + cmd_list[1:] if cmd_list[1:] else wrapper
def exec_with_wrapper(exec_with, install_dir, cmd_list):
'''Wrapper function for inserting the execution wrapper, e.g., 'cabal exec' or 'stack exec'
:returns: Process object from ProcHelper.
'''
proc_args = {}
if exec_with is not None:
cmd_list = exec_wrapper_cmd(exec_with, cmd_list)
if install_dir is not None:
proc_args['cwd'] = Utils.normalize_path(install_dir)
else:
raise RuntimeError('ProcHelper.exec_with_wrapper: invalid install_dir (None)')
else:
cmd = Which.which(cmd_list[0], ProcHelper.get_extended_path())
if cmd is not None:
cmd_list[0] = cmd
Logging.log('ProcHelper.exec_with_wrapper: {0} in {1}'.format(cmd_list, proc_args.get('cwd')), Logging.LOG_DEBUG)
return ProcHelper(cmd_list, **proc_args)
def get_source_dir(filename):
'''Get root of hs-source-dirs for filename in project.
'''
if not filename:
return os.path.expanduser('~')
cabal_dir, cabal_proj = Common.locate_cabal_project(filename)
if not cabal_dir:
# No cabal file -> Punt and assume the source directory for the file and project is the same as the file.
return os.path.dirname(filename)
else:
proj_info = CabalReader.CabalProjectReader(cabal_dir, cabal_proj)
cabal_info = proj_info.cabal_info
dirs = ['.']
executables = cabal_info.get('executable', {})
dirs.extend([sdir.strip()
for exe in executables
for sdirs in executables[exe].get('hs-source-dirs', [])
for sdir in sdirs.split(',')])
dirs.extend([sdir.strip()
for sdirs in cabal_info.get('library', {}).get('hs-source-dirs', [])
for sdir in sdirs.split(',')])
paths = [os.path.abspath(os.path.join(cabal_dir, srcdirs)) for srcdirs in set(dirs)]
paths.sort(key=lambda p: -len(p))
for path in paths:
if filename.startswith(path):
return path
return os.path.dirname(filename)
| mit | 4,924,986,076,657,667,000 | 41.351852 | 117 | 0.591277 | false |
Pytlicek/VOBS | app/controllers/supplier/profile.py | 1 | 1457 | # -*- coding: utf-8 -*-
from flask import render_template, session, redirect, url_for
from app import app
from app.models.Forms import PublicProfile
from app.models.Checks import login_required
from app.models.SQL_DB import User, Item_Profile
@app.route('/supplier/settings/public_profile', methods=['GET', 'POST'])
@login_required
def supplier_settings_public_profile():
supplier_data = Item_Profile.query.filter_by(
user_id=User.query.filter_by(username=session['username']).first().id).first()
form = PublicProfile()
return render_template('supplier/profile/index.html', supplier_data=supplier_data, form=form)
@app.route('/supplier/settings/public_profile/public_profile_change', methods=['POST'])
@login_required
def supplier_settings_public_profile_change():
from app.models.Profiles import edit_public_profile
supplier_data = Item_Profile.query.filter_by(
user_id=User.query.filter_by(username=session['username']).first().id).first()
form = PublicProfile()
if form.validate_on_submit():
edit_public_profile(form.company_name.data, form.company_address.data, form.company_logo.data, form.ico.data,
form.dic.data, form.ic_dph.data, form.phone.data, form.email.data, form.website.data)
return redirect(url_for('supplier_settings_public_profile'))
else:
return render_template('supplier/profile/index.html', supplier_data=supplier_data, form=form)
| mit | 9,213,925,581,094,542,000 | 46 | 117 | 0.720659 | false |
sanjaymandadi/mozu-python-sdk | test/functional_test/security_test/userauthenticator_test.py | 2 | 1389 | import unittest
from mozurestsdk import mozuclient;
from mozurestsdk.security.userauthenticator import UserAuthenticator;
from mozurestsdk import util;
class UserAuthenticator_Test(unittest.TestCase):
def setUp(self):
self.config = util.readConfigFile("c:\projects\mozuconfig.txt");
mozuclient.configure(config="c:\projects\mozuconfig.txt");
def test_tenantAuth(self):
userName = self.config.get("userName", None);
password = self.config.get("password", None);
tenantId = self.config.get("tenantId", None);
userAuth = UserAuthenticator(userName, password, tenantId=tenantId);
userAuth.authenticate();
userAuth.refreshAuth();
self.assertIsNotNone(userAuth.auth);
self.assertIsNotNone(userAuth.auth["accessToken"]);
self.assertEqual(str(userAuth.auth["tenant"]["id"]), tenantId);
def test_devAccountAuth(self):
userName = self.config.get("userName", None);
password = self.config.get("password", None);
devAccountId = self.config.get("devAccountId", None);
authUrl = self.config.get("baseAuthUrl", None);
userAuth = UserAuthenticator(userName, password, devAccountId=devAccountId);
userAuth.authenticate();
userAuth.refreshAuth();
self.assertIsNotNone(userAuth.auth);
self.assertIsNotNone(userAuth.auth["accessToken"]);
self.assertEqual(str(userAuth.auth["account"]["id"]), devAccountId);
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -2,910,828,063,572,994,600 | 36.540541 | 79 | 0.74586 | false |
tundish/turberfield-utils | turberfield/utils/db.py | 1 | 7983 | #!/usr/bin/env python3
# encoding: UTF-8
# This file is part of turberfield.
#
# Turberfield is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Turberfield is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with turberfield. If not, see <http://www.gnu.org/licenses/>.
from collections import namedtuple
from collections import OrderedDict
from collections.abc import Mapping
import datetime
import enum
import logging
import sqlite3
class Table:
Column = namedtuple(
"Column",
["name", "type", "isPK", "isNullable", "isUnique", "default", "fk"]
)
lookup = OrderedDict()
@staticmethod
def declare_type(col):
if isinstance(col.type, str):
return "INTEGER" if "int" in col.type.lower() and col.isPK else col.type
elif col.type is int:
return "INTEGER"
elif col.type is str:
return "TEXT"
elif col.type is bool:
return ""
elif col.type is bytes:
return "BLOB"
elif col.type is datetime.date:
return "date"
elif col.type is datetime.datetime:
return "timestamp"
elif "__conform__" in dir(col.type):
return "BLOB"
else:
return ""
def __init__(self, name, cols=[], lookup=None):
self.name = name
self.cols = cols
if lookup is not None:
self.lookup = lookup
self.lookup[name] = self
def sql_lines(self):
yield "create table if not exists {0}(".format(self.name)
pks = [col for col in self.cols if col.isPK]
fks = OrderedDict()
uqs = [col for col in self.cols if col.isUnique]
constraints = len(pks) >= 2 or len(uqs) >= 2
for col in self.cols:
ref = self.lookup.get(col.fk)
if ref is not None:
fks[col] = ref
yield " ".join((
col.name, self.declare_type(col),
"PRIMARY KEY" if col.isPK and len(pks) == 1 else "",
"NOT NULL" if not col.isNullable else "",
"UNIQUE" if col.isUnique and len(uqs) == 1 else "",
"DEFAULT {0}".format(col.default) if col.default else ""
)).rstrip() + (
"," if constraints or fks or col is not self.cols[-1]
else ""
)
if len(pks) >= 2:
yield "PRIMARY KEY({0})".format(", ".join([i.name for i in pks]))
if len(uqs) >= 2:
yield "UNIQUE({0})".format(", ".join([i.name for i in uqs]))
for col, refs in fks.items():
yield "FOREIGN KEY ({0.name}) REFERENCES {1.name}({2})".format(
col, refs, ", ".join([col.name for col in refs.cols if col.isPK])
)
yield(")")
class SQLOperation:
@property
def sql(self):
raise NotImplementedError
def __init__(self, *args, data=[]):
self.tables = args
self.data = data
def run(self, con, log=None):
"""
Execute the SQL defined by this class.
Returns the cursor for data extraction.
"""
cur = con.cursor()
sql, data = self.sql
try:
if data is None:
cur.executescript(sql)
else:
statements = sql.split(";")
for s in statements:
if isinstance(data, Mapping):
cur.execute(s, data)
else:
cur.executemany(s, data)
except (sqlite3.OperationalError, sqlite3.ProgrammingError) as e:
if log is not None:
log.error(self.sql)
con.rollback()
raise e
else:
con.commit()
return cur
class Creation(SQLOperation):
@property
def sql(self):
return (
";\n".join("\n".join(table.sql_lines()) for table in self.tables),
None
)
def run(self, con, log=None):
cur = super().run(con)
if cur is not None:
cur.close()
return self.tables
class Insertion(SQLOperation):
@property
def sql(self):
lines = []
for table in self.tables:
if isinstance(self.data, Mapping):
params = [i for i in table.cols if i.name in self.data]
elif self.data:
params = [i for i in table.cols if i.name in self.data[0]]
lines.append(
"insert into {table.name} ({columns}) values ({values})".format(
table=table,
columns=", ".join(i.name for i in params),
values=", ".join(":{col.name}".format(col=col) for col in params)
)
)
return (";\n".join(lines), self.data)
class Connection:
"""
* Find target database files
* Load extensions
* Attach databases (readonly?)
* Attach in-memory database
* Execute pragmas
* Discover state tables
* Create state tables
"""
class CacheOptions(enum.Enum):
shared = "cache=shared"
private = "cache=private"
class ImmutableOptions(enum.Enum):
immutable = "immutable=1"
mutable = "immutable=0"
class ModeOptions(enum.Enum):
read = "mode=ro"
read_write = "mode=rw"
read_write_create = "mode=rwc"
memory = "mode=memory"
@staticmethod
def url(conn, options):
return "file:{0}?{1}".format(
conn, "&".join(i.value for i in options)
)
@staticmethod
def options(name=None, paths=[]):
version = tuple(int(i) for i in sqlite3.sqlite_version.split("."))
if version < (3, 7, 13):
raise UserWarning(
"Your sqlite3 library is too old. Version 3.7.13 required at least."
)
if not paths:
if name is None:
dbs = {
":memory:": [
Connection.CacheOptions.shared,
]
}
else:
dbs = {
name: [
Connection.CacheOptions.shared,
Connection.ModeOptions.memory
]
}
elif len(paths) == 1:
dbs = {
paths[0]: [Connection.ModeOptions.read_write_create]
}
else:
dbs = OrderedDict({
":memory:": [
Connection.CacheOptions.private,
Connection.ModeOptions.memory
]
})
dbs.update(
{i: [Connection.ModeOptions.read] for i in paths}
)
return {
"attach": dbs
}
def __init__(self, attach=[], log=None):
self.log = log or logging.getLogger("Connection")
self.attach = attach
self.db = None
for conn, options in self.attach.items():
self.log.debug(Connection.url(conn, options))
def __enter__(self):
conn, options = list(self.attach.items())[0]
self.db = sqlite3.connect(
self.url(conn, options), uri=True,
detect_types=sqlite3.PARSE_DECLTYPES
)
self.db.row_factory = sqlite3.Row
self.db.execute("pragma foreign_keys=ON")
# states = list(gather_installed("turberfield.utils.states"))
return self.db
def __exit__(self, exc_type, exc_value, traceback):
return False
| gpl-3.0 | 8,149,039,837,599,711,000 | 29.469466 | 85 | 0.525742 | false |
fnl/pymonad | pymonad/Either.py | 1 | 3330 | # --------------------------------------------------------
# (c) Copyright 2014 by Jason DeLaat.
# Licensed under BSD 3-clause licence.
# --------------------------------------------------------
from pymonad.Monad import *
class Either(Monad):
"""
Represents a calculation that may either fail or succeed.
An alternative to using exceptions. `Either` is an abstract type and should not
be instantiated directly. Instead use `Right` (or its alias `Result`) and
`Left` (or its alias `Error`)
"""
def __init__(self, value):
""" Raises a `NotImplementedError`. Use `Right` or `Left` instead. """
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, Either): raise TypeError("Can't compare different types.")
@classmethod
def unit(cls, value):
return Right(value)
class Left(Either):
"""
Represents a calculation which has failed and contains an error code or message.
To help with readability you may alternatively use the alias `Error`.
"""
def __init__(self, errorMsg):
"""
Creates a `Left` "calculation failed" object.
`errorMsg` can be anything which gives information about what when wrong.
"""
super(Either, self).__init__(errorMsg)
def __eq__(self, other):
super(Left, self).__eq__(other)
if not isinstance(other, Left): return False
elif (self.getValue() == other.getValue()): return True
else: return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "Left: " + str(self.getValue())
def fmap(self, _):
""" Returns the `Left` instance that was used to call the method. """
return self
def amap(self, _):
""" Returns the `Left` instance that was used to call the method. """
return self
def bind(self, _):
""" Returns the `Left` instance that was used to call the method. """
return self
class Right(Either):
"""
Represents a calculation which has succeeded and contains the result of that calculation.
To help with readability you may alternatively use the alias `Result`.
"""
def __init__(self, value):
"""
Creates a `Right` "calculation succeeded" object.
`value` is the actual calculated value of whatever operation was being performed
and can be any type.
"""
super(Either, self).__init__(value)
def __eq__(self, other):
super(Right, self).__eq__(other)
if not isinstance(other, Right): return False
elif (self.getValue() == other.getValue()): return True
else: return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "Right: " + str(self.getValue())
def fmap(self, function):
"""
Applies `function` to the contents of the `Right` instance and returns a
new `Right` object containing the result.
`function` should accept a single "normal" (non-monad) argument and return
a non-monad result.
"""
return Right(function(self.getValue()))
def amap(self, functorValue):
""" Applies the function stored in the functor to `functorValue` returning a new Either value. """
return self.getValue() << functorValue
def bind(self, function):
"""
Applies `function` to the result of a previous calculation.
`function` should accept a single "normal" (non-monad) argument and return
either a `Left` or `Right` type object.
"""
return function(self.getValue())
Error = Left
Result = Right
| bsd-3-clause | 8,568,344,353,019,406,000 | 28.732143 | 100 | 0.664264 | false |
h-qub/wordeater-web | we-web/tests/unit/rest/users_test.py | 1 | 7620 | # coding=utf-8
from rest_test import *
import api.resources.users.users
__author__ = 'Glebov Boris'
class UserListTest(RestBaseTest):
"""
Test all case for GET: /api/v1/users/
"""
def test_users_list_success(self):
client_app = self.get_app_client()
r = client_app.get('/api/v1/users/')
data = json.loads(r.data)
self.assertEqual(data[u'status'], 200)
class UserSignInTest(RestBaseTest):
"""
Test all case for POST: /api/v1/users/signin/
"""
def test_users_signin_fail(self):
"""
Check what:
1. User's login not found,
OR
2. Password is incorrect
"""
client_app = self.get_app_client()
data = {
u'login': u'123',
u'password': u'123'
}
r = client_app.post('/api/v1/user/signin/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
self.assertEqual(4001, response_data[u'status'])
def test_users_signin_ok(self):
"""
Check what:
1. Users with login is exists
2. Password is correct
"""
us = ServiceLocator.resolve(ServiceLocator.USERS)
us.create(u'user1', u'[email protected]', u'123', first_name=u'demo', last_name=u'demo')
client_app = self.get_app_client()
data = {
u'login': u'user1',
u'password': u'123'
}
r = client_app.post('/api/v1/user/signin/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
self.assertEqual(response_data[u'status'], 200)
self.assertIsNotNone(response_data[u'data'][u'auth_token'])
class UserSignUpTest(RestBaseTest):
"""
Test all case for POST: /api/v1/users/signup/
"""
def test_users_signup_fail_email_is_exists(self):
"""
Checks what we normal handled next cases:
1. Email is exists
"""
self.clear_db()
us = ServiceLocator.resolve(ServiceLocator.USERS)
us.create(u'user1', u'[email protected]', u'123', first_name=u'demo', last_name=u'demo')
client_app = self.get_app_client()
data = {
u'login': u'user1',
u'email': u'[email protected]',
u'password': u'123',
u'first_name': u'aa',
u'last_name': u'aa'
}
r = client_app.post('/api/v1/user/signup/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
self.assertEqual(response_data[u'status'], 4001)
self.assertEqual(response_data[u'errors'][u'error_type'], u'user_already_exists_error', u'Login already exists')
def test_users_signup_fail_login_is_exists(self):
"""
Checks what we normal handled next cases:
1. Login is exists
"""
self.clear_db()
us = ServiceLocator.resolve(ServiceLocator.USERS)
us.create(u'user1', u'[email protected]', u'123', first_name=u'demo', last_name=u'demo')
client_app = self.get_app_client()
data = {
u'login': u'user2',
u'email': u'[email protected]',
u'password': u'123',
u'first_name': u'aa',
u'last_name': u'aa'
}
r = client_app.post('/api/v1/user/signup/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
self.assertEqual(response_data[u'status'], 4001)
self.assertEqual(response_data[u'errors'][u'error_type'], u'email_already_exists', u'Email already exists')
def test_users_signup_ok(self):
"""
User signup is successfully
:return:
"""
self.clear_db()
client_app = self.get_app_client()
data = {
u'login': u'user1',
u'email': u'[email protected]',
u'password': u'123',
u'first_name': u'aa',
u'last_name': u'aa'
}
r = client_app.post('/api/v1/user/signup/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
self.assertEqual(response_data[u'status'], 201)
us = ServiceLocator.resolve(ServiceLocator.USERS)
user = us.single(u'user1')
self.assertIsNotNone(user)
class UserCheckTest(RestBaseTest):
"""
Test all case for POST: /api/v1/users/check/
"""
def test_user_check_login_is_exists(self):
"""
Test case:
Login is exists
"""
data = {
u'login': u'user1',
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'login'], False)
def test_user_check_email_is_exists(self):
"""
Test case:
Email is exists
"""
data = {
u'email': u'[email protected]',
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'email'], False)
def test_user_check_login_ok(self):
"""
Test case:
Login is not exists
"""
data = {
u'login': u'user2'
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'login'], True)
def test_user_check_email_ok(self):
"""
Test case:
Email is not exists
"""
data = {
u'email': u'[email protected]'
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'email'], True)
def test_user_check_login_email_ok(self):
"""
Test case:
Login and Email is not exists
"""
data = {
u'login': u'user2',
u'email': u'[email protected]'
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'login'], True)
self.assertEqual(response_data[u'data'][u'email'], True)
def test_user_check_login_email_fail(self):
"""
Test case:
Login and Email is not exists
"""
data = {
u'login': u'user1',
u'email': u'[email protected]'
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'login'], False)
self.assertEqual(response_data[u'data'][u'email'], False)
def test_user_check_login_email_none(self):
"""
Test case:
Login and Email didn't send
"""
data = {
}
response_data = self._test_check(data)
self.assertEqual(response_data[u'status'], 200)
self.assertEqual(response_data[u'data'][u'login'], None)
self.assertEqual(response_data[u'data'][u'email'], None)
def _test_check(self, data):
self.clear_db()
us = ServiceLocator.resolve(ServiceLocator.USERS)
us.create(u'user1', u'[email protected]', u'123', first_name=u'demo', last_name=u'demo')
client_app = self.get_app_client()
r = client_app.post('/api/v1/users/check/', headers=self.headers, data=json.dumps(data))
response_data = json.loads(r.data)
return response_data
| mit | 3,243,931,358,639,282,700 | 26.509025 | 120 | 0.555249 | false |
TraceContext/tracecontext-spec | test/tracecontext/tracestate.py | 1 | 2749 | from collections import OrderedDict
import re
class Tracestate(object):
_KEY_WITHOUT_VENDOR_FORMAT = r'[a-z][_0-9a-z\-\*\/]{0,255}'
_KEY_WITH_VENDOR_FORMAT = r'[0-9a-z][_0-9a-z\-\*\/]{0,240}@[a-z][_0-9a-z\-\*\/]{0,13}'
_KEY_FORMAT = _KEY_WITHOUT_VENDOR_FORMAT + '|' + _KEY_WITH_VENDOR_FORMAT
_VALUE_FORMAT = r'[\x20-\x2b\x2d-\x3c\x3e-\x7e]{0,255}[\x21-\x2b\x2d-\x3c\x3e-\x7e]'
_DELIMITER_FORMAT_RE = re.compile('[ \t]*,[ \t]*')
_KEY_VALIDATION_RE = re.compile('^(' + _KEY_FORMAT + ')$')
_VALUE_VALIDATION_RE = re.compile('^(' + _VALUE_FORMAT + ')$')
_MEMBER_FORMAT_RE = re.compile('^(%s)(=)(%s)$' % (_KEY_FORMAT, _VALUE_FORMAT))
def __init__(self, *args, **kwds):
if len(args) == 1 and not kwds:
if isinstance(args[0], str):
self._traits = OrderedDict()
self.from_string(args[0])
return
if isinstance(args[0], Tracestate):
self._traits = OrderedDict(args[0]._traits)
return
self._traits = OrderedDict(*args, **kwds)
def __contains__(self, key):
return key in self._traits
def __len__(self):
return len(self._traits)
def __repr__(self):
return '{}({!r})'.format(type(self).__name__, str(self))
def __getitem__(self, key):
return self._traits[key]
def __setitem__(self, key, value):
if not isinstance(key, str):
raise ValueError('key must be an instance of str')
if not re.match(self._KEY_VALIDATION_RE, key):
raise ValueError('illegal key provided')
if not isinstance(value, str):
raise ValueError('value must be an instance of str')
if not re.match(self._VALUE_VALIDATION_RE, value):
raise ValueError('illegal value provided')
self._traits[key] = value
self._traits.move_to_end(key, last = False)
def __str__(self):
return self.to_string()
def from_string(self, string):
for member in re.split(self._DELIMITER_FORMAT_RE, string):
if member:
match = self._MEMBER_FORMAT_RE.match(member)
if not match:
raise ValueError('illegal key-value format {!r}'.format(member))
key, eq, value = match.groups()
if key in self._traits:
raise ValueError('conflict key {!r}'.format(key))
self._traits[key] = value
return self
def to_string(self):
return ','.join(map(lambda key: key + '=' + self[key], self._traits))
# make this an optional choice instead of enforcement during put/update
# if the tracestate value size is bigger than 512 characters, the tracer
# CAN decide to forward the tracestate
def is_valid(self):
if len(self) is 0:
return False
# combined header length MUST be less than or equal to 512 bytes
if len(self.to_string()) > 512:
return False
# there can be a maximum of 32 list-members in a list
if len(self) > 32:
return False
return True
def pop(self):
return self._traits.popitem()
| apache-2.0 | 5,575,316,123,618,295,000 | 32.52439 | 87 | 0.650782 | false |
nick41496/Beatnik | beatnik/api_manager/api_manager.py | 1 | 3520 | import logging
import os
import spotipy
import sys
from beatnik.api_manager.clients import AppleMusicApi, SoundcloudApi
from beatnik.api_manager.link_converter import LinkConverter
from beatnik.api_manager.link_parser import LinkParser
from beatnik.api_manager.search_handler import SearchHandler
from gmusicapi import Mobileclient
from spotipy.oauth2 import SpotifyClientCredentials
from tidalapi import Session
class ApiManager:
def __init__(self):
self.logger = logging.getLogger(__name__)
self.apple_api = self.get_apple_api()
self.gpm_api = self.get_gpm_api()
self.soundcloud_api = self.get_soundcloud_api()
self.spotify_api = self.get_spotify_api()
self.tidal_api = self.get_tidal_api()
self.link_parser = LinkParser(
self.apple_api,
self.gpm_api,
self.soundcloud_api,
self.spotify_api,
self.tidal_api)
self.link_converter = LinkConverter(
self.apple_api,
self.gpm_api,
self.soundcloud_api,
self.spotify_api,
self.tidal_api,
self.link_parser)
self.search_handler = SearchHandler(self.spotify_api, self.link_converter)
def get_apple_api(self):
try:
key_id = os.environ['APPLE_KEY_ID']
issuer = os.environ['APPLE_KEY_ISSUER']
key = os.environ['APPLE_KEY']
return AppleMusicApi(key_id=key_id, issuer=issuer, key=key)
except Exception as e:
self.logger.error("Something went wrong getting Apple Music API")
self.logger.error(e)
return None
def get_gpm_api(self):
try:
gpm_api = Mobileclient()
username = os.environ['GPM_USERNAME']
password = os.environ['GPM_PASSWORD']
if (not gpm_api.login(username, password, Mobileclient.FROM_MAC_ADDRESS, 'en_US')):
self.logger.error("Unable to login to Google Play Music.")
return None
return gpm_api
except Exception as e:
self.logger.error("Something went wrong getting Google Play Music API")
self.logger.error(e)
return None
def get_soundcloud_api(self):
try:
return SoundcloudApi()
except Exception as e:
self.logger.error("Something went wrong getting Soundcloud API")
self.logger.error(e)
return None
def get_spotify_api(self):
try:
client_credentials_manager = SpotifyClientCredentials()
return spotipy.Spotify(client_credentials_manager=client_credentials_manager)
except Exception as e:
self.logger.error("Something went wrong getting Spotify API")
self.logger.error(e)
return None
def get_tidal_api(self):
try:
session = Session()
username = os.environ['TIDAL_USERNAME']
password = os.environ['TIDAL_PASSWORD']
if (not session.login(username, password)):
self.logger.error("Unable to login to Tidal")
return None
return session
except Exception as e:
self.logger.error("Something went wrong getting Tidal API")
self.logger.error(e)
return None
def convert_link(self, music):
music = self.link_converter.convert_link(music)
return music
| gpl-3.0 | 1,703,179,569,399,725,300 | 34.2 | 95 | 0.598295 | false |
acshi/osf.io | framework/mongo/utils.py | 1 | 5732 | # -*- coding: utf-8 -*-
import functools
import httplib as http
from django.core.paginator import Paginator
from django.db.models import QuerySet
import markupsafe
import pymongo
from modularodm.query import QueryBase
from modularodm.exceptions import NoResultsFound, MultipleResultsFound
from framework.exceptions import HTTPError
# MongoDB forbids field names that begin with "$" or contain ".". These
# utilities map to and from Mongo field names.
mongo_map = {
'.': '__!dot!__',
'$': '__!dollar!__',
}
def to_mongo(item):
for key, value in mongo_map.items():
item = item.replace(key, value)
return item
def to_mongo_key(item):
return to_mongo(item).strip().lower()
def from_mongo(item):
for key, value in mongo_map.items():
item = item.replace(value, key)
return item
def unique_on(*groups):
"""Decorator for subclasses of `StoredObject`. Add a unique index on each
group of keys provided.
:param *groups: List of lists of keys to be indexed
"""
def wrapper(cls):
cls.__indices__ = getattr(cls, '__indices__', [])
cls.__indices__.extend([
{
'key_or_list': [
(key, pymongo.ASCENDING)
for key in group
],
'unique': True,
}
for group in groups
])
return cls
return wrapper
def get_or_http_error(Model, pk_or_query, allow_deleted=False, display_name=None):
"""Load an instance of Model by primary key or modularodm.Q query. Raise an appropriate
HTTPError if no record is found or if the query fails to find a unique record
:param type Model: StoredObject subclass to query
:param pk_or_query:
:type pk_or_query: either
- a <basestring> representation of the record's primary key, e.g. 'abcdef'
- a <QueryBase> subclass query to uniquely select a record, e.g.
Q('title', 'eq', 'Entitled') & Q('version', 'eq', 1)
:param bool allow_deleted: allow deleleted records?
:param basestring display_name:
:raises: HTTPError(404) if the record does not exist
:raises: HTTPError(400) if no unique record is found
:raises: HTTPError(410) if the resource is deleted and allow_deleted = False
:return: Model instance
"""
display_name = display_name or ''
# FIXME: Not everything that uses this decorator needs to be markupsafe, but OsfWebRenderer error.mako does...
safe_name = markupsafe.escape(display_name)
if isinstance(pk_or_query, QueryBase):
try:
instance = Model.find_one(pk_or_query)
except NoResultsFound:
raise HTTPError(http.NOT_FOUND, data=dict(
message_long='No {name} record matching that query could be found'.format(name=safe_name)
))
except MultipleResultsFound:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='The query must match exactly one {name} record'.format(name=safe_name)
))
else:
instance = Model.load(pk_or_query)
if not instance:
raise HTTPError(http.NOT_FOUND, data=dict(
message_long='No {name} record with that primary key could be found'.format(name=safe_name)
))
if getattr(instance, 'is_deleted', False) and getattr(instance, 'suspended', False):
raise HTTPError(451, data=dict( # 451 - Unavailable For Legal Reasons
message_short='Content removed',
message_long='This content has been removed'
))
if not allow_deleted and getattr(instance, 'is_deleted', False):
raise HTTPError(http.GONE)
return instance
def autoload(Model, extract_key, inject_key, func):
"""Decorator to autoload a StoredObject instance by primary key and inject into kwargs. Raises
an appropriate HTTPError (see #get_or_http_error)
:param type Model: database collection model to query (should be a subclass of StoredObject)
:param basestring extract_key: named URL field containing the desired primary key to be fetched
from the database
:param basestring inject_key: name the instance will be accessible as when it's injected as an
argument to the function
Example usage: ::
def get_node(node_id):
node = Node.load(node_id)
...
becomes
import functools
autoload_node = functools.partial(autoload, Node, 'node_id', 'node')
@autoload_node
def get_node(node):
...
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
primary_key = kwargs.get(extract_key)
instance = get_or_http_error(Model, primary_key)
kwargs[inject_key] = instance
return func(*args, **kwargs)
return wrapper
def paginated(model, query=None, increment=200, each=True):
"""Paginate a MODM query.
:param StoredObject model: Model to query.
:param Q query: Optional query object.
:param int increment: Page size
:param bool each: If True, each record is yielded. If False, pages
are yielded.
"""
queryset = model.find(query)
# Pagination requires an order by clause, especially when using Postgres.
# see: https://docs.djangoproject.com/en/1.10/topics/pagination/#required-arguments
if isinstance(queryset, QuerySet) and not queryset.ordered:
queryset = queryset.order_by(queryset.model._meta.pk.name)
paginator = Paginator(queryset.all(), increment)
for page_num in paginator.page_range:
page = paginator.page(page_num)
if each:
for item in page.object_list:
yield item
else:
yield page.object_list
| apache-2.0 | -1,111,238,550,963,007,200 | 33.323353 | 114 | 0.643057 | false |
simphony/simphony-openfoam | foam_controlwrapper/tests/test_run_time.py | 1 | 4123 | import unittest
import os
import shutil
import tempfile
from foam_controlwrapper.blockmesh_utils import create_quad_mesh
from simphony.api import CUDS, Simulation
from simphony.core.cuba import CUBA
from simphony.cuds.meta import api
from simphony.engine import EngineInterface
class WrapperRunTestCase(unittest.TestCase):
def setUp(self):
case_name = "simplemeshIO"
mesh_name = "simplemeshIO_mesh"
cuds = CUDS(name=case_name)
# physics model
cfd = api.Cfd(name='default model')
cuds.add([cfd])
self.sim_time = api.IntegrationTime(name='simulation_time',
current=0.0,
final=1.0,
size=0.5)
cuds.add([self.sim_time])
mat = api.Material(name='a_material')
mat._data[CUBA.DENSITY] = 1.0
mat._data[CUBA.DYNAMIC_VISCOSITY] = 1.0
cuds.add([mat])
vel_inlet = api.Dirichlet(mat, name='vel_inlet')
vel_inlet._data[CUBA.VARIABLE] = CUBA.VELOCITY
vel_inlet._data[CUBA.VELOCITY] = (0.1, 0, 0)
pres_inlet = api.Neumann(mat, name='pres_inlet')
pres_inlet._data[CUBA.VARIABLE] = CUBA.PRESSURE
vel_outlet = api.Neumann(mat, name='vel_outlet')
vel_outlet._data[CUBA.VARIABLE] = CUBA.VELOCITY
pres_outlet = api.Dirichlet(mat, name='pres_outlet')
pres_outlet._data[CUBA.VARIABLE] = CUBA.PRESSURE
pres_outlet._data[CUBA.PRESSURE] = 0.0
vel_walls = api.Dirichlet(mat, name='vel_walls')
vel_walls._data[CUBA.VARIABLE] = CUBA.VELOCITY
vel_walls._data[CUBA.VELOCITY] = (0, 0, 0)
pres_walls = api.Neumann(mat, name='pres_walls')
pres_walls._data[CUBA.VARIABLE] = CUBA.PRESSURE
vel_frontAndBack = api.EmptyCondition(name='vel_frontAndBack')
vel_frontAndBack._data[CUBA.VARIABLE] = CUBA.VELOCITY
pres_frontAndBack = api.EmptyCondition(name='pres_frontAndBack')
pres_frontAndBack._data[CUBA.VARIABLE] = CUBA.PRESSURE
inlet = api.Boundary(name='inlet', condition=[vel_inlet, pres_inlet])
walls = api.Boundary(name='walls', condition=[vel_walls, pres_walls])
outlet = api.Boundary(name='outlet', condition=[vel_outlet,
pres_outlet])
frontAndBack = api.Boundary(name='frontAndBack',
condition=[vel_frontAndBack,
pres_frontAndBack])
cuds.add([inlet, walls, outlet, frontAndBack])
corner_points = [(0.0, 0.0, 0.0), (5.0, 0.0, 0.0),
(5.0, 5.0, 0.0), (0.0, 5.0, 0.0),
(0.0, 0.0, 1.0), (5.0, 0.0, 1.0),
(5.0, 5.0, 1.0), (0.0, 5.0, 1.0)]
self.mesh_path = tempfile.mkdtemp()
mesh = create_quad_mesh(self.mesh_path, mesh_name,
corner_points, 5, 5, 5)
cuds.add([mesh])
self.cuds = cuds
self.sim = Simulation(cuds, 'OpenFOAM',
engine_interface=EngineInterface.FileIO)
self.mesh_in_cuds = self.cuds.get_by_name(mesh_name)
def tearDown(self):
if os.path.exists(self.mesh_in_cuds.path):
shutil.rmtree(self.mesh_in_cuds.path)
if os.path.exists(self.mesh_path):
shutil.rmtree(self.mesh_path)
def test_run_time(self):
"""Test that field variable value is changed after
consecutive calls of run method
"""
self.sim.run()
for cell in self.mesh_in_cuds.iter(item_type=CUBA.CELL):
old_vel = cell.data[CUBA.VELOCITY]
old_pres = cell.data[CUBA.PRESSURE]
cell_uid = cell.uid
self.sim.run()
cell = self.mesh_in_cuds.get(cell_uid)
new_vel = cell.data[CUBA.VELOCITY]
new_pres = cell.data[CUBA.PRESSURE]
self.assertNotEqual(old_vel, new_vel)
self.assertNotEqual(old_pres, new_pres)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -8,620,174,854,293,740,000 | 35.8125 | 77 | 0.564152 | false |
project-owner/Peppy | player/client/vlcclient.py | 1 | 10907 | # Copyright 2016-2021 Peppy Player [email protected]
#
# This file is part of Peppy Player.
#
# Peppy Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Peppy Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Peppy Player. If not, see <http://www.gnu.org/licenses/>.
import threading
import time
import urllib
from player.client.baseplayer import BasePlayer
from vlc import Meta
from vlc import EventType
from queue import Queue
from util.fileutil import FILE_PLAYLIST, FILE_AUDIO
class Vlcclient(BasePlayer):
""" This class extends base player and provides communication with VLC player
using Python binding for 'libvlc' library """
def __init__(self):
""" Initializer. Starts separate threads for handling VLC events """
self.RADIO_MODE = "radio"
BasePlayer.__init__(self)
self.mode = self.RADIO_MODE
self.instance = None
self.player = None
self.media = None
self.current_track = ""
self.seek_time = "0"
self.cd_track_id = None
self.cd_drive_name = None
self.END_REACHED = "end reached"
self.TRACK_CHANGED = "track changed"
self.PAUSED = "paused"
self.player_queue = Queue()
self.threads_running = False
self.changing_volume = False
def start_client(self):
""" Start threads. """
self.threads_running = True
thread_1 = threading.Thread(target = self.radio_stream_event_listener)
thread_1.start()
thread_2 = threading.Thread(target = self.handle_event_queue)
thread_2.start()
def stop_client(self):
""" Stop threads """
with self.lock:
self.threads_running = False
def set_proxy(self, proxy_process, proxy=None):
""" Create new VLC player """
self.instance = proxy_process
self.proxy = proxy
self.player = self.instance.media_player_new()
player_mgr = self.player.event_manager()
player_mgr.event_attach(EventType.MediaPlayerEndReached, self.player_callback, [self.END_REACHED])
player_mgr.event_attach(EventType.MediaPlayerPlaying, self.player_callback, [self.TRACK_CHANGED])
def player_callback(self, event, data):
""" Player callback method
:param event: event to handle
:param data: event data
"""
if data:
self.player_queue.put(data[0])
def radio_stream_event_listener(self):
""" Starts the loop for listening VLC events for radio track change """
while self.threads_running:
with self.lock:
if self.media and self.mode == self.RADIO_MODE:
t = self.media.get_meta(Meta.NowPlaying)
if t and t != self.current_track:
self.current_track = t
if self.enabled:
self.notify_player_listeners({"current_title": t})
time.sleep(1)
def handle_event_queue(self):
""" Handling player event queue """
if not self.enabled:
return
while self.threads_running:
d = self.player_queue.get() # blocking line
if d == self.END_REACHED:
self.notify_end_of_track_listeners()
self.player_queue.task_done()
elif d == self.TRACK_CHANGED:
self.track_changed()
self.player_queue.task_done()
def track_changed(self):
""" Handle track change event """
if not self.enabled:
return
if self.mode == self.RADIO_MODE:
return
current = {"source": "player"}
current["state"] = "playing"
t = self.media.get_meta(Meta.Title)
if t == ".":
return
if self.cd_track_id and t.startswith("cdda:"):
current["cd_track_id"] = self.cd_track_id
if self.cd_tracks:
t = self.cd_tracks[int(self.cd_track_id) - 1].name
else:
t = self.cd_drive_name + self.cd_track_title + " " + self.cd_track_id
m = self.media.get_mrl()
m = m[m.rfind("/") + 1:]
m = urllib.parse.unquote(m)
current["file_name"] = m
current["current_title"] = t
current["Time"] = str(self.player.get_length()/1000)
if not self.seek_time:
self.seek_time = "0"
current["seek_time"] = self.seek_time
self.notify_player_listeners(current)
def set_player_volume_control(self, flag):
""" Player Volume Control type setter
:param volume: True - player volume cotrol type, False - amixer or hardware volume control type
"""
BasePlayer.set_player_volume_control(self, flag)
if not self.player_volume_control:
self.set_volume(100)
def play(self, state):
""" Start playing specified track/station. First it cleans the playlist
then adds new track/station to the list and then starts playback
syntax for CD:
self.media = self.instance.media_new("cdda:///E:/", (":cdda-track=7"))
:param state: button state which contains the track/station info
"""
url = None
self.enabled = True
if state == None:
if self.state != None:
url = getattr(self.state, "url", None)
else:
url = None
else:
url = getattr(state, "url", None)
self.state = state
if url == None:
return
url = url.replace("\\", "/").replace("\"", "")
track_time = getattr(self.state, "track_time", None)
if track_time == None:
track_time = "0"
else:
track_time = str(track_time)
if ":" in track_time:
track_time = track_time.replace(":", ".")
self.seek_time = track_time
s = getattr(self.state, "playback_mode", None)
if s and s == FILE_PLAYLIST:
self.stop()
self.mode = FILE_PLAYLIST
self.enabled = True
elif s and s == FILE_AUDIO:
self.mode = FILE_AUDIO
else:
self.mode = self.RADIO_MODE
if url.startswith("http") and self.mode != self.RADIO_MODE:
url = self.encode_url(url)
with self.lock:
file_name = getattr(self.state, "file_name", None)
if file_name and file_name.startswith("cdda://"):
parts = file_name.split()
self.cd_track_id = parts[1].split("=")[1]
self.cd_drive_name = parts[0][len("cdda:///"):]
self.media = self.instance.media_new(parts[0], parts[1])
else:
self.media = self.instance.media_new(url)
self.player.set_media(self.media)
self.player.play()
try:
self.player.set_time(int(float(self.seek_time)) * 1000)
except:
pass
if self.player_volume_control and getattr(self.state, "volume", None) != None:
self.set_volume(int(self.state.volume))
def stop(self, state=None):
""" Stop playback """
with self.lock:
self.enabled = False
self.player.stop()
def seek(self, time):
""" Jump to the specified position in the track
:param time: time position in track
"""
if ":" in time:
self.seek_time = self.get_seconds_from_string(time)
else:
self.seek_time = time
with self.lock:
msec = int(float(self.seek_time) * 1000)
t = threading.Thread(target=self.seek_method, args=[msec])
t.start()
def seek_method(self, msec):
""" Seek track thread method
:param msec: milliseconds for new position
"""
self.player.set_time(msec)
def play_pause(self, pause_flag=None):
""" Play/Pause playback
:param pause_flag: play/pause flag
"""
with self.lock:
self.seek_time = self.get_current_track_time()
self.player.pause()
def set_volume(self, level):
""" Set volume.
:param level: new volume level
"""
self.player.audio_set_volume(int(level))
if getattr(self, "state", None) != None:
if self.state.volume == level:
return
self.state.volume = level
v = self.get_volume()
if v != int(level): # usually initial volume setting
if hasattr(self, "volume_thread"):
self.volume_thread.join()
self.volume_thread = threading.Thread(target=self.set_volume_level, args=[level])
self.volume_thread.start()
def set_volume_level(self, level):
""" Set volume level in separate thread
:param level: volume level
"""
n = 0
max_n = 20
vol = -2
while n < max_n and level != vol:
self.player.audio_set_volume(int(level))
time.sleep(0.1)
vol = self.get_volume()
n += 1
def get_volume(self):
""" Return current volume level
:return: volume level or -1 if not available
"""
with self.lock:
return self.player.audio_get_volume()
def mute(self):
""" Mute """
with self.lock:
self.player.audio_toggle_mute()
def current(self):
""" Return the current song """
pass
def shutdown(self):
""" Shutdown the player """
with self.lock:
self.player.stop()
def get_current_track_time(self):
""" Return current track time
:return: current track time
"""
t = self.player.get_time()/1000
return str(t)
def get_current_playlist(self):
""" Return current playlist
:return: current playlist
"""
return self.playlist
| gpl-3.0 | -9,174,182,375,290,047,000 | 31.85241 | 106 | 0.539103 | false |
googleapis/googleapis-gen | google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/errors/types/user_data_error.py | 1 | 1227 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v6.errors',
marshal='google.ads.googleads.v6',
manifest={
'UserDataErrorEnum',
},
)
class UserDataErrorEnum(proto.Message):
r"""Container for enum describing possible user data errors. """
class UserDataError(proto.Enum):
r"""Enum describing possible request errors."""
UNSPECIFIED = 0
UNKNOWN = 1
OPERATIONS_FOR_CUSTOMER_MATCH_NOT_ALLOWED = 2
TOO_MANY_USER_IDENTIFIERS = 3
USER_LIST_NOT_APPLICABLE = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -305,280,532,206,740,100 | 30.461538 | 74 | 0.693562 | false |
jasonleaster/LeetCode | Largest_Number/ln.py | 1 | 1325 | class number() :
key = 0
bit_list = []
cnt = 0
def num_init(array) :
if array is None :
return;
num_array = []
for i in range(0, len(array)) :
cls_num = number()
tmp = array[i]
cnt = 1
while tmp > 10 :
bit_list = bit_list + [tmp % 10]
tmp /= 10
cnt += 1
cls_num.key = array[i]
cls_num.cnt = cnt
num_array += [cls_num]
return num_array
def lgst_num(num_array) :
if num_array is None :
return
output = [num_array[0]]
for i in range(1, len(num_array)) :
for j in range(1, len(num_array)) :
if output[j].key is num_array[j].key :
output += [num_array[j]]
else :
k = 0
a = num_array[max_loc].bit_list[k]
b = num_array[j].bit_list[k]
while a == b and k < min(a.cnt, b.cnt) :
k += 1
a = num_array[max_loc].bit_list[k]
b = num_array[j].bit_list[k]
if k is a.cnt :
elif k is b.cnt :
else :
max_loc = j
num_array.bit_list[0]
lgst_num([5,2,6,3,1,4])
| gpl-2.0 | -8,309,436,790,888,531,000 | 19.075758 | 56 | 0.392453 | false |
Connexions/draft-transforms | drafttransform/cli.py | 1 | 3331 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
"""The command-line interface for transforming cnxml files directly in workspace."""
import os
import sys
import argparse
from . import transforms
from workgroup import Workgroup, listWorkspaces
DESCRIPTION = __doc__
DEFAULT_HOST = "qa.cnx.org"
def main(argv=None):
"""Main functions used to interface directly with the user."""
parser = argparse.ArgumentParser(description = DESCRIPTION)
parser.add_argument('-H', '--host',
default = DEFAULT_HOST,
help = "A legacy connexions authoring site")
parser.add_argument('-a', '--auth', required = True,
help = "authentication info [userid:password]")
parser.add_argument('-w', '--workgroup',
help = "Id of workgroup: defaults to user's private workspace")
parser.add_argument('-l', '--list',
action = "store_true",
help = "List all workspaces")
parser.add_argument('-p', '--publish', metavar = 'message',
help = "Publish after transform")
parser.add_argument('-P', '--publish_only', metavar = 'message',
help = "Publish all drafts, no download or transform")
parser.add_argument('-u', '--upload',
action = "store_true",
help="Upload transformed doc back to draft")
parser.add_argument('-s', '--save-dir',
help = "Directory to save transformed output to, as <moduleid>.xml")
subparsers = parser.add_subparsers(help = "transform step")
transforms.load_cli(subparsers)
if len(sys.argv) < 5 or sys.argv[0].startswith('-'):
sys.argv.append(transforms.get_default_cli_command_name())
print sys.argv
args = parser.parse_args(argv)
if hasattr(args,'save_dir') and args.save_dir or hasattr(args,'save_dir_d') and args.save_dir_d:
save_dir = args.save_dir or args.save_dir_d
else:
save_dir = None
cmmd = args.cmmd
if args.list:
print '\n'.join(listWorkspaces(**vars(args)))
return
# walk workgroup, look over and retrieve cnxmls, transform, then save and
# optionally publish.
workgroup = Workgroup(**vars(args))
print workgroup.url
for mod in workgroup.modules():
if args.publish_only:
mod.publish(args.publish_only)
else:
cnxml = mod.cnxml()
new_cnxml = cmmd(cnxml,**vars(args))
if cnxml and new_cnxml:
print '%s: %s %s' % (mod.moduleid,len(cnxml),len(new_cnxml))
if save_dir:
if not os.path.exists(save_dir):
os.mkdir(save_dir)
with open(os.path.join(save_dir,'%s.xml' % (mod.moduleid)), 'w') as m:
m.write(new_cnxml)
if args.upload:
mod.save(new_cnxml)
if args.publish:
mod.publish(args.publish)
return # cmmd(**vars(args))
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 | 7,051,750,747,463,857,000 | 36.426966 | 100 | 0.567998 | false |
mjenrungrot/competitive_programming | UVa Online Judge/12394.py | 1 | 1476 | # =============================================================================
# Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/
# FileName: 12394.py
# Description: UVa Online Judge - 12394
# =============================================================================
while True:
K, N = list(map(int, input().split()))
if K == 0 and N == 0: break
data = []
for i in range(N):
tmp = input().split()
name = tmp[0]
papers = list(map(int, tmp[1:]))
data.append((name, papers))
checked = [True for i in range(N)]
# Check number of reviews
n_reviews = [0 for i in range(N)]
for i in range(N):
for j in range(K):
n_reviews[data[i][1][j] - 1] += 1
for i in range(N):
if n_reviews[i] != K:
checked[i] = False
# Check collaborator
for i in range(N):
for j in range(K):
if data[i][0] == data[data[i][1][j] - 1][0]:
checked[data[i][1][j] - 1] = False
# Check same paper
for i in range(N):
for j in range(K):
for k in range(j+1, K):
if data[i][1][j] == data[i][1][k]:
checked[data[i][1][j] - 1] = False
ans = 0
for i in range(N):
if not checked[i]:
ans += 1
if ans == 0: print("NO PROBLEMS FOUND")
elif ans == 1: print("1 PROBLEM FOUND")
else: print("{} PROBLEMS FOUND".format(ans))
| mit | -6,404,102,092,147,442,000 | 27.941176 | 79 | 0.436314 | false |
hellysmile/django-iron-sessions | tests/tests.py | 1 | 1986 | # tests stolen from https://github.com/martinrusev/django-redis-sessions
from django.utils.importlib import import_module
from django.conf import settings
import time
from nose.tools import eq_
session_engine = import_module(settings.SESSION_ENGINE).SessionStore()
def test_modify_and_keys():
eq_(session_engine.modified, False)
session_engine['test'] = 'test_me'
eq_(session_engine.modified, True)
eq_(session_engine['test'], 'test_me')
def test_save_and_delete():
session_engine['key'] = 'value'
session_engine.save()
eq_(session_engine.exists(session_engine.session_key), True)
session_engine.delete(session_engine.session_key)
eq_(session_engine.exists(session_engine.session_key), False)
def test_flush():
session_engine['key'] = 'another_value'
session_engine.save()
key = session_engine.session_key
session_engine.flush()
eq_(session_engine.exists(key), False)
def test_items():
session_engine['item1'], session_engine['item2'] = 1, 2
session_engine.save()
# Python 3.*
eq_(sorted(list(session_engine.items())), [('item1', 1), ('item2', 2)])
def test_expiry():
session_engine.set_expiry(1)
# Test if the expiry age is set correctly
eq_(session_engine.get_expiry_age(), 1)
session_engine['key'] = 'expiring_value'
session_engine.save()
key = session_engine.session_key
eq_(session_engine.exists(key), True)
time.sleep(2)
eq_(session_engine.exists(key), False)
def test_save_and_load():
session_engine.set_expiry(60)
session_engine.setdefault('item_test', 8)
session_engine.save()
session_data = session_engine.load()
eq_(session_data.get('item_test'), 8)
# def test_load():
# session_engine.set_expiry(60)
# session_engine['item1'], session_engine['item2'] = 1,2
# session_engine.save()
# session_data = session_engine.server.get(session_engine.session_key)
# expiry, data = int(session_data[:15]), session_data[15:]
| apache-2.0 | 1,653,861,135,497,624,000 | 29.553846 | 75 | 0.678751 | false |
asicontech/software-for-equipment | bfgminer/usbtest.py | 2 | 3439 | #!/usr/bin/env python
# Copyright 2012-2013 Xiangfu
# Copyright 2012-2013 Andrew Smith
# Copyright 2013 Luke Dashjr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version. See COPYING for more details.
# Linux usage: ./usbtest.py /dev/ttyUSB0 0xhexcodes|string|icarus
# OR python usbtest.py /dev/ttyUSB0 0xhexcodes|string|icarus
#
# Windows usage: ./usbtest.py COM1 0xhexcodes|string|icarus
#
# sends the data sepcified to the USB device and waits
# for a reply then displays it
#
# the data can be:
# 0xhexcodes: e.g. 0x68656c6c6f20776f726c640a
# would send "hello world\n"
#
# string: e.g. sendsometext
#
# icarus: sends 2 known block payloads for an icarus device
# and shows the expected and actual answers if it's
# a working V3 icarus
import sys
import serial
import binascii
if len(sys.argv) < 2:
sys.stderr.write("Usage: " + sys.argv[0] + " device strings...\n")
sys.stderr.write(" where device is either like /dev/ttyUSB0 or COM1\n")
sys.stderr.write(" and strings are either '0xXXXX' or 'text'\n")
sys.stderr.write(" if the first string is 'icarus' the rest are ignored\n")
sys.stderr.write(" and 2 valid icarus test payloads are sent with results displayed\n")
sys.stderr.write("\nAfter any command is sent it waits up to 30 seconds for a reply\n");
sys.exit("Aborting")
# Open with a 10 second timeout - just to be sure
ser = serial.Serial(sys.argv[1], 115200, serial.EIGHTBITS, serial.PARITY_NONE, serial.STOPBITS_ONE, 10, False, False, 5)
if sys.argv[2] == "icarus":
# This show how Icarus use the block and midstate data
# This will produce nonce 063c5e01
block = "0000000120c8222d0497a7ab44a1a2c7bf39de941c9970b1dc7cdc400000079700000000e88aabe1f353238c668d8a4df9318e614c10c474f8cdf8bc5f6397b946c33d7c4e7242c31a098ea500000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000"
midstate = "33c5bf5751ec7f7e056443b5aee3800331432c83f404d9de38b94ecbf907b92d"
rdata2 = binascii.a2b_hex(block.encode('ascii'))[95:63:-1]
rmid = binascii.a2b_hex(midstate.encode('ascii'))[::-1]
payload = rmid + rdata2
print("Push payload to icarus: " + binascii.hexlify(payload).decode('ascii'))
ser.write(payload)
b=ser.read(4)
print("Result:(should be: 063c5e01): " + binascii.hexlify(b).decode('ascii'))
# Just another test
payload2 = "ce92099c5a80bb81c52990d5c0924c625fd25a535640607d5a4bdf8174e2c8d500000000000000000000000080000000000000000b290c1a42313b4f21b5bcb8"
print("Push payload to icarus: " + payload2)
ser.write(binascii.a2b_hex(payload2.encode('ascii')))
b=ser.read(4)
print("Result:(should be: 8e0b31c5): " + binascii.hexlify(b).decode('ascii'))
else:
data = b""
for arg in sys.argv[2::]:
if arg[0:2:] == '0x':
data += binascii.a2b_hex(arg[2::].encode('ascii'))
else:
data += arg.encode('latin-1')
print("Sending: 0x" + binascii.hexlify(data).decode('ascii'))
ser.write(data)
# If you're expecting more than one linefeed terminated reply,
# you'll only see the first one
# AND with no linefeed, this will wait the 10 seconds before returning
print("Waiting up to 10 seconds ...")
b=ser.readline()
print("Result: hex 0x" + binascii.hexlify(b).decode('ascii'))
print("Result: asc %s" % (repr(b),))
ser.close()
| apache-2.0 | 6,122,057,605,471,330,000 | 37.640449 | 267 | 0.742658 | false |
skoolkid/pyskool | pyskool/lesson.py | 1 | 21776 | # -*- coding: utf-8 -*-
# Copyright 2008, 2010, 2014, 2015 Richard Dymond ([email protected])
#
# This file is part of Pyskool.
#
# Pyskool is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# Pyskool is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Pyskool. If not, see <http://www.gnu.org/licenses/>.
"""
Classes concerned with controlling what goes on during a lesson.
"""
import random
import re
from . import lines
from . import ai
class Lesson:
"""Controls the interaction between the teacher, the swot and Eric during a
lesson. The various actions required by the teacher and the swot during a
lesson - such as grassing on Eric for being absent, writing on the board,
and asking and answering questions - are defined by individual methods on
this class.
A new lesson is created by the swot when he sits down after being told to
by the teacher at the classroom doorway.
:type cast: :class:`~pyskool.cast.Cast`
:param cast: The cast.
:type swot: :class:`~pyskool.character.Character`
:param swot: The swot.
:type room: :class:`~pyskool.room.Room`
:param room: The classroom in which the lesson is taking place.
:type config: dict
:param config: Configuration parameters from the ini file.
"""
def __init__(self, cast, swot, room, config):
self.cast = cast
self.swot = swot
self.room = room
self.hitter_id = None
self.writer_id = None
self.teacher = None
self.qa_generator = None
self.qa_group = None
self.answer = None
self.asked_special = False
self.actor = None
self.swot_action = 'check_eric_initial'
self.teacher_action = None
self.base_action = 'tell_class_what_to_do'
self.base_location = None
self.base_direction = None
self.grassed = False
self.absence_message_ids = (lines.BE_PUNCTUAL, lines.STAY_IN_CLASS)
self.absence_index = 0
self.p_grass_for_hitting = config.get('GrassForHittingProbability', 0.140625)
self.p_lines_for_tales = config.get('LinesForTalesProbability', 0.328125)
self.p_write_on_board = config.get('EricsTeacherWriteOnBoardProbability', 0.28125)
self.p_qa_session = config.get('QASessionProbability', 0.90625)
def join(self, teacher, qa_generator, qa_group):
"""Make a teacher join the lesson. This method is called by the teacher
when he notices that the swot has sat down.
:type teacher: :class:`~pyskool.character.Character`
:param teacher: The teacher.
:type qa_generator: :class:`~pyskool.lesson.QAGenerator`
:param qa_generator: The question-and-answer generator to use.
:param qa_group: The Q&A group from which to choose questions and
answers for the teacher and the swot; if `None`, the
Q&A group will be chosen at random from those
available each time a question and answer is
generated.
"""
self.teacher = teacher
self.qa_generator = None
if random.random() < self.p_qa_session:
self.qa_generator = qa_generator
self.qa_group = qa_group
self.base_action = 'ask_question'
self.actor = self.swot
self.base_location = (teacher.x, teacher.y)
self.base_direction = teacher.direction
def next_swot_action(self):
"""Complete any actions required of the swot, and return the next
command to be executed by him, or `None` if it's not his turn to act.
"""
while self.actor is self.swot:
method = getattr(self, self.swot_action)
next_action = method()
if next_action:
return next_action
def check_eric_initial(self):
"""Make the swot tell the teacher that Eric is absent (if he is). This
method defines the swot's first action during a lesson. If Eric is
absent, the teacher's next action will be :meth:`fetch_eric`. The
swot's next action is set to :meth:`grass_for_hitting`.
:return: A :class:`~pyskool.ai.Say` command if Eric is absent,
otherwise `None`.
"""
self.teacher.set_home_room()
self.swot_action = 'grass_for_hitting'
if self.is_eric_absent():
self.teacher_action = 'fetch_eric'
return ai.Say(self.cast.get_absent_tale(self.teacher), True)
def grass_for_hitting(self):
"""Make the swot tell a tale about someone hitting him (possibly). This
method defines the swot's second action during a lesson. The teacher's
next action is set to :meth:`give_lines_for_hitting`. The swot's next
action is set to :meth:`grass_for_writing`.
:return: A :class:`~pyskool.ai.Say` command, or `None` if the swot
decides not to tell a tale.
"""
self.swot_action = 'grass_for_writing'
self.teacher_action = 'give_lines_for_hitting'
if random.random() < self.p_grass_for_hitting:
self.hitter_id, tale = self.cast.get_hit_tale(self.teacher)
return ai.Say(tale, True)
self.switch()
def grass_for_writing(self):
"""Make the swot tell a tale about someone writing on the blackboard
(if it was written on by Eric or the tearaway). This method defines the
swot's third action during a lesson. The teacher's next action is set
to :meth:`give_lines_for_writing`.
:return: A :class:`~pyskool.ai.Say` command, or `None` if the swot
decides not to tell a tale.
"""
self.grassed = True
self.teacher_action = 'give_lines_for_writing'
writer = self.room.get_blackboard_writer()
if writer:
self.writer_id, tale = self.cast.get_write_tale(writer.character_id, self.teacher)
if tale:
return ai.Say(tale, True)
self.switch()
def check_eric(self):
"""Make the swot tell the teacher that Eric is absent (if he is). If
Eric is absent, the teacher's next action will be :meth:`fetch_eric`.
:return: A :class:`~pyskool.ai.Say` command if Eric is absent,
otherwise `None`.
"""
if self.is_eric_absent():
self.teacher_action = 'fetch_eric'
return ai.Say(self.cast.get_absent_tale(self.teacher), True)
self.switch(self.base_action)
def answer_question(self):
"""Make the swot answer the teacher's question. The swot's next action
will be :meth:`check_eric`.
"""
self.swot_action = 'check_eric'
return ai.Say(self.cast.expand_title(self.answer, self.teacher))
def next_teacher_action(self):
"""Complete any actions required of the teacher, and return the next
command to be executed by him, or `None` if it's not his turn to act.
"""
while self.actor is self.teacher:
method = getattr(self, self.teacher_action)
next_action = method()
if next_action:
return next_action
def fetch_eric(self):
"""Make the teacher track down Eric if he is absent. The teacher may
first give lines to the swot for telling tales. If Eric is present by
the time this method is called (after the swot has finished telling the
teacher that Eric is not in class), the teacher will give lines to Eric
for being late (or for leaving early).
:return: A :class:`~pyskool.ai.FetchEric` command if Eric is still
absent after the swot has finished speaking, otherwise `None`.
"""
if random.random() < self.p_lines_for_tales:
self.teacher.give_lines(self.swot.character_id, lines.NO_TALES, True)
if self.is_eric_absent():
self.teacher_action = 'return_to_base'
self.teacher.reset_come_along_index()
self.absence_index = 1
return ai.FetchEric()
lines_message_id = self.absence_message_ids[self.absence_index]
self.absence_index = 1
self.teacher.give_lines(self.cast.eric.character_id, lines_message_id, True)
self.switch()
def return_to_base(self):
"""Make the teacher return to the classroom after fetching Eric. The
teacher's next action will be :meth:`ask_question` (if a
question-and-answer session was interrupted) or
:meth:`walk_up_or_down`.
:return: A :class:`~pyskool.ai.GoToXY` command.
"""
if (self.teacher.x, self.teacher.y) != self.base_location:
return ai.GoToXY(*self.base_location)
if self.room.has_blackboard() and not self.grassed:
if self.teacher.direction != self.base_direction:
# Turn teacher round before continuing
return ai.GoTowardsXY(self.teacher.x - self.teacher.direction, self.teacher.y)
else:
self.switch()
return
if self.qa_generator:
if self.teacher.direction != self.base_direction:
# Turn teacher round before continuing
return ai.GoTowardsXY(self.teacher.x - self.teacher.direction, self.teacher.y)
else:
self.teacher_action = 'ask_question'
else:
self.teacher_action = 'walk_up_or_down'
if self.teacher.direction != self.base_direction:
# Instead of turning round to face in the base direction only
# to turn around again immediately, start walking up and down
# now
return ai.GoToXY(self.teacher.x + self.teacher.get_blackboard_pace_distance() * self.teacher.direction, self.teacher.y)
def give_lines(self, victim_id, message_id):
"""Make the teacher give lines to the swot for telling a tale, or give
lines to the subject of the swot's tale.
:param victim_id: The ID of the subject (may be `None`, in which case
no lines will be given).
:param message_id: The ID of the lines message.
"""
if victim_id:
victim_present = self.room.contains(self.cast.get(victim_id))
punish_swot = random.random() < self.p_lines_for_tales
if punish_swot or not victim_present:
victim_id, message_id = self.swot.character_id, lines.NO_TALES
self.teacher.give_lines(victim_id, message_id, True)
def give_lines_for_hitting(self):
"""Make the teacher give lines to the swot for telling a tale about
being hit, or give lines to the subject of the tale. If the swot has
not told such a tale, nothing happens.
"""
self.give_lines(self.hitter_id, lines.NO_HITTING)
self.switch()
def give_lines_for_writing(self):
"""Make the teacher give lines to the swot for telling a tale about the
blackboard being written on, or give lines to the subject of the tale.
If the swot has not told such a tale, nothing happens. The teacher's
next action is set to :meth:`wipe_board`.
"""
self.give_lines(self.writer_id, lines.NO_WRITING)
self.teacher_action = 'wipe_board'
def wipe_board(self):
"""Make the teacher wipe the board (if there is one). The teacher's
next action will be :meth:`walk_to_board`.
:return: A :class:`~pyskool.ai.WipeBoard` command if there is a
blackboard, `None` otherwise.
"""
self.absence_index = 1
if self.room.has_blackboard():
self.teacher_action = 'walk_to_board'
return ai.WipeBoard()
self.teacher_action = self.base_action
def walk_to_board(self):
"""Make the teacher walk to the middle of the blackboard (after having
wiped it). The teacher's next action will be :meth:`write_on_board`.
:return: A :class:`~pyskool.ai.GoToXY` command.
"""
self.teacher_action = 'write_on_board'
return ai.GoToXY(self.teacher.x - self.teacher.get_blackboard_backtrack() * self.teacher.direction, self.teacher.y)
def write_on_board(self):
"""Make the teacher write on the blackboard (possibly). The teacher's
next action will be the base action for this lesson (either
:meth:`tell_class_what_to_do` or :meth:`ask_question`).
:return: A :class:`~pyskool.ai.WriteOnBoard` command if the teacher
chooses to write, otherwise `None`.
"""
self.base_location = (self.teacher.x, self.teacher.y)
self.base_direction = self.teacher.direction
self.teacher_action = self.base_action
if random.random() < self.p_write_on_board:
return ai.WriteOnBoard(self.teacher.get_blackboard_message())
def ask_question(self):
"""Make the teacher ask a question. The swot's next action is set to
:meth:`answer_question`.
:return: A :class:`~pyskool.ai.Say` command.
"""
self.swot_action = 'answer_question'
return ai.Say(self.get_question(), True)
def tell_class_what_to_do(self):
"""Make the teacher tell the class what to do (as opposed to starting a
question-and-answer session with the swot). The teacher's next action
(and base action for the remainder of the lesson) will be
:meth:`walk_up_or_down`.
:return: A :class:`~pyskool.ai.TellClassWhatToDo` command.
"""
self.base_action = 'walk_up_or_down'
self.teacher_action = 'walk_up_or_down'
return ai.TellClassWhatToDo()
def walk_up_or_down(self):
"""Make the teacher walk up or down in front of the blackboard. This
action is used during a lesson with no question-and-answer session.
The swot's next action is set to :meth:`check_eric`.
:return: A :class:`~pyskool.ai.WalkUpOrDown` command.
"""
self.switch('check_eric')
return ai.WalkUpOrDown()
def get_question(self):
"""Return the next question for the teacher to ask in a
question-and-answer session with the swot.
"""
if not self.asked_special and self.qa_generator.has_special_question():
self.asked_special = True
question, self.answer = self.qa_generator.prepare_special_qa()
else:
question, self.answer = self.qa_generator.prepare_qa(self.qa_group)
return question
def switch(self, action=None):
"""Switch turns between the actors in this lesson (the teacher and the
swot).
:param action: The next action (method to execute) for the next actor;
if `None`, the next action (which may have already been
set) is unchanged.
"""
if self.actor is self.swot:
self.actor = self.teacher
self.teacher_action = action or self.teacher_action
else:
self.actor = self.swot
self.swot_action = action or self.swot_action
def finished_speaking(self):
"""Indicate that the current actor (teacher or swot) has finished
speaking.
"""
self.switch()
def is_eric_absent(self):
"""Return whether Eric is absent from the classroom in which this
lesson is taking place.
"""
return not self.room.contains(self.cast.eric)
class QAGenerator:
"""Generates questions and answers for the teacher and swot to use during a
lesson. Every teacher gets his own generator to keep; it is built before
the game starts.
"""
def __init__(self):
self.questions = {}
self.answers = {}
self.qa_pairs = {}
self.special_qa_group = None
self.remaining = {}
def set_special_group(self, qa_group, index):
"""Set the Q&A group to use for the teacher's special question (if
there is one).
:param qa_group: The name of the Q&A group.
:param index: The index (0 or 1) of the special answer in the Q&A pair.
"""
self.special_qa_group = qa_group
self.special_qa_pair_index = index
def initialise_special_answer(self):
"""Initialise the answer to the teacher's special question (if there is
one). The special answer is chosen at random from the Q&A pairs in the
Q&A group of the special question.
"""
if self.special_qa_group:
self.special_answer_index = random.randrange(len(self.qa_pairs[self.special_qa_group]))
return self.qa_pairs[self.special_qa_group][self.special_answer_index][self.special_qa_pair_index]
def has_special_question(self):
"""Return whether the teacher has a special question. A special
question is one to which the answer must be seen written on a
blackboard by the teacher to make him reveal his safe combination
letter.
"""
return self.special_qa_group is not None
def add_question(self, question_id, qa_group, text):
"""Add a question to a Q&A group.
:param question_id: The ID of the question.
:param qa_group: The name of the Q&A group to add the question to.
:param text: The text of the question.
"""
q = self.questions.setdefault(qa_group, [])
q.append((question_id, text))
def add_answer(self, question_id, text):
"""Add an answer to a question.
:param question_id: The ID of the question.
:param text: The text of the answer.
"""
self.answers[question_id] = text
def add_qa_pair(self, qa_group, word1, word2):
"""Add a Q&A pair to a Q&A group.
:param qa_group: The name of the Q&A group.
:param word1: The first word of the pair.
:param word2: The second word of the pair.
"""
if qa_group not in self.qa_pairs:
self.qa_pairs[qa_group] = []
self.remaining[qa_group] = []
self.qa_pairs[qa_group].append((word1, word2))
def _expand(self, template, word1, word2):
"""Return a message template with any Q&A pair macro expanded to the
appropriate word from the pair.
:param template: The message template.
:param word1: The first word of the pair.
:param word2: The second word of the pair.
"""
if template:
return template.replace('$1', word1).replace('$2', word2)
def prepare_special_qa(self):
"""Prepare the teacher's special question and answer (if any).
:return: A 2-tuple containing the question and the answer.
"""
word1, word2 = self.qa_pairs[self.special_qa_group][self.special_answer_index]
return self.special_question, self._expand(self.special_answer, word1, word2)
def prepare_qa(self, qa_group=None):
"""Prepare a randomly chosen question and answer.
:param qa_group: The Q&A group from which to choose the question and
answer; if `None`, the Q&A group will be chosen at
random from those available.
:return: A 2-tuple containing the question and the answer.
"""
if not qa_group:
qa_group = random.choice(list(self.questions.keys()))
question_id, question = random.choice(self.questions.get(qa_group, [(None, None)]))
answer = self.answers.get(question_id)
if not self.remaining[qa_group]:
self.remaining[qa_group] = list(range(len(self.qa_pairs[qa_group])))
random_index = self.remaining[qa_group].pop(random.randrange(len(self.remaining[qa_group])))
word1, word2 = self.qa_pairs[qa_group][random_index]
return self._expand(question, word1, word2), self._expand(answer, word1, word2)
class AssemblyMessageGenerator:
"""Generates messages to be delivered by whoever is conducting assembly.
There is only one assembly message generator, shared by the whole skool.
"""
def __init__(self):
self.templates = []
self.groups = {}
def add_message_template(self, template):
"""Add `template` to the generator's collection of message templates.
"""
self.templates.append(template)
def add_word(self, group_id, word):
"""Add a word to the generator's collection.
:param group_id: The name of the group to add the word to.
:param word: The word.
"""
group = self.groups.setdefault(group_id, [])
group.append(word)
def generate_message(self):
"""Return a message based on a randomly chosen template and containing
randomly chosen phrases.
"""
message = random.choice(self.templates)
while True:
search = re.search('\$[A-Z0-9]+', message)
if not search:
break
marker = search.group()
group_id = marker[1:]
if group_id in self.groups:
rep = random.choice(self.groups[group_id])
message = message.replace(marker, rep)
else:
message = message.replace(marker, group_id)
return message
| gpl-3.0 | 4,834,191,393,522,992,000 | 41.03861 | 135 | 0.618204 | false |
RoboPython/Pykemon | game.py | 1 | 12823 | # Copyright (c) 2010 Brian Gordon
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys, pygame, config, maps, sprites, item
from pygame.locals import *
from xml import sax
tilemap = "map2.tmx"
game = None
class Game:
def main(self):
global tilemap
def draw_layer():
"""This block of code is used twice, the first time to draw each tile of each background
layer, and then (after the foreground has been drawn) to draw each tile of each occluding
layer. Rather than have duplicate code, I put it in a local function definition and let the many
variables fall through. The foreground uses its own code because it deals with multi-frame
sprites rather static tiles and it only needs to loop through the list of items."""
#The player's position determines which tiles to draw. Additionally, if the player is still
#moving, the trailing row/column one unit behind him should also be drawn.
for row in range(0 - (y_tile_sliding and player.facing[1] == 1), config.tiles_visible_y + (y_tile_sliding and player.facing[1] == -1)):
for col in range(0 - (x_tile_sliding and player.facing[0] == 1), config.tiles_visible_x + (x_tile_sliding and player.facing[0] == -1)):
#Blit the appropriate area from a given layer of the internal representation of the
#game world onto the screen surface buffer.
self.screen.blit(self.tmxhandler.image[layer][row + clamped.top][col + clamped.left],
(col * config.tiles_size + (x_tile_sliding * player.sliding * player.facing[0]), row * config.tiles_size + (y_tile_sliding * player.sliding * player.facing[1]) - 16))
pygame.init()
self.screen = pygame.display.set_mode((config.tiles_visible_x * config.tiles_size, config.tiles_visible_y * config.tiles_size - 16))
pygame.display.set_caption("Pokemon Ginger")
#We use the sax library to parse tile maps, paths, and metadata out of the game world XML files.
parser = sax.make_parser()
self.tmxhandler = maps.TMXHandler()
parser.setContentHandler(self.tmxhandler)
parser.parse(tilemap)
#Special handling for active objects in the game world. All people must have a path property.
for i in self.tmxhandler.items:
if(i.type == "person"):
#Build the path out of the parsed properties list for the map
i.path = [int(n) for n in i.properties["path"].split(',')]
#These parameters of regular tilesets are specified in the XML world files, but there are no XML files for
#player sprites so they have to be hard coded somewhere.
player = item.Item(sprites.Sprite("sprites/boy.png", 32, 64, Color("0x00c6c6")), Rect(11, 34, 1, 1), "player")
#Add the player to the items list so that he collides properly and is painted as part of the foreground.
self.tmxhandler.items.append(player)
#Everything works even with an even number of visible rows/columns. The player simply isn't centered on the screen surface.
mid_x, mid_y = (config.tiles_visible_x - 1) / 2, (config.tiles_visible_y - 1) / 2
self.map_edges = pygame.Rect(0, 0, self.tmxhandler.columns, self.tmxhandler.lines)
#Calculate which tiles should be visible around the player. The clamped view makes sure that the camera doesn't
#go outside of the world borders.
viewport = Rect(player.position.left - mid_x, player.position.top - mid_y, config.tiles_visible_x, config.tiles_visible_y)
clamped = viewport.clamp(self.map_edges)
#moving represents the direction key pressed down right now
#If you want to know if the player is moving, player.facing and player.sliding should be
#used instead, because the direction key could be released or changed during movement.
moving = (0, 0)
player.facing = (0, 1)
#Maps keyboard constants to movement vectors
moves = {K_UP : (0, -1), K_DOWN : (0, 1), K_LEFT : (-1, 0) , K_RIGHT : (1, 0)}
#Is the frame limiter currently disabled?
turbo = 0
#These take on values of 0, -1, or 1 depending on whether the tiles are currently sliding in that
#direction. This is different from moving because it takes into account the window clamping around the edges.
x_tile_sliding, y_tile_sliding = 0, 0
#There is a single frame for standing still. While sliding, the item alternates between the standing still
#frame and the current walking frame (which gets toggled every step so that the legs alternate)
animation_cutoffs = (config.tiles_size / 2)
clk = pygame.time.Clock()
#Are we currently recording?
recording = False
frame = 0
capture_run = 0
#Main game loop
while 1:
print "x position is:" + str(player.position.x) +" y position is:" +str(player.position.y)
if player.position.x == 11 and player.position.y == 33:
print "Yay"
tilemap = "map3.tmx"
print tilemap
parser = sax.make_parser()
self.tmxhandler = maps.TMXHandler()
parser.setContentHandler(self.tmxhandler)
parser.parse(tilemap)
else:
print tilemap
self.screen.fill(Color(255,0,255))
for event in pygame.event.get():
if event.type == QUIT:
return
if event.type == KEYDOWN:
if event.key in moves.keys():
moving = moves[event.key]
if event.key is K_SPACE:
turbo = True #Hold space to disable frame limiter
if event.key is K_r:
recording = True #Hold r to record screen output to lots of PNGs
elif event.type == KEYUP:
#We don't want to stop if the player presses and releases a movement key while actually
#moving in a different direction, so the movement direction is checked against the pressed key.
if event.key in moves.keys() and moves[event.key] == moving:
moving = (0, 0)
if event.key is K_SPACE:
turbo = False #Restores frame limiter when space is released
if event.key is K_r:
recording = False
capture_run += 1
#Note that the player's movement is being handled here rather than below with the rest of the items.
if player.sliding == 0 and moving != (0,0):
if(player.move(moving)): #This will return false if the player runs into an obstacle.
viewport.move_ip(moving) #Naively move the viewport with the player
clamped = viewport.clamp(self.map_edges) #Move the viewport back into the game world if it has just left it.
#These calculations determine whether the player should move freely near the borders or be fixed in
#the center of a scrolling background when distant from the borders. Note that, for example, the player
#can be close to the top of the world and able to move freely vertically, but still be fixed in the
#horizontal direction.
x_tile_sliding, y_tile_sliding = 0, 0
if viewport.left == clamped.left and viewport.move(-1 * moving[0],0).left == viewport.move(-1 * moving[0],0).clamp(self.map_edges).left:
x_tile_sliding = 1
if viewport.top == clamped.top and viewport.move(0,-1 * moving[1]).top == viewport.move(0,-1 * moving[1]).clamp(self.map_edges).top:
y_tile_sliding = 1
#Handles movement for all persons every frame by changing direction as necessary to match the path in the XML file.
for i in self.tmxhandler.items:
if(i.type == "person"): #Note that boulders are never called to go(), only bump_notify()
i.go()
#First we need to pick out all layers not marked as occluding and draw them in order. This creates the
#background which items move on top of.
occluding = []
for layer in range(0, self.tmxhandler.layers):
if "occlude" in self.tmxhandler.properties[layer+1]: #+1 because 0 is the map props
occluding.append(layer)
else:
draw_layer() #Lots and lots of free variables fall through here
#Now draw each item (including the player) depending on whether it is visible in the camera viewport.
for i in self.tmxhandler.items:
#An item's sliding is set to tiles_size every time it moves, and decremented by 4 pixels per frame until
#it reaches 0, at which point it has reached its new position. Note that player's sliding value isn't
#changed until after the occluding layer has been drawn. This is necessary because if the viewport is changed
#before the items are drawn, they will jump back 4 pixels at the end of the sliding motion.
if i is not player and i.sliding > 0:
i.sliding -= 4
#Check if the item is visible within 3 tiles around the viewport and if so draw it. The view must be expanded
#by three tiles because of the worst case: while an item is sliding to the right away from the player, the player
#moves left. In the future I might add a check that allows inflating only by 2 tiles and a directional inflate
#depending on which way the player is moving.
if clamped.inflate(3,3).contains(i.position):
self.screen.blit(i.sprite.facing[i.facing][0 if i.sliding < animation_cutoffs else 1 + i.toggle],
((i.position.left - clamped.left) * config.tiles_size - (i.sliding * i.facing[0]) + (x_tile_sliding * player.sliding * player.facing[0]),
(i.position.top - clamped.top - 1) * config.tiles_size - (i.sliding * i.facing[1]) + (y_tile_sliding * player.sliding * player.facing[1]) - 16))
#Finally, draw each occluding layer that was skipped before. This layer will draw on top of items.
for layer in occluding:
draw_layer()
#And now that the drawing operations are finished, update the player's sliding value.
if player.sliding > 0:
player.sliding -= 4
#Swap display buffers, and wait if it's been less than 1/30 of a second since the last frame.
pygame.display.update()
if not turbo:
clk.tick(30)
if(recording):
#Export the screen surface to a png file for making videos. This can use a lot of disk space if you record for more
#than a few seconds. PNG compression kills the frame rate, but the file sizes are much more manageable.
pygame.image.save(self.screen, "cap/" + "run" + str(capture_run).zfill(2) + "_f" + str(frame).zfill(5) + ".png")
frame += 1
if player.position.x == 11 and player.position.y == 33:
tilemap = "map.tmx"
def run():
global game
game = Game()
game.main() | gpl-2.0 | 1,011,627,586,971,395,000 | 56.765766 | 188 | 0.613507 | false |
pwwang/bioprocs | bioprocs/scripts/imtherapy/pNetMHC.py | 1 | 5476 | from pathlib import Path
from diot import Diot
from bioprocs.utils import shell2 as shell, logger
from bioprocs.utils.parallel import Parallel, distributeList
{%from os import path%}
{%from pyppl.utils import always_list%}
infile = {{i.infile | quote}}
afile = {{i.afile | ?path.isfile | =readlines | !always_list | repr}}
outfile = Path({{o.outfile | quote}})
allfile = {{o.outfile | prefix | @append: '.all' | @append: ext(o.outfile) | quote}}
netmhc = {{args.netmhc | quote}}
isfa = {{args.isfa | repr}}
nthread = {{args.nthread | repr}}
params = {{args.params | repr}}
tmpdir = {{args.tmpdir | repr}}
lens = {{args.lens | ?isinstance: list | =@join: ',' | quote}}
shell.load_config(netmhc = netmhc)
# support HLA-A*03:79 -> HLA-A0379
alleles = [allele.strip().replace('*', '').replace(':', '') for allele in afile if 'HLA-' in allele]
valid_alleles = shell.netmhc(listMHC = True).splitlines()
for i in range(nthread):
shell.mkdir(p = outfile.parent.joinpath('threads', str(i+1)))
# split infile
if isfa:
seqs = [line.strip() for line in shell.grep('>', infile).splitlines() if line.strip()]
seqs_to_threads = distributeList(seqs, nthread)
seqs = {}
for i, tseqs in enumerate(seqs_to_threads):
for tseq in tseqs:
seqs[tseq] = i
handlers = {}
lastindex = None
with open(infile) as fin:
for line in fin:
if line.startswith('>'):
seq = line.strip()
index = seqs[seq]
if index not in handlers:
handlers[index] = open(outfile.parent.joinpath('threads', str(index+1), 'peptides.txt'), 'w')
handlers[index].write(line)
lastindex = index
elif lastindex is None:
raise IndexError('Sequence tag not found!')
else:
handlers[lastindex].write(line)
for handler in handlers.values():
if not handler.closed:
handler.close()
else:
with open(infile) as fin:
peptides = fin.readlines()
pep_to_threads = distributeList(peptides, threads)
for i, pep in enumerate(pep_to_threads):
with open(outfile.parent.joinpath('threads', str(i+1), 'peptides.txt'), 'w') as fpep:
fpep.write(''.join(pep))
"""
PARAMETER DEFAULT VALUE DESCRIPTION
[-a filename] HLA-A0201 HLA allele name
[-f filename] Input file (by default in FASTA format)
[-p] 0 Switch on if input is a list of peptides (Peptide format)
[-l string] 9 Peptide length (multiple lengths separated by comma e.g. 8,9,10)
[-s] 0 Sort output on decreasing affinity
[-rth float] 0.500000 Threshold for high binding peptides (%Rank)
[-rlt float] 2.000000 Threshold for low binding peptides (%Rank)
[-listMHC] 0 Print list of alleles included in netMHC
[-xls] 0 Save output to xls file
[-xlsfile filename] NetMHC_out.xls File name for xls output
[-t float] -99.900002 Threshold for output
[-thrfmt filename] $NETMHC/data/threshold/%s.thr Format for threshold filenames
[-hlalist filename] $NETMHC/data/allelelist File with covered HLA names
[-rdir filename] $NETMHC Home directory for NetMHC
[-tdir filename] $TMPDIR Temporary directory (Default $$)
[-syn filename] $NETMHC/data/synlists/%s.synlist Format of synlist file
[-v] 0 Verbose mode
[-dirty] 0 Dirty mode, leave tmp dir+files
[-inptype int] 0 Input type [0] FASTA [1] Peptide
[-version filename] $NETMHC/data/version File with version information
[-w] 0 w option for webface
"""
# common options
params.tdir = tmpdir
params.l = lens
def do_one(allele, ifile, ithread):
ps = params.copy()
ps.p = not isfa
ps.f = ifile
ps.a = allele
ps._out = outfile.parent.joinpath('threads', str(ithread+1), allele + '.out.txt')
ps._debug = True
shell.netmhc(**ps)
args = []
for allele in alleles:
if allele not in valid_alleles:
logger.warning('Not a valid allele: %s', allele)
for i in range(nthread):
if outfile.parent.joinpath('threads', str(i+1), 'peptides.txt').is_file():
args.append((allele, outfile.parent.joinpath('threads', str(i+1), 'peptides.txt'), i))
if not args:
raise ValueError('No valid alleles found.')
para = Parallel(nthread = nthread)
para.run(do_one, args)
# merge results
with open(outfile, 'w') as fout, open(allfile, 'w') as fall:
header_written = False
pos_written = False
for i, ofile in enumerate(outfile.parent.joinpath('threads').glob('*/*.out.txt')):
with open(ofile) as fo:
for line in fo:
line = line.strip()
if not line or line.startswith('-'):
continue
if header_written and line.startswith('#'):
continue
if i == 0 and line.startswith('#'):
fout.write(line + '\n')
fall.write(line + '\n')
else:
header_written = True
parts = line.split()
if parts and parts[0] == 'pos' and i == 0 and not pos_written:
fout.write('\t'.join(parts) + '\n')
fall.write('\t'.join(parts) + '\n')
pos_written = True
elif not parts or parts[0] in ('pos', 'Protein'):
continue
elif len(parts) > 14:
del parts[-2]
fout.write('\t'.join(parts) + '\n')
fall.write('\t'.join(parts) + '\n')
else:
fall.write('\t'.join(parts) + '\n')
| mit | -5,011,356,954,891,710,000 | 37.56338 | 107 | 0.60756 | false |
minogame/bilinear_tensorflow | src/model.py | 1 | 14668 | import tensorflow as tf
import tensorflow.contrib.layers as cl
from gridconv_v3 import gridconv2d
from deformconv import deformconv2d
from utils import log_weights
# # # # # # # # # CIFAR # # # # # # # # #
# The network is built based on 'NCHW'.
def normal_cnn_cifar(name, reuse=False):
@log_weights
def normal_cnn(x, is_training):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
with tf.variable_scope(name, reuse=reuse):
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_1')
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_2')
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv2_1')
x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv2_2')
x = cl.conv2d(x, num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv2_3')
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv3_1')
x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv3_2')
x = cl.conv2d(x, num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv3_3')
x = tf.reduce_mean(x, [2, 3])
x = cl.fully_connected(x, num_outputs=10, activation_fn=None)
return x
return normal_cnn
# The network is built based on 'NCHW'.
def trash_cnn_cifar(name, reuse=False):
@log_weights
def trash_cnn(x, is_training):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
with tf.variable_scope(name, reuse=reuse):
# x = gridconv2d(x, scope='Conv1_1', num_outputs=32, kernel_size=[3, 3], stride=1,
# activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
# normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
# x = gridconv2d(x, scope='Conv1_2', num_outputs=32, kernel_size=[3, 3], stride=1,
# activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
# normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
# x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW')
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_1')
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_2')
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = gridconv2d(x, scope='Conv2_1', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = gridconv2d(x, scope='Conv2_2', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = gridconv2d(x, scope='Conv2_3', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = gridconv2d(x, scope='Conv3_1', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = gridconv2d(x, scope='Conv3_2', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = gridconv2d(x, scope='Conv3_3', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = tf.reduce_mean(x, [2, 3])
x = cl.fully_connected(x, num_outputs=10, activation_fn=None)
return x
return trash_cnn
# The network is built based on 'NCHW'.
def deform_cnn_cifar(name, reuse=False):
@log_weights
def deform_cnn(x, is_training):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
with tf.variable_scope(name, reuse=reuse):
# x = gridconv2d(x, scope='Conv1_1', num_outputs=32, kernel_size=[3, 3], stride=1,
# activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
# normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
# x = gridconv2d(x, scope='Conv1_2', num_outputs=32, kernel_size=[3, 3], stride=1,
# activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
# normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
# x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW')
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_1')
x = cl.conv2d(x, num_outputs=32, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='Conv1_2')
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = deformconv2d(x, scope='Conv2_1', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = deformconv2d(x, scope='Conv2_2', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = deformconv2d(x, scope='Conv2_3', num_outputs=64, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = cl.max_pool2d(x, kernel_size=3, stride=2, data_format='NCHW', padding='SAME')
x = deformconv2d(x, scope='Conv3_1', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = deformconv2d(x, scope='Conv3_2', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = deformconv2d(x, scope='Conv3_3', num_outputs=128, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
x = tf.reduce_mean(x, [2, 3])
x = cl.fully_connected(x, num_outputs=10, activation_fn=None)
return x
return deform_cnn
# # # # # # # # # CIFAR RESNET # # # # # # # # #
def residual(name, l, is_training, increase_dim=False, first=False):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
shape = l.get_shape().as_list()
in_channel = shape[1]
if increase_dim:
out_channel = in_channel * 2
stride1 = 2
else:
out_channel = in_channel
stride1 = 1
with tf.variable_scope(name) as scope:
b1 = l if first else tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
c1 = cl.conv2d(b1, num_outputs=out_channel, kernel_size=[3, 3], stride=stride1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='conv1')
c2 = cl.conv2d(c1, num_outputs=out_channel, kernel_size=[3, 3], stride=1,
activation_fn=None, padding='SAME', data_format='NCHW',
scope='conv2')
if increase_dim:
l = cl.avg_pool2d(l, kernel_size=2, stride=2, data_format='NCHW')
l = tf.pad(l, [[0, 0], [in_channel // 2, in_channel // 2], [0, 0], [0, 0]])
l = c2 + l
return l
def grid_residual(name, l, is_training, increase_dim=False, first=False, one_c=False):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
shape = l.get_shape().as_list()
in_channel = shape[1]
if increase_dim:
out_channel = in_channel * 2
stride1 = 2
else:
out_channel = in_channel
stride1 = 1
with tf.variable_scope(name) as scope:
b1 = l if first else tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
c1 = gridconv2d(b1, scope='conv1', num_outputs=out_channel, kernel_size=[3, 3], stride=stride1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW', one_c=one_c,
normalizer_fn=cl.batch_norm, normalizer_params=bn_params)
c2 = gridconv2d(c1, scope='conv2', num_outputs=out_channel, kernel_size=[3, 3], stride=1,
activation_fn=None, padding='SAME', data_format='NCHW', one_c=one_c,
normalizer_fn=None, normalizer_params=None)
if increase_dim:
l = cl.avg_pool2d(l, kernel_size=2, stride=2, data_format='NCHW')
l = tf.pad(l, [[0, 0], [in_channel // 2, in_channel // 2], [0, 0], [0, 0]])
l = c2 + l
return l
# def resnet(name, n):
# def cnn(x, is_training):
# bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
# with tf.variable_scope(name) as scope:
# l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1,
# activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
# normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
# scope='conv0')
# l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
# l = residual('res1.0', l, is_training, first=True)
# for k in range(1, n):
# l = residual('res1.{}'.format(k), l, is_training)
# # 32,c=16
# l = grid_residual('res2.0', l, is_training, increase_dim=True)
# for k in range(1, n):
# l = grid_residual('res2.{}'.format(k), l, is_training)
# # 16,c=32
# l = grid_residual('res3.0', l, is_training, increase_dim=True)
# for k in range(1, n):
# l = grid_residual('res3.' + str(k), l, is_training)
# l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
# # 8,c=64
# l = tf.reduce_mean(l, [2, 3])
# l = cl.fully_connected(l, num_outputs=10, activation_fn=None)
# return l
# return cnn
def resnet(name, n, grid=False):
def cnn(x, is_training):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
with tf.variable_scope(name) as scope:
l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='conv0')
l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
l = residual('res1.0', l, is_training, first=True)
for k in range(1, n):
l = residual('res1.{}'.format(k), l, is_training)
# 32,c=16
l = residual('res2.0', l, is_training, increase_dim=True)
for k in range(1, n):
l = residual('res2.{}'.format(k), l, is_training)
# 16,c=32
l = residual('res3.0', l, is_training, increase_dim=True)
for k in range(1, n):
l = residual('res3.' + str(k), l, is_training)
l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
# 8,c=64
l = tf.reduce_mean(l, [2, 3])
l = cl.fully_connected(l, num_outputs=10, activation_fn=None)
return l
def gridcnn_c5(x, is_training):
bn_params = {'is_training':is_training, 'fused': True, 'data_format': 'NCHW'}
with tf.variable_scope(name) as scope:
l = cl.conv2d(x, num_outputs=16, kernel_size=[3, 3], stride=1,
activation_fn=tf.nn.relu, padding='SAME', data_format='NCHW',
normalizer_fn=cl.batch_norm, normalizer_params=bn_params,
scope='conv0')
l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
l = residual('res1.0', l, is_training, first=True)
for k in range(1, n):
l = residual('res1.{}'.format(k), l, is_training)
# 32,c=16
l = residual('res2.0', l, is_training, increase_dim=True)
for k in range(1, n):
l = grid_residual('res2.{}'.format(k), l, is_training, one_c=False)
# 16,c=32
l = residual('res3.0', l, is_training, increase_dim=True)
for k in range(1, n):
l = grid_residual('res3.{}'.format(k), l, is_training, one_c=False)
l = tf.nn.relu(cl.batch_norm(l, is_training=is_training, fused=True, data_format='NCHW'))
# 8,c=64
l = tf.reduce_mean(l, [2, 3])
l = cl.fully_connected(l, num_outputs=10, activation_fn=None)
return l
return gridcnn_c5 if grid else cnn
| gpl-3.0 | -2,439,001,560,730,617,300 | 45.469256 | 108 | 0.619239 | false |
jandebleser/django-wiki | src/wiki/conf/settings.py | 1 | 10199 | from __future__ import absolute_import, unicode_literals
import bleach
from django.conf import settings as django_settings
from django.core.files.storage import default_storage
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
#: Should urls be case sensitive?
URL_CASE_SENSITIVE = getattr(django_settings, 'WIKI_URL_CASE_SENSITIVE', False)
# Non-configurable (at the moment)
WIKI_LANGUAGE = 'markdown'
#: The editor class to use -- maybe a 3rd party or your own...? You can always
#: extend the built-in editor and customize it!
EDITOR = getattr(
django_settings,
'WIKI_EDITOR',
'wiki.editors.markitup.MarkItUp')
#: Whether to use Bleach or not. It's not recommended to turn this off unless
#: you know what you're doing and you don't want to use the other options.
MARKDOWN_SANITIZE_HTML = getattr(
django_settings,
'WIKI_MARKDOWN_SANITIZE_HTML',
True)
#: Arguments for the Markdown instance, for instance a list of extensions to
#: use.
#: See: https://pythonhosted.org/Markdown/extensions/index.html
#:
#: To set a custom title for TOC's::
#:
#: WIKI_MARKDOWN_KWARGS = {'extension_configs': {'toc': _('Contents of this article')}}
MARKDOWN_KWARGS = {
'extensions': [
'footnotes',
'attr_list',
'smart_strong',
'footnotes',
'attr_list',
'def_list',
'tables',
'abbr',
'sane_lists',
],
'extension_configs': {
'toc': {
'title': _('Table of Contents')}},
}
MARKDOWN_KWARGS.update(getattr(django_settings, 'WIKI_MARKDOWN_KWARGS', {}))
_default_tag_whitelists = bleach.ALLOWED_TAGS + [
'figure',
'figcaption',
'br',
'hr',
'p',
'div',
'img',
'pre',
'span',
'table',
'thead',
'tbody',
'th',
'tr',
'td',
'dl',
'dt',
'dd',
] + ['h{}'.format(n) for n in range(8)]
#: List of allowed tags in Markdown article contents.
MARKDOWN_HTML_WHITELIST = _default_tag_whitelists
MARKDOWN_HTML_WHITELIST += (
getattr(
django_settings,
'WIKI_MARKDOWN_HTML_WHITELIST',
[]
)
)
_default_attribute_whitelist = bleach.ALLOWED_ATTRIBUTES
for tag in MARKDOWN_HTML_WHITELIST:
if tag not in _default_attribute_whitelist:
_default_attribute_whitelist[tag] = []
_default_attribute_whitelist[tag].append('class')
_default_attribute_whitelist[tag].append('id')
_default_attribute_whitelist['img'].append('src')
_default_attribute_whitelist['img'].append('alt')
#: Dictionary of allowed attributes in Markdown article contents.
MARKDOWN_HTML_ATTRIBUTES = _default_attribute_whitelist
MARKDOWN_HTML_ATTRIBUTES.update(
getattr(
django_settings,
'WIKI_MARKDOWN_HTML_ATTRIBUTE_WHITELIST',
{}
)
)
#: Allowed inline styles in Markdown article contents, default is no styles
#: (empty list)
MARKDOWN_HTML_STYLES = (
getattr(
django_settings,
'WIKI_MARKDOWN_HTML_STYLES',
[]
)
)
_project_defined_attrs = getattr(
django_settings,
'WIKI_MARKDOWN_HTML_ATTRIBUTE_WHITELIST',
False)
# If styles are allowed but no custom attributes are defined, we allow styles
# for all kinds of tags
if MARKDOWN_HTML_STYLES and not _project_defined_attrs:
MARKDOWN_HTML_ATTRIBUTES['*'] = 'style'
#: This slug is used in URLPath if an article has been deleted. The children of the
#: URLPath of that article are moved to lost and found. They keep their permissions
#: and all their content.
LOST_AND_FOUND_SLUG = getattr(
django_settings,
'WIKI_LOST_AND_FOUND_SLUG',
'lost-and-found')
#: When True, this blocks new slugs that resolve to non-wiki views, stopping
#: users creating articles that conflict with overlapping URLs from other apps.
CHECK_SLUG_URL_AVAILABLE = getattr(
django_settings,
'WIKI_CHECK_SLUG_URL_AVAILABLE',
True)
#: Do we want to log IPs of anonymous users?
LOG_IPS_ANONYMOUS = getattr(django_settings, 'WIKI_LOG_IPS_ANONYMOUS', True)
#: Do we want to log IPs of logged in users?
LOG_IPS_USERS = getattr(django_settings, 'WIKI_LOG_IPS_USERS', False)
####################################
# PERMISSIONS AND ACCOUNT HANDLING #
####################################
# NB! None of these callables need to handle anonymous users as they are treated
# in separate settings...
#: A function returning True/False if a user has permission to
#: read contents of an article + plugins
#: Relevance: viewing articles and plugins
CAN_READ = getattr(django_settings, 'WIKI_CAN_READ', None)
#: A function returning True/False if a user has permission to
#: change contents, ie add new revisions to an article
#: Often, plugins also use this
#: Relevance: editing articles, changing revisions, editing plugins
CAN_WRITE = getattr(django_settings, 'WIKI_CAN_WRITE', None)
#: A function returning True/False if a user has permission to assign
#: permissions on an article
#: Relevance: changing owner and group membership
CAN_ASSIGN = getattr(django_settings, 'WIKI_CAN_ASSIGN', None)
#: A function returning True/False if the owner of an article has permission to change
#: the group to a user's own groups
#: Relevance: changing group membership
CAN_ASSIGN_OWNER = getattr(django_settings, 'WIKI_ASSIGN_OWNER', None)
#: A function returning True/False if a user has permission to change
#: read/write access for groups and others
CAN_CHANGE_PERMISSIONS = getattr(
django_settings,
'WIKI_CAN_CHANGE_PERMISSIONS',
None)
#: Specifies if a user has access to soft deletion of articles
CAN_DELETE = getattr(django_settings, 'WIKI_CAN_DELETE', None)
#: A function returning True/False if a user has permission to change
#: moderate, ie. lock articles and permanently delete content.
CAN_MODERATE = getattr(django_settings, 'WIKI_CAN_MODERATE', None)
#: A function returning True/False if a user has permission to create
#: new groups and users for the wiki.
CAN_ADMIN = getattr(django_settings, 'WIKI_CAN_ADMIN', None)
#: Treat anonymous (non logged in) users as the "other" user group
ANONYMOUS = getattr(django_settings, 'WIKI_ANONYMOUS', True)
#: Globally enable write access for anonymous users, if true anonymous users will be treated
#: as the others_write boolean field on models.Article.
ANONYMOUS_WRITE = getattr(django_settings, 'WIKI_ANONYMOUS_WRITE', False)
#: Globally enable create access for anonymous users
#: Defaults to ANONYMOUS_WRITE.
ANONYMOUS_CREATE = getattr(
django_settings,
'WIKI_ANONYMOUS_CREATE',
ANONYMOUS_WRITE)
#: Default setting to allow anonymous users upload access (used in
#: plugins.attachments and plugins.images).
ANONYMOUS_UPLOAD = getattr(django_settings, 'WIKI_ANONYMOUS_UPLOAD', False)
#: Sign up, login and logout views should be accessible
ACCOUNT_HANDLING = getattr(django_settings, 'WIKI_ACCOUNT_HANDLING', True)
#: Signup allowed? If it's not allowed, logged in superusers can still access
#: the signup page to create new users.
ACCOUNT_SIGNUP_ALLOWED = ACCOUNT_HANDLING and getattr(
django_settings, 'WIKI_ACCOUNT_SIGNUP_ALLOWED', True
)
if ACCOUNT_HANDLING:
LOGIN_URL = reverse_lazy("wiki:login")
LOGOUT_URL = reverse_lazy("wiki:logout")
SIGNUP_URL = reverse_lazy("wiki:signup")
else:
LOGIN_URL = getattr(django_settings, "LOGIN_URL", "/")
LOGOUT_URL = getattr(django_settings, "LOGOUT_URL", "/")
SIGNUP_URL = getattr(django_settings, "WIKI_SIGNUP_URL", "/")
##################
# OTHER SETTINGS #
##################
#: Maximum amount of children to display in a menu before going "+more"
#: NEVER set this to 0 as it will wrongly inform the user that there are no
#: children and for instance that an article can be safely deleted.
SHOW_MAX_CHILDREN = getattr(django_settings, 'WIKI_SHOW_MAX_CHILDREN', 20)
#: User Bootstrap's select widget. Switch off if you're not using Bootstrap!
USE_BOOTSTRAP_SELECT_WIDGET = getattr(
django_settings,
'WIKI_USE_BOOTSTRAP_SELECT_WIDGET',
True)
#: dottedname of class used to construct urlpatterns for wiki.
#:
#: Default is wiki.urls.WikiURLPatterns. To customize urls or view handlers,
#: you can derive from this.
URL_CONFIG_CLASS = getattr(
django_settings,
'WIKI_URL_CONFIG_CLASS',
'wiki.urls.WikiURLPatterns')
#: Search view - dotted path denoting where the search view Class is located
SEARCH_VIEW = getattr(
django_settings,
'WIKI_SEARCH_VIEW',
'wiki.views.article.SearchView'
if 'wiki.plugins.haystack' not in django_settings.INSTALLED_APPS
else
'wiki.plugins.haystack.views.HaystackSearchView'
)
#: Seconds of timeout before renewing article cache. Articles are automatically
#: renewed whenever an edit occurs but article content may be generated from
#: other objects that are changed.
CACHE_TIMEOUT = getattr(django_settings, 'WIKI_CACHE_TIMEOUT', 600)
#: Choose the Group model to use. Defaults to django's auth.Group
GROUP_MODEL = getattr(django_settings, 'WIKI_GROUP_MODEL', 'auth.Group')
###################
# SPAM PROTECTION #
###################
#: Maximum allowed revisions per hour for any given user or IP
REVISIONS_PER_HOUR = getattr(django_settings, 'WIKI_REVISIONS_PER_HOUR', 60)
#: Maximum allowed revisions per minute for any given user or IP
REVISIONS_PER_MINUTES = getattr(
django_settings,
'WIKI_REVISIONS_PER_MINUTES',
5)
#: Maximum allowed revisions per hour for any given user or IP
REVISIONS_PER_HOUR_ANONYMOUS = getattr(
django_settings,
'WIKI_REVISIONS_PER_HOUR_ANONYMOUS',
10)
#: Maximum allowed revisions per hour for any given user or IP
REVISIONS_PER_MINUTES_ANONYMOUS = getattr(
django_settings,
'WIKI_REVISIONS_PER_MINUTES_ANONYMOUS',
2)
#: Number of minutes for looking up REVISIONS_PER_MINUTES and
#: REVISIONS_PER_MINUTES_ANONYMOUS
REVISIONS_MINUTES_LOOKBACK = getattr(
django_settings,
'WIKI_REVISIONS_MINUTES_LOOKBACK',
2)
###########
# STORAGE #
###########
#: Django Storage backend to use for images, attachments etc.
STORAGE_BACKEND = getattr(
django_settings,
'WIKI_STORAGE_BACKEND',
default_storage)
#: Use Sendfile
USE_SENDFILE = getattr(django_settings, 'WIKI_ATTACHMENTS_USE_SENDFILE', False)
| gpl-3.0 | 8,362,595,489,785,991,000 | 31.275316 | 92 | 0.702226 | false |
EricsonWillians/PyGameWidgets | examples/long_text_label.py | 1 | 1651 | import sys
sys.path.append("..")
import pygame
import core
import widgets
# Text label example.
WINDOW_WIDTH = 1024
WINDOW_HEIGHT = 728
pygame.init()
pygame.font.init
screen = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
clock = pygame.time.Clock()
FPS = 60
running = True
if __name__ == "__main__":
panel = widgets.Panel(core.Grid((3, 10), (WINDOW_WIDTH, WINDOW_HEIGHT)), None, None, (0, 0))
panel.set_color((155, 155, 155, 255))
text = widgets.TextLabel(panel, (1, 2), core.Text(
"""
Lorem ipsum dolor sit amet,
consectetur adipiscing elit,
sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua.
Ut enim ad minim veniam, quis
nostrud exercitation ullamco laboris
nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in
reprehenderit in voluptate velit
esse cillum dolore eu fugiat
nulla pariatur. Excepteur sint
occaecat cupidatat non proident,
sunt in culpa qui officia deserunt
mollit anim id est laborum.""", 13, core.BLACK)
)
text.set_color(core.WHITE) # This is the color of the widget, not to be confused with the color of its text.
text.set_span((0, 5))
text.set_border(core.BLACK, 8)
text.set_margin(10) # Altering the margin because of the border.
def redraw():
pygame.display.flip()
screen.fill((0, 0, 0))
panel.draw(screen)
text.draw(screen)
while (running):
clock.tick(FPS)
redraw()
for e in pygame.event.get():
if e.type == pygame.QUIT:
sys.exit()
| gpl-3.0 | -231,955,241,589,300,640 | 29.018182 | 112 | 0.625681 | false |
hchauvet/beampy | beampy/statics/default_theme.py | 1 | 4858 | # -*- coding: utf-8 -*-
# Default theme of Beampy
# Main keys of the dict should be the name of the beampy modules or class
# Each modules default options need to be defined here!
THEME = {}
THEME['document'] = {
'format': 'html5', #could be svg // pdf // html5
'width': 800,
'height': 600,
'optimize': True,
'resize_raster':True,
'cache': True,
'guide': False,
'text_box': False,
'html': {
'background_color': 'black'
},
'external_app': {"inkscape": "auto",
"dvisvgm": "auto",
"pdfjoin": "auto",
"video_encoder": 'auto',
"pdf2svg": "auto",
"epstopdf": "auto"}
}
THEME['slide'] = {
'background': "white",
'layout': None, #Could be changed to a function that will decorate the current slide with elements
#this can be used to create a specific layout for a theme
#Could also be a string that refer to the key of the LAYOUT[key] dict if you need several layouts
#for a presentation
}
THEME['text'] = {
'size':20,
'font':'CMR',
'color':'#000000',
'align':'',
'x':'center',
'y':'auto',
'width':None,
'usetex':True,
'va': '',
'opacity':1,
'extra_packages': []
}
THEME['title'] = {
'size': 28,
'font': 'CMR',
'color': 'ForestGreen',
'x': {'shift':0.5, 'unit':'cm'},
'y': {'shift':1.25, 'unit':'cm'},
'reserved_y': '1.5cm',
'align': '',
'va': 'baseline',
'opacity': 1
}
THEME['link'] = {
'fill':THEME['title']['color']
}
THEME['maketitle'] = {
'title_size':30,
'title_color':THEME['title']['color'],
'author_size':THEME['text']['size'],
'author_color':'black',
'date_size':15,
'date_color':'#888888',
'subtitle_color':'#888888',
'subtitle_size':20,
'template': None #here you redefine a link to a function "def mytitle(titlein, author, subtitle, date, args)"" that is executed in maketitle to replace the default template
}
THEME['tableofcontents'] = {
'width': None,
'height': None,
'x': 25,
'y': 'center',
'section_yoffset': 50,
'subsection_xoffset': 20,
'subsection_yoffset': 10,
'section_style': 'round',
'subsection_style': None,
'section_decoration_color': THEME['title']['color'],
'section_decoration_size': 13,
'section_number_color': 'white',
'section_text_color': THEME['title']['color'],
'subsection_text_color': THEME['text']['color'],
'subsection_decoration_color': 'gray',
'subsection_decoration_size': 13/2,
'hidden_opacity': 0.2
}
THEME['video'] = {
'width': None,
'height': None,
'x': 'center',
'y': 'auto',
'autoplay': False,
'loop' : False,
'control': True,
'still_image_time': 0.0,
'embedded': True
}
THEME['animatesvg'] = {
'start': 0,
'end': 'end',
'x': 'center',
'y': 'auto',
'width': None,
'fps': 25,
'autoplay': False
}
THEME['tikz'] = {
'x': 0,
'y': 0,
'tikz_header': None,
'tex_packages': None,
'latex_pre_tikzpicture': None,
'figure_options': None,
'figure_anchor': 'top_left'
}
THEME['figure'] = {
'x':'center',
'y':'auto',
'width':None,
'height':None
}
THEME['cite'] = {
'x':'center',
'y':'auto',
'color':THEME['title']['color'],
'size':16,
'reference_delimiter' : ';',
'brackets' : ('[',']'),
}
THEME['bibliography'] = {
"max_author" : 3,
"initials" : False,
"journal" : False,
"and" : r'\&',
'et_al' : 'et al.',
'initial_delimiter' : '.',
}
THEME['itemize'] = {
'x':'center',
'y':'auto',
'item_style':'bullet',
'item_spacing':'+1cm',
'item_indent':'0cm',
'item_color':THEME['title']['color'],
'text_color':THEME['text']['color'],
'width':None,
'item_layers': None
}
THEME['line'] = {
'x':'center',
'y':'auto',
'color': THEME['title']['color'],
'linewidth': '2px',
'opacity': 1
}
THEME['rectangle'] = {
'x':'center',
'y':'auto',
'color': THEME['title']['color'],
'linewidth': '2px',
'opacity': 1,
'edgecolor': THEME['text']['color'],
'height': '10px',
'width': '%spx'%(THEME['document']['width']),
'rx':0,
'ry':0,
'svgfilter': None,
'svgclip': None
}
THEME['circle'] = {
'x':'center',
'y':'auto',
'color': THEME['title']['color'],
'linewidth': '1px',
'opacity': 1,
'edgecolor': THEME['title']['color'],
'r': '3px'
}
THEME['box'] = {
'rounded': 10,
'linewidth': 1,
'color': THEME['title']['color'],
'head_height': None,
'shadow': False,
'background_color': 'white',
'title_color': 'white',
'title_align': 'left',
'title_xoffset': 10,
'title_size': THEME['text']['size'],
'auto_height_margin': 15,
'title_height_margin': 10
}
| gpl-3.0 | -2,950,168,247,424,549,000 | 21.490741 | 176 | 0.527995 | false |
bolkedebruin/airflow | airflow/providers/google/cloud/operators/local_to_gcs.py | 1 | 3617 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains operator for uploading local file to GCS.
"""
import warnings
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.utils.decorators import apply_defaults
class LocalFilesystemToGCSOperator(BaseOperator):
"""
Uploads a file to Google Cloud Storage.
Optionally can compress the file for upload.
:param src: Path to the local file. (templated)
:type src: str
:param dst: Destination path within the specified bucket, it must be the full file path
to destination object on GCS, including GCS object (ex. `path/to/file.txt`) (templated)
:type dst: str
:param bucket: The bucket to upload to. (templated)
:type bucket: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud
Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead.
:type google_cloud_storage_conn_id: str
:param mime_type: The mime-type string
:type mime_type: str
:param delegate_to: The account to impersonate, if any
:type delegate_to: str
:param gzip: Allows for file to be compressed and uploaded as gzip
:type gzip: bool
"""
template_fields = ('src', 'dst', 'bucket')
@apply_defaults
def __init__(self,
src,
dst,
bucket,
gcp_conn_id='google_cloud_default',
google_cloud_storage_conn_id=None,
mime_type='application/octet-stream',
delegate_to=None,
gzip=False,
*args,
**kwargs):
super().__init__(*args, **kwargs)
if google_cloud_storage_conn_id:
warnings.warn(
"The google_cloud_storage_conn_id parameter has been deprecated. You should pass "
"the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3)
gcp_conn_id = google_cloud_storage_conn_id
self.src = src
self.dst = dst
self.bucket = bucket
self.gcp_conn_id = gcp_conn_id
self.mime_type = mime_type
self.delegate_to = delegate_to
self.gzip = gzip
def execute(self, context):
"""
Uploads the file to Google Cloud Storage
"""
hook = GCSHook(
google_cloud_storage_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
hook.upload(
bucket_name=self.bucket,
object_name=self.dst,
mime_type=self.mime_type,
filename=self.src,
gzip=self.gzip,
)
| apache-2.0 | -7,390,544,169,797,167,000 | 36.28866 | 104 | 0.646945 | false |
robertz23/code-samples | python scripts and tools/list_intersection.py | 1 | 1200 | """
List intersection: Finds intersections between various lists
"""
def check_intersection(first_list, second_list):
#We use set builtin function to find the intersection between lists
return set(first_list).intersection(second_list)
def create_lists(line):
#receive a line from the file containing ascending numbers
#each line is of the form 'n,n,n;n,n,n' where n is a number
#and the semi-colon separates the lists
first, second = line.split(';')
#Make sure that we have a list of numbers and not numbers and commas
first = [x for x in first.split(',')]
second = [x for x in second.split(',')]
#look for the intersection
intersected_number = check_intersection(first, second)
if intersected_number:
intersected_numbers_sorted = [eachNumber for eachNumber in intersected_number]
intersected_numbers_sorted.sort()
print ','.join(intersected_numbers_sorted)
else:
print ""
#return 0
if __name__ == '__main__':
#l = ["1,2,3;3,4,5", "1,2,3;0,4,5", "7,8,9;8,9,10,11,12"]
l = ["1,2,3,4;4,5,6", "20,21,22;45,46,47", "7,8,9;8,9,10,11,12"]
for eachLine in l:
create_lists(eachLine)
| mit | 7,565,693,730,159,266,000 | 32.333333 | 86 | 0.645 | false |
emanuil-tolev/yugi-sync | yugisync.py | 1 | 1574 | """
Usage: python yugisync.py pull|push
Backs up everything needed to restore your current game options,
deck and card list to a git repository. If the repository is uploaded
to a service like GitHub.com, this will also upload the changes there.
Example: python yugisync.py push on your own laptop and then python
yugisync.py pull on a friend's laptop will give you your card list,
decks, replays and game options to play with. Yugi-sync is always
careful, so it will never overwrite your friend's data. Obviously in
this case you and your friend can't have different decks and options.
Yugi-sync allows you both to play with and contribute to the same
deck.
"""
# later...
'''Watches all important files for changes and backs everything needed
to restore your deck whenever the game files change (e.g. you export
a deck, you win cards, you change the game options).'''
__author__ = 'Emanuil Tolev'
def export():
# Copy the whole Common dir to the backup git repo
# copy yugi, kaiba and joey deck to backup repo
# export registry keys (be mindful of 64 vs 32 bit systems) to
# backup repo
pass
def restore():
backup_local()
# delete current Common dir
# restore Common dir from backup repo
# restore decks from backup repo
# import registry keys
pass
def push():
# git push the backup repo
pass
def pull():
# git pull the backup repo
pass
def backup_local():
# copy current Common dir to "OLD "
# copy yugi,kaiba,joey decks to .old
pass | mit | 7,542,078,908,345,114,000 | 27.735849 | 70 | 0.69568 | false |
hjoliver/cylc | cylc/flow/xtriggers/echo.py | 1 | 1143 | # THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def echo(*args, **kwargs):
"""An xtrigger function that prints its arguments and never succeeds.
This may be a useful aid to understanding how xtriggers work. Try returning
True (success) and some results dict to pass on to dependent tasks.
Returns
tuple: (False, {})
"""
print("echo: ARGS:", args)
print("echo: KWARGS:", kwargs)
return False, {}
| gpl-3.0 | -1,211,451,900,558,556,000 | 37.1 | 79 | 0.72091 | false |
joseguerrero/sembrando | src/presentacion/librerias/popups.py | 1 | 13668 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pygame
from .texto import texto
from .textoci import texto2
from .imgfondo import fondo
TEXT_COLOR2 = (0,0,0)
COLOR_RED = (213, 0, 0)
BAR_COLOR = (151, 0, 172)
TEXT_COLOR = (255, 255, 255)
class Button(pygame.sprite.Sprite):
def __init__(self, identificador, parent, text, fondo = 0, ancho = 500):
"""
Método inicializador de la clase.
@param identificador: Variable usada para identificar al botón.
@type identificador: str
@param parent: Instancia del gestor de pantallas.
@type parent: Manejador
@param text: Variable que indica el texto que tendrá el botón.
@type text: str
@param fondo: Indica si el fondo del botón sera con imagen o sin imagen (en desarrollo).
@type fondo: bool
@param ancho: Indica el ancho del botón. Es usado para cuadrar el texto centrado.
@type ancho: int
"""
pygame.sprite.Sprite.__init__(self)
self.ancho = ancho
self.parent =parent
tipografia = pygame.font.match_font("FreeSans", False, False)
font = pygame.font.Font(tipografia, parent.config.t_fuente)
self.identificador = identificador
varios = "../imagenes/png/varios/"
if fondo == 0:
texto1 = font.render(text, 1, TEXT_COLOR)
textorect = texto1.get_rect()
texto2 = font.render(text, 1, COLOR_RED)
self.img_fondo = pygame.image.load(varios + "img-boton.png")
self.img_fondo2 = pygame.image.load(varios + "img-boton.png")
imgrect = self.img_fondo.get_rect()
textorect.center = imgrect.center[0],imgrect.center[1]+imgrect.center[1]/3
self.img_fondo.blit (texto1, textorect)
self.img_fondo2.blit (texto2,textorect)
self.rect = self.img_fondo.get_rect()
self.image= self.img_fondo
if fondo == 1:
txt = texto(0,0,text,parent.config.t_fuente,"texto_act",self.ancho)
self.rect = pygame.Rect(0,0,self.ancho,txt.ancho_final)
image_texto = pygame.Surface((self.ancho,txt.ancho_final))
image_texto.fill((255,255,255))
image_texto.set_colorkey((255,255,255))
for i in txt.img_palabras:
image_texto.blit(i.image, i.rect)
self.image = image_texto
self.img_fondo = image_texto
self.img_fondo2 = image_texto
def cambiar_status(self, status):
"""
Dibuja un efecto en los botones cambiando la imagen de fondo (descontinuado)
"""
if status:
self.image = self.img_fondo2
else:
self.image = self.img_fondo
def mover_boton(self,x,y):
"""
Cambia la posición del botón.
"""
self.rect.center = (x,y)
class PopUp(pygame.sprite.Sprite):
def __init__(self, parent , texto1, palabra_boton , imagenes , grupo, tipo = 0 ,px=512,py=281,tam =0):
"""
Método inicializador de la clase.
@param parent: Instancia del gestor de pantallas.
@type parent: Manejador
@param texto1: Indica el texto que será mostrado en la ventana emergente.
@type texto1: str
@param palabra_boton: Indica la palabra que tendrá el botón. Solo es usado en caso de ser tipo = 0,
de ser distinto de 0 toma el valor de una cadena y no será asignado.
@type palabra_boton: str
@param imagenes: Indica la(s) imagen(es) que mostrará la ventana emergente. En caso de tipo = 2
no es un campo necesario, en caso de tipo = 1 debe ser una superficie y en caso de tipo = 0
el parámetro debe ser una tupla con dos imágenes, la posición 0 sera la imagen
que estará al lado del texto, mientras que la posición 1 sera la imagen que estará debajo del texto.
@type imagenes: pygame.Surface, tuple
@param grupo: Representa un grupo de Sprites donde será agregado el sprite con la imagen y su rectángulo.
@type grupo: pygame.sprite.Group
@param tipo: Determina el tipo de ventana emergente, si toma el valor de 2 la ventana emergente solo
tomara el parámetro texto1 (los demás parámetros deben ser introducidos), en caso de tomar el valor 1
la ventana emergente tomara los parámetros texto1, imagenes y palabra_boton, mostrando una ventana
ordenada con texto justificado, un espacio a derecha donde sera ubicada la imagen (dicha imagen debe
tener máximo 1/3 del tamaño de la ventana emergente) y un botón centrado debajo del texto. En caso de
tomar valor 0 la ventana emergente sera similar a la anterior, con la diferencia que tendrá una imagen
más ubicada en la parte izquierda del botón y debajo del texto.
@type tipo: int
"""
pygame.sprite.Sprite.__init__(self)
self.parent = parent
self.sprite = pygame.sprite.Sprite()
varios = "../imagenes/png/varios/"
self.texto = pygame.Surface
self.tipo = tipo
self.arreglo_botones=[]
self.grupo = grupo
self.click = -1
self.activo = 0
self.tam = 0
if tipo == 0:
self.img_fondo = pygame.image.load(varios + "cuadropop-up.png").convert_alpha()
self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha()
self.sprite.rect = self.sprite.image.get_rect()
x=30
y=30
self.texto = texto(x, y,texto1[0], parent.config.t_fuente , "texto_act" ,(self.sprite.rect.width*2/3 ))
self.area_texto = pygame.Rect(x,y,self.sprite.rect.w*2/3,self.texto.ancho_final)
self.area_imagenes = pygame.Rect((self.sprite.rect.w*2/3)+80, y, self.sprite.rect.w/3, self.texto.ancho_final)
self.parent = parent
self.boton = Button(0,self.parent,palabra_boton)
self.boton.mover_boton( self.sprite.rect.width/2, self.area_texto.h + x*2 + self.boton.rect.h / 2 )
self.boton_rect = pygame.Rect(self.boton.rect.x , self.boton.rect.y , self.boton.rect.width , self.boton.rect.height)
self.sprite.image = fondo(self.sprite.rect.w, self.boton.rect.y+self.boton.rect.h+x,5 ).return_imagen()
self.imagen = pygame.sprite.Sprite()
if type (imagenes)!= pygame.Surface :
self.imagen2 = pygame.sprite.Sprite()
self.imagen.image = imagenes[0]
self.imagen.rect =self.imagen.image.get_rect()
self.imagen.rect.center =(self.sprite.rect.w*2/3 +(self.sprite.rect.w/3)/2 , self.area_imagenes.h/2 + self.boton_rect.h/2 )
self.imagen2.image = imagenes[1]
self.imagen2.rect = self.imagen.image.get_rect()
self.imagen2.rect.left = x
self.imagen2.rect.y = self.area_texto.h+40
self.sprite.image.blit(self.imagen2.image , self.imagen2.rect)
else:
self.imagen.image = imagenes
self.imagen.rect =self.imagen.image.get_rect()
self.imagen.rect.center =(self.sprite.rect.w*2/3 +(self.sprite.rect.w/3)/2 , self.area_imagenes.h/2 + self.boton_rect.h/2 )
if self.imagen.rect.y < 5:
self.imagen.rect.y = 6
for i in self.texto.img_palabras:
self.sprite.image.blit(i.image, i.rect)
self.sprite.image.blit(self.boton.image , self.boton.rect)
self.sprite.image.blit(self.imagen.image , self.imagen.rect)
self.sprite.rect.center = (px, py)
self.boton_rect.center = (self.sprite.rect.x + self.sprite.rect.width/2, self.sprite.rect.y + self.area_texto.h + x*2 + self.boton.rect.h / 2)
if tipo == 1:
self.img_fondo = pygame.image.load(varios + "cuadropop-up.png").convert_alpha()
self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha()
self.sprite.rect = self.sprite.image.get_rect()
x = 15
y = 15
o = 0
separacion = 30
tabulacion = 30
self.sprite.rect.w += tam
for i in texto1:
if o ==0:
self.texto = texto(x, y,i, parent.config.t_fuente , "texto_act" ,(self.sprite.rect.width )-x)
if o>0:
self.arreglo_botones.append(Button(o-1,self.parent,i,1,self.sprite.rect.w - x*2 -tabulacion))
o+=1
self.texto.rect = pygame.Rect(x,y,self.sprite.rect.w - 80, self.texto.ancho_final)
y+= self.texto.ancho_final + separacion
for i in self.arreglo_botones:
i.rect.x = x+tabulacion/2
i.rect.y = y
y+=i.rect.h + separacion/2
self.img_fondo = fondo(self.sprite.rect.w, y).return_imagen()
self.sprite.image = fondo(self.sprite.rect.w, y).return_imagen()
for i in self.texto.img_palabras:
self.sprite.image.blit(i.image,i.rect)
self.img_fondo.blit(i.image,i.rect)
self.sprite.rect.center = (px, py)
for i in self.arreglo_botones:
self.sprite.image.blit(i.image,i.rect)
self.img_fondo.blit(i.image,i.rect)
i.rect.x = self.sprite.rect.x + i.rect.x
i.rect.y = self.sprite.rect.y + i.rect.y
if tipo == 2:
self.sprite.image = pygame.image.load(varios + "cuadropop-up.png").convert_alpha()
self.sprite.rect = self.sprite.image.get_rect()
self.sprite.rect.w += tam
self.texto = texto2(15,15, texto1,parent.config.t_fuente, "intercalado", self.sprite.rect.w -15, imagenes)
self.sprite.image = fondo(self.sprite.rect.w, self.texto.ancho_final+30).return_imagen()
self.sprite.rect.h = self.texto.ancho_final+30
self.tam = self.texto.ancho_final+60
for i in self.texto.img_palabras:
self.sprite.image.blit(i.image, i.rect)
self.sprite.rect.center=(px,py)
def popup_estatus(self):
"""
Define cuando esta activa la ventana emergente.
@return: En caso de ser True la ventana esta activa, en caso contrario no estará activa.
@rtype: bool
"""
if self.activo:
return True
else:
return False
def redibujar_boton(self):
"""
Define el efecto de los botones en las ventanas emergentes (descontinuado)
"""
if self.tipo ==0:
self.sprite.image.blit(self.img_fondo,(self.boton.rect.x,self.boton.rect.y), self.boton.rect)
self.sprite.image.blit(self.boton.img , self.boton.rect)
if self.tipo == 1:
self.sprite.image.blit(self.img_fondo,(0,0))
def agregar_grupo (self):
"""
Agrega el sprite de la ventana emergente al grupo de sprite pasado por parámetros al crear el objeto.
"""
self.activo=1
self.grupo.add(self.sprite)
def eliminar_grupo(self):
"""
Elimina el sprite de la ventana emergente del grupo de sprite pasado por parámetros al crear el objeto.
"""
self.activo = 0
self.grupo.remove(self.sprite)
def evaluar_click(self):
"""
Retorna el resultado del método manejador_eventos().
@return: True si se hizo click, de lo contrario False.
@rtype: bool
"""
return self.click
def manejador_eventos(self, eventos):
"""
Determina cuando se hace click al botón
(solo en caso de ser una ventana emergente de tipo 0 o 1)
@param eventos: Ultimo evento recibido.
@rtype: pygame.event.Event
"""
teclasPulsadas = pygame.key.get_pressed()
if self.tipo == 0:
if self.boton_rect.collidepoint(pygame.mouse.get_pos()):
if (eventos.type == pygame.MOUSEBUTTONDOWN and eventos.button == 1):
self.eliminar_grupo()
self.click = 0
return True
else:
self.click= -1
if teclasPulsadas[pygame.K_RETURN]:
self.eliminar_grupo()
self.click = 0
else:
self.click= -1
if self.tipo == 1:
for i in self.arreglo_botones:
if i.rect.collidepoint(pygame.mouse.get_pos()):
if eventos.type == pygame.MOUSEBUTTONDOWN and eventos.button == 1:
self.click = i.identificador
else:
self.click = -1
| gpl-3.0 | -4,537,053,745,970,449,400 | 47.508897 | 166 | 0.548823 | false |
SDAquaponics/Software | Arduino/PythonDriver/NotifyAQPServer.py | 1 | 2261 | #! /usr/bin/python
# Import dependencies
import serial, sys, sendmail, decimal
import socket
HOST = "raspberrypi"
PORT = 1981
DEBUG = False;
# Serial port settings
AT_PORT = "/dev/ttyATH0"
AT_BAUD = 115200
# Serial packet structure
MAGIC_1 = 0xA9
MAGIC_2 = 0xBD
MSG_TYPE_REQUEST = 0x00
MSFG_TYPE_RESPONSE = 0x01
MSG_LEN_READ_SENSOR_VALUES = 0x01
OPAQUE_DATA_REQ = 0x00
EOD = 0xCB
CK_1 = 0xFD
CK_2 = 0xFF
ATMEGA_REQ_SENSOR_VALUES = [MAGIC_1, \
MAGIC_2, \
MSG_TYPE_REQUEST, \
MSG_LEN_READ_SENSOR_VALUES, \
OPAQUE_DATA_REQ,\
EOD,\
CK_1,\
CK_2]
# Sensor type definitions
SENSOR_TYPE_DISTANCE = 0x00
SENSOR_TYPE_TEMP = 0x01
SENSOR_TYPE_HUMIDITY = 0x02
CRITICAL_DISTANCE = 1.5
MSG_COUNTER = 0
def myprint(arg):
if DEBUG:
print(arg)
else:
None
def aqp_get_sensor_values(port):
results = {}
nw_packet = ""
port.write(ATMEGA_REQ_SENSOR_VALUES)
myprint("Sent serial data to AT, waiting for response now...")
magic = port.read(2)
nw_packet = nw_packet + magic
if ord(magic[0]) == 0xA9 and ord(magic[1]) == 0xBD:
myprint("Magic numbers in response alright\n")
msg_type = port.read(1)
nw_packet = nw_packet + msg_type
if (ord(msg_type) == 0x01): # Check for response
msg_len = port.read(1)
nw_packet = nw_packet + msg_len
myprint ("Payload size is: %d" % ord(msg_len))
payload = port.read(ord(msg_len))
nw_packet = nw_packet + payload
ck1_ck2 = port.read(2)
nw_packet = nw_packet + ck1_ck2
myprint ("Ck1 = %X, Ck2 = %X" % (ord(ck1_ck2[0]), ord(ck1_ck2[1])))
else:
myprint("Invalid response packet\n")
else:
myprint ("Bad Magic, aborting...%X, %X\n" %(ord(magic[0]), ord(magic[1])))
return nw_packet
def init_serial_port():
ser = serial.Serial(AT_PORT, AT_BAUD)
return ser
if __name__ == "__main__":
ser = init_serial_port()
nw_packet = aqp_get_sensor_values(ser)
# Send the network packet to the AQPServer running on the RasPI
client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_sock.connect((HOST, PORT))
#print ord(nw_packet[0]), ord(nw_packet[1]), ord(nw_packet[2]), ord(nw_packet[3])
client_sock.send(nw_packet)
resp = client_sock.recv(1024)
client_sock.close()
myprint("Response from Server: %s\n" % resp)
| gpl-2.0 | 8,152,229,391,004,637,000 | 20.130841 | 82 | 0.655462 | false |
pave/pylint_flask_ext | flask_ext_clean.py | 1 | 5123 | """
Plugin for pylint that tells it about flask's extension classes.
"""
from pylint.utils import PyLintASTWalker
from logilab.astng import MANAGER
from logilab.astng import node_classes
def copy_node_info(src, dest):
"""Copy information from src to dest
Every node in the AST has to have line number information. Get
the information from the old stmt."""
for attr in ['lineno', 'fromlineno', 'tolineno',
'col_offset', 'parent']:
if hasattr(src, attr):
setattr(dest, attr, getattr(src, attr))
def splice(stmt, new_stmt):
"""Replace stmt with new_stmt in the AST
Also, copy useful information from stmt to new_stmt.
This assumes that stmt and new_stmt are of the same type and
define the same names.
"""
copy_node_info(stmt, new_stmt)
# Replace stmt with new_stmt in the sequence of statements that
# included stmt.
body = stmt.parent.child_sequence(stmt)
i = body.index(stmt)
stmt.parent.body[i] = new_stmt
# The names defined by an import statement are kept in stmt.names
# as a pair of (exported_name, as_name). For example, "import foo,
# bar as baz" corresponds to an import statement with
# names=[("foo", None), ("bar", "baz")].
#
# All names that stmt defined should now be defined by new_stmt.
for (name, as_name) in stmt.names:
stmt.parent.set_local(as_name or name, new_stmt)
class ImportRewriterVisitor(object):
"""AST Visitor that looks for flask.ext imports and rewrites them
This is something like the Visitor Pattern. For every Foo node in
the AST, PyLintASTWalker will call visit_foo."""
def __init__(self):
self.flask_ext_imported = {}
def visit_from(self, stmt):
"""Visit 'from foo import bar' statements"""
if stmt.modname == 'flask.ext':
# Replace 'from flask.ext import login' with
# 'import flask_login as login'.
new_stmt = node_classes.Import()
new_stmt.names = []
for pair in stmt.names:
(name, as_name) = pair
new_stmt.names.append(('flask_'+name, as_name or name))
splice(stmt, new_stmt)
if stmt.modname.startswith('flask.ext.'):
# Replace 'from flask.ext.wtf import Foo' with 'from
# flask_wtf import Foo'.
ext_name = stmt.modname[10:]
new_stmt = node_classes.From('flask_'+ext_name,
stmt.names, stmt.level)
splice(stmt, new_stmt)
def visit_import(self, stmt):
"""Visit 'import flask.ext.login' statements
Pretend that flask.ext did "import flask_login as login"."""
flask_ext_names = []
for (name, as_name) in stmt.names:
if name.startswith('flask.ext.'):
flask_ext_names.append(name[10:])
if not flask_ext_names:
# We visited an import that doesn't import any flask.ext stuff.
# Not our problem.
return
module = stmt.root()
if not self.flask_ext_imported.get(module):
# Make sure flask.ext is imported already at least once.
import_stmt = node_classes.Import()
import_stmt.names = [('flask.ext', None)]
import_stmt.fromlineno = import_stmt.tolineno = -1
import_stmt.parent = module
body = stmt.parent.child_sequence(stmt)
body.insert(0, import_stmt)
self.flask_ext_imported[module] = True
# Mark this as the first definition of flask
module.locals.setdefault('flask', []).insert(0, import_stmt)
# Change all names in this statement in-place.
for i, (modname, as_name) in enumerate(stmt.names):
if modname.startswith('flask.ext.'):
newmodname = modname.replace('flask.ext.', 'flask_')
stmt.names[i] = (newmodname, as_name)
# This import statement no longer defines "flask" (since it
# imports flask_foo), so remove it from locals
module.locals['flask'].remove(stmt)
# Fool the inference engine by pretending that flask.ext does
# an "import flask_foo as foo".
for name in flask_ext_names:
# Get real flask_ext
flask_ext_module = module.import_module('flask.ext')
values = flask_ext_module.locals.setdefault(name, [])
if values:
# We're fine, it's already been "imported"
continue
new_import = node_classes.Import()
new_import.tolineno = new_import.fromlineno = -1
new_import.parent = flask_ext_module
new_import.names = [('flask_'+name, name)]
# We don't actually need to be in the AST. We just want
# the inference engine to find us.
values.append(new_import)
def register(linter): #pylint: disable=W0613
"""Pylint calls this hook to actually activate the plugin"""
walker = PyLintASTWalker(linter)
walker.add_checker(ImportRewriterVisitor())
MANAGER.register_transformer(walker.walk)
| mit | 5,791,808,267,114,807,000 | 36.669118 | 75 | 0.608823 | false |
VRaviTheja/SDN-policy | flowgenerator/random_ports.py | 1 | 1532 | import random
import socket
import struct
from random import randint
def port_generator():
lim=1000
port_src_start = []
port_src_end = []
port_dst_start = []
port_dst_end = []
for i in range (0,lim):
m = random.randint(1, 200)
n = random.randint(1, 200)
if (m<n and m!=n):
port_src_start.append(m)
port_src_end.append(n)
elif (n<m and m!=n):
port_src_start.append(n)
port_src_end.append(m)
while(lim!=len(port_src_start)):
m = random.randint(1, 200)
n = random.randint(1, 200)
if (m<n and m!=n):
port_src_start.append(m)
port_src_end.append(n)
elif (n<m and m!=n):
port_src_start.append(n)
port_src_end.append(m)
for i in range (0,lim):
k = random.randint(1, 200)
p = random.randint(1, 200)
if (k<p and k!=p):
port_dst_start.append(k)
port_dst_end.append(p)
elif (p<k and k!=p):
port_dst_start.append(p)
port_dst_end.append(k)
while(lim!=len(port_dst_start)):
m = random.randint(1, 200)
n = random.randint(1, 200)
if (k<p and k!=p):
port_dst_start.append(k)
port_dst_end.append(p)
elif (p<k and k!=p):
port_dst_start.append(p)
port_dst_end.append(k)
return (port_src_start, port_src_end, port_dst_start, port_dst_end)
| apache-2.0 | 9,052,464,247,601,924,000 | 28.64 | 71 | 0.498695 | false |
Subsets and Splits