repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ospalh/kajongg-fork | src/hand.py | 1 | 30524 | # -*- coding: utf-8 -*-
"""Copyright (C) 2009-2012 Wolfgang Rohdewald <[email protected]>
kajongg is free software you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
Read the user manual for a description of the interface to this scoring engine
"""
from util import logDebug
from meld import Meld, meldKey, meldsContent, Pairs, CONCEALED
from rule import Score, Ruleset
from common import elements, Debug
class UsedRule(object):
"""use this in scoring, never change class Rule.
If the rule has been used for a meld, pass it"""
def __init__(self, rule, meld=None):
self.rule = rule
self.meld = meld
def __str__(self):
result = self.rule.name
if self.meld:
result += ' ' + str(self.meld)
return result
def __repr__(self):
return 'UsedRule(%s)' % str(self)
class Hand(object):
"""represent the hand to be evaluated"""
# pylint: disable=R0902
# pylint we need more than 10 instance attributes
cache = dict()
misses = 0
hits = 0
@staticmethod
def clearCache(game):
"""clears the cache with Hands"""
if Debug.handCache and Hand.cache:
game.debug('cache hits:%d misses:%d' % (Hand.hits, Hand.misses))
Hand.cache.clear()
Hand.hits = 0
Hand.misses = 0
@staticmethod
def cached(ruleset, string, computedRules=None, robbedTile=None):
"""since a Hand instance is never changed, we can use a cache"""
if computedRules is not None and not isinstance(computedRules, list):
computedRules = list([computedRules])
cRuleHash = '&&'.join([rule.name for rule in computedRules]) if computedRules else 'None'
if isinstance(ruleset, Hand):
cacheId = id(ruleset.player or ruleset.ruleset)
else:
cacheId = id(ruleset)
cacheKey = hash((cacheId, string, robbedTile, cRuleHash))
cache = Hand.cache
if cacheKey in cache:
if cache[cacheKey] is None:
raise Exception('recursion: Hand calls itself for same content')
Hand.hits += 1
return cache[cacheKey]
Hand.misses += 1
cache[cacheKey] = None
result = Hand(ruleset, string,
computedRules=computedRules, robbedTile=robbedTile)
cache[cacheKey] = result
return result
def __init__(self, ruleset, string, computedRules=None, robbedTile=None):
"""evaluate string using ruleset. rules are to be applied in any case.
ruleset can be Hand, Game or Ruleset."""
# silence pylint. This method is time critical, so do not split it into smaller methods
# pylint: disable=R0902,R0914,R0912,R0915
if isinstance(ruleset, Hand):
self.ruleset = ruleset.ruleset
self.player = ruleset.player
self.computedRules = ruleset.computedRules
elif isinstance(ruleset, Ruleset):
self.ruleset = ruleset
self.player = None
else:
self.player = ruleset
self.ruleset = self.player.game.ruleset
self.string = string
self.robbedTile = robbedTile
if computedRules is not None and not isinstance(computedRules, list):
computedRules = list([computedRules])
self.computedRules = computedRules or []
self.__won = False
self.mjStr = ''
self.mjRule = None
self.ownWind = None
self.roundWind = None
tileStrings = []
mjStrings = []
haveM = False
splits = self.string.split()
for part in splits:
partId = part[0]
if partId in 'Mmx':
haveM = True
self.ownWind = part[1]
self.roundWind = part[2]
mjStrings.append(part)
self.__won = partId == 'M'
elif partId == 'L':
if len(part[1:]) > 8:
raise Exception('last tile cannot complete a kang:' + self.string)
mjStrings.append(part)
else:
tileStrings.append(part)
if not haveM:
raise Exception('Hand got string without mMx: %s', self.string)
self.mjStr = ' '.join(mjStrings)
self.__lastTile = self.__lastSource = self.__announcements = ''
self.__lastMeld = 0
self.__lastMelds = []
self.hiddenMelds = []
self.declaredMelds = []
self.melds = []
tileString = ' '.join(tileStrings)
self.bonusMelds, tileString = self.__separateBonusMelds(tileString)
self.tileNames = Pairs(tileString.replace(' ','').replace('R', ''))
self.tileNames.sort()
self.values = ''.join(x[1] for x in self.tileNames)
self.suits = set(x[0].lower() for x in self.tileNames)
self.lenOffset = self.__computeLenOffset(tileString)
self.dragonMelds, self.windMelds = self.__computeDragonWindMelds(tileString)
self.__separateMelds(tileString)
self.hiddenMelds = sorted(self.hiddenMelds, key=meldKey)
self.tileNamesInHand = sum((x.pairs for x in self.hiddenMelds), [])
self.sortedMeldsContent = meldsContent(self.melds)
if self.bonusMelds:
self.sortedMeldsContent += ' ' + meldsContent(self.bonusMelds)
self.usedRules = []
self.score = None
oldWon = self.won
self.__applyRules()
if len(self.lastMelds) > 1:
self.__applyBestLastMeld()
if self.won != oldWon:
# if not won after all, this might be a long hand.
# So we might even have to unapply meld rules and
# bonus points. Instead just recompute all again.
# This should only happen with scoring manual games
# and with scoringtest - normally kajongg would not
# let you declare an invalid mah jongg
self.__applyRules()
@property
def lastTile(self):
"""compute and cache, readonly"""
if self.__lastTile == '':
self.__setLastTile()
return self.__lastTile
@property
def lastSource(self):
"""compute and cache, readonly"""
if self.__lastTile == '':
self.__setLastTile()
return self.__lastSource
@property
def announcements(self):
"""compute and cache, readonly"""
if self.__lastTile == '':
self.__setLastTile()
return self.__announcements
@property
def lastMeld(self):
"""compute and cache, readonly"""
if self.__lastMeld == 0:
self.__setLastMeld()
return self.__lastMeld
@property
def lastMelds(self):
"""compute and cache, readonly"""
if self.__lastMeld == 0:
self.__setLastMeld()
return self.__lastMelds
@property
def won(self):
"""have we been modified since load or last save?
The "won" value is set to True when instantiating the hand,
according to the mMx in the init string. Later on, it may
only be cleared."""
return self.__won
@won.setter
def won(self, value):
"""must never change to True"""
value = bool(value)
assert not value
self.__won = value
self.string = self.string.replace(' M', ' m')
self.mjStr = self.mjStr.replace(' M', ' m')
def debug(self, msg, btIndent=None):
"""try to use Game.debug so we get a nice prefix"""
if self.player:
self.player.game.debug(msg, btIndent=btIndent)
else:
logDebug(msg, btIndent=btIndent)
def __applyRules(self):
"""find out which rules apply, collect in self.usedRules.
This may change self.won"""
self.usedRules = list([UsedRule(rule) for rule in self.computedRules])
if self.__hasExclusiveRules():
return
self.__applyMeldRules()
self.__applyHandRules()
if self.__hasExclusiveRules():
return
self.score = self.__totalScore()
# do the rest only if we know all tiles of the hand
if 'Xy' in self.string:
self.won = False # we do not know better
return
if self.won:
matchingMJRules = self.__maybeMahjongg()
if not matchingMJRules:
self.won = False
self.score = self.__totalScore()
return
self.mjRule = matchingMJRules[0]
self.usedRules.append(UsedRule(self.mjRule))
if self.__hasExclusiveRules():
return
self.usedRules.extend(self.matchingWinnerRules())
self.score = self.__totalScore()
else: # not self.won
assert self.mjRule is None
loserRules = self.__matchingRules(self.ruleset.loserRules)
if loserRules:
self.usedRules.extend(list(UsedRule(x) for x in loserRules))
self.score = self.__totalScore()
def matchingWinnerRules(self):
"""returns a list of matching winner rules"""
matching = self.__matchingRules(self.ruleset.winnerRules)
for rule in matching:
if (self.ruleset.limit and rule.score.limits >= 1) or 'absolute' in rule.options:
return [UsedRule(rule)]
return list(UsedRule(x) for x in matching)
def __hasExclusiveRules(self):
"""if we have one, remove all others"""
exclusive = list(x for x in self.usedRules if 'absolute' in x.rule.options)
if exclusive:
self.usedRules = exclusive
self.score = self.__totalScore()
self.won = self.__maybeMahjongg()
return bool(exclusive)
def __setLastTile(self):
"""sets lastTile, lastSource, announcements"""
self.__announcements = ''
self.__lastTile = None
self.__lastSource = None
parts = self.mjStr.split()
for part in parts:
if part[0] == 'L':
part = part[1:]
if len(part) > 2:
self.__lastMeld = Meld(part[2:])
self.__lastTile = part[:2]
elif part[0] == 'M':
if len(part) > 3:
self.__lastSource = part[3]
if len(part) > 4:
self.__announcements = part[4:]
if self.__lastTile:
assert self.__lastTile in self.tileNames, 'lastTile %s is not in tiles %s, mjStr=%s' % (
self.__lastTile, ' '.join(self.tileNames), self.mjStr)
if self.__lastSource == 'k':
assert self.tileNames.count(self.__lastTile.lower()) + \
self.tileNames.count(self.__lastTile.capitalize()) == 1, \
'Robbing kong: I cannot have lastTile %s more than once in %s' % (
self.__lastTile, ' '.join(self.tileNames))
def __setLastMeld(self):
"""sets the shortest possible last meld. This is
not yet the final choice, see __applyBestLastMeld"""
self.__lastMeld = None
if self.lastTile and self.won:
if hasattr(self.mjRule.function, 'computeLastMelds'):
self.__lastMelds = self.mjRule.function.computeLastMelds(self)
if self.__lastMelds:
# syncHandBoard may return nothing
if len(self.__lastMelds) == 1:
self.__lastMeld = self.__lastMelds[0]
else:
totals = sorted((len(x), idx) for idx, x in enumerate(self.__lastMelds))
self.__lastMeld = self.__lastMelds[totals[0][1]]
if not self.__lastMeld:
self.__lastMeld = Meld([self.lastTile])
self.__lastMelds = [self.__lastMeld]
def __applyBestLastMeld(self):
"""select the last meld giving the highest score (only winning variants)"""
assert len(self.lastMelds) > 1
totals = []
prev = self.lastMeld
for lastMeld in self.lastMelds:
self.__lastMeld = lastMeld
self.__applyRules()
totals.append((self.won, self.__totalScore().total(), lastMeld))
if any(x[0] for x in totals): # if any won
totals = list(x[1:] for x in totals if x[0]) # remove lost variants
totals = sorted(totals) # sort by totalScore
maxScore = totals[-1][0]
totals = list(x[1] for x in totals if x[0] == maxScore)
# now we have a list of only lastMelds reaching maximum score
if prev not in totals or self.__lastMeld not in totals:
if Debug.explain and prev not in totals:
if not self.player or not self.player.game.belongsToRobotPlayer():
self.debug('replaced last meld %s with %s' % (prev, totals[0]))
self.__lastMeld = totals[0]
self.__applyRules()
def __sub__(self, tiles):
"""returns a copy of self minus tiles. Case of tiles (hidden
or exposed) is ignored. If the tile is not hidden
but found in an exposed meld, this meld will be hidden with
the tile removed from it. Exposed melds of length<3 will also
be hidden."""
# pylint: disable=R0912
# pylint says too many branches
if not isinstance(tiles, list):
tiles = list([tiles])
hidden = 'R' + ''.join(self.tileNamesInHand)
# exposed is a deep copy of declaredMelds. If lastMeld is given, it
# must be first in the list.
exposed = (Meld(x) for x in self.declaredMelds)
if self.lastMeld:
exposed = sorted(exposed, key=lambda x: (x.pairs != self.lastMeld.pairs, meldKey(x)))
else:
exposed = sorted(exposed, key=meldKey)
bonus = sorted(Meld(x) for x in self.bonusMelds)
for tile in tiles:
assert isinstance(tile, str) and len(tile) == 2, 'Hand.__sub__:%s' % tiles
if tile.capitalize() in hidden:
hidden = hidden.replace(tile.capitalize(), '', 1)
elif tile[0] in 'fy': # bonus tile
for idx, meld in enumerate(bonus):
if tile == meld.pairs[0]:
del bonus[idx]
break
else:
for idx, meld in enumerate(exposed):
if tile.lower() in meld.pairs:
del meld.pairs[meld.pairs.index(tile.lower())]
del exposed[idx]
meld.conceal()
hidden += meld.joined
break
for idx, meld in enumerate(exposed):
if len(meld.pairs) < 3:
del exposed[idx]
meld.conceal()
hidden += meld.joined
mjStr = self.mjStr
if self.lastTile in tiles:
parts = mjStr.split()
newParts = []
for idx, part in enumerate(parts):
if part[0] == 'M':
part = 'm' + part[1:]
if len(part) > 3 and part[3] == 'k':
part = part[:3]
elif part[0] == 'L':
continue
newParts.append(part)
mjStr = ' '.join(newParts)
newString = ' '.join([hidden, meldsContent(exposed), meldsContent(bonus), mjStr])
return Hand.cached(self, newString, self.computedRules)
def manualRuleMayApply(self, rule):
"""returns True if rule has selectable() and applies to this hand"""
if self.won and rule in self.ruleset.loserRules:
return False
if not self.won and rule in self.ruleset.winnerRules:
return False
return rule.selectable(self) or rule.appliesToHand(self) # needed for activated rules
def callingHands(self, wanted=1, excludeTile=None, mustBeAvailable=False):
"""the hand is calling if it only needs one tile for mah jongg.
Returns up to 'wanted' hands which would only need one tile.
If mustBeAvailable is True, make sure the missing tile might still
be available.
"""
result = []
string = self.string
if ' x' in string or self.lenOffset:
return result
for rule in self.ruleset.mjRules:
# sort only for reproducibility
if not hasattr(rule, 'winningTileCandidates'):
raise Exception('rule %s, code=%s has no winningTileCandidates' % (
rule.name, rule.function))
candidates = sorted(x.capitalize() for x in rule.winningTileCandidates(self))
for tileName in candidates:
if excludeTile and tileName == excludeTile.capitalize():
continue
if mustBeAvailable and not self.player.tileAvailable(tileName, self):
continue
hand = self.picking(tileName)
if hand.won:
result.append(hand)
if len(result) == wanted:
break
if len(result) == wanted:
break
return result
def __maybeMahjongg(self):
"""check if this is a mah jongg hand.
Return a sorted list of matching MJ rules, highest
total first"""
if not self.won:
return []
if self.lenOffset != 1:
return []
matchingMJRules = [x for x in self.ruleset.mjRules if x.appliesToHand(self)]
if self.robbedTile and self.robbedTile.istitle():
# Millington 58: robbing hidden kong is only allowed for 13 orphans
matchingMJRules = [x for x in matchingMJRules if 'mayrobhiddenkong' in x.options]
return sorted(matchingMJRules, key=lambda x: -x.score.total())
def splitRegex(self, rest):
"""split rest into melds as good as possible"""
rest = ''.join(rest)
melds = []
for rule in self.ruleset.splitRules:
splits = rule.apply(rest)
while len(splits) >1:
for split in splits[:-1]:
melds.append(Meld(split))
rest = splits[-1]
splits = rule.apply(rest)
if len(splits) == 0:
break
if len(splits) == 1 :
assert Meld(splits[0]).isValid() # or the splitRules are wrong
return melds
def __recurse(self, cVariants, foundMelds, rest, maxPairs, color):
"""build the variants recursively"""
melds = []
for value in set(rest):
intValue = int(value)
if rest.count(value) == 3:
melds.append([value] * 3)
elif rest.count(value) == 2:
melds.append([value] * 2)
if rest.count(str(intValue + 1)) and rest.count(str(intValue + 2)):
melds.append([value, str(intValue+1), str(intValue+2)])
pairsFound = sum(len(x) == 2 for x in foundMelds)
for meld in (m for m in melds if len(m) !=2 or pairsFound < maxPairs):
restCopy = rest[:]
for value in meld:
restCopy.remove(value)
newMelds = foundMelds[:]
newMelds.append(meld)
if restCopy:
self.__recurse(cVariants, newMelds, restCopy, maxPairs, color)
else:
for idx, newMeld in enumerate(newMelds):
newMelds[idx] = ''.join(color+x for x in newMeld)
cVariants.append(' '.join(sorted(newMelds )))
def genVariants(self, original0, maxPairs=1):
"""generates all possible meld variants out of original
where original is a list of tile values like ['1','1','2']"""
color = original0[0][0]
original = [x[1] for x in original0]
cVariants = []
self.__recurse(cVariants, [], original, maxPairs, color)
gVariants = []
for cVariant in set(cVariants):
melds = [Meld(x) for x in cVariant.split()]
gVariants.append(melds)
if not gVariants:
gVariants.append(self.splitRegex(original0)) # fallback: nothing useful found
return gVariants
# TODO: get rid of __split, the mjRules should do that if they need it at all
# only __split at end of Hand.__init__, now we do it twice for winning hands
def __split(self, rest):
"""work hard to always return the variant with the highest Mah Jongg value.
Adds melds to self.melds.
only one special mjRule may try to rearrange melds.
A rest will be rearranged by standard rules."""
if 'Xy' in rest:
# hidden tiles of other players:
self.melds.extend(self.splitRegex(rest))
return
arrangements = []
for mjRule in self.ruleset.mjRules:
func = mjRule.function
if func.__class__.__name__ == 'StandardMahJongg':
stdMJ = func
if self.mjRule:
rules = [self.mjRule]
else:
rules = self.ruleset.mjRules
for mjRule in rules:
func = mjRule.function
if func != stdMJ and hasattr(func, 'rearrange'):
if ((self.lenOffset == 1 and func.appliesToHand(self))
or (self.lenOffset < 1 and func.shouldTry(self))):
melds, pairs = func.rearrange(self, rest[:])
if melds:
arrangements.append((mjRule, melds, pairs))
if arrangements:
# TODO: we should know for each arrangement how many tiles for MJ are still needed.
# If len(pairs) == 4, one or up to three might be needed. That would allow for better AI.
# TODO: if hand just completed and we did not win, only try stdmj
arrangement = sorted(arrangements, key=lambda x: len(x[2]))[0]
self.melds.extend(arrangement[1])
self.melds.extend([Meld(x) for x in arrangement[2]])
assert len(''.join(x.joined for x in self.melds)) == len(self.tileNames) * 2, '%s != %s' % (
meldsContent(self.melds), self.tileNames)
else:
# stdMJ is special because it might build more than one pair
# the other special hands would put that into the rest
# if the above TODO is done, stdMJ does not have to be special anymore
melds, _ = stdMJ.rearrange(self, rest[:])
self.melds.extend(melds)
assert len(''.join(x.joined for x in self.melds)) == len(self.tileNames) * 2, '%s != %s' % (
meldsContent(self.melds), self.tileNames)
def countMelds(self, key):
"""count melds having key"""
result = 0
if isinstance(key, str):
for meld in self.melds:
if meld.tileType() in key:
result += 1
else:
for meld in self.melds:
if key(meld):
result += 1
return result
def __matchingRules(self, rules):
"""return all matching rules for this hand"""
return list(rule for rule in rules if rule.appliesToHand(self))
def __applyMeldRules(self):
"""apply all rules for single melds"""
for rule in self.ruleset.meldRules:
for meld in self.melds + self.bonusMelds:
if rule.appliesToMeld(self, meld):
self.usedRules.append(UsedRule(rule, meld))
def __applyHandRules(self):
"""apply all hand rules for both winners and losers"""
for rule in self.ruleset.handRules:
if rule.appliesToHand(self):
self.usedRules.append(UsedRule(rule))
def __totalScore(self):
"""use all used rules to compute the score"""
pointsTotal = Score(ruleset=self.ruleset)
maxLimit = 0.0
maxRule = None
for usedRule in self.usedRules:
score = usedRule.rule.score
if score.limits:
# we assume that a hand never gets different limits combined
maxLimit = max(maxLimit, score.limits)
maxRule = usedRule
else:
pointsTotal += score
if maxLimit:
if maxLimit >= 1.0 or maxLimit * self.ruleset.limit > pointsTotal.total():
self.usedRules = [maxRule]
return Score(ruleset=self.ruleset, limits=maxLimit)
return pointsTotal
def total(self):
"""total points of hand"""
return self.score.total()
def __computeLenOffset(self, tileString):
"""lenOffset is <0 for short hand, 0 for correct calling hand, >0 for long hand.
Of course ignoring bonus tiles.
if there are no kongs, 13 tiles will return 0"""
result = len(self.tileNames) - 13
for split in tileString.split():
if split[0] != 'R':
if Meld(split).isKong():
result -= 1
return result
@staticmethod
def __computeDragonWindMelds(tileString):
"""returns lists with melds containing all (even single)
dragons respective winds"""
dragonMelds = []
windMelds = []
for split in tileString.split():
if split[0] == 'R':
pairs = Pairs(split[1:])
for lst, tiles in ((windMelds, elements.wINDS), (dragonMelds, elements.dRAGONS)):
for tile in tiles:
count = pairs.count(tile)
if count:
lst.append(Meld([tile] * count))
elif split[0] in 'dD':
dragonMelds.append(Meld(split))
elif split[0] in 'wW':
windMelds.append(Meld(split))
return dragonMelds, windMelds
@staticmethod
def __separateBonusMelds(tileString):
"""keep them separate. One meld per bonus tile. Others depend on that."""
result = []
if 'f' in tileString or 'y' in tileString:
for pair in Pairs(tileString.replace(' ','').replace('R', '')):
if pair[0] in 'fy':
result.append(Meld(pair))
tileString = tileString.replace(pair, '', 1)
return result, tileString
def __separateMelds(self, tileString):
"""build a meld list from the hand string"""
# no matter how the tiles are grouped make a single
# meld for every bonus tile
# we need to remove spaces from the hand string first
# for building only pairs with length 2
splits = tileString.split()
rest = ''
for split in splits:
if split[0] == 'R':
rest = split[1:]
else:
meld = Meld(split)
self.melds.append(meld)
self.declaredMelds.append(meld)
if rest:
rest = sorted([rest[x:x+2] for x in range(0, len(rest), 2)])
self.__split(rest)
self.melds = sorted(self.melds, key=meldKey)
for meld in self.melds:
if not meld.isValid():
raise Exception('%s has an invalid meld: %s' % (self.string, meld.joined))
self.__categorizeMelds()
def picking(self, tileName):
"""returns a new Hand built from this one plus tileName"""
assert tileName.istitle(), 'tileName %s should be title:' % tileName
parts = self.string.split()
mPart = ''
rPart = 'R%s' % tileName
unchanged = []
for part in parts:
if part[0] in 'SBCDW':
rPart += part
elif part[0] == 'R':
rPart += part[1:]
elif part[0].lower() == 'm':
mPart = part
elif part[0] == 'L':
pass
else:
unchanged.append(part)
# combine all parts about hidden tiles plus the new one to one part
# because something like DrDrS8S9 plus S7 will have to be reordered
# anyway
# set the "won" flag M
parts = unchanged
parts.extend([rPart, mPart.capitalize(), 'L%s' % tileName])
return Hand.cached(self, ' '.join(parts))
def __categorizeMelds(self):
"""categorize: hidden, declared"""
self.hiddenMelds = []
self.declaredMelds = []
for meld in self.melds:
if meld.state == CONCEALED and not meld.isKong():
self.hiddenMelds.append(meld)
else:
self.declaredMelds.append(meld)
def explain(self):
"""explain what rules were used for this hand"""
result = [x.rule.explain() for x in self.usedRules
if x.rule.score.points]
result.extend([x.rule.explain() for x in self.usedRules
if x.rule.score.doubles])
result.extend([x.rule.explain() for x in self.usedRules
if not x.rule.score.points and not x.rule.score.doubles])
if any(x.rule.debug for x in self.usedRules):
result.append(str(self))
return result
def doublesEstimate(self):
"""this is only an estimate because it only uses meldRules and handRules,
but not things like mjRules, winnerRules, loserRules"""
result = 0
for meld in self.dragonMelds + self.windMelds:
for rule in self.ruleset.doublingMeldRules:
if rule.appliesToMeld(self, meld):
result += rule.score.doubles
for rule in self.ruleset.doublingHandRules:
if rule.appliesToHand(self):
result += rule.score.doubles
return result
def __str__(self):
"""hand as a string"""
return u' '.join([self.sortedMeldsContent, self.mjStr])
def __repr__(self):
"""the default representation"""
return 'Hand(%s)' % str(self)
| gpl-2.0 | -5,353,195,466,614,206,000 | 39.862115 | 104 | 0.56146 | false |
kcsry/wurst | wurst/core/migrations/0001_initial.py | 1 | 4493 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-30 19:14
from __future__ import unicode_literals
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import enumfields.fields
import wurst.core.consts
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(db_index=True, max_length=32, unique=True)),
('title', models.CharField(max_length=140)),
('description', models.TextField(blank=True)),
('start_date', models.DateField(blank=True, null=True)),
('due_date', models.DateField(blank=True, null=True)),
('created', models.DateTimeField(db_index=True, default=django.utils.timezone.now, editable=False)),
('assignee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issues_assigned', to=settings.AUTH_USER_MODEL)),
('creator', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issues_created', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='IssueType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique=True)),
('nouns', models.TextField(blank=True)),
],
),
migrations.CreateModel(
name='Priority',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique=True)),
('nouns', models.TextField(blank=True)),
('value', models.IntegerField(db_index=True, default=0)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique=True)),
('prefix', models.CharField(max_length=10, unique=True)),
],
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique=True)),
('category', enumfields.fields.EnumIntegerField(db_index=True, default=0, enum=wurst.core.consts.StatusCategory)),
('value', models.IntegerField(db_index=True, default=0)),
],
),
migrations.AddField(
model_name='issue',
name='priority',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to='wurst.Priority'),
),
migrations.AddField(
model_name='issue',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to='wurst.Project'),
),
migrations.AddField(
model_name='issue',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to='wurst.Status'),
),
migrations.AddField(
model_name='issue',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issues', to='wurst.IssueType'),
),
]
| mit | -5,385,830,359,492,303,000 | 46.294737 | 191 | 0.594258 | false |
sqlboy/fileseq | test/test_fuzz.py | 1 | 57574 | #!/usr/bin/env python
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from future.utils import string_types, text_type, native_str
import unittest
import pickle
import re
import types
from fileseq.utils import *
from fileseq import FrameSet, framesToFrameRange, ParseException
def _yrange(first, last=None, incr=1):
"""
Simple value generator for the 1-20y5 syntax.
:param first: as per xrange
:param last: as per xrange
:param incr: as per xrange
:return: generator
"""
if last is None:
first, last = 0, first
whole = list(range(first, last, 1 if incr >= 0 else -1))
filt = set(whole[::abs(incr)])
for i in whole:
if i not in filt:
yield i
def _srange(first, last=None, incr=1):
"""
Simple value generator for the 1-20:5 syntax.
:param first: as per xrange
:param last: as per xrange
:param incr: as per xrange
:return: generator
"""
if last is None:
first, last = 0, first
whole = list(range(first, last, 1 if incr >= 0 else -1))
sent = set()
for stagger in range(abs(incr), 0, -1):
for i in whole[::stagger]:
if i not in sent:
sent.add(i)
yield i
def _uchain(*args):
"""
As per itertools.chain, but will only yield items not previously yielded.
:param args: one or more iterables to chain
:return: generator
"""
sent = set()
for i in chain(*args):
if i not in sent:
yield i
sent.add(i)
FRAME_SET_SHOULD_SUCCEED = [
# the null value
("Empty", '', []),
# individual frames
('Zero', '0', [0]),
('NegZero', '-0', [0]),
('Pos', '1', [1]),
('Neg', '-1', [-1]),
# permutations on comma separated individual frames
('DupePos', '1,1,1', [1]),
('DupeNeg', '-1,-1,-1', [-1]),
('DupeMix', '-1,1,-1,1', [-1,1]),
('CommaSepPos', '1,3,17', [1,3,17]),
('CommaSepNeg', '-1,-3,-17', [-1,-3,-17]),
('CommaSepMix', '1,-3,17', [1,-3,17]),
('CommaSepPosInv', '17,3,1', [17,3,1]),
('CommaSepNegInv', '-17,-3,-1', [-17,-3,-1]),
('CommaSepMixInv', '17,-3,1', [17,-3,1]),
('CommaSepMixInv', '17,-3,1', [17,-3,1]),
("CommaTrailing", "1,", [1]),
("CommaLeading", ",1", [1]),
("CommaDupes", "1,,,,,,2,,,,,3,,,", [1,2,3]),
# args that str(arg) cast to a valid FrameSet
('PosInt', 1, [1]),
('NegInt', -1, [-1]),
('FrameSet', FrameSet("1-20"), list(range(1,21))),
# unicode args that are the equivalent of a valid FrameSet
('UnicodeEquivalentRange', u'-1--20', list(range(-1,-21,-1))),
('UnicodeEquivalentRangeChunk', u'-1--20x5', list(range(-1,-21,-5))),
('UnicodeEquivalentRangeFill', u'-1--20y5', list(_yrange(-1,-21,-5))),
('UnicodeEquivalentRangeStagger', u'-1--20:5', list(_srange(-1,-21,-5))),
]
LO_RANGES = [
# low value permutations of signed integer ranges, these will all be individually tested
('PosToPos', '1-20', list(range(1,21,1))),
('NegToPos', '-1-20', list(range(-1,21,1))),
('NegToNeg', '-1--20', list(range(-1,-21,-1))),
('PosToNeg', '1--20', list(range(1,-21,-1))),
('PosToPosInv', '20-1', list(range(20,0,-1))),
('NegToPosInv', '-20-1', list(range(-20,2,1))),
('NegToNegInv', '-20--1', list(range(-20,0,1))),
('PosToNegInv', '20--1', list(range(20,-2,-1))),
('PosToPosChunk', '1-20x5', list(range(1,21,5))),
('NegToPosChunk', '-1-20x5', list(range(-1,21,5))),
('NegToNegChunk', '-1--20x5', list(range(-1,-21,-5))),
('PosToNegChunk', '1--20x5', list(range(1,-21,-5))),
('PosToPosChunkInv', '20-1x5', list(range(20,0,-5))),
('NegToPosChunkInv', '-20-1x5', list(range(-20,2,5))),
('NegToNegChunkInv', '-20--1x5', list(range(-20,0,5))),
('PosToNegChunkInv', '20--1x5', list(range(20,-2,-5))),
('PosToPosNegChunkInv', '20-1x-1', list(range(20,0,-1))),
('PosToPosFill', '1-20y5', list(_yrange(1,21,5))),
('NegToPosFill', '-1-20y5', list(_yrange(-1,21,5))),
('NegToNegFill', '-1--20y5', list(_yrange(-1,-21,-5))),
('PosToNegFill', '1--20y5', list(_yrange(1,-21,-5))),
('PosToPosFillInv', '20-1y5', list(_yrange(20,0,-5))),
('NegToPosFillInv', '-20-1y5', list(_yrange(-20,2,5))),
('NegToNegFillInv', '-20--1y5', list(_yrange(-20,0,5))),
('PosToNegFillInv', '20--1y5', list(_yrange(20,-2,-5))),
('PosToPosStagger', '1-20:5', list(_srange(1,21,5))),
('NegToPosStagger', '-1-20:5', list(_srange(-1,21,5))),
('NegToNegStagger', '-1--20:5', list(_srange(-1,-21,-5))),
('PosToNegStagger', '1--20:5', list(_srange(1,-21,-5))),
('PosToPosStaggerInv', '20-1:5', list(_srange(20,0,-5))),
('NegToPosStaggerInv', '-20-1:5', list(_srange(-20,2,5))),
('NegToNegStaggerInv', '-20--1:5', list(_srange(-20,0,5))),
('PosToNegStaggerInv', '20--1:5', list(_srange(20,-2,-5)))]
HI_RANGES = [
# high value permutations of signed integer ranges, these will be permuted with the LO_RANGES for testing
('PosToPos', '21-30', list(range(21,31,1))),
('NegToPos', '-21-30', list(range(-21,31,1))),
('NegToNeg', '-21--30', list(range(-21,-31,-1))),
('PosToNeg', '21--30', list(range(21,-31,-1))),
('PosToPosInv', '30-21', list(range(30,20,-1))),
('NegToPosInv', '-30-21', list(range(-30,22,1))),
('NegToNegInv', '-30--21', list(range(-30,-20,1))),
('PosToNegInv', '30--21', list(range(30,-22,-1))),
('PosToPosChunk', '21-30x5', list(range(21,31,5))),
('NegToPosChunk', '-21-30x5', list(range(-21,31,5))),
('NegToNegChunk', '-21--30x5', list(range(-21,-31,-5))),
('PosToNegChunk', '21--30x5', list(range(21,-31,-5))),
('PosToPosChunkInv', '30-21x5', list(range(30,20,-5))),
('NegToPosChunkInv', '-30-21x5', list(range(-30,22,5))),
('NegToNegChunkInv', '-30--21x5', list(range(-30,-20,5))),
('PosToNegChunkInv', '30--21x5', list(range(30,-22,-5))),
('PosToPosFill', '21-30y5', list(_yrange(21,31,5))),
('NegToPosFill', '-21-30y5', list(_yrange(-21,31,5))),
('NegToNegFill', '-21--30y5', list(_yrange(-21,-31,-5))),
('PosToNegFill', '21--30y5', list(_yrange(21,-31,-5))),
('PosToPosFillInv', '30-21y5', list(_yrange(30,20,-5))),
('NegToPosFillInv', '-30-21y5', list(_yrange(-30,22,5))),
('NegToNegFillInv', '-30--21y5', list(_yrange(-30,-20,5))),
('PosToNegFillInv', '30--21y5', list(_yrange(30,-22,-5))),
('PosToPosStagger', '21-30:5', list(_srange(21,31,5))),
('NegToPosStagger', '-21-30:5', list(_srange(-21,31,5))),
('NegToNegStagger', '-21--30:5', list(_srange(-21,-31,-5))),
('PosToNegStagger', '21--30:5', list(_srange(21,-31,-5))),
('PosToPosStaggerInv', '30-21:5', list(_srange(30,20,-5))),
('NegToPosStaggerInv', '-30-21:5', list(_srange(-30,22,5))),
('NegToNegStaggerInv', '-30--21:5', list(_srange(-30,-20,5))),
('PosToNegStaggerInv', '30--21:5', list(_srange(30,-22,-5)))]
for lo in LO_RANGES:
FRAME_SET_SHOULD_SUCCEED.append(lo)
for hi in HI_RANGES:
name = 'CommaSep{0}To{1}'.format(lo[0], hi[0])
test = ','.join([lo[1], hi[1]])
expect = list(_uchain(lo[2], hi[2]))
FRAME_SET_SHOULD_SUCCEED.append((name, test, expect))
FRAME_SET_SHOULD_FAIL = [
("PosWChunkChar", "1x5"),
("NegWChunkChar", "-1x5"),
("PosWFillChar", "1y5"),
("NegWFillChar", "-1y5"),
("PosWStaggerChar", "1:5"),
("NegWStaggerChar", "-1:5"),
("PosWSepChar", "1-"),
("NegWSepChar", "-1-"),
("BadAlphaChars", "bilbo"),
("RangeWDupeSpecialChar", "1-20x:y5"),
("RangeWBadCaseChunkChar", "1-20X5"),
("RangeWBadCaseFillChar", "1-20Y5"),
("RangeWChunkZero", "1-20x0"),
("RangeWFillZero", "1-20y0"),
("RangeWStaggerZero", "1-20:0"),
("RangeWNegChunk", "1-20x-5"),
("RangeWNegFill", "1-20y-5"),
("RangeWNegStagger", "1-20:-5"),
("ActualNone", None),
]
FRAME_SET_FROM_RANGE_SHOULD_SUCCEED = [
# individual frames
('Zero', 0, 0, 1, '0'),
('Pos', 1, 1, 1, '1'),
# ranges
('PosToPos', 1, 20, 1, '1-20'),
('NegToPos', -1, 1, 1, '-1-1'),
('PosToNeg', 1, -1, 1, '1--1'),
('PosToPosInv', 20, 1, 1, '20-1'),
('NegToPosInv', -20, 1, 1, '-20-1'),
('NegToNegInv', -20, -1, 1, '-20--1'),
('PosToNegInv', 20, -1, 1, '20--1'),
('PosToPosChunk', 1, 20, 5, '1-20x5'),
('NegToPosChunk', -1, 20, 5, '-1-20x5'),
('NegToNegChunk', -1, -20, 5, '-1--20x5'),
('PosToNegChunk', 1, -20, 5, '1--20x5'),
('PosToPosChunkInv', 20, 1, 5, '20-1x5'),
('NegToPosChunkInv', -20, 1, 5, '-20-1x5'),
('NegToNegChunkInv', -20, -1, 5, '-20--1x5'),
('PosToNegChunkInv', 20, -1, 5, '20--1x5'),
('PosToPosNegChunkInv', 20, 1, -1, '20-1x-1'),
]
class TestFrameSet(unittest.TestCase):
"""
Exercise the TestFrame object. Due to the sheer number of permutations, we'll add most tests dynamically.
"""
def _check___init___range(self, test, expect):
"""
Harness to test if the FrameSet.__init__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}")._frange != {0}: got {1}'
r = f._frange
self.assertEqual(r, native_str(test), m.format(test, r))
m = u'FrameSet("{0}")._frange returns {1}: got {2}'
self.assertIsInstance(r, native_str, m.format(test, native_str, type(r)))
def _check___init___items(self, test, expect):
"""
Harness to test if the FrameSet.__init__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}")._items != {1}: got {2}'
r = f._items
self.assertEqual(r, set(expect), m.format(test, set(expect), r))
m = u'FrameSet("{0}")._FrameSet__items returns {1}: got {2}'
self.assertIsInstance(r, frozenset, m.format(test, frozenset, type(r)))
def _check___init___order(self, test, expect):
"""
Harness to test if the FrameSet.__init__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}")._order != {1}: got {2}'
r = f._order
self.assertEqual(r, tuple(expect), m.format(test, tuple(expect), r))
m = u'FrameSet("{0}")._order returns {1}: got {2}'
self.assertIsInstance(r, tuple, m.format(test, tuple, type(r)))
def _check___init____malformed(self, test):
"""
Harness to test if the FrameSet.__init__ call properly handles malformed strings.
:param test: the string to pass to FrameSet
:return: None
"""
try:
r = FrameSet(test)
except ParseException as err:
r = err
except Exception as err:
r = err
m = u'FrameSet("{0}") should fail: got {1}'
self.assertIsInstance(r, ParseException, m.format(test, r))
def _check___str__(self, test, expect):
"""
Harness to test if the FrameSet.__str__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'str(FrameSet("{0}")) != {0}: got {1}'
r = str(f)
self.assertEqual(r, native_str(test), m.format(test, r))
m = u'str(FrameSet("{0}")) returns {1}: got {2}'
self.assertIsInstance(r, native_str, m.format(test, native_str, type(r)))
def _check___len__(self, test, expect):
"""
Harness to test if the FrameSet.__len__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'len(FrameSet("{0}")) != {1}: got {2}'
r = len(f)
self.assertEqual(r, len(expect), m.format(test, len(expect), r))
m = u'len(FrameSet("{0}")) returns {1}: got {2}'
self.assertIsInstance(r, int, m.format(test, int, type(r)))
def _check___getitem__(self, test, expect):
"""
Harness to test if the FrameSet.__getitem__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
i = len(expect) // 2
m = u'FrameSet("{0}")[{1}] != {2}: got {3}'
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertRaises(IndexError, f.__getitem__, i)
return
try:
r = f[i]
except Exception as err:
r = repr(err)
self.assertEqual(r, expect[i], m.format(test, i, expect[i], r))
m = u'FrameSet("{0}")[{1}] returns {2}: got {3}'
self.assertIsInstance(r, int, m.format(test, i, int, type(r)))
try:
r = f[:-1:2]
except Exception as err:
r = repr(err)
e = tuple(expect[:-1:2])
m = u'FrameSet("{0}")[:1:2] != {1}: got {2}'
self.assertEqual(r, e, m.format(test, e, r))
def _check_start(self, test, expect):
"""
Harness to test if the FrameSet.start call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}").start() != {1}: got {2}'
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertRaises(IndexError, f.start)
return
try:
r = f.start()
except Exception as err:
r = repr(err)
self.assertEqual(r, expect[0], m.format(test, expect[0], r))
m = u'FrameSet("{0}").start() returns {1}: got {2}'
self.assertIsInstance(r, int, m.format(test, int, type(r)))
def _check_end(self, test, expect):
"""
Harness to test if the FrameSet.end call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}").end() != {1}: got {2}'
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertRaises(IndexError, f.end)
return
try:
r = f.end()
except Exception as err:
r = repr(err)
self.assertEqual(r, expect[-1], m.format(test, expect[-1], r))
m = u'FrameSet("{0}").end() returns {1}: got {2}'
self.assertIsInstance(r, int, m.format(test, int, type(r)))
def _check_index(self, test, expect):
"""
Harness to test if the FrameSet.index call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertRaises(IndexError, f.frame, 0)
return
i = expect[len(expect) // 2]
m = u'FrameSet("{0}").index({1}) != {2}: got {3}'
try:
r = f.index(i)
except Exception as err:
r = repr(err)
self.assertEqual(r, expect.index(i), m.format(test, i, expect.index(i), r))
m = u'FrameSet("{0}").index({1}) returns {2}: got {3}'
self.assertIsInstance(r, int, m.format(test, i, int, type(r)))
def _check_frame(self, test, expect):
"""
Harness to test if the FrameSet.frame call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertRaises(IndexError, f.frame, 0)
return
i = len(expect) // 2
m = u'FrameSet("{0}").frame({1}) != {2}: got {3}'
try:
r = f.frame(i)
except Exception as err:
r = repr(err)
self.assertEqual(r, expect[i], m.format(test, i, expect[i], r))
m = u'FrameSet("{0}").frame({1}) returns {2}: got {3}'
self.assertIsInstance(r, int, m.format(test, i, int, type(r)))
def _check_hasFrameTrue(self, test, expect):
"""
Harness to test if the FrameSet.hasFrame call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertFalse(f.hasFrame(1))
return
i = max(expect)
m = u'FrameSet("{0}").hasFrame({1}) != {2}: got {3}'
r = f.hasFrame(i)
self.assertTrue(r, m.format(test, i, i in expect, r))
m = u'FrameSet("{0}").frame({1}) returns {2}: got {3}'
self.assertIsInstance(r, bool, m.format(test, i, bool, type(r)))
def _check_hasFrameFalse(self, test, expect):
"""
Harness to test if the FrameSet.hasFrame call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is expected to always fail
if not test and not expect:
self.assertFalse(f.hasFrame(1))
return
i = max(expect) + 1
m = u'FrameSet("{0}").hasFrame({1}) != {2}: got {3}'
r = f.hasFrame(i)
self.assertFalse(r, m.format(test, i, i in expect, r))
m = u'FrameSet("{0}").frame({1}) returns {2}: got {3}'
self.assertIsInstance(r, bool, m.format(test, i, bool, type(r)))
def _check___iter__(self, test, expect):
"""
Harness to test if the FrameSet.__iter__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'list(FrameSet("{0}")) != {1}: got {2}'
r = f.__iter__()
self.assertEqual(list(r), expect, m.format(test, expect, list(r)))
m = u'FrameSet("{0}").__iter__ returns {1}: got {2}'
self.assertIsInstance(r, types.GeneratorType, m.format(test, types.GeneratorType, type(r)))
def _check_canSerialize(self, test, expect):
"""
Harness to test if the FrameSet.__getstate__ and FrameSet.__setstate__ calls allowing pickling.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
f2 = pickle.loads(pickle.dumps(f))
m = u'FrameSet("{0}") does not pickle correctly'
self.assertIsInstance(f2, FrameSet, m.format(test))
self.assertTrue(str(f) == str(f2) and list(f) == list(f2), m.format(test))
# test old objects being unpickled through new lib
state = {'__frange': f._frange, '__set': set(f._items), '__list': list(f._order)}
f2 = FrameSet.__new__(FrameSet)
f2.__setstate__(state)
self.assertTrue(str(f) == str(f2) and list(f) == list(f2), m.format(test))
def _check_frameRange(self, test, expect):
"""
Harness to test if the FrameSet.frameRange call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet always has a frameRange of ''
if not test and not expect:
self.assertEqual(f.frameRange(), '')
return
p1 = r'((?<![xy:-])-?\d+)'
l = max([max([len(i) for i in re.findall(p1, str(f))]) + 1, 4])
p2 = r'(-?\d+)(?:(-)(-?\d+)([xy:]\d+)?)?'
def replace(match):
start, sep, end, step = match.groups()
if start:
start = start.zfill(l)
if end:
end = end.zfill(l)
return ''.join(o for o in [start, sep, end, step] if o)
expect = re.sub(p2, replace, str(f))
try:
r = f.frameRange(l)
except Exception as err:
r = repr(err)
m = u'FrameSet("{0}").frameRange({1}) != "{2}": got "{3}"'
self.assertEqual(r, expect, m.format(test, l, expect, r))
m = u'FrameSet("{0}").frameRange({1}) returns {2}: got {3}'
self.assertIsInstance(r, native_str, m.format(test, l, native_str, type(r)))
def _check_invertedFrameRange(self, test, expect):
"""
Harness to test if the FrameSet.invertedFrameRange call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'FrameSet("{0}").invertedFrameRange() != "{1}": got "{2}"'
r = f.invertedFrameRange()
t = sorted(f)
c = sorted(FrameSet(r) if r else [])
# the empty FrameSet will always return '' for inverted and normal
# FrameRange
if not test and not expect:
self.assertEqual(r, '')
else:
e = [i for i in range(t[0], t[-1]) if i not in t]
self.assertEqual(c, e, m.format(test, e, c))
m = u'FrameSet("{0}").invertedFrameRange() returns {1}: got {2}'
self.assertIsInstance(r, native_str, m.format(test, native_str, type(r)))
def _check_normalize(self, test, expect):
"""
Harness to test if the FrameSet.normalize call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
m = u'set(FrameSet("{0}").normalize()) != {1}: got {2}'
r = f.normalize()
self.assertEqual(set(f), set(r), m.format(test, set(expect), set(r)))
m = u'FrameSet("{0}").normalize() returns {1}: got {2}'
self.assertIsInstance(r, FrameSet, m.format(test, FrameSet, type(r)))
def _check_isFrameRange(self, test, expect):
"""
Harness to test if the FrameSet.isFrameRange call works properly.
:param test: the string to pass to FrameSet.isFrameRange
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
r = FrameSet.isFrameRange(test)
m = u'FrameSet.isFrameRange("{0}") != {1}: got {2}'
self.assertEqual(r, expect, m.format(test, expect, r))
m = u'FrameSet.isFrameRange("{0}") returns {1}: got {2}'
self.assertIsInstance(r, bool, m.format(test, bool, type(r)))
def _check_fromIterable(self, expect, iterable):
"""
Harness to test if the FrameSet.fromIterable call works properly.
:param expect: the string to use to build the expected FrameRange, which will be normalized for comparison
:param iterable: the iterable to test
:return: None
"""
e = FrameSet(expect)
r = FrameSet.from_iterable(iterable)
m = u'FrameSet.fromIterable({0}) != {1!r}: got {2!r}'
self.assertEqual(r, e, m.format(iterable, e, r))
m = u'FrameSet.fromIterable({0}) returns {1}: got {2}'
self.assertIsInstance(r, FrameSet, m.format(expect, FrameSet, type(r)))
def _check___repr__(self, test, expect):
"""
Harness to test if the FrameSet.__repr__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
e = 'FrameSet("{0}")'.format(test)
m = u'repr(FrameSet("{0}")) != {1}: got {2}'
self.assertEqual(repr(f), e, m.format(test, e, repr(f)))
m = u'repr(FrameSet("{0}")) returns {1}: got {2}'
self.assertIsInstance(repr(f), native_str, m.format(test, native_str, type(repr(f))))
def _check___reversed__(self, test, expect):
"""
Harness to test if the FrameSet.__reversed__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
e = list(reversed(expect))
r = reversed(f)
m = u'reversed(FrameSet("{0}")) != {1}: got {2}'
self.assertEqual(list(r), e, m.format(test, e, r))
m = u'reversed(FrameSet("{0}")) returns {1}: got {2}'
self.assertIsInstance(r, types.GeneratorType, m.format(test, types.GeneratorType, type(r)))
def _check___contains__(self, test, expect):
"""
Harness to test if the FrameSet.__contains__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
e = expect[-1] if len(expect) else None
should_succeed = e in f
e = (max(expect) + 1) if len(expect) else None
should_fail = e in f
m = u'{0} in FrameSet("{1}"))'
# the empty FrameSet contains nothing
if not test and not expect:
self.assertFalse(should_succeed, m.format(e, test))
self.assertFalse(should_fail, m.format(e, test))
else:
self.assertTrue(should_succeed, m.format(e, test))
self.assertFalse(should_fail, m.format(e, test))
m = u'FrameSet("{0}").__contains__ returns {1}: got {2}'
self.assertIsInstance(should_succeed, bool, m.format(test, bool, type(should_succeed)))
self.assertIsInstance(should_fail, bool, m.format(test, bool, type(should_fail)))
def _check___hash__(self, test, expect):
"""
Harness to test if the FrameSet.__hash__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
try:
r = hash(f)
except Exception as err:
r = err
m = u'hash(FrameSet("{0}")) returns {1}: got {2}'
self.assertIsInstance(r, int, m.format(test, int, type(r)))
def _check___lt__(self, test, expect):
"""
Harness to test if the FrameSet.__lt__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is less than everything, except for itself
if not test and not expect:
self.assertTrue(f < FrameSet('1'))
self.assertTrue(f < FrameSet('-1'))
self.assertFalse(f < expect)
return
r = FrameSet.from_iterable(expect + [max(expect) + 1])
should_succeed = f < r
should_fail = r < f
m = u'FrameSet("{0}") < FrameSet("{1}")'
self.assertTrue(should_succeed, m.format(test, r))
self.assertFalse(should_fail, m.format(r, test))
m = u'FrameSet("{0}") < FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
self.assertIsInstance(should_fail, bool, m.format(r, test, bool, type(should_fail)))
def _check___le__(self, test, expect):
"""
Harness to test if the FrameSet.__le__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is less than everything, equal only to itself
if not test and not expect:
self.assertTrue(f <= FrameSet('1'))
self.assertTrue(f <= FrameSet('-1'))
self.assertTrue(f <= expect)
return
for i in [expect, expect + [max(expect) + 1]]:
r = FrameSet.from_iterable(i)
should_succeed = f <= r
m = u'FrameSet("{0}") <= FrameSet("{1}")'
self.assertTrue(should_succeed, m.format(test, r))
m = u'FrameSet("{0}") <= FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
def _check___eq__(self, test, expect):
"""
Harness to test if the FrameSet.__eq__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
r = FrameSet(','.join((str(i) for i in expect)))
should_succeed = f == r
m = u'FrameSet("{0}") == FrameSet("{1}")'
self.assertTrue(should_succeed, m.format(test, r))
m = u'FrameSet("{0}") == FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
def _check___ne__(self, test, expect):
"""
Harness to test if the FrameSet.__ne__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is not equal to anything, except for itself
if not test and not expect:
self.assertTrue(f != FrameSet('1'))
self.assertTrue(f != FrameSet('-1'))
self.assertFalse(f != expect)
return
r = FrameSet(','.join((str(i) for i in (expect + [max(expect) + 1]))))
should_succeed = f != r
m = u'FrameSet("{0}") != FrameSet("{1}")'
self.assertTrue(should_succeed, m.format(test, r))
m = u'FrameSet("{0}") != FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
def _check___ge__(self, test, expect):
"""
Harness to test if the FrameSet.__ge__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is greater than nothing, except for itself
if not test and not expect:
self.assertFalse(f >= FrameSet('1'))
self.assertFalse(f >= FrameSet('-1'))
self.assertTrue(f >= expect)
return
for i in [expect, expect[:-1]]:
try:
r = FrameSet.from_iterable(i)
except ParseException:
# this will happen if len(expect) == 1
continue
should_succeed = f >= r
m = u'FrameSet("{0}") >= FrameSet("{1}"'
self.assertTrue(should_succeed, m.format(test, r))
m = u'FrameSet("{0}") >= FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
def _check___gt__(self, test, expect):
"""
Harness to test if the FrameSet.__gt__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is greater than nothing, except for itself
if not test and not expect:
self.assertFalse(f > FrameSet('1'))
self.assertFalse(f > FrameSet('-1'))
self.assertFalse(f > expect)
return
try:
r = FrameSet.from_iterable(expect[:-1])
except ParseException:
# this will happen if len(expect) == 1
return
should_succeed = f > r
should_fail = r > f
m = u'FrameSet("{0}") > FrameSet("{1}")'
self.assertTrue(should_succeed, m.format(test, r))
self.assertFalse(should_fail, m.format(r, test))
m = u'FrameSet("{0}") > FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(should_succeed, bool, m.format(test, r, bool, type(should_succeed)))
self.assertIsInstance(should_fail, bool, m.format(r, test, bool, type(should_fail)))
def _check___and__(self, test, expect):
"""
Harness to test if the FrameSet.__and__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = f & t
e = FrameSet.from_iterable(set(expect) & set(v), sort=True)
m = u'FrameSet("{0}") & FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(f, t, e))
m = u'FrameSet("{0}") & FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check___rand__(self, test, expect):
"""
Harness to test if the FrameSet.__rand__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = t & f
e = FrameSet.from_iterable(set(v) & set(expect), sort=True)
m = u'FrameSet("{0}") & FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}") & FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(t, test, FrameSet, type(r)))
def _check___sub__(self, test, expect):
"""
Harness to test if the FrameSet.__sub__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = f - t
e = FrameSet.from_iterable(set(expect) - set(v), sort=True)
m = u'FrameSet("{0}") - FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(f, t, e))
m = u'FrameSet("{0}") - FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check___rsub__(self, test, expect):
"""
Harness to test if the FrameSet.__rsub__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = t - f
e = FrameSet.from_iterable(set(v) - set(expect), sort=True)
m = u'FrameSet("{0}") - FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}") - FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(t, test, FrameSet, type(r)))
def _check___or__(self, test, expect):
"""
Harness to test if the FrameSet.__or__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = f | t
e = FrameSet.from_iterable(set(expect) | set(v), sort=True)
m = u'FrameSet("{0}") | FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(f, t, e))
m = u'FrameSet("{0}") | FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check___ror__(self, test, expect):
"""
Harness to test if the FrameSet.__ror__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = t | f
e = FrameSet.from_iterable(set(v) | set(expect), sort=True)
m = u'FrameSet("{0}") | FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}") | FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(t, test, FrameSet, type(r)))
def _check___xor__(self, test, expect):
"""
Harness to test if the FrameSet.__xor__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = f ^ t
e = FrameSet.from_iterable(set(expect) ^ set(v), sort=True)
m = u'FrameSet("{0}") ^ FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(f, t, e))
m = u'FrameSet("{0}") ^ FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check___rxor__(self, test, expect):
"""
Harness to test if the FrameSet.__rxor__ call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
v = [i + max(expect) + 1 for i in expect] or list(range(999, 1999))
t = FrameSet.from_iterable(v)
r = t ^ f
e = FrameSet.from_iterable(set(v) ^ set(expect), sort=True)
m = u'FrameSet("{0}") ^ FrameSet("{1}") != FrameSet("{2}")'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}") ^ FrameSet("{1}") returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(t, test, FrameSet, type(r)))
def _check_isdisjoint(self, test, expect):
"""
Harness to test if the FrameSet.isdisjoint call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is the disjoint of everything, including itself
if not test and not expect:
self.assertTrue(f.isdisjoint(FrameSet('1')))
self.assertTrue(f.isdisjoint(FrameSet('-1')))
self.assertTrue(f.isdisjoint(expect))
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.isdisjoint(t)
e = set(expect).isdisjoint(v)
m = u'FrameSet("{0}").isdisjoint(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").isdisjoint(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, bool, m.format(test, t, bool, type(r)))
def _check_issubset(self, test, expect):
"""
Harness to test if the FrameSet.issubset call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is the subset of everything, including itself
if not test and not expect:
self.assertTrue(f.issubset(FrameSet('1')))
self.assertTrue(f.issubset(FrameSet('-1')))
self.assertTrue(f.issubset(expect))
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.issubset(t)
e = set(expect).issubset(v)
m = u'FrameSet("{0}").issubset(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").issubset(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, bool, m.format(test, t, bool, type(r)))
def _check_issuperset(self, test, expect):
"""
Harness to test if the FrameSet.issuperset call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the empty FrameSet is the superset of everything, except itself
if not test and not expect:
self.assertFalse(f.issuperset(FrameSet('1')))
self.assertFalse(f.issuperset(FrameSet('-1')))
self.assertTrue(f.issuperset(expect))
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.issuperset(t)
e = set(expect).issuperset(v)
m = u'FrameSet("{0}").issuperset(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").issuperset(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, bool, m.format(test, t, bool, type(r)))
def _check_union(self, test, expect):
"""
Harness to test if the FrameSet.union call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the union of the empty FrameSet with any other is always the other
if not test and not expect:
self.assertEqual(f.union(FrameSet('1')), FrameSet('1'))
self.assertEqual(f.union(FrameSet('-1')), FrameSet('-1'))
self.assertEqual(f.union(expect), FrameSet.from_iterable(expect, sort=True))
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.union(t)
e = FrameSet.from_iterable(set(expect).union(v), sort=True)
m = u'FrameSet("{0}").union(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").union(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check_intersection(self, test, expect):
"""
Harness to test if the FrameSet.intersection call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the intersection of the empty FrameSet with any other is always the empty FrameSet
if not test and not expect:
self.assertEqual(f.intersection(FrameSet('1')), f)
self.assertEqual(f.intersection(FrameSet('-1')), f)
self.assertEqual(f.intersection(expect), f)
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.intersection(t)
e = FrameSet.from_iterable(set(expect).intersection(v), sort=True)
m = u'FrameSet("{0}").intersection(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").intersection(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check_difference(self, test, expect):
"""
Harness to test if the FrameSet.difference call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the difference of the empty FrameSet with any other is always the empty FrameSet
if not test and not expect:
self.assertEqual(f.intersection(FrameSet('1')), f)
self.assertEqual(f.intersection(FrameSet('-1')), f)
self.assertEqual(f.intersection(expect), f)
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.difference(t)
e = FrameSet.from_iterable(set(expect).difference(v), sort=True)
m = u'FrameSet("{0}").difference(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").difference(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t, FrameSet, type(r)))
def _check_symmetric_difference(self, test, expect):
"""
Harness to test if the FrameSet.symmetric_difference call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
# the symmetric_difference of the empty FrameSet with any other is always the empty FrameSet
if not test and not expect:
self.assertEqual(f.intersection(FrameSet('1')), f)
self.assertEqual(f.intersection(FrameSet('-1')), f)
self.assertEqual(f.intersection(expect), f)
return
for v in [[expect[0]], expect, expect + [max(expect)+1], [i + max(expect) + 1 for i in expect]]:
t = FrameSet.from_iterable(v)
r = f.symmetric_difference(t)
e = FrameSet.from_iterable(set(expect).symmetric_difference(v), sort=True)
m = u'FrameSet("{0}").symmetric_difference(FrameSet("{1}")) != {2}'
self.assertEqual(r, e, m.format(t, f, e))
m = u'FrameSet("{0}").symmetric_difference(FrameSet("{1}")) returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(test, t,
FrameSet, type(r)))
def _check_copy(self, test, expect):
"""
Harness to test if the FrameSet.copy call works properly.
:param test: the string to pass to FrameSet
:param expect: the expected list of values that FrameSet will hold
:return: None
"""
f = FrameSet(test)
r = f.copy()
self.assertIsNot(f, r)
self.assertEqual(f, r)
# due to the sheer number of combinations, we build the bulk of our tests on to TestFrameSet dynamically
for name, tst, exp in FRAME_SET_SHOULD_SUCCEED:
setattr(
TestFrameSet, 'testFrameSet%sInitSetsRange' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___init___range(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sInitSetsItems' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___init___items(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sInitSetsOrder' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___init___order(self, t, e))
setattr(
TestFrameSet, 'testFromIterable%s' % name,
lambda self, e=tst, i=exp: TestFrameSet._check_fromIterable(self, e, i))
setattr(
TestFrameSet, 'testFrameSet%sIndex' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_index(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sFrame' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_frame(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sHasFrameTrue' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_hasFrameTrue(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sHasFrameFalse' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_hasFrameTrue(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sStart' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_start(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sEnd' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_end(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sFrameRange' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_frameRange(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sInvertedFrameRange' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_invertedFrameRange(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sNormalize' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_normalize(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sSerialize' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_canSerialize(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sGetItem' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___getitem__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sLen' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___len__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sStr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___str__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sRepr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___repr__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sIter' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___iter__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sReversed' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___reversed__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sContains' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___contains__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sHash' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___hash__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sLessThan' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___lt__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sLessEqual' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___le__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sEqual' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___eq__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sNotEqual' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___ne__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sGreaterEqual' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___ge__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sGreaterThan' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___gt__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sAnd' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___and__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sRightAnd' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___rand__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sSub' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___sub__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sRightSub' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___rsub__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sOr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___or__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sRightOr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___ror__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sExclusiveOr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___xor__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sRightExclusiveOr' % name,
lambda self, t=tst, e=exp: TestFrameSet._check___rxor__(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sIsDisjoint' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_isdisjoint(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sIsSubset' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_issubset(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sIsSubset' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_issuperset(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sUnion' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_union(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sIntersection' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_intersection(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sDifference' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_difference(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sSymmetricDifference' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_symmetric_difference(self, t, e))
setattr(
TestFrameSet, 'testFrameSet%sCopy' % name,
lambda self, t=tst, e=exp: TestFrameSet._check_copy(self, t, e))
setattr(
TestFrameSet, 'testIsFrameRange%sShouldSucceed' % name,
lambda self, t=tst: TestFrameSet._check_isFrameRange(self, t, True))
for name, tst in FRAME_SET_SHOULD_FAIL:
setattr(
TestFrameSet, 'testFrameSet%sInitHandlesMalformed' % name,
lambda self, t=tst: TestFrameSet._check___init____malformed(self, t))
setattr(
TestFrameSet, 'testIsFrameRange%sShouldFail' % name,
lambda self, t=tst: TestFrameSet._check_isFrameRange(self, t, False))
class TestFramesToFrameRange(unittest.TestCase):
"""
Exercise the frameToRange func. Due to the sheer number of permutations, we'll add most tests dynamically.
"""
def _check_frameToRangeEquivalence(self, test, expect):
f = FrameSet(test)
frange = framesToFrameRange(expect, sort=False)
r = FrameSet(frange)
m = '{0!r} != {1!r}'
self.assertEqual(f, r, m.format(f, r))
m = '{0!r} != {1!r} ; got type {2!r}'
self.assertIsInstance(frange, native_str, m.format(frange, native_str, type(frange)))
# due to the sheer number of combinations, we build the bulk of our tests on to TestFramesToFrameRange dynamically
for name, tst, exp in FRAME_SET_SHOULD_SUCCEED:
setattr(
TestFramesToFrameRange, 'testFramesToRangeEquivalence%s' % name,
lambda self, t=tst, e=exp: TestFramesToFrameRange._check_frameToRangeEquivalence(self, t, e))
class TestFrameSetFromRangeConstructor(unittest.TestCase):
"""
Exercise the TestFrame.from_range() constructor. Due to the sheer number of permutations, we'll add most tests dynamically.
"""
def _check_fromRange(self, start, end, step, expect):
"""
Harness to test if the FrameSet.fromRange call works properly.
:param expect: the string to use to build the expected FrameSet, which will be normalized for comparison
:param start: the start frame
:param end: the end frame
:return: None
"""
e = FrameSet(expect)
r = FrameSet.from_range(start, end, step)
m = u'FrameSet.fromRange({0}, {1}) != {2!r}: got {3!r}'
self.assertEqual(r, e, m.format(start, end, e, r))
m = u'FrameSet.fromRange({0}, {1}) returns {2}: got {3}'
self.assertIsInstance(r, FrameSet, m.format(start, end, FrameSet, type(r)))
# add tests dynamically
for name, start, end, step_, exp in FRAME_SET_FROM_RANGE_SHOULD_SUCCEED:
setattr(
TestFrameSetFromRangeConstructor, 'testFromRange%s' % name,
lambda self, s=start, e=end, step=step_, exp=exp: TestFrameSetFromRangeConstructor._check_fromRange(self, s, e, step, exp))
if __name__ == '__main__':
unittest.main(verbosity=1)
| mit | 3,327,696,484,198,525,400 | 42.81583 | 131 | 0.574026 | false |
kukushdi3981/sel-1_test-project | task14_check_handling_new_windows.py | 1 | 3198 | import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
@pytest.fixture
def driver(request):
wd = webdriver.Chrome() # Optional argument, if not specified will search path.
# wd = webdriver.Ie()
print(wd.capabilities)
# wd.implicitly_wait(10)
request.addfinalizer(wd.quit)
return wd
def login(driver, username, password):
driver.find_element_by_name("username").send_keys(username)
driver.find_element_by_name("password").send_keys(password)
driver.find_element_by_name("login").click()
def logout(driver):
WebDriverWait(driver, 5).until(lambda driver : driver.find_element_by_css_selector("div.header"))
driver.find_element_by_css_selector("a[title='Logout']").click()
def open_add_new_country_page(driver):
WebDriverWait(driver, 5).until(lambda driver : driver.find_element_by_css_selector("div#box-apps-menu-wrapper"))
driver.find_element_by_css_selector("div#box-apps-menu-wrapper a[href$=countries]").click()
# проверяем появления заголовка страницы после нажатия
WebDriverWait(driver, 5).until(lambda driver : driver.find_element_by_css_selector("h1"))
driver.find_element_by_css_selector("#content a.button").click()
def open_and_close_new_windows(webdriver, element):
wait = WebDriverWait(webdriver, 10)
# запоминаем идентификатор текущего окна
main_window = webdriver.current_window_handle
# запоминаем идентификатор уже открытых окон
exist_windows = webdriver.window_handles
# открывает новое окно
element.click()
# ожидание появления нового окна,
# идентификатор которого отсутствует в списке exist_windows
wait.until(lambda webdriver: len(exist_windows) != len(webdriver.window_handles))
handles = webdriver.window_handles
handles.remove(main_window)
# переключаемся в новое окно
webdriver.switch_to_window(handles[0])
# webdriver.switch_to_window(webdriver.window_handles[-1])
# ожидаем загрузки стрницы в новом окне
wait.until(lambda webdriver : webdriver.find_element_by_css_selector("h1"))
webdriver.close()
# возвращаемся в исходное окно
webdriver.switch_to_window(main_window)
def click_links_to_open_windows(driver):
WebDriverWait(driver, 5).until(lambda driver : driver.find_element_by_css_selector("td#content"))
links = driver.find_elements_by_css_selector("form a[target='_blank']")
for link in links:
open_and_close_new_windows(driver, link)
driver.find_element_by_css_selector("span.button-set button[name='cancel']").click()
def test_check_handle_new_windows(driver):
driver.get('http://localhost/litecart/admin/')
login(driver, "admin", "admin")
open_add_new_country_page(driver)
click_links_to_open_windows(driver)
logout(driver)
| apache-2.0 | 3,625,667,480,234,511,400 | 37.853333 | 116 | 0.731984 | false |
mathandy/Classifiers2LearnWith | classifiers/tensorflow/vgg16_pre-trained.py | 1 | 9534 | """A pre-trained implimentation of VGG16 with weights trained on ImageNet."""
##########################################################################
# Special thanks to
# http://www.cs.toronto.edu/~frossard/post/vgg16/
# for converting the caffe VGG16 pre-trained weights to TensorFlow
# this file is essentially just a restylized version of his vgg16.py
##########################################################################
from __future__ import print_function, absolute_import, division
import os
import numpy as np
from scipy.misc import imread, imresize
import tensorflow as tf
_debug = True
def conv_layer(input_tensor, diameter, in_dim, out_dim, name=None):
"""Creates a convolutional layer with
Args:
input_tensor: A `Tensor`.
diameter: An `int`, the width and also height of the filter.
in_dim: An `int`, the number of input channels.
out_dim: An `int`, the number of output channels.
name: A `str`, the name for the operation defined by this function.
"""
with tf.name_scope(name):
filter_shape = (diameter, diameter, in_dim, out_dim)
initial_weights = tf.truncated_normal(filter_shape, stddev=0.1)
weights = tf.Variable(initial_weights, name='weights')
conv = tf.nn.conv2d(input=input_tensor,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME',
name='convolution')
initial_biases = tf.constant(1.0, shape=[out_dim], dtype=tf.float32)
biases = tf.Variable(initial_biases, name='biases')
preactivations = tf.nn.bias_add(conv, biases, name='bias_addition')
activations = tf.nn.relu(preactivations, name='activation')
return activations, weights, biases
def fc_layer(in_tensor, in_dim, out_dim, sigmoid=tf.nn.relu, name=None):
"""Creates a fully-connected (ReLU by default) layer with
Args:
in_tensor: A `Tensor`.
in_dim: An `int`, the number of input channels.
out_dim: An `int`, the number of output channels.
sigmoid: A `function`, the activation operation, defaults to tf.nn.relu.
name: A `str`, the name for the operation defined by this function.
"""
with tf.name_scope(name):
initial_weights = tf.truncated_normal((in_dim, out_dim), stddev=0.1)
weights = tf.Variable(initial_weights, name='weights')
initial_biases = tf.constant(0.0, shape=[out_dim], dtype=tf.float32)
biases = tf.Variable(initial_biases, name='biases')
preactivations = tf.nn.bias_add(tf.matmul(in_tensor, weights), biases)
activations = sigmoid(preactivations, name='activation')
return activations, weights, biases
class PreTrainedVGG16:
def __init__(self, weights=None, session=None):
self.input_images = tf.placeholder(tf.float32, (None, 224, 224, 3))
self.activations, self.parameters = self._build_graph()
self.output = self.activations['fc3']
if weights is not None and session is not None:
self.load_weights(weights, session)
def load_weights(self, weight_file, session):
weights = np.load(weight_file)
keys = sorted(weights.keys())
for i, k in enumerate(keys):
session.run(self.parameters[i].assign(weights[k]))
@staticmethod
def get_class_names():
with open('ImageNet_Classes.txt') as names_file:
return [l.replace('\n', '') for l in names_file]
def get_output(self, images, auto_resize=True):
""""Takes in a list of images and returns softmax probabilities."""
if auto_resize:
images_ = [imresize(im, (224, 224)) for im in images]
else:
images_ = images
feed_dict = {self.input_images: images_}
return sess.run(vgg.output, feed_dict)[0]
def get_activations(self, images, auto_resize=True):
""""Takes in a list of images and returns the activation dictionary."""
if auto_resize:
images_ = np.array([imresize(im, (224, 224)) for im in images])
else:
images_ = np.array(images)
feed_dict = {self.input_images: images_}
return sess.run(vgg.activations, feed_dict)[0]
def _build_graph(self):
parameters = [] # storage for trainable parameters
# pooling arguments
_ksize = [1, 2, 2, 1]
_strides = [1, 2, 2, 1]
# center the input images
with tf.name_scope('preprocess_centering'):
mean = tf.constant([123.68, 116.779, 103.939], dtype=tf.float32,
shape=[1, 1, 1, 3], name='img_mean')
c_images = self.input_images - mean
# images --> conv1_1 --> conv1_2 --> pool1
print("hi", tf.shape(c_images))
conv1_1, weights1, biases1 = conv_layer(c_images, 3, 3, 64, 'conv1_1')
conv1_2, weights2, biases2 = conv_layer(conv1_1, 3, 64, 64, 'conv1_2')
pool1 = tf.nn.max_pool(conv1_2, _ksize, _strides, 'SAME', name='pool1')
parameters += [weights1, biases1, weights2, biases2]
# pool1 --> conv2_1 --> conv2_2 --> pool2
conv2_1, weights1, biases1 = conv_layer(pool1, 3, 64, 128, 'conv2_1')
conv2_2, weights2, biases2 = conv_layer(conv2_1, 3, 128, 128, 'conv2_2')
pool2 = tf.nn.max_pool(conv2_2, _ksize, _strides, 'SAME', name='pool2')
parameters += [weights1, biases1, weights2, biases2]
# pool2 --> conv3_1 --> conv3_2 --> conv3_3 --> pool3
conv3_1, weights1, biases1 = conv_layer(pool2, 3, 128, 256, 'conv3_1')
conv3_2, weights2, biases2 = conv_layer(conv3_1, 3, 256, 256, 'conv3_2')
conv3_3, weights3, biases3 = conv_layer(conv3_2, 3, 256, 256, 'conv3_3')
pool3 = tf.nn.max_pool(conv3_3, _ksize, _strides, 'SAME', name='pool3')
parameters += [weights1, biases1, weights2, biases2, weights3, biases3]
# pool3 --> conv4_1 --> conv4_2 --> conv4_3 --> pool4
conv4_1, weights1, biases1 = conv_layer(pool3, 3, 256, 512, 'conv4_1')
conv4_2, weights2, biases2 = conv_layer(conv4_1, 3, 512, 512, 'conv4_2')
conv4_3, weights3, biases3 = conv_layer(conv4_2, 3, 512, 512, 'conv4_3')
pool4 = tf.nn.max_pool(conv4_3, _ksize, _strides, 'SAME', name='pool4')
parameters += [weights1, biases1, weights2, biases2, weights3, biases3]
# pool4 --> conv5_1 --> conv5_2 --> conv5_3 --> pool5
conv5_1, weights1, biases1 = conv_layer(pool4, 3, 512, 512, 'conv5_1')
conv5_2, weights2, biases2 = conv_layer(conv5_1, 3, 512, 512, 'conv5_2')
conv5_3, weights3, biases3 = conv_layer(conv5_2, 3, 512, 512, 'conv5_3')
pool5 = tf.nn.max_pool(conv5_3, _ksize, _strides, 'SAME', name='pool5')
parameters += [weights1, biases1, weights2, biases2, weights3, biases3]
# pool5 --> flatten --> fc1 --> fc2 --> fc3
shape = int(np.prod(pool5.get_shape()[1:]))
pool5_flat = tf.reshape(pool5, [-1, shape])
fc1, weights1, biases1 = fc_layer(pool5_flat, shape, 4096, name='fc1')
fc2, weights2, biases2 = fc_layer(fc1, 4096, 4096, name='fc2')
fc3, weights3, biases3 = fc_layer(fc2, 4096, 1000, tf.nn.softmax, 'fc3')
parameters += [weights1, biases1, weights2, biases2, weights3, biases3]
activations = {
'conv1_1': conv1_1, 'conv1_2': conv1_2, 'pool1': pool1,
'conv2_1': conv2_1, 'conv2_2': conv2_2, 'pool2': pool2,
'conv3_1': conv3_1, 'conv3_2': conv3_2, 'conv3_3': conv3_3, 'pool3': pool3,
'conv4_1': conv4_1, 'conv4_2': conv4_2, 'conv4_3': conv4_3, 'pool4': pool4,
'conv5_1': conv5_1, 'conv5_2': conv5_2, 'conv5_3': conv5_3, 'pool5': pool5,
'fc1': fc1, 'fc2': fc2, 'fc3': fc3
}
return activations, parameters
if __name__ == '__main__':
# Get input
os.chdir("../../experiments/vgg16_pre-trained/")
imlist = ['testflash.jpg', 'testme.jpg']
im_names = [os.path.splitext(os.path.basename(imf))[0] for imf in imlist]
input_images = [imread(f, mode='RGB') for f in imlist]
# Check 'vgg16_weights.npz exists
if not os.path.isfile('vgg16_weights.npz'):
raise Exception(
"The weights I use here were converted from the Caffe Model Zoo "
"weights by Davi Frossard. He didn't include a license so I'm "
"hesistant to re-post them. Please download them from his "
"website:\nhttp://www.cs.toronto.edu/~frossard/post/vgg16/")
# Build VGG16
if _debug:
sess = tf.InteractiveSession()
tf.summary.FileWriter('TensorBoard', sess.graph)
else:
sess = tf.Session()
vgg = PreTrainedVGG16('vgg16_weights.npz', sess)
# Run images through network, return softmax probabilities
class_probabilities = vgg.get_output(input_images)
print(class_probabilities.shape)
# Get Class Names
class_names = vgg.get_class_names()
#NOTE: only one file at a time is working... must fix
# Report results
# for imf, cps in zip(imlist, class_probabilities_list):
imf = im_names[0]
print("Top Five Results for", imf + ':')
top5 = (np.argsort(class_probabilities)[::-1])[0:5]
with open(imf + '_results.txt', 'w') as fout:
for p in np.argsort(class_probabilities)[::-1]:
fout.write(str(class_probabilities[p]) + ' : ' + class_names[p] + '\n')
for p in top5:
print(class_probabilities[p], ' : ', class_names[p])
| mit | 1,991,808,772,521,995,800 | 43.344186 | 87 | 0.595133 | false |
zarr-developers/numcodecs | numcodecs/tests/test_shuffle.py | 1 | 4387 | from multiprocessing import Pool
from multiprocessing.pool import ThreadPool
import numpy as np
import pytest
try:
from numcodecs.shuffle import Shuffle
except ImportError: # pragma: no cover
pytest.skip(
"numcodecs.shuffle not available", allow_module_level=True
)
from numcodecs.tests.common import (check_encode_decode,
check_config,
check_backwards_compatibility)
codecs = [
Shuffle(),
Shuffle(elementsize=0),
Shuffle(elementsize=4),
Shuffle(elementsize=8)
]
# mix of dtypes: integer, float, bool, string
# mix of shapes: 1D, 2D, 3D
# mix of orders: C, F
arrays = [
np.arange(1000, dtype='i4'),
np.linspace(1000, 1001, 1000, dtype='f8'),
np.random.normal(loc=1000, scale=1, size=(100, 10)),
np.random.randint(0, 2, size=1000, dtype=bool).reshape(100, 10, order='F'),
np.random.choice([b'a', b'bb', b'ccc'], size=1000).reshape(10, 10, 10),
np.random.randint(0, 2**60, size=1000, dtype='u8').view('M8[ns]'),
np.random.randint(0, 2**60, size=1000, dtype='u8').view('m8[ns]'),
np.random.randint(0, 2**25, size=1000, dtype='u8').view('M8[m]'),
np.random.randint(0, 2**25, size=1000, dtype='u8').view('m8[m]'),
np.random.randint(-2**63, -2**63 + 20, size=1000, dtype='i8').view('M8[ns]'),
np.random.randint(-2**63, -2**63 + 20, size=1000, dtype='i8').view('m8[ns]'),
np.random.randint(-2**63, -2**63 + 20, size=1000, dtype='i8').view('M8[m]'),
np.random.randint(-2**63, -2**63 + 20, size=1000, dtype='i8').view('m8[m]'),
]
@pytest.mark.parametrize('array', arrays)
@pytest.mark.parametrize('codec', codecs)
def test_encode_decode(array, codec):
check_encode_decode(array, codec)
def test_config():
codec = Shuffle()
check_config(codec)
codec = Shuffle(elementsize=8)
check_config(codec)
def test_repr():
expect = "Shuffle(elementsize=0)"
actual = repr(Shuffle(elementsize=0))
assert expect == actual
expect = "Shuffle(elementsize=4)"
actual = repr(Shuffle(elementsize=4))
assert expect == actual
expect = "Shuffle(elementsize=8)"
actual = repr(Shuffle(elementsize=8))
assert expect == actual
expect = "Shuffle(elementsize=16)"
actual = repr(Shuffle(elementsize=16))
assert expect == actual
def test_eq():
assert Shuffle() == Shuffle()
assert Shuffle(elementsize=16) != Shuffle()
def _encode_worker(data):
compressor = Shuffle()
enc = compressor.encode(data)
return enc
def _decode_worker(enc):
compressor = Shuffle()
data = compressor.decode(enc)
return data
@pytest.mark.parametrize('pool', (Pool, ThreadPool))
def test_multiprocessing(pool):
data = np.arange(1000000)
enc = _encode_worker(data)
pool = pool(5)
# test with process pool and thread pool
# test encoding
enc_results = pool.map(_encode_worker, [data] * 5)
assert all([len(enc) == len(e) for e in enc_results])
# test decoding
dec_results = pool.map(_decode_worker, [enc] * 5)
assert all([data.nbytes == len(d) for d in dec_results])
# tidy up
pool.close()
pool.join()
def test_backwards_compatibility():
check_backwards_compatibility(Shuffle.codec_id, arrays, codecs)
# def test_err_decode_object_buffer():
# check_err_decode_object_buffer(Shuffle())
# def test_err_encode_object_buffer():
# check_err_encode_object_buffer(Shuffle())
# def test_decompression_error_handling():
# for codec in codecs:
# with pytest.raises(RuntimeError):
# codec.decode(bytearray())
# with pytest.raises(RuntimeError):
# codec.decode(bytearray(0))
def test_expected_result():
# Each byte of the 4 byte uint64 is shuffled in such a way
# that for an array of length 4, the last byte of the last
# element becomes the first byte of the first element
# therefore [0, 0, 0, 1] becomes [2**((len-1)*8), 0, 0, 0]
# (where 8 = bits in a byte)
arr = np.array([0, 0, 0, 1], dtype='uint64')
codec = Shuffle(elementsize=arr.data.itemsize)
enc = codec.encode(arr)
assert np.frombuffer(enc.data, arr.dtype)[0] == 2**((len(arr)-1)*8)
def test_incompatible_elementsize():
with pytest.raises(ValueError):
arr = np.arange(1001, dtype='u1')
codec = Shuffle(elementsize=4)
codec.encode(arr)
| mit | 2,365,699,436,646,874,600 | 28.05298 | 81 | 0.635742 | false |
SorenSeeberg/MrDatabase | mr_database/column.py | 1 | 1505 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
class DataTypes:
@staticmethod
def char(num_chars) -> str:
return f'CHAR({num_chars})'
@staticmethod
def varchar(num_chars=None) -> str:
if num_chars:
return f'VARCHAR({num_chars})'
else:
return 'VARCHAR'
smallint = 'SMALLINT'
integer = 'INTEGER'
datetime = 'DATETIME'
blob = 'BLOB'
class Column:
data_types: DataTypes = DataTypes
def __init__(self,
data_type: str,
data_type_var=None,
default=None,
pk: bool=False,
fk: 'Table.__subclasses__'=None,
unique: bool=False,
not_null: bool=False,
display_name: str=None):
self.data_type = data_type
self.data_type_var = data_type_var
self.default = default
self.pk = pk
if not fk:
self.fk = False
elif type(fk[0]) == str:
self.fk_table_name = fk[0]
self.fk_property = fk[1]
self.fk = True
else:
self.fk_table = fk[0]
self.fk_property = fk[1]
self.fk = True
self.unique = unique
self.not_null = not_null
self.display_name = display_name
def __len__(self):
pass
def __repr__(self) -> str:
return f'Column({self.data_type})'
def __eq__(self, other: 'Column') -> bool:
pass
| mit | -5,761,736,747,812,785,000 | 19.337838 | 49 | 0.483721 | false |
rickshinners/blinkenlights | app/plugins/plugin_loader.py | 1 | 2974 | from TestPlugin import TestPlugin
from JenkinsPlugin import JenkinsPlugin, JenkinsHistoryPlugin
import logging
def load_plugins(config, scheduler, set_pixel):
logger = logging.getLogger(__name__)
logger.info("Stopping any existing jobs")
scheduler.remove_all_jobs()
if config is None or len(config) == 0:
logger.info("No plugins configured")
return
for plugin_name in config:
logger.info("Loading plugin: %s" % plugin_name)
try:
plugin_config = config[plugin_name]
plugin = _load_plugin_type(plugin_config, set_pixel)
schedule_config = plugin_config['schedule']
schedule_type = schedule_config['type']
if schedule_type == 'cron':
scheduler.add_job(plugin.run, 'cron', id=plugin_name,
second=schedule_config.get('second', '*'),
minute=schedule_config.get('minute', '*'),
hour=schedule_config.get('hour', '*'),
day_of_week=schedule_config.get('day_of_week', '*'),
week=schedule_config.get('week', '*'),
day=schedule_config.get('day', '*'),
month=schedule_config.get('month', '*'),
year=schedule_config.get('year', '*'),
start_date=schedule_config.get('start_date', None),
end_date=schedule_config.get('end_date', None))
elif schedule_type == 'interval':
scheduler.add_job(plugin.run, 'interval', id=plugin_name,
seconds=schedule_config.get('seconds', 0),
minutes=schedule_config.get('minutes', 0),
hours=schedule_config.get('hours', 0),
days=schedule_config.get('days', 0),
weeks=schedule_config.get('weeks', 0),
start_date=schedule_config.get('start_date', None),
end_date=schedule_config.get('end_date', None))
elif schedule_type == 'immediate':
scheduler.add_job(plugin.run, 'date', id=plugin_name)
else:
raise Exception("Unknown schedule type: %s" % schedule_type)
except Exception, e:
logger.exception("Could not load plugin: %s" % plugin_name)
def _load_plugin_type(config, set_pixel):
type_name = config['plugin_type']
if type_name == 'TestPlugin':
return TestPlugin(config, set_pixel)
elif type_name == 'jenkins':
return JenkinsPlugin(config, set_pixel)
elif type_name == 'jenkins_history':
return JenkinsHistoryPlugin(config, set_pixel)
else:
raise Exception("Unknown plugin type: %s" % type_name)
| mit | -5,793,354,938,301,005,000 | 47.754098 | 86 | 0.520511 | false |
sserrot/champion_relationships | venv/Lib/site-packages/networkx/tests/test_convert.py | 1 | 11765 | #!/usr/bin/env python
import pytest
import networkx as nx
from networkx.testing import assert_nodes_equal, assert_edges_equal, assert_graphs_equal
from networkx.convert import (to_networkx_graph,
to_dict_of_dicts,
from_dict_of_dicts,
to_dict_of_lists,
from_dict_of_lists)
from networkx.generators.classic import barbell_graph, cycle_graph
class TestConvert():
def edgelists_equal(self, e1, e2):
return sorted(sorted(e) for e in e1) == sorted(sorted(e) for e in e2)
def test_simple_graphs(self):
for dest, source in [(to_dict_of_dicts, from_dict_of_dicts),
(to_dict_of_lists, from_dict_of_lists)]:
G = barbell_graph(10, 3)
G.graph = {}
dod = dest(G)
# Dict of [dicts, lists]
GG = source(dod)
assert_graphs_equal(G, GG)
GW = to_networkx_graph(dod)
assert_graphs_equal(G, GW)
GI = nx.Graph(dod)
assert_graphs_equal(G, GI)
# With nodelist keyword
P4 = nx.path_graph(4)
P3 = nx.path_graph(3)
P4.graph = {}
P3.graph = {}
dod = dest(P4, nodelist=[0, 1, 2])
Gdod = nx.Graph(dod)
assert_graphs_equal(Gdod, P3)
def test_exceptions(self):
# NX graph
class G(object):
adj = None
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
# pygraphviz agraph
class G(object):
is_strict = None
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
# Dict of [dicts, lists]
G = {"a": 0}
pytest.raises(TypeError, to_networkx_graph, G)
# list or generator of edges
class G(object):
next = None
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
# no match
pytest.raises(nx.NetworkXError, to_networkx_graph, "a")
def test_digraphs(self):
for dest, source in [(to_dict_of_dicts, from_dict_of_dicts),
(to_dict_of_lists, from_dict_of_lists)]:
G = cycle_graph(10)
# Dict of [dicts, lists]
dod = dest(G)
GG = source(dod)
assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GG.edges()))
GW = to_networkx_graph(dod)
assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GW.edges()))
GI = nx.Graph(dod)
assert_nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GI.edges()))
G = cycle_graph(10, create_using=nx.DiGraph)
dod = dest(G)
GG = source(dod, create_using=nx.DiGraph)
assert sorted(G.nodes()) == sorted(GG.nodes())
assert sorted(G.edges()) == sorted(GG.edges())
GW = to_networkx_graph(dod, create_using=nx.DiGraph)
assert sorted(G.nodes()) == sorted(GW.nodes())
assert sorted(G.edges()) == sorted(GW.edges())
GI = nx.DiGraph(dod)
assert sorted(G.nodes()) == sorted(GI.nodes())
assert sorted(G.edges()) == sorted(GI.edges())
def test_graph(self):
g = nx.cycle_graph(10)
G = nx.Graph()
G.add_nodes_from(g)
G.add_weighted_edges_from((u, v, u) for u, v in g.edges())
# Dict of dicts
dod = to_dict_of_dicts(G)
GG = from_dict_of_dicts(dod, create_using=nx.Graph)
assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GG.edges()))
GW = to_networkx_graph(dod, create_using=nx.Graph)
assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GW.edges()))
GI = nx.Graph(dod)
assert sorted(G.nodes()) == sorted(GI.nodes())
assert sorted(G.edges()) == sorted(GI.edges())
# Dict of lists
dol = to_dict_of_lists(G)
GG = from_dict_of_lists(dol, create_using=nx.Graph)
# dict of lists throws away edge data so set it to none
enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)]
assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
assert_edges_equal(enone, sorted(GG.edges(data=True)))
GW = to_networkx_graph(dol, create_using=nx.Graph)
assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
assert_edges_equal(enone, sorted(GW.edges(data=True)))
GI = nx.Graph(dol)
assert_nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
assert_edges_equal(enone, sorted(GI.edges(data=True)))
def test_with_multiedges_self_loops(self):
G = cycle_graph(10)
XG = nx.Graph()
XG.add_nodes_from(G)
XG.add_weighted_edges_from((u, v, u) for u, v in G.edges())
XGM = nx.MultiGraph()
XGM.add_nodes_from(G)
XGM.add_weighted_edges_from((u, v, u) for u, v in G.edges())
XGM.add_edge(0, 1, weight=2) # multiedge
XGS = nx.Graph()
XGS.add_nodes_from(G)
XGS.add_weighted_edges_from((u, v, u) for u, v in G.edges())
XGS.add_edge(0, 0, weight=100) # self loop
# Dict of dicts
# with self loops, OK
dod = to_dict_of_dicts(XGS)
GG = from_dict_of_dicts(dod, create_using=nx.Graph)
assert_nodes_equal(XGS.nodes(), GG.nodes())
assert_edges_equal(XGS.edges(), GG.edges())
GW = to_networkx_graph(dod, create_using=nx.Graph)
assert_nodes_equal(XGS.nodes(), GW.nodes())
assert_edges_equal(XGS.edges(), GW.edges())
GI = nx.Graph(dod)
assert_nodes_equal(XGS.nodes(), GI.nodes())
assert_edges_equal(XGS.edges(), GI.edges())
# Dict of lists
# with self loops, OK
dol = to_dict_of_lists(XGS)
GG = from_dict_of_lists(dol, create_using=nx.Graph)
# dict of lists throws away edge data so set it to none
enone = [(u, v, {}) for (u, v, d) in XGS.edges(data=True)]
assert_nodes_equal(sorted(XGS.nodes()), sorted(GG.nodes()))
assert_edges_equal(enone, sorted(GG.edges(data=True)))
GW = to_networkx_graph(dol, create_using=nx.Graph)
assert_nodes_equal(sorted(XGS.nodes()), sorted(GW.nodes()))
assert_edges_equal(enone, sorted(GW.edges(data=True)))
GI = nx.Graph(dol)
assert_nodes_equal(sorted(XGS.nodes()), sorted(GI.nodes()))
assert_edges_equal(enone, sorted(GI.edges(data=True)))
# Dict of dicts
# with multiedges, OK
dod = to_dict_of_dicts(XGM)
GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph,
multigraph_input=True)
assert_nodes_equal(sorted(XGM.nodes()), sorted(GG.nodes()))
assert_edges_equal(sorted(XGM.edges()), sorted(GG.edges()))
GW = to_networkx_graph(dod, create_using=nx.MultiGraph, multigraph_input=True)
assert_nodes_equal(sorted(XGM.nodes()), sorted(GW.nodes()))
assert_edges_equal(sorted(XGM.edges()), sorted(GW.edges()))
GI = nx.MultiGraph(dod) # convert can't tell whether to duplicate edges!
assert_nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
#assert_not_equal(sorted(XGM.edges()), sorted(GI.edges()))
assert not sorted(XGM.edges()) == sorted(GI.edges())
GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph,
multigraph_input=False)
assert_nodes_equal(sorted(XGM.nodes()), sorted(GE.nodes()))
assert sorted(XGM.edges()) != sorted(GE.edges())
GI = nx.MultiGraph(XGM)
assert_nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
assert_edges_equal(sorted(XGM.edges()), sorted(GI.edges()))
GM = nx.MultiGraph(G)
assert_nodes_equal(sorted(GM.nodes()), sorted(G.nodes()))
assert_edges_equal(sorted(GM.edges()), sorted(G.edges()))
# Dict of lists
# with multiedges, OK, but better write as DiGraph else you'll
# get double edges
dol = to_dict_of_lists(G)
GG = from_dict_of_lists(dol, create_using=nx.MultiGraph)
assert_nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GG.edges()))
GW = to_networkx_graph(dol, create_using=nx.MultiGraph)
assert_nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GW.edges()))
GI = nx.MultiGraph(dol)
assert_nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(GI.edges()))
def test_edgelists(self):
P = nx.path_graph(4)
e = [(0, 1), (1, 2), (2, 3)]
G = nx.Graph(e)
assert_nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(P.edges()))
assert_edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
e = [(0, 1, {}), (1, 2, {}), (2, 3, {})]
G = nx.Graph(e)
assert_nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(P.edges()))
assert_edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
e = ((n, n + 1) for n in range(3))
G = nx.Graph(e)
assert_nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
assert_edges_equal(sorted(G.edges()), sorted(P.edges()))
assert_edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
def test_directed_to_undirected(self):
edges1 = [(0, 1), (1, 2), (2, 0)]
edges2 = [(0, 1), (1, 2), (0, 2)]
assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1)
assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1)
assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1)
assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1)
assert self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(),
edges1)
assert self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(),
edges1)
assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1)
assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1)
def test_attribute_dict_integrity(self):
# we must not replace dict-like graph data structures with dicts
G = nx.OrderedGraph()
G.add_nodes_from("abc")
H = to_networkx_graph(G, create_using=nx.OrderedGraph)
assert list(H.nodes) == list(G.nodes)
H = nx.OrderedDiGraph(G)
assert list(H.nodes) == list(G.nodes)
def test_to_edgelist(self):
G = nx.Graph([(1, 1)])
elist = nx.to_edgelist(G, nodelist=list(G))
assert_edges_equal(G.edges(data=True), elist)
def test_custom_node_attr_dict_safekeeping(self):
class custom_dict(dict):
pass
class Custom(nx.Graph):
node_attr_dict_factory = custom_dict
g = nx.Graph()
g.add_node(1, weight=1)
h = Custom(g)
assert isinstance(g._node[1], dict)
assert isinstance(h._node[1], custom_dict)
# this raise exception
# h._node.update((n, dd.copy()) for n, dd in g.nodes.items())
# assert isinstance(h._node[1], custom_dict)
| mit | -8,935,147,732,230,737,000 | 41.626812 | 88 | 0.571101 | false |
anthonynguyen/UrTSB | urtsb_src/ui/adv_filter_window.py | 1 | 27886 | #
# Copyright (C) 2010 Sorcerer
#
# This file is part of UrTSB.
#
# UrTSB is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UrTSB is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with UrTSB. If not, see <http://www.gnu.org/licenses/>.
#
from threading import Thread
from urtsb_src.filemanager import FileManager, filterkey, cfgvalues
from urtsb_src.globals import Globals
from urtsb_src.ui.gametypes_filter import GametypesFilter
import gtk
class AdvancedFilterWindow(gtk.Dialog):
"""
"""
def __init__(self, filter):
"""
Constructor
"""
gtk.Dialog.__init__(self, 'Advanced Filter Settings', None,\
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
self.set_icon_from_file(Globals.icon_dir +'/logo.png')
self.set_default_size(700, 500)
self.filter = filter
#buttons
applybutton = gtk.Button('Apply')
cancelbutton = gtk.Button('Cancel')
defaultbutton = gtk.Button('Defaults')
resetbutton = gtk.Button('Reset')
applybutton.connect("clicked", self.on_apply_clicked)
cancelbutton.connect("clicked", self.on_cancel_clicked)
defaultbutton.connect("clicked", self.on_default_clicked)
resetbutton.connect("clicked", self.on_reset_clicked)
self.action_area.pack_start(defaultbutton, False, False)
self.action_area.pack_start(resetbutton, False, False)
self.action_area.pack_start(cancelbutton, False, False)
self.action_area.pack_start(applybutton, False, False)
self.setup_filter_elements()
self.set_default_values(False)
self.show_all()
def setup_filter_elements(self):
"""
setup the filter elements
"""
basic_filter_box = gtk.HBox()
self.vbox.pack_start(basic_filter_box, False, False)
queryframe = gtk.Frame('Query Parameters')
queryframe.set_border_width(2)
filterframe = gtk.Frame('Basic Filter')
filterframe.set_border_width(2)
basic_filter_box.pack_start(queryframe, False, False)
basic_filter_box.pack_start(filterframe, True, True)
#query parameters, empty and full
querybox = gtk.VBox()
querybox.set_border_width(5)
self.checkbox_showfull = gtk.CheckButton('show full')
self.checkbox_showfull.show()
self.checkbox_showempty = gtk.CheckButton('show empty')
self.checkbox_showempty.show()
#filterframe content
filtertable = gtk.Table(2,5)
filtertable.set_border_width(5)
filterframe.add(filtertable)
self.checkbox_hide_non_responsive = gtk.CheckButton('hide non responsive')
self.checkbox_hide_passworded = gtk.CheckButton('hide passworded')
minplayerlabel = gtk.Label('min. players:')
maxplayerlabel = gtk.Label('max. players:')
self.minplayerentry = gtk.SpinButton()
self.maxplayerentry = gtk.SpinButton()
self.minplayerentry.set_increments(1, 10)
self.maxplayerentry.set_increments(1, 10)
self.minplayerentry.set_range(0,99)
self.maxplayerentry.set_range(0,99)
map_label = gtk.Label('Mapname contains:')
server_label = gtk.Label('Servername contains:')
self.mapnameentry = gtk.Entry()
self.servernameentry = gtk.Entry()
filtertable.attach(self.checkbox_hide_non_responsive, 0,1,0,1 )
filtertable.attach(self.checkbox_hide_passworded, 0,1,1,2 )
filtertable.attach(minplayerlabel, 1,2,0,1 )
filtertable.attach(maxplayerlabel, 1,2,1,2 )
filtertable.attach(self.minplayerentry, 2,3,0,1 )
filtertable.attach(self.maxplayerentry, 2,3,1,2 )
filtertable.attach(map_label, 3,4,0,1)
filtertable.attach(self.mapnameentry, 4,5,0,1)
filtertable.attach(server_label, 3,4,1,2)
filtertable.attach(self.servernameentry, 4,5,1,2)
querybox.pack_start(self.checkbox_showfull)
querybox.pack_start(self.checkbox_showempty)
queryframe.add(querybox)
self.gametypesfilter = GametypesFilter()
self.vbox.pack_start(self.gametypesfilter, False, False)
self.create_gear_chooser()
self.create_cvar_filter()
def create_gear_chooser(self):
"""
Creates the ui elements to choose a g_gear configuration
"""
gear_frame = gtk.Frame('Gear Settings Filter')
gear_type_box = gtk.HBox()
#the include exclude chooser
self.radio_gear_disable = gtk.RadioButton(None, 'Disabled')
self.radio_gear_include = gtk.RadioButton(self.radio_gear_disable, \
'Include (equals)')
self.radio_gear_exclude = gtk.RadioButton(self.radio_gear_disable, \
'Exclude (not equals)')
gear_type_box.pack_start(self.radio_gear_disable)
gear_type_box.pack_start(self.radio_gear_include)
gear_type_box.pack_start(self.radio_gear_exclude)
gear_type_box.set_border_width(5)
gearhbox = gtk.HBox()
gear_frame.add(gearhbox)
gear_choose_area_vbox = gtk.VBox()
gear_table = gtk.Table(4,2)
gear_table.set_border_width(15)
gearhbox.pack_start(gear_choose_area_vbox)
gear_choose_area_vbox.pack_start(gear_type_box)
gear_choose_area_vbox.pack_start(gear_table)
#the checkboxes
self.checkbox_grenades = gtk.CheckButton('Grenades')
self.checkbox_snipers = gtk.CheckButton('Snipers')
self.checkbox_spas = gtk.CheckButton('Spas')
self.checkbox_pistols = gtk.CheckButton('Pistols')
self.checkbox_automatics = gtk.CheckButton('Automatic Guns')
self.checkbox_negev = gtk.CheckButton('Negev')
#connect to the toggled signal
self.checkbox_grenades.connect('toggled', self.on_gear_checkbox_changed)
self.checkbox_snipers.connect('toggled', self.on_gear_checkbox_changed)
self.checkbox_spas.connect('toggled', self.on_gear_checkbox_changed)
self.checkbox_pistols.connect('toggled', self.on_gear_checkbox_changed)
self.checkbox_automatics.connect('toggled', \
self.on_gear_checkbox_changed)
self.checkbox_negev.connect('toggled', self.on_gear_checkbox_changed)
#the value textfield
self.gearvalue = gtk.Entry()
self.gearvalue.set_width_chars(4)
self.gearvalue.set_editable(False)
#the add button
add_button = gtk.Button('Add')
add_button.set_border_width(5)
add_button.connect('clicked', self.on_add_gear_value_clicked)
#now put all into the table
gear_table.attach(self.checkbox_grenades, 0,1,0,1 )
gear_table.attach(self.checkbox_snipers, 0,1,1,2 )
gear_table.attach(self.checkbox_spas, 0,1,2,3 )
gear_table.attach(self.gearvalue, 0,1,3,4 )
gear_table.attach(self.checkbox_pistols, 1,2,0,1 )
gear_table.attach(self.checkbox_automatics, 1,2,1,2 )
gear_table.attach(self.checkbox_negev, 1,2,2,3 )
gear_table.attach(add_button, 1,2,3,4 )
#gear settings treeview area
gear_values_vbox = gtk.VBox()
gearhbox.pack_start(gear_values_vbox)
gear_scrolled_window = gtk.ScrolledWindow()
gear_scrolled_window.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
gear_values_vbox.pack_start(gear_scrolled_window)
self.gearliststore = gtk.ListStore(str)
gear_set_treeview = gtk.TreeView(model=self.gearliststore)
self.gearlistview = gear_set_treeview
gear_scrolled_window.add(gear_set_treeview)
self.column_gear_value = gtk.TreeViewColumn("Gear Value")
gear_set_treeview.append_column(self.column_gear_value)
var_cell0=gtk.CellRendererText()
self.column_gear_value.pack_start(var_cell0, expand=True)
self.column_gear_value.add_attribute(var_cell0, 'text', 0)
self.column_gear_value.set_reorderable(True)
btn_hbox = gtk.HBox()
gear_values_vbox.pack_start(btn_hbox, False, False)
clear_button = gtk.Button('Clear')
clear_button.set_border_width(5)
btn_hbox.pack_start(clear_button, True, True)
clear_button.connect('clicked', self.on_clear_gear_list_clicked)
remove_button = gtk.Button('Remove Selected')
remove_button.set_border_width(5)
btn_hbox.pack_start(remove_button, True, True)
remove_button.connect('clicked', self.on_remove_selected_gear_value)
self.vbox.pack_start(gear_frame, False, False)
def create_cvar_filter(self):
"""
Creates the ui-elements for the custom server cvars filtering
"""
cvar_frame = gtk.Frame('Custom Sever CVARS Filtering')
cvar_main_hbox = gtk.HBox()
cvar_frame.add(cvar_main_hbox)
#settings editing area
cvar_set_vbox = gtk.VBox()
cvar_main_hbox.pack_start(cvar_set_vbox)
variable_label = gtk.Label('Variable:')
value_label = gtk.Label('Value:')
self.variable_entry = gtk.Entry()
self.value_entry = gtk.Entry()
editing_table = gtk.Table(5,2)
editing_table.attach(variable_label, 0,1,0,1)
editing_table.attach(self.variable_entry,1,2,0,1)
editing_table.attach(value_label, 0,1,1,2)
editing_table.attach(self.value_entry, 1,2,1,2)
editing_table.set_border_width(10)
cvar_set_vbox.pack_start(editing_table)
self.radio_cvar_include = gtk.RadioButton(None, 'Include (equals)')
self.radio_cvar_include.set_border_width(5)
self.radio_cvar_exclude = gtk.RadioButton(self.radio_cvar_include, \
'Exclude (not equals)')
self.radio_cvar_exclude.set_border_width(5)
editing_table.attach(self.radio_cvar_include, 1,2,2,3)
editing_table.attach(self.radio_cvar_exclude, 1,2,3,4)
add_button = gtk.Button('Add')
editing_table.attach(add_button, 1,2,4,5)
add_button.connect('clicked', self.on_add_var_filter_clicked)
#the treeview displaying current CVAR filter settings
cvar_values_vbox = gtk.VBox()
cvar_main_hbox.pack_start(cvar_values_vbox)
cvar_scrolled_window = gtk.ScrolledWindow()
cvar_scrolled_window.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
cvar_values_vbox.pack_start(cvar_scrolled_window)
self.cvarliststore = gtk.ListStore(str, str, str, object)
cvar_set_treeview = gtk.TreeView(model=self.cvarliststore)
self.varfilterview = cvar_set_treeview
cvar_scrolled_window.add(cvar_set_treeview)
self.column_cvar_variable = gtk.TreeViewColumn('Variable')
self.column_cvar_value = gtk.TreeViewColumn('Value')
self.column_cvar_type = gtk.TreeViewColumn('Type')
cvar_set_treeview.append_column(self.column_cvar_variable)
cvar_set_treeview.append_column(self.column_cvar_value)
cvar_set_treeview.append_column(self.column_cvar_type)
var_cell0=gtk.CellRendererText()
var_cell1=gtk.CellRendererText()
var_cell2=gtk.CellRendererText()
self.column_cvar_variable.pack_start(var_cell0, expand=True)
self.column_cvar_value.pack_start(var_cell1, expand=False)
self.column_cvar_type.pack_start(var_cell2, expand=False)
self.column_cvar_variable.add_attribute(var_cell0, 'text', 0)
self.column_cvar_value.add_attribute(var_cell1, 'text', 1)
self.column_cvar_type.add_attribute(var_cell2, 'text', 2)
btn_hbox = gtk.HBox()
cvar_values_vbox.pack_start(btn_hbox, False, False)
clear_button = gtk.Button('Clear')
clear_button.set_border_width(5)
btn_hbox.pack_start(clear_button, True, True)
clear_button.connect('clicked', self.on_clear_var_list_clicked)
remove_button = gtk.Button('Remove Selected')
remove_button.set_border_width(5)
btn_hbox.pack_start(remove_button, True, True)
remove_button.connect('clicked', self.on_remove_selected_var)
self.vbox.pack_start(cvar_frame, False, False)
def calculate_gear_value(self):
"""
Calculates the g_gear value
"""
retval = 63
if self.checkbox_grenades.get_active():
retval -= 1
if self.checkbox_snipers.get_active():
retval -= 2
if self.checkbox_spas.get_active():
retval -= 4
if self.checkbox_pistols.get_active():
retval -= 8
if self.checkbox_automatics.get_active():
retval -= 16
if self.checkbox_negev.get_active():
retval -= 32
return retval
def set_default_values(self, reset):
"""
Set default values to all input elements of the filter.
Differs between application defaults and the values that are stored
in a file to remember user choices.
@param reset - boolean: if True use application defaults, otherwise load
values from file.
"""
self.gearliststore.clear()
self.cvarliststore.clear()
fm = FileManager()
stored_filter = fm.get_remembered_filter_parameters()
#gearcheckbox is not stored, only the listview
#initialize with all checked
self.checkbox_grenades.set_active(True)
self.checkbox_snipers.set_active(True)
self.checkbox_spas.set_active(True)
self.checkbox_pistols.set_active(True)
self.checkbox_automatics.set_active(True)
self.checkbox_negev.set_active(True)
if reset or None == stored_filter: #reset to application defaults
self.gametypesfilter.checkbox_show_gametype_all.set_active(True)
# emits the 'toggled' signal
self.gametypesfilter.checkbox_show_gametype_all.toggled()
self.checkbox_hide_non_responsive.set_active(True)
self.checkbox_hide_passworded.set_active(True)
#defaults for min and maxplayer spinbuttons
self.minplayerentry.set_value(0)
self.maxplayerentry.set_value(99)
self.checkbox_showfull.set_active(False)
self.checkbox_showempty.set_active(False)
self.mapnameentry.set_text('')
self.servernameentry.set_text('')
else: #reset to stored values
#gametypes
value = fm.value_as_boolean(stored_filter[filterkey.GT_ALL])
self.gametypesfilter.checkbox_show_gametype_all.set_active(True)
value = fm.value_as_boolean(stored_filter[filterkey.GT_BOMB])
self.gametypesfilter.checkbox_show_gametype_bomb.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_TS])
self.gametypesfilter.checkbox_show_gametype_survivor.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_CTF])
self.gametypesfilter.checkbox_show_gametype_ctf.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_TDM])
self.gametypesfilter.checkbox_show_gametype_tdm.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_CAH])
self.gametypesfilter.checkbox_show_gametype_cah.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_FTL])
self.gametypesfilter.checkbox_show_gametype_ftl.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.GT_FFA])
self.gametypesfilter.checkbox_show_gametype_ffa.set_active(value)
#other filters:
#defaults for min and maxplayer spinbuttons
value = int(stored_filter[filterkey.FLT_MIN_PLAYERS])
self.minplayerentry.set_value(value)
value = int(stored_filter[filterkey.FLT_MAX_PLAYERS])
self.maxplayerentry.set_value(value)
if filterkey.FLT_MAP_NAME in stored_filter:
self.mapnameentry.set_text(stored_filter[filterkey.\
FLT_MAP_NAME])
if filterkey.FLT_SERVER_NAME in stored_filter:
self.servernameentry.set_text(stored_filter[filterkey.\
FLT_SERVER_NAME])
value = fm.value_as_boolean(stored_filter[filterkey.\
FLT_HIDE_NON_RESP])
self.checkbox_hide_non_responsive.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.\
FLT_HIDE_PASSWORDED])
self.checkbox_hide_passworded.set_active(value)
#query params
value = fm.value_as_boolean(stored_filter[filterkey.QRY_SHOW_FULL])
self.checkbox_showfull.set_active(value)
value = fm.value_as_boolean(stored_filter[filterkey.QRY_SHOW_EMPTY])
self.checkbox_showempty.set_active(value)
#the gearvalue list
if filterkey.FLT_GEAR in stored_filter:
value = stored_filter[filterkey.FLT_GEAR]
if cfgvalues.DISABLED == value:
self.radio_gear_disable.set_active(True)
self.radio_gear_exclude.set_active(False)
self.radio_gear_include.set_active(False)
elif cfgvalues.INCLUDE == value:
self.radio_gear_disable.set_active(False)
self.radio_gear_exclude.set_active(False)
self.radio_gear_include.set_active(True)
elif cfgvalues.EXCLUDE == value:
self.radio_gear_disable.set_active(False)
self.radio_gear_exclude.set_active(True)
self.radio_gear_include.set_active(False)
if filterkey.FLT_GEAR_LIST in stored_filter:
for value in stored_filter[filterkey.FLT_GEAR_LIST]:
self.gearliststore.append([value])
if filterkey.FLT_VAR_LIST in stored_filter:
for value in stored_filter[filterkey.FLT_VAR_LIST]:
self.cvarliststore.append([value[0], value[1], \
value[2], value])
def save_filter(self):
"""
writes the current filter/query params to the filter dict
"""
fm = FileManager()
filter = fm.get_remembered_filter_parameters()
if not filter:
# TODO: clean up this dirty hack ;)
fm.filter = {}
filter = fm.filter
#process gametypes
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_all.get_active())
filter[filterkey.GT_ALL] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_bomb.get_active())
filter[filterkey.GT_BOMB] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_survivor.get_active())
filter[filterkey.GT_TS] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_ctf.get_active())
filter[filterkey.GT_CTF] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_tdm.get_active())
filter[filterkey.GT_TDM] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_cah.get_active())
filter[filterkey.GT_CAH] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_ftl.get_active())
filter[filterkey.GT_FTL] = value
value = fm.value_from_boolean(self.gametypesfilter.\
checkbox_show_gametype_ffa.get_active())
filter[filterkey.GT_FFA] = value
#other filters
filter[filterkey.FLT_MIN_PLAYERS] = self.\
minplayerentry.get_value_as_int()
filter[filterkey.FLT_MAX_PLAYERS] = self.\
maxplayerentry.get_value_as_int()
value = fm.value_from_boolean(self.\
checkbox_hide_non_responsive.get_active())
filter[filterkey.FLT_HIDE_NON_RESP] = value
value = fm.value_from_boolean(self.\
checkbox_hide_passworded.get_active())
filter[filterkey.FLT_HIDE_PASSWORDED] = value
#mapname and servername filter
filter[filterkey.FLT_MAP_NAME] = self.mapnameentry.get_text()
filter[filterkey.FLT_SERVER_NAME] = self.servernameentry.get_text()
#query params
value = fm.value_from_boolean(self.checkbox_showfull.get_active())
filter[filterkey.QRY_SHOW_FULL] = value
value = fm.value_from_boolean(self.checkbox_showempty.get_active())
filter[filterkey.QRY_SHOW_EMPTY] = value
if self.radio_gear_disable.get_active():
filter[filterkey.FLT_GEAR] = cfgvalues.DISABLED
elif self.radio_gear_include.get_active():
filter[filterkey.FLT_GEAR] = cfgvalues.INCLUDE
elif self.radio_gear_exclude.get_active():
filter[filterkey.FLT_GEAR] = cfgvalues.EXCLUDE
#iterate over gearliststore to create a list of geavalues
iter = self.gearliststore.iter_children(None)
gearvalues = [] #empty list
while iter:
value = self.gearliststore.get_value(iter, 0)
gearvalues.append(value)
iter = self.gearliststore.iter_next(iter)
filter[filterkey.FLT_GEAR_LIST] = gearvalues
#iterate over varliststore to create the list of filter vars
iter = self.cvarliststore.iter_children(None)
varlist = []
while iter:
varfilter = self.cvarliststore.get_value(iter, 3)
varlist.append(varfilter)
iter = self.cvarliststore.iter_next(iter)
filter[filterkey.FLT_VAR_LIST] = varlist
#write to file
t = Thread(target=fm.save_filter_to_remember)
t.setDaemon(True)
t.start()
def on_apply_clicked(self, widget):
"""
Callback of the Apply button
"""
self.save_filter()
self.destroy()
def on_cancel_clicked(self, widget):
"""
Callback of the Cancel button
"""
#do nothing just close the dialog
self.destroy()
def on_reset_clicked(self, widget):
"""
Callback of the reset button
Reset the filter to the last applied values
"""
self.set_default_values(False)
def on_add_gear_value_clicked(self, widget):
"""
Callback of the add button in the gear selection filter area
Adds the current gear value to the gear value list
"""
gearvalue = self.gearvalue.get_text()
self.gearliststore.append([gearvalue])
def on_clear_gear_list_clicked(self, widget):
"""
Callback of the clear gear list button
clears the treeview
"""
self.gearliststore.clear()
def on_clear_var_list_clicked(self, button):
"""
Callback of the clear varlist button
clears the treeview/liststore
"""
self.cvarliststore.clear()
def on_default_clicked(self, widget):
"""
Callback of the defaults button
Reset the filter to the default values (not the stored/last applied
values)
"""
self.set_default_values(True)
def on_gear_checkbox_changed(self, checkbox):
"""
Callback for the toggled signal of the gear (weapons) checkboxes
triggers the calculation of the g_gear value and sets it to the
text entry
"""
g_gear_value = self.calculate_gear_value()
self.gearvalue.set_text(str(g_gear_value))
def on_remove_selected_gear_value(self, button):
"""
Callback of the remove selected button of the gear value treeview list
"""
selection = self.gearlistview.get_selection()
result = selection.get_selected()
if result:
iter = result[1]
self.gearliststore.remove(iter)
def on_remove_selected_var(self, button):
"""
Callback of the remoce selected button of the custom filtering area
"""
selection = self.varfilterview.get_selection()
result = selection.get_selected()
if result:
iter = result[1]
self.cvarliststore.remove(iter)
def on_add_var_filter_clicked(self, button):
"""
Callback of the add button in the custom variable filtering area
"""
varname = self.variable_entry.get_text()
varvalue = self.value_entry.get_text()
#both values not None and larger than 0
if not None == varname and not len(varname) == 0 and not None\
== varvalue and not len(varvalue) == 0:
var = [None]*3
var[0] = varname
var[1] = varvalue
if self.radio_cvar_include.get_active():
var[2] = cfgvalues.INCLUDE
elif self.radio_cvar_exclude.get_active():
var[2] = cfgvalues.EXCLUDE
self.cvarliststore.append([var[0], var[1], var[2], var]) | gpl-3.0 | -1,009,639,285,081,446,900 | 38.952722 | 85 | 0.578426 | false |
Saturn/soccer-cli | soccer/writers.py | 1 | 14627 | import click
import csv
import datetime
import json
import io
from abc import ABCMeta, abstractmethod
from itertools import groupby
from collections import namedtuple
from soccer import leagueids, leagueproperties
LEAGUE_PROPERTIES = leagueproperties.LEAGUE_PROPERTIES
LEAGUE_IDS = leagueids.LEAGUE_IDS
def get_writer(output_format='stdout', output_file=None):
return globals()[output_format.capitalize()](output_file)
class BaseWriter(object):
__metaclass__ = ABCMeta
def __init__(self, output_file):
self.output_filename = output_file
@abstractmethod
def live_scores(self, live_scores):
pass
@abstractmethod
def team_scores(self, team_scores, time):
pass
@abstractmethod
def team_players(self, team):
pass
@abstractmethod
def standings(self, league_table, league):
pass
@abstractmethod
def league_scores(self, total_data, time):
pass
class Stdout(BaseWriter):
def __init__(self, output_file):
self.Result = namedtuple("Result", "homeTeam, goalsHomeTeam, awayTeam, goalsAwayTeam")
enums = dict(
WIN="red",
LOSE="blue",
TIE="yellow",
MISC="green",
TIME="yellow",
CL_POSITION="green",
EL_POSITION="yellow",
RL_POSITION="red",
POSITION="blue"
)
self.colors = type('Enum', (), enums)
def live_scores(self, live_scores):
"""Prints the live scores in a pretty format"""
scores = sorted(live_scores, key=lambda x: x["league"])
for league, games in groupby(scores, key=lambda x: x["league"]):
self.league_header(league)
for game in games:
self.scores(self.parse_result(game), add_new_line=False)
click.secho(' %s' % Stdout.utc_to_local(game["time"],
use_12_hour_format=False),
fg=self.colors.TIME)
click.echo()
def team_scores(self, team_scores, time, show_datetime, use_12_hour_format):
"""Prints the teams scores in a pretty format"""
for score in team_scores["matches"]:
if score["status"] == "FINISHED":
click.secho("%s\t" % score["utcDate"].split('T')[0],
fg=self.colors.TIME, nl=False)
self.scores(self.parse_result(score))
elif show_datetime:
self.scores(self.parse_result(score), add_new_line=False)
click.secho(' %s' % Stdout.utc_to_local(score["utcDate"],
use_12_hour_format,
show_datetime),
fg=self.colors.TIME)
def team_players(self, team):
"""Prints the team players in a pretty format"""
players = sorted(team, key=lambda d: d['shirtNumber'])
click.secho("%-4s %-25s %-20s %-20s %-15s" %
("N.", "NAME", "POSITION", "NATIONALITY", "BIRTHDAY"),
bold=True,
fg=self.colors.MISC)
fmt = (u"{shirtNumber:<4} {name:<28} {position:<23} {nationality:<23}"
u" {dateOfBirth:<18}")
for player in players:
click.secho(fmt.format(**player), bold=True)
def standings(self, league_table, league):
""" Prints the league standings in a pretty way """
click.secho("%-6s %-30s %-10s %-10s %-10s" %
("POS", "CLUB", "PLAYED", "GOAL DIFF", "POINTS"))
for team in league_table["standings"][0]["table"]:
if team["goalDifference"] >= 0:
team["goalDifference"] = ' ' + str(team["goalDifference"])
# Define the upper and lower bounds for Champions League,
# Europa League and Relegation places.
# This is so we can highlight them appropriately.
cl_upper, cl_lower = LEAGUE_PROPERTIES[league]['cl']
el_upper, el_lower = LEAGUE_PROPERTIES[league]['el']
rl_upper, rl_lower = LEAGUE_PROPERTIES[league]['rl']
team['teamName'] = team['team']['name']
team_str = (u"{position:<7} {teamName:<33} {playedGames:<12}"
u" {goalDifference:<14} {points}").format(**team)
if cl_upper <= team["position"] <= cl_lower:
click.secho(team_str, bold=True, fg=self.colors.CL_POSITION)
elif el_upper <= team["position"] <= el_lower:
click.secho(team_str, fg=self.colors.EL_POSITION)
elif rl_upper <= team["position"] <= rl_lower:
click.secho(team_str, fg=self.colors.RL_POSITION)
else:
click.secho(team_str, fg=self.colors.POSITION)
def league_scores(self, total_data, time, show_datetime,
use_12_hour_format):
"""Prints the data in a pretty format"""
for match in total_data['matches']:
self.scores(self.parse_result(match), add_new_line=not show_datetime)
if show_datetime:
click.secho(' %s' % Stdout.utc_to_local(match["utcDate"],
use_12_hour_format,
show_datetime),
fg=self.colors.TIME)
click.echo()
def league_header(self, league):
"""Prints the league header"""
league_name = " {0} ".format(league)
click.secho("{:=^62}".format(league_name), fg=self.colors.MISC)
click.echo()
def scores(self, result, add_new_line=True):
"""Prints out the scores in a pretty format"""
if result.goalsHomeTeam > result.goalsAwayTeam:
homeColor, awayColor = (self.colors.WIN, self.colors.LOSE)
elif result.goalsHomeTeam < result.goalsAwayTeam:
homeColor, awayColor = (self.colors.LOSE, self.colors.WIN)
else:
homeColor = awayColor = self.colors.TIE
click.secho('%-25s %2s' % (result.homeTeam, result.goalsHomeTeam),
fg=homeColor, nl=False)
click.secho(" vs ", nl=False)
click.secho('%2s %s' % (result.goalsAwayTeam,
result.awayTeam.rjust(25)), fg=awayColor,
nl=add_new_line)
def parse_result(self, data):
"""Parses the results and returns a Result namedtuple"""
def valid_score(score):
return "" if score is None else score
return self.Result(
data["homeTeam"]["name"],
valid_score(data["score"]["fullTime"]["homeTeam"]),
data["awayTeam"]["name"],
valid_score(data["score"]["fullTime"]["awayTeam"]))
@staticmethod
def utc_to_local(time_str, use_12_hour_format, show_datetime=False):
"""Converts the API UTC time string to the local user time."""
if not (time_str.endswith(" UTC") or time_str.endswith("Z")):
return time_str
today_utc = datetime.datetime.utcnow()
utc_local_diff = today_utc - datetime.datetime.now()
if time_str.endswith(" UTC"):
time_str, _ = time_str.split(" UTC")
utc_time = datetime.datetime.strptime(time_str, '%I:%M %p')
utc_datetime = datetime.datetime(today_utc.year,
today_utc.month,
today_utc.day,
utc_time.hour,
utc_time.minute)
else:
utc_datetime = datetime.datetime.strptime(time_str,
'%Y-%m-%dT%H:%M:%SZ')
local_time = utc_datetime - utc_local_diff
if use_12_hour_format:
date_format = '%I:%M %p' if not show_datetime else '%a %d, %I:%M %p'
else:
date_format = '%H:%M' if not show_datetime else '%a %d, %H:%M'
return datetime.datetime.strftime(local_time, date_format)
class Csv(BaseWriter):
def generate_output(self, result):
if not self.output_filename:
for row in result:
click.echo(u','.join(unicode(item) for item in row))
else:
with open(self.output_filename, 'w') as csv_file:
writer = csv.writer(csv_file)
for row in result:
row = [unicode(s).encode('utf-8') for s in row]
writer.writerow(row)
def live_scores(self, live_scores):
"""Store output of live scores to a CSV file"""
headers = ['League', 'Home Team Name', 'Home Team Goals',
'Away Team Goals', 'Away Team Name']
result = [headers]
result.extend([game['league'], game['homeTeamName'],
game['goalsHomeTeam'], game['goalsAwayTeam'],
game['awayTeamName']] for game in live_scores['games'])
self.generate_output(result)
def team_scores(self, team_scores, time):
"""Store output of team scores to a CSV file"""
headers = ['Date', 'Home Team Name', 'Home Team Goals',
'Away Team Goals', 'Away Team Name']
result = [headers]
result.extend([score["utcDate"].split('T')[0],
score['homeTeam']['name'],
score['score']['fullTime']['homeTeam'],
score['score']['fullTime']['awayTeam'],
score['awayTeam']['name']]
for score in team_scores['matches']
if score['status'] == 'FINISHED')
self.generate_output(result)
def team_players(self, team):
"""Store output of team players to a CSV file"""
headers = ['Jersey Number', 'Name', 'Position', 'Nationality',
'Date of Birth']
result = [headers]
result.extend([player['shirtNumber'],
player['name'],
player['position'],
player['nationality'],
player['dateOfBirth']]
for player in team)
self.generate_output(result)
def standings(self, league_table, league):
"""Store output of league standings to a CSV file"""
headers = ['Position', 'Team Name', 'Games Played', 'Goal For',
'Goals Against', 'Goal Difference', 'Points']
result = [headers]
result.extend([team['position'],
team['team']['name'],
team['playedGames'],
team['goalsFor'],
team['goalsAgainst'],
team['goalDifference'],
team['points']]
for team in league_table['standings'][0]['table'])
self.generate_output(result)
def league_scores(self, total_data, time, show_upcoming, use_12_hour_format):
"""Store output of fixtures based on league and time to a CSV file"""
headers = ['League', 'Home Team Name', 'Home Team Goals',
'Away Team Goals', 'Away Team Name']
result = [headers]
league = total_data['competition']['name']
result.extend([league,
score['homeTeam']['name'],
score['score']['fullTime']['homeTeam'],
score['score']['fullTime']['awayTeam'],
score['awayTeam']['name']]
for score in total_data['matches'])
self.generate_output(result)
class Json(BaseWriter):
def generate_output(self, result):
if not self.output_filename:
click.echo(json.dumps(result,
indent=4,
separators=(',', ': '),
ensure_ascii=False))
else:
with io.open(self.output_filename, 'w', encoding='utf-8') as f:
data = json.dumps(result, f, indent=4,
separators=(',', ': '), ensure_ascii=False)
f.write(data)
def live_scores(self, live_scores):
"""Store output of live scores to a JSON file"""
self.generate_output(live_scores['games'])
def team_scores(self, team_scores, time):
"""Store output of team scores to a JSON file"""
data = []
for score in team_scores['matches']:
if score['status'] == 'FINISHED':
item = {'date': score["utcDate"].split('T')[0],
'homeTeamName': score['homeTeam']['name'],
'goalsHomeTeam': score['score']['fullTime']['homeTeam'],
'goalsAwayTeam': score['score']['fullTime']['awayTeam'],
'awayTeamName': score['awayTeam']['name']}
data.append(item)
self.generate_output({'team_scores': data})
def standings(self, league_table, league):
"""Store output of league standings to a JSON file"""
data = []
for team in league_table['standings'][0]['table']:
item = {'position': team['position'],
'teamName': team['team'],
'playedGames': team['playedGames'],
'goalsFor': team['goalsFor'],
'goalsAgainst': team['goalsAgainst'],
'goalDifference': team['goalDifference'],
'points': team['points']}
data.append(item)
self.generate_output({'standings': data})
def team_players(self, team):
"""Store output of team players to a JSON file"""
keys = 'shirtNumber name position nationality dateOfBirth'.split()
data = [{key: player[key] for key in keys} for player in team]
self.generate_output({'players': data})
def league_scores(self, total_data, time):
"""Store output of fixtures based on league and time to a JSON file"""
data = []
for league, score in self.supported_leagues(total_data):
item = {'league': league, 'homeTeamName': score['homeTeamName'],
'goalsHomeTeam': score['result']['goalsHomeTeam'],
'goalsAwayTeam': score['result']['goalsAwayTeam'],
'awayTeamName': score['awayTeamName']}
data.append(item)
self.generate_output({'league_scores': data, 'time': time})
| mit | 6,004,862,366,422,027,000 | 41.031609 | 94 | 0.521912 | false |
pidydx/grr | grr/lib/flows/general/transfer_test.py | 1 | 15431 | #!/usr/bin/env python
"""Test the file transfer mechanism."""
import hashlib
import os
import platform
import unittest
from grr.client import vfs
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import constants
from grr.lib import file_store
from grr.lib import flags
from grr.lib import flow
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.aff4_objects import aff4_grr
from grr.lib.flows.general import transfer
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import paths as rdf_paths
# pylint:mode=test
class ClientMock(object):
BUFFER_SIZE = 1024 * 1024
def __init__(self, mbr_data=None, client_id=None):
self.mbr = mbr_data
self.client_id = client_id
def ReadBuffer(self, args):
return_data = self.mbr[args.offset:args.offset + args.length]
return [
rdf_client.BufferReference(
data=return_data, offset=args.offset, length=len(return_data))
]
def UploadFile(self, args):
"""Just copy the file into the filestore."""
file_fd = vfs.VFSOpen(args.pathspec)
fs = file_store.FileUploadFileStore()
fd = fs.CreateFileStoreFile()
while True:
data = file_fd.read(self.BUFFER_SIZE)
if not data:
break
fd.write(data)
file_id = fd.Finalize()
return [rdf_client.UploadedFile(stat_entry=file_fd.Stat(), file_id=file_id)]
class TestTransfer(test_lib.FlowTestsBaseclass):
"""Test the transfer mechanism."""
maxDiff = 65 * 1024
mbr = ("123456789" * 1000)[:4096]
def setUp(self):
super(TestTransfer, self).setUp()
# Set suitable defaults for testing
self.old_window_size = transfer.GetFile.WINDOW_SIZE
self.old_chunk_size = transfer.GetFile.CHUNK_SIZE
transfer.GetFile.WINDOW_SIZE = 10
transfer.GetFile.CHUNK_SIZE = 600 * 1024
def tearDown(self):
super(TestTransfer, self).tearDown()
transfer.GetFile.WINDOW_SIZE = self.old_window_size
transfer.GetFile.CHUNK_SIZE = self.old_chunk_size
def testUploadFiles(self):
"""Test the upload file flows."""
with test_lib.ConfigOverrider({
"FileUploadFileStore.root_dir": self.temp_dir
}):
test_data_path = os.path.join(self.base_path, "test_img.dd")
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=test_data_path)
session_id = None
for session_id in test_lib.TestFlowHelper(
"MultiUploadFile",
ClientMock(client_id=self.client_id),
token=self.token,
pathspecs=[pathspec],
client_id=self.client_id):
pass
results = flow.GRRFlow.ResultCollectionForFID(
session_id, token=self.token)
self.assertEqual(len(results), 1)
for stat_entry in results:
# Make sure the AFF4 file is the same as the original test file we tried
# to upload.
fd1 = aff4.FACTORY.Open(
stat_entry.AFF4Path(self.client_id), token=self.token)
fd2 = open(test_data_path, "rb")
fd2.seek(0, 2)
self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
self.CompareFDs(fd1, fd2)
def testGetMBR(self):
"""Test that the GetMBR flow works."""
for _ in test_lib.TestFlowHelper(
"GetMBR",
ClientMock(self.mbr),
token=self.token,
client_id=self.client_id):
pass
fd = aff4.FACTORY.Open(self.client_id.Add("mbr"), token=self.token)
self.assertEqual(fd.Read(4096), self.mbr)
def _RunAndCheck(self, chunk_size, download_length):
with utils.Stubber(constants, "CLIENT_MAX_BUFFER_SIZE", chunk_size):
for _ in test_lib.TestFlowHelper(
"GetMBR",
ClientMock(self.mbr),
token=self.token,
client_id=self.client_id,
length=download_length):
pass
fd = aff4.FACTORY.Open(self.client_id.Add("mbr"), token=self.token)
self.assertEqual(fd.Read(download_length), self.mbr[:download_length])
aff4.FACTORY.Delete(fd.urn, token=self.token)
def testGetMBRChunked(self):
chunk_size = 100
download_length = 15 * chunk_size
self._RunAndCheck(chunk_size, download_length)
# Not a multiple of the chunk size.
download_length = 15 * chunk_size + chunk_size / 2
self._RunAndCheck(chunk_size, download_length)
def testGetFile(self):
"""Test that the GetFile flow works."""
client_mock = action_mocks.GetFileClientMock()
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "test_img.dd"))
for _ in test_lib.TestFlowHelper(
"GetFile",
client_mock,
token=self.token,
client_id=self.client_id,
pathspec=pathspec):
pass
# Fix path for Windows testing.
pathspec.path = pathspec.path.replace("\\", "/")
# Test the AFF4 file that was created.
urn = pathspec.AFF4Path(self.client_id)
fd1 = aff4.FACTORY.Open(urn, token=self.token)
fd2 = open(pathspec.path, "rb")
fd2.seek(0, 2)
self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
self.CompareFDs(fd1, fd2)
def testGetFilePathCorrection(self):
"""Tests that the pathspec returned is used for the aff4path."""
client_mock = action_mocks.GetFileClientMock()
# Deliberately using the wrong casing.
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "TEST_IMG.dd"))
for s in test_lib.TestFlowHelper(
"GetFile",
client_mock,
token=self.token,
client_id=self.client_id,
pathspec=pathspec):
session_id = s
results = flow.GRRFlow.ResultCollectionForFID(session_id, token=self.token)
self.assertEqual(len(results), 1)
res_pathspec = results[0].pathspec
# Fix path for Windows testing.
pathspec.path = pathspec.path.replace("\\", "/")
# Test the AFF4 file that was created.
urn = res_pathspec.AFF4Path(self.client_id)
fd1 = aff4.FACTORY.Open(urn, token=self.token)
fd2 = open(res_pathspec.path, "rb")
fd2.seek(0, 2)
self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
self.CompareFDs(fd1, fd2)
@unittest.skipUnless(platform.system() == "Linux",
"/proc only exists on Linux")
def testMultiGetFileOfSpecialFiles(self):
"""Test that special /proc/ files are handled correctly.
/proc/ files have the property that they are non seekable from their end
(i.e. seeking them relative to the end is not supported). They also return
an st_size of 0. For example:
$ stat /proc/self/maps
File: '/proc/self/maps'
Size: 0 Blocks: 0 IO Block: 1024 regular empty file
$ head /proc/self/maps
00400000-00409000 r-xp 00000000 fc:01 9180740 /usr/bin/head
00608000-00609000 r--p 00008000 fc:01 9180740 /usr/bin/head
...
When we try to use the MultiGetFile flow, it deduplicates the files and
since it thinks the file has a zero size, the flow will not download the
file, and instead copy the zero size file into it.
"""
client_mock = action_mocks.MultiGetFileClientMock()
# # Create a zero sized file.
zero_sized_filename = os.path.join(self.temp_dir, "zero_size")
with open(zero_sized_filename, "wb") as fd:
pass
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=zero_sized_filename)
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
file_size="1MiB",
client_id=self.client_id,
pathspecs=[pathspec]):
pass
# Now if we try to fetch a real /proc/ filename this will fail because the
# filestore already contains the zero length file
# aff4:/files/nsrl/da39a3ee5e6b4b0d3255bfef95601890afd80709.
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path="/proc/self/environ")
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
file_size=1024 * 1024,
client_id=self.client_id,
pathspecs=[pathspec]):
pass
data = open(pathspec.last.path, "rb").read()
# Test the AFF4 file that was created - it should be empty since by default
# we judge the file size based on its stat.st_size.
urn = pathspec.AFF4Path(self.client_id)
fd = aff4.FACTORY.Open(urn, token=self.token)
self.assertEqual(fd.size, len(data))
self.assertMultiLineEqual(fd.read(len(data)), data)
def CompareFDs(self, fd1, fd2):
# Seek the files to the end to make sure they are the same size.
fd2.seek(0, 2)
fd1.seek(0, 2)
self.assertEqual(fd2.tell(), fd1.tell())
ranges = [
# Start of file
(0, 100),
# Straddle the first chunk
(16 * 1024 - 100, 300),
# Read past end of file
(fd2.tell() - 100, 300),
# Zero length reads
(100, 0),
]
for offset, length in ranges:
fd1.Seek(offset)
data1 = fd1.Read(length)
fd2.seek(offset)
data2 = fd2.read(length)
self.assertEqual(data1, data2)
def testMultiGetFile(self):
"""Test MultiGetFile."""
client_mock = action_mocks.MultiGetFileClientMock()
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "test_img.dd"))
args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec])
with test_lib.Instrument(transfer.MultiGetFile,
"StoreStat") as storestat_instrument:
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
client_id=self.client_id,
args=args):
pass
# We should only have called StoreStat once because the two paths
# requested were identical.
self.assertEqual(len(storestat_instrument.args), 1)
# Fix path for Windows testing.
pathspec.path = pathspec.path.replace("\\", "/")
# Test the AFF4 file that was created.
urn = pathspec.AFF4Path(self.client_id)
fd1 = aff4.FACTORY.Open(urn, token=self.token)
fd2 = open(pathspec.path, "rb")
fd2.seek(0, 2)
self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
self.CompareFDs(fd1, fd2)
def testMultiGetFileMultiFiles(self):
"""Test MultiGetFile downloading many files at once."""
client_mock = action_mocks.MultiGetFileClientMock()
pathspecs = []
# Make 30 files to download.
for i in xrange(30):
path = os.path.join(self.temp_dir, "test_%s.txt" % i)
with open(path, "wb") as fd:
fd.write("Hello")
pathspecs.append(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=path))
args = transfer.MultiGetFileArgs(
pathspecs=pathspecs, maximum_pending_files=10)
for session_id in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
client_id=self.client_id,
args=args):
# Check up on the internal flow state.
flow_obj = aff4.FACTORY.Open(session_id, mode="r", token=self.token)
flow_state = flow_obj.state
# All the pathspecs should be in this list.
self.assertEqual(len(flow_state.indexed_pathspecs), 30)
# At any one time, there should not be more than 10 files or hashes
# pending.
self.assertLessEqual(len(flow_state.pending_files), 10)
self.assertLessEqual(len(flow_state.pending_hashes), 10)
# When we finish there should be no pathspecs stored in the flow state.
for flow_pathspec in flow_state.indexed_pathspecs:
self.assertIsNone(flow_pathspec)
for flow_request_data in flow_state.request_data_list:
self.assertIsNone(flow_request_data)
# Now open each file and make sure the data is there.
for pathspec in pathspecs:
urn = pathspec.AFF4Path(self.client_id)
fd = aff4.FACTORY.Open(urn, token=self.token)
self.assertEqual("Hello", fd.Read(100000))
def testMultiGetFileDeduplication(self):
client_mock = action_mocks.MultiGetFileClientMock()
pathspecs = []
# Make 10 files to download.
for i in xrange(10):
path = os.path.join(self.temp_dir, "test_%s.txt" % i)
with open(path, "wb") as fd:
fd.write("Hello")
pathspecs.append(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=path))
# All those files are the same so the individual chunks should
# only be downloaded once. By forcing maximum_pending_files=1,
# there should only be a single TransferBuffer call.
args = transfer.MultiGetFileArgs(
pathspecs=pathspecs, maximum_pending_files=1)
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
client_id=self.client_id,
args=args):
pass
self.assertEqual(client_mock.action_counts["TransferBuffer"], 1)
def testMultiGetFileSetsFileHashAttributeWhenMultipleChunksDownloaded(self):
client_mock = action_mocks.MultiGetFileClientMock()
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "test_img.dd"))
args = transfer.MultiGetFileArgs(pathspecs=[pathspec])
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
client_id=self.client_id,
args=args):
pass
# Fix path for Windows testing.
pathspec.path = pathspec.path.replace("\\", "/")
# Test the AFF4 file that was created.
urn = pathspec.AFF4Path(self.client_id)
fd = aff4.FACTORY.Open(urn, token=self.token)
fd_hash = fd.Get(fd.Schema.HASH)
self.assertTrue(fd_hash)
h = hashlib.sha256()
with open(os.path.join(self.base_path, "test_img.dd"), "rb") as model_fd:
h.update(model_fd.read())
self.assertEqual(fd_hash.sha256, h.digest())
def testMultiGetFileSizeLimit(self):
client_mock = action_mocks.MultiGetFileClientMock()
image_path = os.path.join(self.base_path, "test_img.dd")
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path)
# Read a bit more than one chunk (600 * 1024).
expected_size = 750 * 1024
args = transfer.MultiGetFileArgs(
pathspecs=[pathspec], file_size=expected_size)
for _ in test_lib.TestFlowHelper(
"MultiGetFile",
client_mock,
token=self.token,
client_id=self.client_id,
args=args):
pass
urn = pathspec.AFF4Path(self.client_id)
blobimage = aff4.FACTORY.Open(urn, token=self.token)
# Make sure a VFSBlobImage got written.
self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))
self.assertEqual(len(blobimage), expected_size)
data = blobimage.read(100 * expected_size)
self.assertEqual(len(data), expected_size)
expected_data = open(image_path, "rb").read(expected_size)
self.assertEqual(data, expected_data)
hash_obj = blobimage.Get(blobimage.Schema.HASH)
d = hashlib.sha1()
d.update(expected_data)
expected_hash = d.hexdigest()
self.assertEqual(hash_obj.sha1, expected_hash)
def main(argv):
# Run the full test suite
test_lib.GrrTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 | -5,449,065,455,727,448,000 | 31.014523 | 80 | 0.654656 | false |
yudaykiran/openebs | e2e/ansible/plugins/callback/openebs.py | 1 | 2743 | # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.callback.default import (
CallbackModule as CallbackModule_default
)
from ansible import constants as C
"""Implementation of Custom Class that inherits the 'default' stdout_callback
plugin and overrides the v2_runner_retry api for displaying the 'FAILED -
RETRYING' only during verbose mode."""
class CallbackModule(CallbackModule_default):
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'openebs'
CALLBACK_NEEDS_WHITELIST = False
def v2_runner_retry(self, result):
task_name = result.task_name or result._task
final_result = result._result['retries'] - result._result['attempts']
msg = "FAILED - RETRYING: %s (%d retries left)." % (task_name,
final_result)
display_verbosity = self._display.verbosity
required_result = '_ansible_verbose_always'
if (display_verbosity > 2 or required_result in result._result):
if required_result not in result._result:
msg += "Result was: %s" % self._dump_results(result._result)
self._display.v('%s' % (msg))
def v2_runner_on_skipped(self, result):
my_result = result._result
required_result = '_ansible_verbose_always'
if C.DISPLAY_SKIPPED_HOSTS:
if (self._display.verbosity > 0 or required_result in my_result):
if required_result not in my_result:
dumped_results = self._dump_results(my_result)
msg = "skipping: [%s] => %s" % (result._host.get_name(),
dumped_results)
self._display.display(msg, color=C.COLOR_SKIP)
else:
self._display.display("skipping task..", color=C.COLOR_SKIP)
def v2_runner_item_on_skipped(self, result):
my_result = result._result
required_result = '_ansible_verbose_always'
if C.DISPLAY_SKIPPED_HOSTS:
if (self._display.verbosity > 0 or required_result in my_result):
if required_result not in my_result:
required_item = self._get_item(my_result)
dumped_result = self._dump_results(my_result)
result_host = result._host.get_name()
msg = "skipping: [%s] => (item=%s) => %s" % (result_host,
required_item,
dumped_result)
self._display.display(msg, color=C.COLOR_SKIP)
| apache-2.0 | 2,254,038,138,875,758,600 | 42.539683 | 79 | 0.56471 | false |
TomAugspurger/pandas | pandas/core/computation/ops.py | 1 | 15978 | """
Operator classes for eval.
"""
from datetime import datetime
from distutils.version import LooseVersion
from functools import partial
import operator
from typing import Callable, Iterable, Optional, Union
import numpy as np
from pandas._libs.tslibs import Timestamp
from pandas.core.dtypes.common import is_list_like, is_scalar
import pandas.core.common as com
from pandas.core.computation.common import _ensure_decoded, result_type_many
from pandas.core.computation.scope import _DEFAULT_GLOBALS
from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded
_reductions = ("sum", "prod")
_unary_math_ops = (
"sin",
"cos",
"exp",
"log",
"expm1",
"log1p",
"sqrt",
"sinh",
"cosh",
"tanh",
"arcsin",
"arccos",
"arctan",
"arccosh",
"arcsinh",
"arctanh",
"abs",
"log10",
"floor",
"ceil",
)
_binary_math_ops = ("arctan2",)
_mathops = _unary_math_ops + _binary_math_ops
_LOCAL_TAG = "__pd_eval_local_"
class UndefinedVariableError(NameError):
"""
NameError subclass for local variables.
"""
def __init__(self, name: str, is_local: Optional[bool] = None):
base_msg = f"{repr(name)} is not defined"
if is_local:
msg = f"local variable {base_msg}"
else:
msg = f"name {base_msg}"
super().__init__(msg)
class Term:
def __new__(cls, name, env, side=None, encoding=None):
klass = Constant if not isinstance(name, str) else cls
supr_new = super(Term, klass).__new__
return supr_new(klass)
is_local: bool
def __init__(self, name, env, side=None, encoding=None):
# name is a str for Term, but may be something else for subclasses
self._name = name
self.env = env
self.side = side
tname = str(name)
self.is_local = tname.startswith(_LOCAL_TAG) or tname in _DEFAULT_GLOBALS
self._value = self._resolve_name()
self.encoding = encoding
@property
def local_name(self) -> str:
return self.name.replace(_LOCAL_TAG, "")
def __repr__(self) -> str:
return pprint_thing(self.name)
def __call__(self, *args, **kwargs):
return self.value
def evaluate(self, *args, **kwargs):
return self
def _resolve_name(self):
res = self.env.resolve(self.local_name, is_local=self.is_local)
self.update(res)
if hasattr(res, "ndim") and res.ndim > 2:
raise NotImplementedError(
"N-dimensional objects, where N > 2, are not supported with eval"
)
return res
def update(self, value):
"""
search order for local (i.e., @variable) variables:
scope, key_variable
[('locals', 'local_name'),
('globals', 'local_name'),
('locals', 'key'),
('globals', 'key')]
"""
key = self.name
# if it's a variable name (otherwise a constant)
if isinstance(key, str):
self.env.swapkey(self.local_name, key, new_value=value)
self.value = value
@property
def is_scalar(self) -> bool:
return is_scalar(self._value)
@property
def type(self):
try:
# potentially very slow for large, mixed dtype frames
return self._value.values.dtype
except AttributeError:
try:
# ndarray
return self._value.dtype
except AttributeError:
# scalar
return type(self._value)
return_type = type
@property
def raw(self) -> str:
return f"{type(self).__name__}(name={repr(self.name)}, type={self.type})"
@property
def is_datetime(self) -> bool:
try:
t = self.type.type
except AttributeError:
t = self.type
return issubclass(t, (datetime, np.datetime64))
@property
def value(self):
return self._value
@value.setter
def value(self, new_value):
self._value = new_value
@property
def name(self):
return self._name
@property
def ndim(self) -> int:
return self._value.ndim
class Constant(Term):
def __init__(self, value, env, side=None, encoding=None):
super().__init__(value, env, side=side, encoding=encoding)
def _resolve_name(self):
return self._name
@property
def name(self):
return self.value
def __repr__(self) -> str:
# in python 2 str() of float
# can truncate shorter than repr()
return repr(self.name)
_bool_op_map = {"not": "~", "and": "&", "or": "|"}
class Op:
"""
Hold an operator of arbitrary arity.
"""
op: str
def __init__(self, op: str, operands: Iterable[Union[Term, "Op"]], encoding=None):
self.op = _bool_op_map.get(op, op)
self.operands = operands
self.encoding = encoding
def __iter__(self):
return iter(self.operands)
def __repr__(self) -> str:
"""
Print a generic n-ary operator and its operands using infix notation.
"""
# recurse over the operands
parened = (f"({pprint_thing(opr)})" for opr in self.operands)
return pprint_thing(f" {self.op} ".join(parened))
@property
def return_type(self):
# clobber types to bool if the op is a boolean operator
if self.op in (_cmp_ops_syms + _bool_ops_syms):
return np.bool_
return result_type_many(*(term.type for term in com.flatten(self)))
@property
def has_invalid_return_type(self) -> bool:
types = self.operand_types
obj_dtype_set = frozenset([np.dtype("object")])
return self.return_type == object and types - obj_dtype_set
@property
def operand_types(self):
return frozenset(term.type for term in com.flatten(self))
@property
def is_scalar(self) -> bool:
return all(operand.is_scalar for operand in self.operands)
@property
def is_datetime(self) -> bool:
try:
t = self.return_type.type
except AttributeError:
t = self.return_type
return issubclass(t, (datetime, np.datetime64))
def _in(x, y):
"""
Compute the vectorized membership of ``x in y`` if possible, otherwise
use Python.
"""
try:
return x.isin(y)
except AttributeError:
if is_list_like(x):
try:
return y.isin(x)
except AttributeError:
pass
return x in y
def _not_in(x, y):
"""
Compute the vectorized membership of ``x not in y`` if possible,
otherwise use Python.
"""
try:
return ~x.isin(y)
except AttributeError:
if is_list_like(x):
try:
return ~y.isin(x)
except AttributeError:
pass
return x not in y
_cmp_ops_syms = (">", "<", ">=", "<=", "==", "!=", "in", "not in")
_cmp_ops_funcs = (
operator.gt,
operator.lt,
operator.ge,
operator.le,
operator.eq,
operator.ne,
_in,
_not_in,
)
_cmp_ops_dict = dict(zip(_cmp_ops_syms, _cmp_ops_funcs))
_bool_ops_syms = ("&", "|", "and", "or")
_bool_ops_funcs = (operator.and_, operator.or_, operator.and_, operator.or_)
_bool_ops_dict = dict(zip(_bool_ops_syms, _bool_ops_funcs))
_arith_ops_syms = ("+", "-", "*", "/", "**", "//", "%")
_arith_ops_funcs = (
operator.add,
operator.sub,
operator.mul,
operator.truediv,
operator.pow,
operator.floordiv,
operator.mod,
)
_arith_ops_dict = dict(zip(_arith_ops_syms, _arith_ops_funcs))
_special_case_arith_ops_syms = ("**", "//", "%")
_special_case_arith_ops_funcs = (operator.pow, operator.floordiv, operator.mod)
_special_case_arith_ops_dict = dict(
zip(_special_case_arith_ops_syms, _special_case_arith_ops_funcs)
)
_binary_ops_dict = {}
for d in (_cmp_ops_dict, _bool_ops_dict, _arith_ops_dict):
_binary_ops_dict.update(d)
def _cast_inplace(terms, acceptable_dtypes, dtype):
"""
Cast an expression inplace.
Parameters
----------
terms : Op
The expression that should cast.
acceptable_dtypes : list of acceptable numpy.dtype
Will not cast if term's dtype in this list.
dtype : str or numpy.dtype
The dtype to cast to.
"""
dt = np.dtype(dtype)
for term in terms:
if term.type in acceptable_dtypes:
continue
try:
new_value = term.value.astype(dt)
except AttributeError:
new_value = dt.type(term.value)
term.update(new_value)
def is_term(obj) -> bool:
return isinstance(obj, Term)
class BinOp(Op):
"""
Hold a binary operator and its operands.
Parameters
----------
op : str
lhs : Term or Op
rhs : Term or Op
"""
def __init__(self, op: str, lhs, rhs):
super().__init__(op, (lhs, rhs))
self.lhs = lhs
self.rhs = rhs
self._disallow_scalar_only_bool_ops()
self.convert_values()
try:
self.func = _binary_ops_dict[op]
except KeyError as err:
# has to be made a list for python3
keys = list(_binary_ops_dict.keys())
raise ValueError(
f"Invalid binary operator {repr(op)}, valid operators are {keys}"
) from err
def __call__(self, env):
"""
Recursively evaluate an expression in Python space.
Parameters
----------
env : Scope
Returns
-------
object
The result of an evaluated expression.
"""
# recurse over the left/right nodes
left = self.lhs(env)
right = self.rhs(env)
return self.func(left, right)
def evaluate(self, env, engine: str, parser, term_type, eval_in_python):
"""
Evaluate a binary operation *before* being passed to the engine.
Parameters
----------
env : Scope
engine : str
parser : str
term_type : type
eval_in_python : list
Returns
-------
term_type
The "pre-evaluated" expression as an instance of ``term_type``
"""
if engine == "python":
res = self(env)
else:
# recurse over the left/right nodes
left = self.lhs.evaluate(
env,
engine=engine,
parser=parser,
term_type=term_type,
eval_in_python=eval_in_python,
)
right = self.rhs.evaluate(
env,
engine=engine,
parser=parser,
term_type=term_type,
eval_in_python=eval_in_python,
)
# base cases
if self.op in eval_in_python:
res = self.func(left.value, right.value)
else:
from pandas.core.computation.eval import eval
res = eval(self, local_dict=env, engine=engine, parser=parser)
name = env.add_tmp(res)
return term_type(name, env=env)
def convert_values(self):
"""
Convert datetimes to a comparable value in an expression.
"""
def stringify(value):
encoder: Callable
if self.encoding is not None:
encoder = partial(pprint_thing_encoded, encoding=self.encoding)
else:
encoder = pprint_thing
return encoder(value)
lhs, rhs = self.lhs, self.rhs
if is_term(lhs) and lhs.is_datetime and is_term(rhs) and rhs.is_scalar:
v = rhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert("UTC")
self.rhs.update(v)
if is_term(rhs) and rhs.is_datetime and is_term(lhs) and lhs.is_scalar:
v = lhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert("UTC")
self.lhs.update(v)
def _disallow_scalar_only_bool_ops(self):
if (
(self.lhs.is_scalar or self.rhs.is_scalar)
and self.op in _bool_ops_dict
and (
not (
issubclass(self.rhs.return_type, (bool, np.bool_))
and issubclass(self.lhs.return_type, (bool, np.bool_))
)
)
):
raise NotImplementedError("cannot evaluate scalar only bool ops")
def isnumeric(dtype) -> bool:
return issubclass(np.dtype(dtype).type, np.number)
class Div(BinOp):
"""
Div operator to special case casting.
Parameters
----------
lhs, rhs : Term or Op
The Terms or Ops in the ``/`` expression.
"""
def __init__(self, lhs, rhs):
super().__init__("/", lhs, rhs)
if not isnumeric(lhs.return_type) or not isnumeric(rhs.return_type):
raise TypeError(
f"unsupported operand type(s) for {self.op}: "
f"'{lhs.return_type}' and '{rhs.return_type}'"
)
# do not upcast float32s to float64 un-necessarily
acceptable_dtypes = [np.float32, np.float_]
_cast_inplace(com.flatten(self), acceptable_dtypes, np.float_)
_unary_ops_syms = ("+", "-", "~", "not")
_unary_ops_funcs = (operator.pos, operator.neg, operator.invert, operator.invert)
_unary_ops_dict = dict(zip(_unary_ops_syms, _unary_ops_funcs))
class UnaryOp(Op):
"""
Hold a unary operator and its operands.
Parameters
----------
op : str
The token used to represent the operator.
operand : Term or Op
The Term or Op operand to the operator.
Raises
------
ValueError
* If no function associated with the passed operator token is found.
"""
def __init__(self, op: str, operand):
super().__init__(op, (operand,))
self.operand = operand
try:
self.func = _unary_ops_dict[op]
except KeyError as err:
raise ValueError(
f"Invalid unary operator {repr(op)}, "
f"valid operators are {_unary_ops_syms}"
) from err
def __call__(self, env):
operand = self.operand(env)
return self.func(operand)
def __repr__(self) -> str:
return pprint_thing(f"{self.op}({self.operand})")
@property
def return_type(self) -> np.dtype:
operand = self.operand
if operand.return_type == np.dtype("bool"):
return np.dtype("bool")
if isinstance(operand, Op) and (
operand.op in _cmp_ops_dict or operand.op in _bool_ops_dict
):
return np.dtype("bool")
return np.dtype("int")
class MathCall(Op):
def __init__(self, func, args):
super().__init__(func.name, args)
self.func = func
def __call__(self, env):
operands = [op(env) for op in self.operands]
with np.errstate(all="ignore"):
return self.func.func(*operands)
def __repr__(self) -> str:
operands = map(str, self.operands)
return pprint_thing(f"{self.op}({','.join(operands)})")
class FuncNode:
def __init__(self, name: str):
from pandas.core.computation.check import _NUMEXPR_INSTALLED, _NUMEXPR_VERSION
if name not in _mathops or (
_NUMEXPR_INSTALLED
and _NUMEXPR_VERSION < LooseVersion("2.6.9")
and name in ("floor", "ceil")
):
raise ValueError(f'"{name}" is not a supported function')
self.name = name
self.func = getattr(np, name)
def __call__(self, *args):
return MathCall(self, args)
| bsd-3-clause | -7,085,088,927,918,930,000 | 25.279605 | 86 | 0.550194 | false |
AVSystem/Anjay | tests/integration/framework/asserts.py | 1 | 10170 | # -*- coding: utf-8 -*-
#
# Copyright 2017-2021 AVSystem <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from typing import Optional
from .lwm2m.messages import *
from .test_utils import DEMO_ENDPOINT_NAME
from framework.lwm2m.coap.transport import Transport
class Lwm2mAsserts:
def assertLwm2mPathValid(self, path):
"""
Convenience assert that checks if a byte-string PATH is in the form
/0/1/2. The PATH may contain 1-3 16bit integer segments.
"""
self.assertEqual('/', path[0],
('LwM2M path %r does not start with /' % (path,)))
segments = path[1:].split('/')
if len(segments) > 3:
self.fail('LwM2M path too long (expected at most 3 segments): %r' % (path,))
for segment in segments:
try:
self.assertTrue(0 <= int(segment) <= 2 ** 16 - 1,
('LwM2M path segment not in range [0, 65535] '
'in path %r' % (path,)))
except ValueError:
self.fail('segment rs is not an integer in link: %r' % (segment, path))
def assertLinkListValid(self, link_list):
"""
Convenience assert that checks if a byte-string LINK_LIST is in a CoRE
Link format https://tools.ietf.org/html/rfc6690 and all links are
valid LwM2M paths.
"""
if link_list == '':
self.fail('empty link list')
for obj in link_list.split(','):
path, *query = obj.split(';')
self.assertTrue((len(path) >= len('</0>')
and path[0] == '<'
and path[-1] == '>'),
'invalid link: %r in %r' % (obj, link_list))
self.assertLwm2mPathValid(path[1:-1])
# TODO: check query strings
def assertMsgEqual(self, expected, actual, msg=None):
"""
Convenience assert that checks if ACTUAL Lwm2mMsg object matches
EXPECTED one.
ACTUAL and EXPECTED may have their MSG_ID, TOKEN, OPTIONS or CONTENT
fields set to lwm2m.messages.ANY, in which case the value will not
be checked.
"""
msg_prefix = msg + ': ' if msg else ''
try:
if actual.version is not None:
self.assertEqual(expected.version, actual.version,
msg_prefix + 'unexpected CoAP version')
if actual.type is not None:
self.assertEqual(expected.type, actual.type,
msg_prefix + 'unexpected CoAP type')
self.assertEqual(expected.code, actual.code,
msg_prefix + 'unexpected CoAP code')
if expected.msg_id is not ANY and actual.msg_id is not ANY and actual.msg_id is not None:
self.assertEqual(expected.msg_id, actual.msg_id,
msg_prefix + 'unexpected CoAP message ID')
if expected.token is not ANY and actual.token is not ANY:
self.assertEqual(expected.token, actual.token,
msg_prefix + 'unexpected CoAP token')
if expected.options is not ANY and actual.options is not ANY:
self.assertEqual(expected.options, actual.options,
msg_prefix + 'unexpected CoAP option list')
if expected.content is not ANY and actual.content is not ANY:
self.assertEqual(expected.content, actual.content,
msg_prefix + 'unexpected CoAP content')
except AssertionError as e:
e.args = (e.args[0] + ('\n\n*** Expected ***\n%s\n*** Actual ***\n%s\n'
% (str(expected), str(actual))),) + e.args[1:]
raise
DEFAULT_REGISTER_ENDPOINT = '/rd/demo'
@staticmethod
def _expected_register_message(version, endpoint, lifetime, binding, lwm2m11_queue_mode):
# Note: the specific order of Uri-Query options does not matter, but
# our packet equality comparator does not distinguish betwen "ordered"
# and "unordered" options, so we expect a specific order of these
# query-strings. dict() does not guarantee the order of items until
# 3.7, so because we want to still work on 3.5, an explicitly ordered
# list is used instead.
query = [
'lwm2m=%s' % (version,),
'ep=%s' % (endpoint,),
'lt=%s' % (lifetime if lifetime is not None else 86400,)
]
if binding is not None:
query.append('b=%s' % (binding,))
if lwm2m11_queue_mode:
query.append('Q')
return Lwm2mRegister('/rd?' + '&'.join(query))
def assertDemoRegisters(self,
server=None,
version='1.0',
location=DEFAULT_REGISTER_ENDPOINT,
endpoint=DEMO_ENDPOINT_NAME,
lifetime=None,
timeout_s=2,
respond=True,
binding=None,
lwm2m11_queue_mode=False,
reject=False):
# passing a float instead of an integer results in a disaster
# (serializes as e.g. lt=4.0 instead of lt=4), which makes the
# assertion fail
if lifetime is not None:
self.assertIsInstance(lifetime, int, msg="lifetime MUST be an integer")
serv = server or self.serv
pkt = serv.recv(timeout_s=timeout_s)
self.assertMsgEqual(self._expected_register_message(version, endpoint, lifetime, binding, lwm2m11_queue_mode), pkt)
self.assertIsNotNone(pkt.content)
self.assertGreater(len(pkt.content), 0)
if respond:
if reject:
serv.send(Lwm2mErrorResponse(code=coap.Code.RES_UNAUTHORIZED, msg_id=pkt.msg_id, token=pkt.token))
else:
serv.send(Lwm2mCreated(location=location, msg_id=pkt.msg_id, token=pkt.token))
return pkt
def assertDemoUpdatesRegistration(self,
server=None,
location=DEFAULT_REGISTER_ENDPOINT,
lifetime: Optional[int] = None,
binding: Optional[str] = None,
sms_number: Optional[str] = None,
content: bytes = b'',
timeout_s: float = 1,
respond: bool = True):
serv = server or self.serv
query_args = (([('lt', lifetime)] if lifetime is not None else [])
+ ([('sms', sms_number)] if sms_number is not None else [])
+ ([('b', binding)] if binding is not None else []))
query_string = '&'.join('%s=%s' % tpl for tpl in query_args)
path = location
if query_string:
path += '?' + query_string
pkt = serv.recv(timeout_s=timeout_s)
self.assertMsgEqual(Lwm2mUpdate(path, content=content), pkt)
if respond:
serv.send(Lwm2mChanged.matching(pkt)())
return pkt
def assertDemoDeregisters(self, server=None, path=DEFAULT_REGISTER_ENDPOINT, timeout_s=2, reset=True):
serv = server or self.serv
pkt = serv.recv(timeout_s=timeout_s)
self.assertMsgEqual(Lwm2mDeregister(path), pkt)
serv.send(Lwm2mDeleted(msg_id=pkt.msg_id, token=pkt.token))
if reset:
serv.reset()
def assertDemoRequestsBootstrap(self, uri_path='', uri_query=None, respond_with_error_code=None,
endpoint=DEMO_ENDPOINT_NAME, timeout_s=-1, preferred_content_format=None):
pkt = self.bootstrap_server.recv(timeout_s=timeout_s)
self.assertMsgEqual(Lwm2mRequestBootstrap(endpoint_name=endpoint,
preferred_content_format=preferred_content_format,
uri_path=uri_path,
uri_query=uri_query), pkt)
if respond_with_error_code is None:
self.bootstrap_server.send(Lwm2mChanged.matching(pkt)())
else:
self.bootstrap_server.send(Lwm2mErrorResponse.matching(
pkt)(code=respond_with_error_code))
def assertDtlsReconnect(self, server=None, timeout_s=1):
serv = server or self.serv
with self.assertRaises(RuntimeError) as raised:
serv.recv(timeout_s=timeout_s)
self.assertIn('0x6780', raised.exception.args[0]) # -0x6780 == MBEDTLS_ERR_SSL_CLIENT_RECONNECT
def assertPktIsDtlsClientHello(self, pkt, seq_number=ANY):
if seq_number is not ANY and seq_number >= 2 ** 48:
raise RuntimeError(
"Sorry, encoding of sequence number greater than 2**48 - 1 is not supported")
allowed_headers = set()
for version in (b'\xfe\xfd', b'\xfe\xff'): # DTLS v1.0 or DTLS v1.2
header = b'\x16' # Content Type: Handshake
header += version
header += b'\x00\x00' # Epoch: 0
if seq_number is not ANY:
# Sequence number is 48bit in length.
header += seq_number.to_bytes(48 // 8, byteorder='big')
allowed_headers.add(header)
self.assertIn(pkt[:len(next(iter(allowed_headers)))], allowed_headers)
| apache-2.0 | -5,839,195,946,829,177,000 | 44.2 | 123 | 0.553392 | false |
timpel/stanford-algs | quicksort/quicksort.py | 1 | 1040 | from random import randint
import sys
def sort(arr):
length = len(arr)
pivot_index = randint(0, length-1)
pivot = arr[pivot_index]
swap(arr, 0, pivot_index)
i = j = 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, 0, i-1)
first_part = arr[:i-1]
second_part = arr[i:]
if i > 2:
first_part = sort(first_part)
if length - i > 1:
second_part = sort(second_part)
return first_part + [arr[i-1]] + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def check(arr, length):
if length != len(arr):
print 'Array size changed!'
return False
for i in range(length-1):
if arr[i] > arr[i+1]:
print 'Sort Failed!'
return False
return True
def main(arr_len):
unsorted = [randint(0, arr_len) for n in range(arr_len)]
length = len(unsorted)
check(sort(unsorted), length)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python quicksort.py <array-length>'
main(arr_len)
| mit | -7,494,212,191,945,609,000 | 15.25 | 57 | 0.618269 | false |
egenerat/bank-statement-analyser | analyzer/utils.py | 1 | 2843 | import datetime
import os
import sys
from constants import CATEGORIES, DIRECT_DEBIT_PAYMENT, ROOT_DIR
def sum_total_expenses(data_dict):
expenses_sum = 0
transaction_nb = 0
for i in data_dict:
if DIRECT_DEBIT_PAYMENT.lower() not in i['description'].lower() and i['amount'] < 0 and not i['amount'] == -720:
expenses_sum += i['amount']
transaction_nb += 1
return {
'expenses_sum': expenses_sum,
'transaction_nb': transaction_nb
}
def display_highest_amounts(expenses):
sorted_result = sorted(expenses, key=lambda x: x['amount'], reverse=True)
for i in sorted_result:
print('{date} {description} {amount}'.format(date=i['date'], description=i['description'], amount=i['amount']))
def display_sorted_categories(expenses):
result_to_display = order_by_category(expenses, CATEGORIES)
sorted_result = sorted(result_to_display.items(), key=lambda x: x[1], reverse=True)
for i in sorted_result:
category_amount = i[1]['amount']
if category_amount != 0:
print('{cat}: {amount}'.format(cat=i[0], amount=category_amount))
# if result_to_display['unCategorized']['amount'] != 0:
# print('unCategorized:')
# print(result_to_display['unCategorized'])
# for i in result_to_display['unCategorized']['obj']:
# print(i)
def get_all_data_files():
walk_dir = ROOT_DIR
result = [os.path.join(root, f) for root, subdirs, files in os.walk(walk_dir) for f in files]
return result
def sort_expenses_by_month(expenses_list):
result = {}
for i in expenses_list:
expense_month = str(i['date'].month)
expense_year = str(i['date'].year)
period = expense_year + '-' + expense_month
if period not in result:
result[period] = []
result[period].append(i)
return result
def get_filename():
return sys.argv[1:]
def format_amount(amount):
print("{:10.2f}".format(amount))
def format_column(text):
return "{:10.2f}".format(text)
def date_from_string(str_date, pattern):
return datetime.datetime.strptime(str_date, pattern).date()
def order_by_category(expenses, categories):
result = {}
# initiate result
for i in categories:
result[i] = {
'amount': 0,
'obj': []
}
for i in expenses:
is_categorized = False
for j in categories:
for k in categories[j]:
if k.lower() in i['description'].lower():
result[j]['amount'] += i['amount']
result[j]['obj'].append(i)
is_categorized = True
if not is_categorized:
result['unCategorized']['amount'] += i['amount']
result['unCategorized']['obj'].append(i)
return result
| mit | -471,902,158,696,964,860 | 29.244681 | 120 | 0.594442 | false |
nayas360/pyterm | bin/set.py | 1 | 1471 | # set command to set global variables
from lib.utils import *
def _help():
usage = '''
Usage: set [options] (var) [value]
[options]:
-h Print this help.
-del (var) Delete variable
(var) if defined.
where (var) is a valid
global variable
if [value] is not given,
current value is returned
'''
print(usage)
def main(argv):
if '-h' in argv:
_help()
return
# The shell doesnt send the
# command name in the arg list
# so the next line is not needed
# anymore
# argv.pop(0) #remove arg
# to show all vars
if len(argv) < 1:
for i in prop.vars():
print(i, ' = ', prop.get(i))
return
if '-del' in argv:
try:
var = argv[1]
# detect system vars
if var == 'save_state' or var == 'c_char':
err(4, add='Cant delete system variable "' + var + '"')
return
prop.delete(var)
return
except IndexError:
err(4, add='variable name was missing')
return
var = argv[0]
if len(argv) < 2:
val = prop.get(var)
if val == NULL:
err(4, var)
return
print(val)
return
# remove name of var
argv.pop(0)
# make the rest the val
val = make_s(argv)
try:
prop.set(var, val)
except ValueError:
err(4, add="can't create this variable")
| mit | 2,858,846,573,396,601,000 | 20.632353 | 71 | 0.507138 | false |
WladimirSidorenko/DiscourseSenser | dsenser/xgboost/xgboostbase.py | 1 | 2813 | #!/usr/bin/env python
# -*- coding: utf-8; mode: python; -*-
"""Module providing abstract interface class for XGBoost sense calssification.
Attributes:
XGBoostBaseSenser (class):
abstract class defining interface for explicit and implicit classifier
"""
##################################################################
# Imports
from __future__ import absolute_import, print_function
from sklearn.feature_extraction import DictVectorizer
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import f1_score, make_scorer
from sklearn.pipeline import Pipeline
from xgboost import XGBClassifier
##################################################################
# Constants
MAX_DEPTH = 9 # maximim depth of tree
NTREES = 600 # number of tree estimators
ALPHA = 0.05 # learning rate
BASE_PARAM_GRID = {"clf__max_depth": [3 * i for i in xrange(1, 3)],
"clf__n_estimators": [100 * i for i in xrange(1, 2)]}
BASE_N_JOBS = 1 # xgboost does not support grid parallelization
# as it relies on openmp
##################################################################
# Class
class XGBoostBaseSenser(object):
"""Base sense classifier using XGBoost.
"""
def __init__(self, a_clf=None, a_grid_search=False):
"""Class constructor.
Args:
a_clf (classifier or None):
classifier to use or None for default
a_grid_search (bool): use grid search for estimating
hyper-parameters
"""
classifier = a_clf
self._gs = a_grid_search
if a_clf is None:
classifier = XGBClassifier(max_depth=MAX_DEPTH,
n_estimators=NTREES,
learning_rate=ALPHA,
objective="multi:softprob")
self._clf = classifier
# latest version of XGBoost cannot deal with non-sparse feature vectors
self._model = Pipeline([("vect", DictVectorizer()),
("clf", classifier)])
def _predict(self, a_feats, a_ret, a_i):
"""Method for predicting sense of single relation.
Args:
a_feats (dict):
features of the input instance
a_ret (np.array):
output prediction vector
a_i (int):
row index in the output vector
Returns:
void:
Note:
updates ``a_ret`` in place
"""
ret = self._model.predict_proba(a_feats)[0]
if self._clf is None:
a_ret[a_i] += ret
else:
for i, j in enumerate(ret):
a_ret[a_i][self._clf._le.inverse_transform(i)] += j
| mit | 6,529,310,863,707,153,000 | 31.709302 | 79 | 0.520441 | false |
neilLasrado/erpnext | erpnext/projects/doctype/timesheet/timesheet.py | 1 | 14333 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import json
from datetime import timedelta
from erpnext.controllers.queries import get_match_cond
from frappe.utils import flt, time_diff_in_hours, get_datetime, getdate, cint, date_diff, add_to_date
from frappe.model.document import Document
from erpnext.manufacturing.doctype.workstation.workstation import (check_if_within_operating_hours,
WorkstationHolidayError)
from erpnext.manufacturing.doctype.manufacturing_settings.manufacturing_settings import get_mins_between_operations
class OverlapError(frappe.ValidationError): pass
class OverWorkLoggedError(frappe.ValidationError): pass
class Timesheet(Document):
def validate(self):
self.set_employee_name()
self.set_status()
self.validate_dates()
self.validate_time_logs()
self.calculate_std_hours()
self.update_cost()
self.calculate_total_amounts()
self.calculate_percentage_billed()
self.set_dates()
def set_employee_name(self):
if self.employee and not self.employee_name:
self.employee_name = frappe.db.get_value('Employee', self.employee, 'employee_name')
def calculate_total_amounts(self):
self.total_hours = 0.0
self.total_billable_hours = 0.0
self.total_billed_hours = 0.0
self.total_billable_amount = 0.0
self.total_costing_amount = 0.0
self.total_billed_amount = 0.0
for d in self.get("time_logs"):
self.update_billing_hours(d)
self.update_time_rates(d)
self.total_hours += flt(d.hours)
self.total_costing_amount += flt(d.costing_amount)
if d.billable:
self.total_billable_hours += flt(d.billing_hours)
self.total_billable_amount += flt(d.billing_amount)
self.total_billed_amount += flt(d.billing_amount) if d.sales_invoice else 0.0
self.total_billed_hours += flt(d.billing_hours) if d.sales_invoice else 0.0
def calculate_percentage_billed(self):
self.per_billed = 0
if self.total_billed_amount > 0 and self.total_billable_amount > 0:
self.per_billed = (self.total_billed_amount * 100) / self.total_billable_amount
def update_billing_hours(self, args):
if args.billable:
if flt(args.billing_hours) == 0.0:
args.billing_hours = args.hours
else:
args.billing_hours = 0
def set_status(self):
self.status = {
"0": "Draft",
"1": "Submitted",
"2": "Cancelled"
}[str(self.docstatus or 0)]
if self.per_billed == 100:
self.status = "Billed"
if self.salary_slip:
self.status = "Payslip"
if self.sales_invoice and self.salary_slip:
self.status = "Completed"
def set_dates(self):
if self.docstatus < 2 and self.time_logs:
start_date = min([getdate(d.from_time) for d in self.time_logs])
end_date = max([getdate(d.to_time) for d in self.time_logs])
if start_date and end_date:
self.start_date = getdate(start_date)
self.end_date = getdate(end_date)
def calculate_std_hours(self):
std_working_hours = frappe.get_value("Company", self.company, 'standard_working_hours')
for time in self.time_logs:
if time.from_time and time.to_time:
if flt(std_working_hours) and date_diff(time.to_time, time.from_time):
time.hours = flt(std_working_hours) * date_diff(time.to_time, time.from_time)
else:
if not time.hours:
time.hours = time_diff_in_hours(time.to_time, time.from_time)
def before_cancel(self):
self.set_status()
def on_cancel(self):
self.update_task_and_project()
def on_submit(self):
self.validate_mandatory_fields()
self.update_task_and_project()
def validate_mandatory_fields(self):
for data in self.time_logs:
if not data.from_time and not data.to_time:
frappe.throw(_("Row {0}: From Time and To Time is mandatory.").format(data.idx))
if not data.activity_type and self.employee:
frappe.throw(_("Row {0}: Activity Type is mandatory.").format(data.idx))
if flt(data.hours) == 0.0:
frappe.throw(_("Row {0}: Hours value must be greater than zero.").format(data.idx))
def update_task_and_project(self):
tasks, projects = [], []
for data in self.time_logs:
if data.task and data.task not in tasks:
task = frappe.get_doc("Task", data.task)
task.update_time_and_costing()
task.save()
tasks.append(data.task)
elif data.project and data.project not in projects:
frappe.get_doc("Project", data.project).update_project()
projects.append(data.project)
def validate_dates(self):
for data in self.time_logs:
if data.from_time and data.to_time and time_diff_in_hours(data.to_time, data.from_time) < 0:
frappe.throw(_("To date cannot be before from date"))
def validate_time_logs(self):
for data in self.get('time_logs'):
self.validate_overlap(data)
self.validate_task_project()
def validate_overlap(self, data):
settings = frappe.get_single('Projects Settings')
self.validate_overlap_for("user", data, self.user, settings.ignore_user_time_overlap)
self.validate_overlap_for("employee", data, self.employee, settings.ignore_employee_time_overlap)
def validate_task_project(self):
for log in self.time_logs:
log.project = log.project or frappe.db.get_value("Task", log.task, "project")
def validate_overlap_for(self, fieldname, args, value, ignore_validation=False):
if not value or ignore_validation:
return
existing = self.get_overlap_for(fieldname, args, value)
if existing:
frappe.throw(_("Row {0}: From Time and To Time of {1} is overlapping with {2}")
.format(args.idx, self.name, existing.name), OverlapError)
def get_overlap_for(self, fieldname, args, value):
cond = "ts.`{0}`".format(fieldname)
if fieldname == 'workstation':
cond = "tsd.`{0}`".format(fieldname)
existing = frappe.db.sql("""select ts.name as name, tsd.from_time as from_time, tsd.to_time as to_time from
`tabTimesheet Detail` tsd, `tabTimesheet` ts where {0}=%(val)s and tsd.parent = ts.name and
(
(%(from_time)s > tsd.from_time and %(from_time)s < tsd.to_time) or
(%(to_time)s > tsd.from_time and %(to_time)s < tsd.to_time) or
(%(from_time)s <= tsd.from_time and %(to_time)s >= tsd.to_time))
and tsd.name!=%(name)s
and ts.name!=%(parent)s
and ts.docstatus < 2""".format(cond),
{
"val": value,
"from_time": args.from_time,
"to_time": args.to_time,
"name": args.name or "No Name",
"parent": args.parent or "No Name"
}, as_dict=True)
# check internal overlap
for time_log in self.time_logs:
if not (time_log.from_time and time_log.to_time
and args.from_time and args.to_time): continue
if (fieldname != 'workstation' or args.get(fieldname) == time_log.get(fieldname)) and \
args.idx != time_log.idx and ((args.from_time > time_log.from_time and args.from_time < time_log.to_time) or
(args.to_time > time_log.from_time and args.to_time < time_log.to_time) or
(args.from_time <= time_log.from_time and args.to_time >= time_log.to_time)):
return self
return existing[0] if existing else None
def update_cost(self):
for data in self.time_logs:
if data.activity_type or data.billable:
rate = get_activity_cost(self.employee, data.activity_type)
hours = data.billing_hours or 0
costing_hours = data.billing_hours or data.hours or 0
if rate:
data.billing_rate = flt(rate.get('billing_rate')) if flt(data.billing_rate) == 0 else data.billing_rate
data.costing_rate = flt(rate.get('costing_rate')) if flt(data.costing_rate) == 0 else data.costing_rate
data.billing_amount = data.billing_rate * hours
data.costing_amount = data.costing_rate * costing_hours
def update_time_rates(self, ts_detail):
if not ts_detail.billable:
ts_detail.billing_rate = 0.0
@frappe.whitelist()
def get_projectwise_timesheet_data(project, parent=None):
cond = ''
if parent:
cond = "and parent = %(parent)s"
return frappe.db.sql("""select name, parent, billing_hours, billing_amount as billing_amt
from `tabTimesheet Detail` where parenttype = 'Timesheet' and docstatus=1 and project = %(project)s {0} and billable = 1
and sales_invoice is null""".format(cond), {'project': project, 'parent': parent}, as_dict=1)
@frappe.whitelist()
def get_timesheet(doctype, txt, searchfield, start, page_len, filters):
if not filters: filters = {}
condition = ""
if filters.get("project"):
condition = "and tsd.project = %(project)s"
return frappe.db.sql("""select distinct tsd.parent from `tabTimesheet Detail` tsd,
`tabTimesheet` ts where
ts.status in ('Submitted', 'Payslip') and tsd.parent = ts.name and
tsd.docstatus = 1 and ts.total_billable_amount > 0
and tsd.parent LIKE %(txt)s {condition}
order by tsd.parent limit %(start)s, %(page_len)s"""
.format(condition=condition), {
'txt': '%' + txt + '%',
"start": start, "page_len": page_len, 'project': filters.get("project")
})
@frappe.whitelist()
def get_timesheet_data(name, project):
data = None
if project and project!='':
data = get_projectwise_timesheet_data(project, name)
else:
data = frappe.get_all('Timesheet',
fields = ["(total_billable_amount - total_billed_amount) as billing_amt", "total_billable_hours as billing_hours"], filters = {'name': name})
return {
'billing_hours': data[0].billing_hours if data else None,
'billing_amount': data[0].billing_amt if data else None,
'timesheet_detail': data[0].name if data and project and project!= '' else None
}
@frappe.whitelist()
def make_sales_invoice(source_name, item_code=None, customer=None, project=None):
target = frappe.new_doc("Sales Invoice")
timesheet = frappe.get_doc('Timesheet', source_name)
if not timesheet.total_billable_hours:
frappe.throw(_("Invoice can't be made for zero billing hour"))
if timesheet.total_billable_hours == timesheet.total_billed_hours:
frappe.throw(_("Invoice already created for all billing hours"))
hours = flt(timesheet.total_billable_hours) - flt(timesheet.total_billed_hours)
billing_amount = flt(timesheet.total_billable_amount) - flt(timesheet.total_billed_amount)
billing_rate = billing_amount / hours
target.company = timesheet.company
if customer:
target.customer = customer
if project:
target.project = project
if item_code:
target.append('items', {
'item_code': item_code,
'qty': hours,
'rate': billing_rate
})
target.append('timesheets', {
'time_sheet': timesheet.name,
'billing_hours': hours,
'billing_amount': billing_amount,
'item_code': item_code
})
target.run_method("calculate_billing_amount_for_timesheet")
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
return target
@frappe.whitelist()
def make_salary_slip(source_name, target_doc=None):
target = frappe.new_doc("Salary Slip")
set_missing_values(source_name, target)
target.run_method("get_emp_and_leave_details")
return target
def set_missing_values(time_sheet, target):
doc = frappe.get_doc('Timesheet', time_sheet)
target.employee = doc.employee
target.employee_name = doc.employee_name
target.salary_slip_based_on_timesheet = 1
target.start_date = doc.start_date
target.end_date = doc.end_date
target.posting_date = doc.modified
target.total_working_hours = doc.total_hours
target.append('timesheets', {
'time_sheet': doc.name,
'working_hours': doc.total_hours
})
@frappe.whitelist()
def get_activity_cost(employee=None, activity_type=None):
rate = frappe.db.get_values("Activity Cost", {"employee": employee,
"activity_type": activity_type}, ["costing_rate", "billing_rate"], as_dict=True)
if not rate:
rate = frappe.db.get_values("Activity Type", {"activity_type": activity_type},
["costing_rate", "billing_rate"], as_dict=True)
return rate[0] if rate else {}
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
filters = json.loads(filters)
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Timesheet", filters)
return frappe.db.sql("""select `tabTimesheet Detail`.name as name,
`tabTimesheet Detail`.docstatus as status, `tabTimesheet Detail`.parent as parent,
from_time as start_date, hours, activity_type,
`tabTimesheet Detail`.project, to_time as end_date,
CONCAT(`tabTimesheet Detail`.parent, ' (', ROUND(hours,2),' hrs)') as title
from `tabTimesheet Detail`, `tabTimesheet`
where `tabTimesheet Detail`.parent = `tabTimesheet`.name
and `tabTimesheet`.docstatus < 2
and (from_time <= %(end)s and to_time >= %(start)s) {conditions} {match_cond}
""".format(conditions=conditions, match_cond = get_match_cond('Timesheet')),
{
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
def get_timesheets_list(doctype, txt, filters, limit_start, limit_page_length=20, order_by="modified"):
user = frappe.session.user
# find customer name from contact.
customer = ''
timesheets = []
contact = frappe.db.exists('Contact', {'user': user})
if contact:
# find customer
contact = frappe.get_doc('Contact', contact)
customer = contact.get_link_for('Customer')
if customer:
sales_invoices = [d.name for d in frappe.get_all('Sales Invoice', filters={'customer': customer})] or [None]
projects = [d.name for d in frappe.get_all('Project', filters={'customer': customer})]
# Return timesheet related data to web portal.
timesheets = frappe.db.sql('''
SELECT
ts.name, tsd.activity_type, ts.status, ts.total_billable_hours,
COALESCE(ts.sales_invoice, tsd.sales_invoice) AS sales_invoice, tsd.project
FROM `tabTimesheet` ts, `tabTimesheet Detail` tsd
WHERE tsd.parent = ts.name AND
(
ts.sales_invoice IN %(sales_invoices)s OR
tsd.sales_invoice IN %(sales_invoices)s OR
tsd.project IN %(projects)s
)
ORDER BY `end_date` ASC
LIMIT {0}, {1}
'''.format(limit_start, limit_page_length), dict(sales_invoices=sales_invoices, projects=projects), as_dict=True) #nosec
return timesheets
def get_list_context(context=None):
return {
"show_sidebar": True,
"show_search": True,
'no_breadcrumbs': True,
"title": _("Timesheets"),
"get_list": get_timesheets_list,
"row_template": "templates/includes/timesheet/timesheet_row.html"
}
| gpl-3.0 | -2,669,668,574,416,058,400 | 34.654229 | 144 | 0.701388 | false |
lmjohns3/downhill | test/base_test.py | 1 | 3100 | import downhill
import numpy as np
import util
class TestBuild:
def test_sgd(self):
assert isinstance(util.build_rosen('sgd')[0], downhill.SGD)
assert isinstance(util.build_factor('sgd')[0], downhill.SGD)
def test_nag(self):
assert isinstance(util.build_rosen('nag')[0], downhill.NAG)
def test_rprop(self):
assert isinstance(util.build_rosen('RProp')[0], downhill.RProp)
def test_rmsprop(self):
assert isinstance(util.build_rosen('RmsProp')[0], downhill.RMSProp)
def test_adadelta(self):
assert isinstance(util.build_rosen('ADADELTA')[0], downhill.ADADELTA)
def test_esgd(self):
assert isinstance(util.build_rosen('EsGd')[0], downhill.ESGD)
def test_adam(self):
assert isinstance(util.build_rosen('Adam')[0], downhill.Adam)
class Straight(downhill.Optimizer):
def _get_updates_for(self, param, grad):
yield (param, param + 1.1)
class TestOptimizer:
def test_rosen(self):
opt, train = util.build_rosen('straight')
assert isinstance(opt, Straight)
# run the optimizer for three iterations. check that the x and y values
# (being monitored) increase at each iteration.
for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
assert tm['x'] >= vm['x']
assert tm['y'] >= vm['y']
assert i < 3
def test_rosen_unnamed(self):
opt, train = util.build_rosen('straight', name=False, monitor_gradients=True)
assert isinstance(opt, Straight)
# run the optimizer for three iterations. check that the x and y values
# (being monitored) increase at each iteration.
for i, (tm, vm) in enumerate(opt.iterate(train, max_updates=3)):
assert tm['x'] >= vm['x']
assert tm['y'] >= vm['y']
# check there's a manually-named parameter in here.
assert 1 == sum(1 for k in tm if 'unnamed' in k), tm
assert i < 3
def test_factor(self):
opt, train = util.build_factor('straight')
assert isinstance(opt, Straight)
# run the optimizer for two iterations. check that the u and v values
# (being monitored) are reasonable at the start.
for i, (tm, vm) in enumerate(opt.iterate(train)):
assert abs(vm['u<1'] - 0.001) < 1e-5
assert vm['u<-1'] == 0
assert vm['v<1'] == 1
assert vm['v<-1'] == 0
if i == 2:
break
def test_gradient_clip(self):
opt, data = util.build_rosen('straight')
for _ in opt.iterate(data, max_gradient_elem=3):
assert opt.max_gradient_elem == 3
break
def test_set_params(self):
opt, _ = util.build_rosen('straight')
opt.set_params([[1, 2]])
assert np.allclose(opt._params[0].get_value(), [1, 2])
def test_set_best_params(self):
opt, _ = util.build_rosen('straight')
opt._best_params = [[1, 2]]
opt.set_params('best')
assert np.allclose(opt._params[0].get_value(), [1, 2])
| mit | -4,127,588,063,821,869,600 | 33.444444 | 85 | 0.590323 | false |
kevthehermit/viper | viper/modules/jar.py | 2 | 2038 | # -*- coding: utf-8 -*-
# This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
import hashlib
import zipfile
from viper.common.abstracts import Module
from viper.core.session import __sessions__
class Jar(Module):
cmd = 'jar'
description = 'Parse Java JAR archives'
authors = ['Kevin Breen']
def __init__(self):
super(Jar, self).__init__()
self.parser.add_argument('-d ', '--dump', metavar='dump_path', help='Extract all items from jar')
def run(self):
def read_manifest(manifest):
rows = []
lines = manifest.split(b'\r\n')
for line in lines:
if len(line) > 1:
item, value = line.split(b':')
rows.append([item.decode(), value.decode()])
self.log('info', "Manifest File:")
self.log('table', dict(header=['Item', 'Value'], rows=rows))
super(Jar, self).run()
if self.args is None:
return
arg_dump = self.args.dump
if not __sessions__.is_set():
self.log('error', "No open session")
return
if not zipfile.is_zipfile(__sessions__.current.file.path):
self.log('error', "Doesn't Appear to be a valid jar archive")
return
with zipfile.ZipFile(__sessions__.current.file.path, 'r') as archive:
jar_tree = []
for name in archive.namelist():
item_data = archive.read(name)
if name == 'META-INF/MANIFEST.MF':
read_manifest(item_data)
item_md5 = hashlib.md5(item_data).hexdigest()
jar_tree.append([name, item_md5])
self.log('info', "Jar Tree:")
self.log('table', dict(header=['Java File', 'MD5'], rows=jar_tree))
if arg_dump:
archive.extractall(arg_dump)
self.log('info', "Archive content extracted to {0}".format(arg_dump))
| bsd-3-clause | 3,725,401,441,174,335,500 | 30.353846 | 105 | 0.541217 | false |
davidwaroquiers/pymatgen | pymatgen/analysis/chemenv/coordination_environments/coordination_geometry_finder.py | 1 | 95203 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module contains the main object used to identify the coordination environments in a given structure.
If you use this module, please cite the following:
David Waroquiers, Xavier Gonze, Gian-Marco Rignanese, Cathrin Welker-Nieuwoudt, Frank Rosowski,
Michael Goebel, Stephan Schenk, Peter Degelmann, Rute Andre, Robert Glaum, and Geoffroy Hautier,
"Statistical analysis of coordination environments in oxides",
Chem. Mater., 2017, 29 (19), pp 8346–8360,
DOI: 10.1021/acs.chemmater.7b02766
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__credits__ = "Geoffroy Hautier"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "[email protected]"
__date__ = "Feb 20, 2016"
import itertools
import logging
import time
from collections import OrderedDict
from random import shuffle
import numpy as np
from numpy.linalg import norm, svd
from pymatgen.analysis.bond_valence import BVAnalyzer
from pymatgen.analysis.chemenv.coordination_environments.chemenv_strategies import (
MultiWeightsChemenvStrategy,
)
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import (
EXPLICIT_PERMUTATIONS,
SEPARATION_PLANE,
AllCoordinationGeometries,
)
from pymatgen.analysis.chemenv.coordination_environments.structure_environments import (
ChemicalEnvironments,
LightStructureEnvironments,
StructureEnvironments,
)
from pymatgen.analysis.chemenv.coordination_environments.voronoi import (
DetailedVoronoiContainer,
)
from pymatgen.analysis.chemenv.utils.coordination_geometry_utils import (
Plane,
collinear,
separation_in_list,
sort_separation,
sort_separation_tuple,
)
from pymatgen.analysis.chemenv.utils.defs_utils import chemenv_citations
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import Species
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
debug = False
DIST_TOLERANCES = [0.02, 0.05, 0.1, 0.2, 0.3]
class AbstractGeometry:
"""
Class used to describe a geometry (perfect or distorted)
"""
def __init__(
self,
central_site=None,
bare_coords=None,
centering_type="standard",
include_central_site_in_centroid=False,
optimization=None,
):
"""
Constructor for the abstract geometry
:param central_site: Coordinates of the central site
:param bare_coords: Coordinates of the neighbors of the central site
:param centering_type: How to center the abstract geometry
:param include_central_site_in_centroid: When the centering is on the centroid, the central site is included
if this parameter is set to True.
:raise: ValueError if the parameters are not consistent
"""
bcoords = np.array(bare_coords)
self.bare_centre = np.array(central_site)
self.bare_points_without_centre = bcoords
self.bare_points_with_centre = np.array(central_site)
self.bare_points_with_centre = np.concatenate(([self.bare_points_with_centre], bcoords))
self.centroid_without_centre = np.mean(self.bare_points_without_centre, axis=0)
self.centroid_with_centre = np.mean(self.bare_points_with_centre, axis=0)
self._points_wcs_csc = self.bare_points_with_centre - self.bare_centre
self._points_wocs_csc = self.bare_points_without_centre - self.bare_centre
self._points_wcs_ctwcc = self.bare_points_with_centre - self.centroid_with_centre
self._points_wocs_ctwcc = self.bare_points_without_centre - self.centroid_with_centre
self._points_wcs_ctwocc = self.bare_points_with_centre - self.centroid_without_centre
self._points_wocs_ctwocc = self.bare_points_without_centre - self.centroid_without_centre
self.centering_type = centering_type
self.include_central_site_in_centroid = include_central_site_in_centroid
self.bare_central_site = np.array(central_site)
if centering_type == "standard":
if len(bare_coords) < 5:
if include_central_site_in_centroid:
raise ValueError(
"The center is the central site, no calculation of the centroid, "
"variable include_central_site_in_centroid should be set to False"
)
if central_site is None:
raise ValueError("Centering_type is central_site, the central site should be given")
self.centre = np.array(central_site)
else:
total = np.sum(bcoords, axis=0)
if include_central_site_in_centroid:
if central_site is None:
raise ValueError("The centroid includes the central site but no central site is given")
total += self.bare_centre
self.centre = total / (np.float_(len(bare_coords)) + 1.0)
else:
self.centre = total / np.float_(len(bare_coords))
elif centering_type == "central_site":
if include_central_site_in_centroid:
raise ValueError(
"The center is the central site, no calculation of the centroid, "
"variable include_central_site_in_centroid should be set to False"
)
if central_site is None:
raise ValueError("Centering_type is central_site, the central site should be given")
self.centre = np.array(central_site)
elif centering_type == "centroid":
total = np.sum(bcoords, axis=0)
if include_central_site_in_centroid:
if central_site is None:
raise ValueError("The centroid includes the central site but no central site is given")
total += self.bare_centre
self.centre = total / (np.float_(len(bare_coords)) + 1.0)
else:
self.centre = total / np.float_(len(bare_coords))
self._bare_coords = self.bare_points_without_centre
self._coords = self._bare_coords - self.centre
self.central_site = self.bare_central_site - self.centre
self.coords = self._coords
self.bare_coords = self._bare_coords
def __str__(self):
"""
String representation of the AbstractGeometry
:return: String representation of the AbstractGeometry
"""
outs = ["\nAbstract Geometry with {n} points :".format(n=len(self.coords))]
for pp in self.coords:
outs.append(" {pp}".format(pp=pp))
if self.centering_type == "standard":
if self.include_central_site_in_centroid:
outs.append(
"Points are referenced to the central site for coordination numbers < 5"
" and to the centroid (calculated with the central site) for coordination"
" numbers >= 5 : {c}\n".format(c=self.centre)
)
else:
outs.append(
"Points are referenced to the central site for coordination numbers < 5"
" and to the centroid (calculated without the central site) for coordination"
" numbers >= 5 : {c}\n".format(c=self.centre)
)
elif self.centering_type == "central_site":
outs.append("Points are referenced to the central site : {c}\n".format(c=self.centre))
elif self.centering_type == "centroid":
if self.include_central_site_in_centroid:
outs.append(
"Points are referenced to the centroid "
"(calculated with the central site) :\n {c}\n".format(c=self.centre)
)
else:
outs.append(
"Points are referenced to the centroid"
" (calculated without the central site) :\n {c}\n".format(c=self.centre)
)
return "\n".join(outs)
@classmethod
def from_cg(cls, cg, centering_type="standard", include_central_site_in_centroid=False):
"""
:param cg:
:param centering_type:
:param include_central_site_in_centroid:
:return:
"""
central_site = cg.get_central_site()
bare_coords = [np.array(pt, np.float_) for pt in cg.points]
return cls(
central_site=central_site,
bare_coords=bare_coords,
centering_type=centering_type,
include_central_site_in_centroid=include_central_site_in_centroid,
)
def points_wcs_csc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wcs_csc
return np.concatenate((self._points_wcs_csc[0:1], self._points_wocs_csc.take(permutation, axis=0)))
def points_wocs_csc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wocs_csc
return self._points_wocs_csc.take(permutation, axis=0)
def points_wcs_ctwcc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wcs_ctwcc
return np.concatenate(
(
self._points_wcs_ctwcc[0:1],
self._points_wocs_ctwcc.take(permutation, axis=0),
)
)
def points_wocs_ctwcc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wocs_ctwcc
return self._points_wocs_ctwcc.take(permutation, axis=0)
def points_wcs_ctwocc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wcs_ctwocc
return np.concatenate(
(
self._points_wcs_ctwocc[0:1],
self._points_wocs_ctwocc.take(permutation, axis=0),
)
)
def points_wocs_ctwocc(self, permutation=None):
"""
:param permutation:
:return:
"""
if permutation is None:
return self._points_wocs_ctwocc
return self._points_wocs_ctwocc.take(permutation, axis=0)
@property
def cn(self):
"""
:return: Coordination number
"""
return len(self.coords)
@property
def coordination_number(self):
"""
:return: Coordination number
"""
return len(self.coords)
def symmetry_measure(points_distorted, points_perfect):
"""
Computes the continuous symmetry measure of the (distorted) set of points "points_distorted" with respect to the
(perfect) set of points "points_perfect".
:param points_distorted: List of points describing a given (distorted) polyhedron for which the symmetry measure
has to be computed with respect to the model polyhedron described by the list of points
"points_perfect".
:param points_perfect: List of "perfect" points describing a given model polyhedron.
:return: The continuous symmetry measure of the distorted polyhedron with respect to the perfect polyhedron
"""
# When there is only one point, the symmetry measure is 0.0 by definition
if len(points_distorted) == 1:
return {
"symmetry_measure": 0.0,
"scaling_factor": None,
"rotation_matrix": None,
}
# Find the rotation matrix that aligns the distorted points to the perfect points in a least-square sense.
rot = find_rotation(points_distorted=points_distorted, points_perfect=points_perfect)
# Find the scaling factor between the distorted points and the perfect points in a least-square sense.
scaling_factor, rotated_coords, points_perfect = find_scaling_factor(
points_distorted=points_distorted, points_perfect=points_perfect, rot=rot
)
# Compute the continuous symmetry measure [see Eq. 1 in Pinsky et al., Inorganic Chemistry 37, 5575 (1998)]
rotated_coords = scaling_factor * rotated_coords
diff = points_perfect - rotated_coords
num = np.tensordot(diff, diff)
denom = np.tensordot(points_perfect, points_perfect)
return {
"symmetry_measure": num / denom * 100.0,
"scaling_factor": scaling_factor,
"rotation_matrix": rot,
}
def find_rotation(points_distorted, points_perfect):
"""
This finds the rotation matrix that aligns the (distorted) set of points "points_distorted" with respect to the
(perfect) set of points "points_perfect" in a least-square sense.
:param points_distorted: List of points describing a given (distorted) polyhedron for which the rotation that
aligns these points in a least-square sense to the set of perfect points "points_perfect"
:param points_perfect: List of "perfect" points describing a given model polyhedron.
:return: The rotation matrix
"""
H = np.matmul(points_distorted.T, points_perfect)
[U, S, Vt] = svd(H)
rot = np.matmul(Vt.T, U.T)
return rot
def find_scaling_factor(points_distorted, points_perfect, rot):
"""
This finds the scaling factor between the (distorted) set of points "points_distorted" and the
(perfect) set of points "points_perfect" in a least-square sense.
:param points_distorted: List of points describing a given (distorted) polyhedron for which the scaling factor has
to be obtained.
:param points_perfect: List of "perfect" points describing a given model polyhedron.
:param rot: The rotation matrix
:return: The scaling factor between the two structures and the rotated set of (distorted) points.
"""
rotated_coords = np.matmul(rot, points_distorted.T).T
num = np.tensordot(rotated_coords, points_perfect)
denom = np.tensordot(rotated_coords, rotated_coords)
return num / denom, rotated_coords, points_perfect
class LocalGeometryFinder:
"""
Main class used to find the local environments in a structure
"""
DEFAULT_BVA_DISTANCE_SCALE_FACTOR = 1.0
BVA_DISTANCE_SCALE_FACTORS = {
"experimental": 1.0,
"GGA_relaxed": 1.015,
"LDA_relaxed": 0.995,
}
DEFAULT_SPG_ANALYZER_OPTIONS = {"symprec": 1e-3, "angle_tolerance": 5}
STRUCTURE_REFINEMENT_NONE = "none"
STRUCTURE_REFINEMENT_REFINED = "refined"
STRUCTURE_REFINEMENT_SYMMETRIZED = "symmetrized"
DEFAULT_STRATEGY = MultiWeightsChemenvStrategy.stats_article_weights_parameters()
PRESETS = {
"DEFAULT": {
"maximum_distance_factor": 2.0,
"minimum_angle_factor": 0.05,
"voronoi_normalized_distance_tolerance": 0.05,
"voronoi_normalized_angle_tolerance": 0.03,
"optimization": 2,
}
}
def __init__(
self,
permutations_safe_override=False,
plane_ordering_override=True,
debug_level=None,
plane_safe_permutations=False,
only_symbols=None,
):
"""
Constructor for the LocalGeometryFinder, initializes the list of coordination geometries
:param permutations_safe_override: If set to True, all permutations are tested (very time-consuming for large
coordination numbers!)
:param plane_ordering_override: If set to False, the ordering of the points in the plane is disabled
"""
self.allcg = AllCoordinationGeometries(
permutations_safe_override=permutations_safe_override,
only_symbols=only_symbols,
)
self.permutations_safe_override = permutations_safe_override
self.plane_ordering_override = plane_ordering_override
self.plane_safe_permutations = plane_safe_permutations
self.setup_parameters(
centering_type="centroid",
include_central_site_in_centroid=True,
bva_distance_scale_factor=None,
structure_refinement=self.STRUCTURE_REFINEMENT_NONE,
)
print(chemenv_citations())
def setup_parameters(
self,
centering_type="standard",
include_central_site_in_centroid=False,
bva_distance_scale_factor=None,
structure_refinement=STRUCTURE_REFINEMENT_REFINED,
spg_analyzer_options=None,
):
"""
Setup of the parameters for the coordination geometry finder. A reference point for the geometries has to be
chosen. This can be the centroid of the structure (including or excluding the atom for which the coordination
geometry is looked for) or the atom itself. In the 'standard' centering_type, the reference point is the central
atom for coordination numbers 1, 2, 3 and 4 and the centroid for coordination numbers > 4.
:param centering_type: Type of the reference point (centering) 'standard', 'centroid' or 'central_site'
:param include_central_site_in_centroid: In case centering_type is 'centroid', the central site is included if
this value is set to True.
:param bva_distance_scale_factor: Scaling factor for the bond valence analyzer (this might be different whether
the structure is an experimental one, an LDA or a GGA relaxed one, or any other relaxation scheme (where
under- or over-estimation of bond lengths is known).
:param structure_refinement: Refinement of the structure. Can be "none", "refined" or "symmetrized".
:param spg_analyzer_options: Options for the SpaceGroupAnalyzer (dictionary specifying "symprec"
and "angle_tolerance". See pymatgen's SpaceGroupAnalyzer for more information.
"""
self.centering_type = centering_type
self.include_central_site_in_centroid = include_central_site_in_centroid
if bva_distance_scale_factor is not None:
self.bva_distance_scale_factor = bva_distance_scale_factor
else:
self.bva_distance_scale_factor = self.DEFAULT_BVA_DISTANCE_SCALE_FACTOR
self.structure_refinement = structure_refinement
if spg_analyzer_options is None:
self.spg_analyzer_options = self.DEFAULT_SPG_ANALYZER_OPTIONS
else:
self.spg_analyzer_options = spg_analyzer_options
def setup_parameter(self, parameter, value):
"""
Setup of one specific parameter to the given value. The other parameters are unchanged. See setup_parameters
method for the list of possible parameters
:param parameter: Parameter to setup/update
:param value: Value of the parameter
"""
self.__dict__[parameter] = value
def setup_structure(self, structure):
"""
Sets up the structure for which the coordination geometries have to be identified. The structure is analyzed
with the space group analyzer and a refined structure is used
:param structure: A pymatgen Structure
"""
self.initial_structure = structure.copy()
if self.structure_refinement == self.STRUCTURE_REFINEMENT_NONE:
self.structure = structure.copy()
self.spg_analyzer = None
self.symmetrized_structure = None
else:
self.spg_analyzer = SpacegroupAnalyzer(
self.initial_structure,
symprec=self.spg_analyzer_options["symprec"],
angle_tolerance=self.spg_analyzer_options["angle_tolerance"],
)
if self.structure_refinement == self.STRUCTURE_REFINEMENT_REFINED:
self.structure = self.spg_analyzer.get_refined_structure()
self.symmetrized_structure = None
elif self.structure_refinement == self.STRUCTURE_REFINEMENT_SYMMETRIZED:
self.structure = self.spg_analyzer.get_refined_structure()
self.spg_analyzer_refined = SpacegroupAnalyzer(
self.structure,
symprec=self.spg_analyzer_options["symprec"],
angle_tolerance=self.spg_analyzer_options["angle_tolerance"],
)
self.symmetrized_structure = self.spg_analyzer_refined.get_symmetrized_structure()
def get_structure(self):
"""
Returns the pymatgen Structure that has been setup for the identification of geometries (the initial one
might have been refined/symmetrized using the SpaceGroupAnalyzer).
:return: The pymatgen Structure that has been setup for the identification of geometries (the initial one
might have been refined/symmetrized using the SpaceGroupAnalyzer).
"""
return self.structure
def set_structure(self, lattice, species, coords, coords_are_cartesian):
"""
Sets up the pymatgen structure for which the coordination geometries have to be identified starting from the
lattice, the species and the coordinates
:param lattice: The lattice of the structure
:param species: The species on the sites
:param coords: The coordinates of the sites
:param coords_are_cartesian: If set to True, the coordinates are given in cartesian coordinates
"""
self.setup_structure(Structure(lattice, species, coords, coords_are_cartesian))
def compute_coordination_environments(
self,
structure,
indices=None,
only_cations=True,
strategy=DEFAULT_STRATEGY,
valences="bond-valence-analysis",
initial_structure_environments=None,
):
"""
:param structure:
:param indices:
:param only_cations:
:param strategy:
:param valences:
:param initial_structure_environments:
:return:
"""
self.setup_structure(structure=structure)
if valences == "bond-valence-analysis":
bva = BVAnalyzer()
try:
vals = bva.get_valences(structure=structure)
except ValueError:
vals = "undefined"
else:
if valences == "undefined":
vals = valences
else:
if len(valences) != len(structure):
raise ValueError("Valences do not match the number of sites in the structure")
vals = valences
# TODO: add something to compute only the neighbors sets needed for the strategy.
se = self.compute_structure_environments(
only_cations=only_cations,
only_indices=indices,
valences=vals,
initial_structure_environments=initial_structure_environments,
)
lse = LightStructureEnvironments.from_structure_environments(strategy=strategy, structure_environments=se)
return lse.coordination_environments
def compute_structure_environments(
self,
excluded_atoms=None,
only_atoms=None,
only_cations=True,
only_indices=None,
maximum_distance_factor=PRESETS["DEFAULT"]["maximum_distance_factor"],
minimum_angle_factor=PRESETS["DEFAULT"]["minimum_angle_factor"],
max_cn=None,
min_cn=None,
only_symbols=None,
valences="undefined",
additional_conditions=None,
info=None,
timelimit=None,
initial_structure_environments=None,
get_from_hints=False,
voronoi_normalized_distance_tolerance=PRESETS["DEFAULT"]["voronoi_normalized_distance_tolerance"],
voronoi_normalized_angle_tolerance=PRESETS["DEFAULT"]["voronoi_normalized_angle_tolerance"],
recompute=None,
optimization=PRESETS["DEFAULT"]["optimization"],
):
"""
Computes and returns the StructureEnvironments object containing all the information about the coordination
environments in the structure
:param excluded_atoms: Atoms for which the coordination geometries does not have to be identified
:param only_atoms: If not set to None, atoms for which the coordination geometries have to be identified
:param only_cations: If set to True, will only compute environments for cations
:param only_indices: If not set to None, will only compute environments the atoms of the given indices
:param maximum_distance_factor: If not set to None, neighbors beyond
maximum_distance_factor*closest_neighbor_distance are not considered
:param minimum_angle_factor: If not set to None, neighbors for which the angle is lower than
minimum_angle_factor*largest_angle_neighbor are not considered
:param max_cn: maximum coordination number to be considered
:param min_cn: minimum coordination number to be considered
:param only_symbols: if not set to None, consider only coordination environments with the given symbols
:param valences: valences of the atoms
:param additional_conditions: additional conditions to be considered in the bonds (example : only bonds
between cation and anion
:param info: additional info about the calculation
:param timelimit: time limit (in secs) after which the calculation of the StructureEnvironments object stops
:param initial_structure_environments: initial StructureEnvironments object (most probably incomplete)
:param get_from_hints: whether to add neighbors sets from "hints" (e.g. capped environment => test the
neighbors without the cap)
:param voronoi_normalized_distance_tolerance: tolerance for the normalized distance used to distinguish
neighbors sets
:param voronoi_normalized_angle_tolerance: tolerance for the normalized angle used to distinguish
neighbors sets
:param recompute: whether to recompute the sites already computed (when initial_structure_environments
is not None)
:param optimization: optimization algorithm
:return: The StructureEnvironments object containing all the information about the coordination
environments in the structure
"""
time_init = time.process_time()
if info is None:
info = {}
info.update(
{
"local_geometry_finder": {
"parameters": {
"centering_type": self.centering_type,
"include_central_site_in_centroid": self.include_central_site_in_centroid,
"structure_refinement": self.structure_refinement,
"spg_analyzer_options": self.spg_analyzer_options,
}
}
}
)
if only_symbols is not None:
self.allcg = AllCoordinationGeometries(
permutations_safe_override=self.permutations_safe_override,
only_symbols=only_symbols,
)
if valences == "undefined":
firstsite = self.structure[0]
try:
sp = firstsite.specie
if isinstance(sp, Species):
self.valences = [int(site.specie.oxi_state) for site in self.structure]
else:
self.valences = valences
except AttributeError:
self.valences = valences
else:
self.valences = valences
# Get a list of indices of unequivalent sites from the initial structure
self.equivalent_sites = [[site] for site in self.structure]
self.struct_sites_to_irreducible_site_list_map = list(range(len(self.structure)))
self.sites_map = list(range(len(self.structure)))
indices = list(range(len(self.structure)))
# Get list of unequivalent sites with valence >= 0
if only_cations and self.valences != "undefined":
sites_indices = [isite for isite in indices if self.valences[isite] >= 0]
else:
sites_indices = list(indices)
# Include atoms that are in the list of "only_atoms" if it is provided
if only_atoms is not None:
sites_indices = [
isite
for isite in sites_indices
if any([at in [sp.symbol for sp in self.structure[isite].species] for at in only_atoms])
]
# Exclude atoms that are in the list of excluded atoms
if excluded_atoms:
sites_indices = [
isite
for isite in sites_indices
if not any([at in [sp.symbol for sp in self.structure[isite].species] for at in excluded_atoms])
]
if only_indices is not None:
sites_indices = [isite for isite in indices if isite in only_indices]
# Get the VoronoiContainer for the sites defined by their indices (sites_indices)
logging.debug("Getting DetailedVoronoiContainer")
if voronoi_normalized_distance_tolerance is None:
normalized_distance_tolerance = DetailedVoronoiContainer.default_normalized_distance_tolerance
else:
normalized_distance_tolerance = voronoi_normalized_distance_tolerance
if voronoi_normalized_angle_tolerance is None:
normalized_angle_tolerance = DetailedVoronoiContainer.default_normalized_angle_tolerance
else:
normalized_angle_tolerance = voronoi_normalized_angle_tolerance
self.detailed_voronoi = DetailedVoronoiContainer(
self.structure,
isites=sites_indices,
valences=self.valences,
maximum_distance_factor=maximum_distance_factor,
minimum_angle_factor=minimum_angle_factor,
additional_conditions=additional_conditions,
normalized_distance_tolerance=normalized_distance_tolerance,
normalized_angle_tolerance=normalized_angle_tolerance,
)
logging.debug("DetailedVoronoiContainer has been set up")
# Initialize the StructureEnvironments object (either from initial_structure_environments or from scratch)
if initial_structure_environments is not None:
se = initial_structure_environments
if se.structure != self.structure:
raise ValueError("Structure is not the same in initial_structure_environments")
if se.voronoi != self.detailed_voronoi:
if self.detailed_voronoi.is_close_to(se.voronoi):
self.detailed_voronoi = se.voronoi
else:
raise ValueError("Detailed Voronoi is not the same in initial_structure_environments")
se.info = info
else:
se = StructureEnvironments(
voronoi=self.detailed_voronoi,
valences=self.valences,
sites_map=self.sites_map,
equivalent_sites=self.equivalent_sites,
ce_list=[None] * len(self.structure),
structure=self.structure,
info=info,
)
# Set up the coordination numbers that have to be computed based on min_cn, max_cn and possibly the settings
# for an update (argument "recompute") of an existing StructureEnvironments
if min_cn is None:
min_cn = 1
if max_cn is None:
max_cn = 20
all_cns = range(min_cn, max_cn + 1)
do_recompute = False
if recompute is not None:
if "cns" in recompute:
cns_to_recompute = recompute["cns"]
all_cns = list(set(all_cns).intersection(cns_to_recompute))
do_recompute = True
# Variables used for checking timelimit
max_time_one_site = 0.0
breakit = False
if optimization > 0:
self.detailed_voronoi.local_planes = [None] * len(self.structure)
self.detailed_voronoi.separations = [None] * len(self.structure)
# Loop on all the sites
for isite in range(len(self.structure)):
if isite not in sites_indices:
logging.debug(
" ... in site #{:d}/{:d} ({}) : "
"skipped".format(isite, len(self.structure), self.structure[isite].species_string)
)
continue
if breakit:
logging.debug(
" ... in site #{:d}/{:d} ({}) : "
"skipped (timelimit)".format(isite, len(self.structure), self.structure[isite].species_string)
)
continue
logging.debug(
" ... in site #{:d}/{:d} ({})".format(isite, len(self.structure), self.structure[isite].species_string)
)
t1 = time.process_time()
if optimization > 0:
self.detailed_voronoi.local_planes[isite] = OrderedDict()
self.detailed_voronoi.separations[isite] = {}
se.init_neighbors_sets(
isite=isite,
additional_conditions=additional_conditions,
valences=valences,
)
to_add_from_hints = []
nb_sets_info = {}
for cn, nb_sets in se.neighbors_sets[isite].items():
if cn not in all_cns:
continue
for inb_set, nb_set in enumerate(nb_sets):
logging.debug(" ... getting environments for nb_set ({:d}, {:d})".format(cn, inb_set))
tnbset1 = time.process_time()
ce = self.update_nb_set_environments(
se=se,
isite=isite,
cn=cn,
inb_set=inb_set,
nb_set=nb_set,
recompute=do_recompute,
optimization=optimization,
)
tnbset2 = time.process_time()
if cn not in nb_sets_info:
nb_sets_info[cn] = {}
nb_sets_info[cn][inb_set] = {"time": tnbset2 - tnbset1}
if get_from_hints:
for cg_symbol, cg_dict in ce:
cg = self.allcg[cg_symbol]
# Get possibly missing neighbors sets
if cg.neighbors_sets_hints is None:
continue
logging.debug(' ... getting hints from cg with mp_symbol "{}" ...'.format(cg_symbol))
hints_info = {
"csm": cg_dict["symmetry_measure"],
"nb_set": nb_set,
"permutation": cg_dict["permutation"],
}
for nb_sets_hints in cg.neighbors_sets_hints:
suggested_nb_set_voronoi_indices = nb_sets_hints.hints(hints_info)
for inew, new_nb_set_voronoi_indices in enumerate(suggested_nb_set_voronoi_indices):
logging.debug(" hint # {:d}".format(inew))
new_nb_set = se.NeighborsSet(
structure=se.structure,
isite=isite,
detailed_voronoi=se.voronoi,
site_voronoi_indices=new_nb_set_voronoi_indices,
sources={
"origin": "nb_set_hints",
"hints_type": nb_sets_hints.hints_type,
"suggestion_index": inew,
"cn_map_source": [cn, inb_set],
"cg_source_symbol": cg_symbol,
},
)
cn_new_nb_set = len(new_nb_set)
if max_cn is not None and cn_new_nb_set > max_cn:
continue
if min_cn is not None and cn_new_nb_set < min_cn:
continue
if new_nb_set in [ta["new_nb_set"] for ta in to_add_from_hints]:
has_nb_set = True
elif cn_new_nb_set not in se.neighbors_sets[isite]:
has_nb_set = False
else:
has_nb_set = new_nb_set in se.neighbors_sets[isite][cn_new_nb_set]
if not has_nb_set:
to_add_from_hints.append(
{
"isite": isite,
"new_nb_set": new_nb_set,
"cn_new_nb_set": cn_new_nb_set,
}
)
logging.debug(" => to be computed")
else:
logging.debug(" => already present")
logging.debug(" ... getting environments for nb_sets added from hints")
for missing_nb_set_to_add in to_add_from_hints:
se.add_neighbors_set(isite=isite, nb_set=missing_nb_set_to_add["new_nb_set"])
for missing_nb_set_to_add in to_add_from_hints:
isite_new_nb_set = missing_nb_set_to_add["isite"]
cn_new_nb_set = missing_nb_set_to_add["cn_new_nb_set"]
new_nb_set = missing_nb_set_to_add["new_nb_set"]
inew_nb_set = se.neighbors_sets[isite_new_nb_set][cn_new_nb_set].index(new_nb_set)
logging.debug(
" ... getting environments for nb_set ({:d}, {:d}) - "
"from hints".format(cn_new_nb_set, inew_nb_set)
)
tnbset1 = time.process_time()
self.update_nb_set_environments(
se=se,
isite=isite_new_nb_set,
cn=cn_new_nb_set,
inb_set=inew_nb_set,
nb_set=new_nb_set,
optimization=optimization,
)
tnbset2 = time.process_time()
if cn not in nb_sets_info:
nb_sets_info[cn] = {}
nb_sets_info[cn][inew_nb_set] = {"time": tnbset2 - tnbset1}
t2 = time.process_time()
se.update_site_info(isite=isite, info_dict={"time": t2 - t1, "nb_sets_info": nb_sets_info})
if timelimit is not None:
time_elapsed = t2 - time_init
time_left = timelimit - time_elapsed
if time_left < 2.0 * max_time_one_site:
breakit = True
max_time_one_site = max(max_time_one_site, t2 - t1)
logging.debug(" ... computed in {:.2f} seconds".format(t2 - t1))
time_end = time.process_time()
logging.debug(" ... compute_structure_environments ended in {:.2f} seconds".format(time_end - time_init))
return se
def update_nb_set_environments(self, se, isite, cn, inb_set, nb_set, recompute=False, optimization=None):
"""
:param se:
:param isite:
:param cn:
:param inb_set:
:param nb_set:
:param recompute:
:param optimization:
:return:
"""
ce = se.get_coordination_environments(isite=isite, cn=cn, nb_set=nb_set)
if ce is not None and not recompute:
return ce
ce = ChemicalEnvironments()
if optimization == 2:
neighb_coords = nb_set.neighb_coordsOpt
else:
neighb_coords = nb_set.neighb_coords
self.setup_local_geometry(isite, coords=neighb_coords, optimization=optimization)
if optimization > 0:
logging.debug("Getting StructureEnvironments with optimized algorithm")
nb_set.local_planes = OrderedDict()
nb_set.separations = {}
cncgsm = self.get_coordination_symmetry_measures_optim(nb_set=nb_set, optimization=optimization)
else:
logging.debug("Getting StructureEnvironments with standard algorithm")
cncgsm = self.get_coordination_symmetry_measures()
for cg in cncgsm:
other_csms = {
"csm_wocs_ctwocc": cncgsm[cg]["csm_wocs_ctwocc"],
"csm_wocs_ctwcc": cncgsm[cg]["csm_wocs_ctwcc"],
"csm_wocs_csc": cncgsm[cg]["csm_wocs_csc"],
"csm_wcs_ctwocc": cncgsm[cg]["csm_wcs_ctwocc"],
"csm_wcs_ctwcc": cncgsm[cg]["csm_wcs_ctwcc"],
"csm_wcs_csc": cncgsm[cg]["csm_wcs_csc"],
"rotation_matrix_wocs_ctwocc": cncgsm[cg]["rotation_matrix_wocs_ctwocc"],
"rotation_matrix_wocs_ctwcc": cncgsm[cg]["rotation_matrix_wocs_ctwcc"],
"rotation_matrix_wocs_csc": cncgsm[cg]["rotation_matrix_wocs_csc"],
"rotation_matrix_wcs_ctwocc": cncgsm[cg]["rotation_matrix_wcs_ctwocc"],
"rotation_matrix_wcs_ctwcc": cncgsm[cg]["rotation_matrix_wcs_ctwcc"],
"rotation_matrix_wcs_csc": cncgsm[cg]["rotation_matrix_wcs_csc"],
"scaling_factor_wocs_ctwocc": cncgsm[cg]["scaling_factor_wocs_ctwocc"],
"scaling_factor_wocs_ctwcc": cncgsm[cg]["scaling_factor_wocs_ctwcc"],
"scaling_factor_wocs_csc": cncgsm[cg]["scaling_factor_wocs_csc"],
"scaling_factor_wcs_ctwocc": cncgsm[cg]["scaling_factor_wcs_ctwocc"],
"scaling_factor_wcs_ctwcc": cncgsm[cg]["scaling_factor_wcs_ctwcc"],
"scaling_factor_wcs_csc": cncgsm[cg]["scaling_factor_wcs_csc"],
"translation_vector_wocs_ctwocc": cncgsm[cg]["translation_vector_wocs_ctwocc"],
"translation_vector_wocs_ctwcc": cncgsm[cg]["translation_vector_wocs_ctwcc"],
"translation_vector_wocs_csc": cncgsm[cg]["translation_vector_wocs_csc"],
"translation_vector_wcs_ctwocc": cncgsm[cg]["translation_vector_wcs_ctwocc"],
"translation_vector_wcs_ctwcc": cncgsm[cg]["translation_vector_wcs_ctwcc"],
"translation_vector_wcs_csc": cncgsm[cg]["translation_vector_wcs_csc"],
}
ce.add_coord_geom(
cg,
cncgsm[cg]["csm"],
algo=cncgsm[cg]["algo"],
permutation=cncgsm[cg]["indices"],
local2perfect_map=cncgsm[cg]["local2perfect_map"],
perfect2local_map=cncgsm[cg]["perfect2local_map"],
detailed_voronoi_index={"cn": cn, "index": inb_set},
other_symmetry_measures=other_csms,
rotation_matrix=cncgsm[cg]["rotation_matrix"],
scaling_factor=cncgsm[cg]["scaling_factor"],
)
se.update_coordination_environments(isite=isite, cn=cn, nb_set=nb_set, ce=ce)
return ce
def setup_local_geometry(self, isite, coords, optimization=None):
"""
Sets up the AbstractGeometry for the local geometry of site with index isite.
:param isite: Index of the site for which the local geometry has to be set up
:param coords: The coordinates of the (local) neighbors
"""
self.local_geometry = AbstractGeometry(
central_site=self.structure.cart_coords[isite],
bare_coords=coords,
centering_type=self.centering_type,
include_central_site_in_centroid=self.include_central_site_in_centroid,
optimization=optimization,
)
def setup_test_perfect_environment(
self,
symbol,
randomness=False,
max_random_dist=0.1,
symbol_type="mp_symbol",
indices="RANDOM",
random_translation="NONE",
random_rotation="NONE",
random_scale="NONE",
points=None,
):
"""
:param symbol:
:param randomness:
:param max_random_dist:
:param symbol_type:
:param indices:
:param random_translation:
:param random_rotation:
:param random_scale:
:param points:
:return:
"""
if symbol_type == "IUPAC":
cg = self.allcg.get_geometry_from_IUPAC_symbol(symbol)
elif symbol_type in ("MP", "mp_symbol"):
cg = self.allcg.get_geometry_from_mp_symbol(symbol)
elif symbol_type == "CoordinationGeometry":
cg = symbol
else:
raise ValueError("Wrong mp_symbol to setup coordination geometry")
neighb_coords = []
if points is not None:
mypoints = points
else:
mypoints = cg.points
if randomness:
rv = np.random.random_sample(3)
while norm(rv) > 1.0:
rv = np.random.random_sample(3)
coords = [np.zeros(3, np.float_) + max_random_dist * rv]
for pp in mypoints:
rv = np.random.random_sample(3)
while norm(rv) > 1.0:
rv = np.random.random_sample(3)
neighb_coords.append(np.array(pp) + max_random_dist * rv)
else:
coords = [np.zeros(3, np.float_)]
for pp in mypoints:
neighb_coords.append(np.array(pp))
if indices == "RANDOM":
shuffle(neighb_coords)
elif indices == "ORDERED":
pass
else:
neighb_coords = [neighb_coords[ii] for ii in indices]
# Scaling the test environment
if random_scale == "RANDOM":
scale = 0.1 * np.random.random_sample() + 0.95
elif random_scale == "NONE":
scale = 1.0
else:
scale = random_scale
coords = [scale * cc for cc in coords]
neighb_coords = [scale * cc for cc in neighb_coords]
# Rotating the test environment
if random_rotation == "RANDOM":
uu = np.random.random_sample(3) + 0.1
uu = uu / norm(uu)
theta = np.pi * np.random.random_sample()
cc = np.cos(theta)
ss = np.sin(theta)
ux = uu[0]
uy = uu[1]
uz = uu[2]
RR = [
[
ux * ux + (1.0 - ux * ux) * cc,
ux * uy * (1.0 - cc) - uz * ss,
ux * uz * (1.0 - cc) + uy * ss,
],
[
ux * uy * (1.0 - cc) + uz * ss,
uy * uy + (1.0 - uy * uy) * cc,
uy * uz * (1.0 - cc) - ux * ss,
],
[
ux * uz * (1.0 - cc) - uy * ss,
uy * uz * (1.0 - cc) + ux * ss,
uz * uz + (1.0 - uz * uz) * cc,
],
]
elif random_rotation == "NONE":
RR = [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]
else:
RR = random_rotation
newcoords = []
for cc in coords:
newcc = np.dot(RR, cc).T
newcoords.append(newcc.ravel())
coords = newcoords
newcoords = []
for cc in neighb_coords:
newcc = np.dot(RR, cc.T)
newcoords.append(newcc.ravel())
neighb_coords = newcoords
# Translating the test environment
if random_translation == "RANDOM":
translation = 10.0 * (2.0 * np.random.random_sample(3) - 1.0)
elif random_translation == "NONE":
translation = np.zeros(3, np.float_)
else:
translation = random_translation
coords = [cc + translation for cc in coords]
neighb_coords = [cc + translation for cc in neighb_coords]
coords.extend(neighb_coords)
myspecies = ["O"] * (len(coords))
myspecies[0] = "Cu"
amin = np.min([cc[0] for cc in coords])
amax = np.max([cc[0] for cc in coords])
bmin = np.min([cc[1] for cc in coords])
bmax = np.max([cc[1] for cc in coords])
cmin = np.min([cc[2] for cc in coords])
cmax = np.max([cc[2] for cc in coords])
factor = 5.0
aa = factor * max([amax - amin, bmax - bmin, cmax - cmin])
lattice = Lattice.cubic(a=aa)
structure = Structure(
lattice=lattice,
species=myspecies,
coords=coords,
to_unit_cell=False,
coords_are_cartesian=True,
)
self.setup_structure(structure=structure)
self.setup_local_geometry(isite=0, coords=neighb_coords)
self.perfect_geometry = AbstractGeometry.from_cg(cg=cg)
def setup_random_structure(self, coordination):
"""
Sets up a purely random structure with a given coordination.
:param coordination: coordination number for the random structure
"""
aa = 0.4
bb = -0.2
coords = list()
for ii in range(coordination + 1):
coords.append(
aa
* np.random.random_sample(
3,
)
+ bb
)
self.set_structure(
lattice=np.array([[10, 0, 0], [0, 10, 0], [0, 0, 10]], np.float_),
species=["Si"] * (coordination + 1),
coords=coords,
coords_are_cartesian=False,
)
self.setup_random_indices_local_geometry(coordination)
def setup_random_indices_local_geometry(self, coordination):
"""
Sets up random indices for the local geometry, for testing purposes
:param coordination: coordination of the local geometry
"""
self.icentral_site = 0
self.indices = list(range(1, coordination + 1))
np.random.shuffle(self.indices)
def setup_ordered_indices_local_geometry(self, coordination):
"""
Sets up ordered indices for the local geometry, for testing purposes
:param coordination: coordination of the local geometry
"""
self.icentral_site = 0
self.indices = list(range(1, coordination + 1))
def setup_explicit_indices_local_geometry(self, explicit_indices):
"""
Sets up explicit indices for the local geometry, for testing purposes
:param explicit_indices: explicit indices for the neighbors (set of numbers
from 0 to CN-1 in a given order)
"""
self.icentral_site = 0
self.indices = [ii + 1 for ii in explicit_indices]
def get_coordination_symmetry_measures(self, only_minimum=True, all_csms=True, optimization=None):
"""
Returns the continuous symmetry measures of the current local geometry in a dictionary.
:return: the continuous symmetry measures of the current local geometry in a dictionary.
"""
test_geometries = self.allcg.get_implemented_geometries(len(self.local_geometry.coords))
if len(self.local_geometry.coords) == 1:
if len(test_geometries) == 0:
return {}
result_dict = {
"S:1": {
"csm": 0.0,
"indices": [0],
"algo": "EXPLICIT",
"local2perfect_map": {0: 0},
"perfect2local_map": {0: 0},
"scaling_factor": None,
"rotation_matrix": None,
"translation_vector": None,
}
}
if all_csms:
for csmtype in [
"wocs_ctwocc",
"wocs_ctwcc",
"wocs_csc",
"wcs_ctwocc",
"wcs_ctwcc",
"wcs_csc",
]:
result_dict["S:1"]["csm_{}".format(csmtype)] = 0.0
result_dict["S:1"]["scaling_factor_{}".format(csmtype)] = None
result_dict["S:1"]["rotation_matrix_{}".format(csmtype)] = None
result_dict["S:1"]["translation_vector_{}".format(csmtype)] = None
return result_dict
result_dict = {}
for geometry in test_geometries:
self.perfect_geometry = AbstractGeometry.from_cg(
cg=geometry,
centering_type=self.centering_type,
include_central_site_in_centroid=self.include_central_site_in_centroid,
)
points_perfect = self.perfect_geometry.points_wcs_ctwcc()
cgsm = self.coordination_geometry_symmetry_measures(
geometry, points_perfect=points_perfect, optimization=optimization
)
result, permutations, algos, local2perfect_maps, perfect2local_maps = cgsm
if only_minimum:
if len(result) > 0:
imin = np.argmin([rr["symmetry_measure"] for rr in result])
if geometry.algorithms is not None:
algo = algos[imin]
else:
algo = algos
result_dict[geometry.mp_symbol] = {
"csm": result[imin]["symmetry_measure"],
"indices": permutations[imin],
"algo": algo,
"local2perfect_map": local2perfect_maps[imin],
"perfect2local_map": perfect2local_maps[imin],
"scaling_factor": 1.0 / result[imin]["scaling_factor"],
"rotation_matrix": np.linalg.inv(result[imin]["rotation_matrix"]),
"translation_vector": result[imin]["translation_vector"],
}
if all_csms:
self._update_results_all_csms(result_dict, permutations, imin, geometry)
else:
result_dict[geometry.mp_symbol] = {
"csm": result,
"indices": permutations,
"algo": algos,
"local2perfect_map": local2perfect_maps,
"perfect2local_map": perfect2local_maps,
}
return result_dict
def _update_results_all_csms(self, result_dict, permutations, imin, geometry):
permutation = permutations[imin]
# Without central site, centered on the centroid (centroid does not include the central site)
# result_dict[geometry.mp_symbol]['csm_wocs_ctwocc'] = \
# result[imin]
pdist = self.local_geometry.points_wocs_ctwocc(permutation=permutation)
pperf = self.perfect_geometry.points_wocs_ctwocc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wocs_ctwocc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wocs_ctwocc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wocs_ctwocc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wocs_ctwocc"] = self.local_geometry.centroid_without_centre
# Without central site, centered on the centroid (centroid includes the central site)
pdist = self.local_geometry.points_wocs_ctwcc(permutation=permutation)
pperf = self.perfect_geometry.points_wocs_ctwcc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wocs_ctwcc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wocs_ctwcc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wocs_ctwcc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wocs_ctwcc"] = self.local_geometry.centroid_with_centre
# Without central site, centered on the central site
pdist = self.local_geometry.points_wocs_csc(permutation=permutation)
pperf = self.perfect_geometry.points_wocs_csc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wocs_csc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wocs_csc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wocs_csc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wocs_csc"] = self.local_geometry.bare_centre
# With central site, centered on the centroid (centroid does not include the central site)
pdist = self.local_geometry.points_wcs_ctwocc(permutation=permutation)
pperf = self.perfect_geometry.points_wcs_ctwocc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wcs_ctwocc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wcs_ctwocc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wcs_ctwocc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wcs_ctwocc"] = self.local_geometry.centroid_without_centre
# With central site, centered on the centroid (centroid includes the central site)
pdist = self.local_geometry.points_wcs_ctwcc(permutation=permutation)
pperf = self.perfect_geometry.points_wcs_ctwcc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wcs_ctwcc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wcs_ctwcc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wcs_ctwcc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wcs_ctwcc"] = self.local_geometry.centroid_with_centre
# With central site, centered on the central site
pdist = self.local_geometry.points_wcs_csc(permutation=permutation)
pperf = self.perfect_geometry.points_wcs_csc()
sm_info = symmetry_measure(points_distorted=pdist, points_perfect=pperf)
result_dict[geometry.mp_symbol]["csm_wcs_csc"] = sm_info["symmetry_measure"]
result_dict[geometry.mp_symbol]["rotation_matrix_wcs_csc"] = np.linalg.inv(sm_info["rotation_matrix"])
result_dict[geometry.mp_symbol]["scaling_factor_wcs_csc"] = 1.0 / sm_info["scaling_factor"]
result_dict[geometry.mp_symbol]["translation_vector_wcs_csc"] = self.local_geometry.bare_centre
def get_coordination_symmetry_measures_optim(
self, only_minimum=True, all_csms=True, nb_set=None, optimization=None
):
"""
Returns the continuous symmetry measures of the current local geometry in a dictionary.
:return: the continuous symmetry measures of the current local geometry in a dictionary.
"""
cn = len(self.local_geometry.coords)
test_geometries = self.allcg.get_implemented_geometries(cn)
if all([cg.algorithms[0].algorithm_type == EXPLICIT_PERMUTATIONS for cg in test_geometries]):
return self.get_coordination_symmetry_measures(
only_minimum=only_minimum, all_csms=all_csms, optimization=optimization
)
if not all(
[all([algo.algorithm_type == SEPARATION_PLANE for algo in cg.algorithms]) for cg in test_geometries]
):
raise ValueError("All algorithms should be EXPLICIT_PERMUTATIONS or SEPARATION_PLANE")
result_dict = {}
for geometry in test_geometries:
logging.log(
level=5,
msg="Getting Continuous Symmetry Measure with Separation Plane "
'algorithm for geometry "{}"'.format(geometry.ce_symbol),
)
self.perfect_geometry = AbstractGeometry.from_cg(
cg=geometry,
centering_type=self.centering_type,
include_central_site_in_centroid=self.include_central_site_in_centroid,
)
points_perfect = self.perfect_geometry.points_wcs_ctwcc()
cgsm = self.coordination_geometry_symmetry_measures_sepplane_optim(
geometry,
points_perfect=points_perfect,
nb_set=nb_set,
optimization=optimization,
)
result, permutations, algos, local2perfect_maps, perfect2local_maps = cgsm
if only_minimum:
if len(result) > 0:
imin = np.argmin([rr["symmetry_measure"] for rr in result])
if geometry.algorithms is not None:
algo = algos[imin]
else:
algo = algos
result_dict[geometry.mp_symbol] = {
"csm": result[imin]["symmetry_measure"],
"indices": permutations[imin],
"algo": algo,
"local2perfect_map": local2perfect_maps[imin],
"perfect2local_map": perfect2local_maps[imin],
"scaling_factor": 1.0 / result[imin]["scaling_factor"],
"rotation_matrix": np.linalg.inv(result[imin]["rotation_matrix"]),
"translation_vector": result[imin]["translation_vector"],
}
if all_csms:
self._update_results_all_csms(result_dict, permutations, imin, geometry)
return result_dict
def coordination_geometry_symmetry_measures(
self,
coordination_geometry,
tested_permutations=False,
points_perfect=None,
optimization=None,
):
"""
Returns the symmetry measures of a given coordination_geometry for a set of permutations depending on
the permutation setup. Depending on the parameters of the LocalGeometryFinder and on the coordination
geometry, different methods are called.
:param coordination_geometry: Coordination geometry for which the symmetry measures are looked for
:return: the symmetry measures of a given coordination_geometry for a set of permutations
:raise: NotImplementedError if the permutation_setup does not exists
"""
if tested_permutations:
tested_permutations = set()
if self.permutations_safe_override:
raise ValueError("No permutations safe override anymore")
csms = []
permutations = []
algos = []
local2perfect_maps = []
perfect2local_maps = []
for algo in coordination_geometry.algorithms:
if algo.algorithm_type == EXPLICIT_PERMUTATIONS:
return self.coordination_geometry_symmetry_measures_standard(
coordination_geometry,
algo,
points_perfect=points_perfect,
optimization=optimization,
)
if algo.algorithm_type == SEPARATION_PLANE:
cgsm = self.coordination_geometry_symmetry_measures_separation_plane(
coordination_geometry,
algo,
tested_permutations=tested_permutations,
points_perfect=points_perfect,
)
csm, perm, algo, local2perfect_map, perfect2local_map = cgsm
csms.extend(csm)
permutations.extend(perm)
algos.extend(algo)
local2perfect_maps.extend(local2perfect_map)
perfect2local_maps.extend(perfect2local_map)
return csms, permutations, algos, local2perfect_maps, perfect2local_maps
def coordination_geometry_symmetry_measures_sepplane_optim(
self, coordination_geometry, points_perfect=None, nb_set=None, optimization=None
):
"""
Returns the symmetry measures of a given coordination_geometry for a set of permutations depending on
the permutation setup. Depending on the parameters of the LocalGeometryFinder and on the coordination
geometry, different methods are called.
:param coordination_geometry: Coordination geometry for which the symmetry measures are looked for
:return: the symmetry measures of a given coordination_geometry for a set of permutations
:raise: NotImplementedError if the permutation_setup does not exists
"""
csms = []
permutations = []
algos = []
local2perfect_maps = []
perfect2local_maps = []
for algo in coordination_geometry.algorithms:
if algo.algorithm_type == SEPARATION_PLANE:
cgsm = self.coordination_geometry_symmetry_measures_separation_plane_optim(
coordination_geometry,
algo,
points_perfect=points_perfect,
nb_set=nb_set,
optimization=optimization,
)
csm, perm, algo, local2perfect_map, perfect2local_map = cgsm
csms.extend(csm)
permutations.extend(perm)
algos.extend(algo)
local2perfect_maps.extend(local2perfect_map)
perfect2local_maps.extend(perfect2local_map)
return csms, permutations, algos, local2perfect_maps, perfect2local_maps
def coordination_geometry_symmetry_measures_standard(
self, coordination_geometry, algo, points_perfect=None, optimization=None
):
"""
Returns the symmetry measures for a set of permutations (whose setup depends on the coordination geometry)
for the coordination geometry "coordination_geometry". Standard implementation looking for the symmetry
measures of each permutation
:param coordination_geometry: The coordination geometry to be investigated
:return: The symmetry measures for the given coordination geometry for each permutation investigated
"""
# permutations_symmetry_measures = np.zeros(len(algo.permutations),
# np.float_)
if optimization == 2:
permutations_symmetry_measures = [None] * len(algo.permutations)
permutations = list()
algos = list()
local2perfect_maps = list()
perfect2local_maps = list()
for iperm, perm in enumerate(algo.permutations):
local2perfect_map = {}
perfect2local_map = {}
permutations.append(perm)
for iperfect, ii in enumerate(perm):
perfect2local_map[iperfect] = ii
local2perfect_map[ii] = iperfect
local2perfect_maps.append(local2perfect_map)
perfect2local_maps.append(perfect2local_map)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=perm)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures[iperm] = sm_info
algos.append(str(algo))
return (
permutations_symmetry_measures,
permutations,
algos,
local2perfect_maps,
perfect2local_maps,
)
permutations_symmetry_measures = [None] * len(algo.permutations)
permutations = list()
algos = list()
local2perfect_maps = list()
perfect2local_maps = list()
for iperm, perm in enumerate(algo.permutations):
local2perfect_map = {}
perfect2local_map = {}
permutations.append(perm)
for iperfect, ii in enumerate(perm):
perfect2local_map[iperfect] = ii
local2perfect_map[ii] = iperfect
local2perfect_maps.append(local2perfect_map)
perfect2local_maps.append(perfect2local_map)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=perm)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures[iperm] = sm_info
algos.append(str(algo))
return (
permutations_symmetry_measures,
permutations,
algos,
local2perfect_maps,
perfect2local_maps,
)
def coordination_geometry_symmetry_measures_separation_plane(
self,
coordination_geometry,
separation_plane_algo,
testing=False,
tested_permutations=False,
points_perfect=None,
):
"""
Returns the symmetry measures of the given coordination geometry "coordination_geometry" using separation
facets to reduce the complexity of the system. Caller to the refined 2POINTS, 3POINTS and other ...
:param coordination_geometry: The coordination geometry to be investigated
:return: The symmetry measures for the given coordination geometry for each plane and permutation investigated
"""
permutations = list()
permutations_symmetry_measures = list()
plane_separations = list()
algos = list()
perfect2local_maps = list()
local2perfect_maps = list()
if testing:
separation_permutations = list()
nplanes = 0
for npoints in range(
separation_plane_algo.minimum_number_of_points,
min(separation_plane_algo.maximum_number_of_points, 4) + 1,
):
for points_combination in itertools.combinations(self.local_geometry.coords, npoints):
if npoints == 2:
if collinear(
points_combination[0],
points_combination[1],
self.local_geometry.central_site,
tolerance=0.25,
):
continue
plane = Plane.from_3points(
points_combination[0],
points_combination[1],
self.local_geometry.central_site,
)
elif npoints == 3:
if collinear(
points_combination[0],
points_combination[1],
points_combination[2],
tolerance=0.25,
):
continue
plane = Plane.from_3points(
points_combination[0],
points_combination[1],
points_combination[2],
)
elif npoints > 3:
plane = Plane.from_npoints(points_combination, best_fit="least_square_distance")
else:
raise ValueError("Wrong number of points to initialize separation plane")
cgsm = self._cg_csm_separation_plane(
coordination_geometry=coordination_geometry,
sepplane=separation_plane_algo,
local_plane=plane,
plane_separations=plane_separations,
dist_tolerances=DIST_TOLERANCES,
testing=testing,
tested_permutations=tested_permutations,
points_perfect=points_perfect,
)
csm, perm, algo = cgsm[0], cgsm[1], cgsm[2]
if csm is not None:
permutations_symmetry_measures.extend(csm)
permutations.extend(perm)
for thisperm in perm:
p2l = {}
l2p = {}
for i_p, pp in enumerate(thisperm):
p2l[i_p] = pp
l2p[pp] = i_p
perfect2local_maps.append(p2l)
local2perfect_maps.append(l2p)
algos.extend(algo)
if testing:
separation_permutations.extend(cgsm[3])
nplanes += 1
if nplanes > 0:
break
if nplanes == 0:
return self.coordination_geometry_symmetry_measures_fallback_random(
coordination_geometry, points_perfect=points_perfect
)
if testing:
return permutations_symmetry_measures, permutations, separation_permutations
return (
permutations_symmetry_measures,
permutations,
algos,
local2perfect_maps,
perfect2local_maps,
)
def coordination_geometry_symmetry_measures_separation_plane_optim(
self,
coordination_geometry,
separation_plane_algo,
points_perfect=None,
nb_set=None,
optimization=None,
):
"""
Returns the symmetry measures of the given coordination geometry "coordination_geometry" using separation
facets to reduce the complexity of the system. Caller to the refined 2POINTS, 3POINTS and other ...
Args:
coordination_geometry: The coordination geometry to be investigated.
separation_plane_algo: Separation Plane algorithm used.
points_perfect: Points corresponding to the perfect geometry.
nb_set: Neighbor set for this set of points. (used to store already computed separation planes)
optimization: Optimization level (1 or 2).
Returns:
tuple: Continuous symmetry measures for the given coordination geometry for each plane and permutation
investigated, corresponding permutations, corresponding algorithms,
corresponding mappings from local to perfect environment and corresponding mappings
from perfect to local environment.
"""
if optimization == 2:
logging.log(level=5, msg="... using optimization = 2")
cgcsmoptim = self._cg_csm_separation_plane_optim2
elif optimization == 1:
logging.log(level=5, msg="... using optimization = 2")
cgcsmoptim = self._cg_csm_separation_plane_optim1
else:
raise ValueError("Optimization should be 1 or 2")
cn = len(self.local_geometry.coords)
permutations = list()
permutations_symmetry_measures = list()
algos = list()
perfect2local_maps = list()
local2perfect_maps = list()
if separation_plane_algo.separation in nb_set.separations:
for sep_indices, (local_plane, npsep) in nb_set.separations[separation_plane_algo.separation].items():
cgsm = cgcsmoptim(
coordination_geometry=coordination_geometry,
sepplane=separation_plane_algo,
local_plane=local_plane,
points_perfect=points_perfect,
separation_indices=npsep,
)
csm, perm, algo, _ = cgsm[0], cgsm[1], cgsm[2], cgsm[3]
permutations_symmetry_measures.extend(csm)
permutations.extend(perm)
for thisperm in perm:
p2l = {}
l2p = {}
for i_p, pp in enumerate(thisperm):
p2l[i_p] = pp
l2p[pp] = i_p
perfect2local_maps.append(p2l)
local2perfect_maps.append(l2p)
algos.extend(algo)
# Get the local planes and separations up to 3 points
for npoints in range(self.allcg.minpoints[cn], min(self.allcg.maxpoints[cn], 3) + 1):
for ipoints_combination in itertools.combinations(range(self.local_geometry.cn), npoints):
if ipoints_combination in nb_set.local_planes:
continue
# Set up new plane
nb_set.local_planes[ipoints_combination] = None
points_combination = [self.local_geometry.coords[ip] for ip in ipoints_combination]
if npoints == 2:
if collinear(
points_combination[0],
points_combination[1],
self.local_geometry.central_site,
tolerance=0.25,
):
continue
plane = Plane.from_3points(
points_combination[0],
points_combination[1],
self.local_geometry.central_site,
)
elif npoints == 3:
if collinear(
points_combination[0],
points_combination[1],
points_combination[2],
tolerance=0.25,
):
continue
plane = Plane.from_3points(
points_combination[0],
points_combination[1],
points_combination[2],
)
elif npoints > 3:
plane = Plane.from_npoints(points_combination, best_fit="least_square_distance")
else:
raise ValueError("Wrong number of points to initialize separation plane")
# Takes a lot of time and happens rarely ...
# if any([plane.is_same_plane_as(plane2) for comb2, plane2 in nb_set.local_planes.items()
# if plane2 is not None]):
# continue
nb_set.local_planes[ipoints_combination] = plane
# Get the separations for this plane
# TODO: check sensitivity to delta/delta_factor parameter
dig = plane.distances_indices_groups(points=self.local_geometry._coords, delta_factor=0.1, sign=True)
grouped_indices = dig[2]
new_seps = []
for ng in range(1, len(grouped_indices) + 1):
inplane = list(itertools.chain(*grouped_indices[:ng]))
if len(inplane) > self.allcg.maxpoints_inplane[cn]:
break
inplane = [ii[0] for ii in inplane]
outplane = list(itertools.chain(*grouped_indices[ng:]))
s1 = [ii_sign[0] for ii_sign in outplane if ii_sign[1] < 0]
s2 = [ii_sign[0] for ii_sign in outplane if ii_sign[1] > 0]
separation = sort_separation_tuple([s1, inplane, s2])
sep = tuple([len(gg) for gg in separation])
if sep not in self.allcg.separations_cg[cn]:
continue
if sep not in nb_set.separations:
nb_set.separations[sep] = {}
mysep = [np.array(ss, dtype=np.int8) for ss in separation]
nb_set.separations[sep][separation] = (plane, mysep)
if sep == separation_plane_algo.separation:
new_seps.append(mysep)
for separation_indices in new_seps:
cgsm = cgcsmoptim(
coordination_geometry=coordination_geometry,
sepplane=separation_plane_algo,
local_plane=plane,
points_perfect=points_perfect,
separation_indices=separation_indices,
)
csm, perm, algo, _ = cgsm[0], cgsm[1], cgsm[2], cgsm[3]
permutations_symmetry_measures.extend(csm)
permutations.extend(perm)
for thisperm in perm:
p2l = {}
l2p = {}
for i_p, pp in enumerate(thisperm):
p2l[i_p] = pp
l2p[pp] = i_p
perfect2local_maps.append(p2l)
local2perfect_maps.append(l2p)
algos.extend(algo)
if len(permutations_symmetry_measures) == 0:
return self.coordination_geometry_symmetry_measures_fallback_random(
coordination_geometry, points_perfect=points_perfect
)
return (
permutations_symmetry_measures,
permutations,
algos,
local2perfect_maps,
perfect2local_maps,
)
def _cg_csm_separation_plane(
self,
coordination_geometry,
sepplane,
local_plane,
plane_separations,
dist_tolerances=None,
testing=False,
tested_permutations=False,
points_perfect=None,
):
argref_separation = sepplane.argsorted_ref_separation_perm
plane_found = False
permutations = []
permutations_symmetry_measures = []
if testing:
separation_permutations = []
dist_tolerances = dist_tolerances or DIST_TOLERANCES
for dist_tolerance in dist_tolerances:
algo = "NOT_FOUND"
separation = local_plane.indices_separate(self.local_geometry._coords, dist_tolerance)
# Do not consider facets leading to the same separation indices
separation = sort_separation(separation)
if separation_in_list(separation, plane_separations):
continue
# Do not consider a separation which does not follow the reference separation of the perfect
# coordination geometry
if len(separation[1]) != len(sepplane.plane_points):
continue
if len(separation[0]) == len(sepplane.point_groups[0]):
this_separation = separation
plane_separations.append(this_separation)
elif len(separation[0]) == len(sepplane.point_groups[1]):
this_separation = [
list(separation[2]),
list(separation[1]),
list(separation[0]),
]
plane_separations.append(this_separation)
else:
continue
if sepplane.ordered_plane:
inp = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in this_separation[1]]
if sepplane.ordered_point_groups[0]:
pp_s0 = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in this_separation[0]]
ordind_s0 = local_plane.project_and_to2dim_ordered_indices(pp_s0)
sep0 = [this_separation[0][ii] for ii in ordind_s0]
else:
sep0 = list(this_separation[0])
if sepplane.ordered_point_groups[1]:
pp_s2 = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in this_separation[2]]
ordind_s2 = local_plane.project_and_to2dim_ordered_indices(pp_s2)
sep2 = [this_separation[2][ii] for ii in ordind_s2]
else:
sep2 = list(this_separation[2])
separation_perm = list(sep0)
ordind = local_plane.project_and_to2dim_ordered_indices(inp)
separation_perm.extend([this_separation[1][ii] for ii in ordind])
algo = "SEPARATION_PLANE_2POINTS_ORDERED"
separation_perm.extend(sep2)
else:
separation_perm = list(this_separation[0])
separation_perm.extend(this_separation[1])
algo = "SEPARATION_PLANE_2POINTS"
separation_perm.extend(this_separation[2])
if self.plane_safe_permutations:
sep_perms = sepplane.safe_separation_permutations(
ordered_plane=sepplane.ordered_plane,
ordered_point_groups=sepplane.ordered_point_groups,
)
else:
sep_perms = sepplane.permutations
# plane_found = True
for i_sep_perm, sep_perm in enumerate(sep_perms):
perm1 = [separation_perm[ii] for ii in sep_perm]
pp = [perm1[ii] for ii in argref_separation]
# Skip permutations that have already been performed
if isinstance(tested_permutations, set) and coordination_geometry.equivalent_indices is not None:
tuple_ref_perm = coordination_geometry.ref_permutation(pp)
if tuple_ref_perm in tested_permutations:
continue
tested_permutations.add(tuple_ref_perm)
permutations.append(pp)
if testing:
separation_permutations.append(sep_perm)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=pp)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures.append(sm_info)
if plane_found:
break
if len(permutations_symmetry_measures) > 0:
if testing:
return (
permutations_symmetry_measures,
permutations,
algo,
separation_permutations,
)
return (
permutations_symmetry_measures,
permutations,
[sepplane.algorithm_type] * len(permutations),
)
if plane_found:
if testing:
return permutations_symmetry_measures, permutations, [], []
return permutations_symmetry_measures, permutations, []
if testing:
return None, None, None, None
return None, None, None
def _cg_csm_separation_plane_optim1(
self,
coordination_geometry,
sepplane,
local_plane,
points_perfect=None,
separation_indices=None,
):
argref_separation = sepplane.argsorted_ref_separation_perm
permutations = []
permutations_symmetry_measures = []
stop_search = False
# TODO: do not do that several times ... also keep in memory
if sepplane.ordered_plane:
inp = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in separation_indices[1]]
if sepplane.ordered_point_groups[0]:
pp_s0 = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in separation_indices[0]]
ordind_s0 = local_plane.project_and_to2dim_ordered_indices(pp_s0)
sep0 = [separation_indices[0][ii] for ii in ordind_s0]
else:
sep0 = list(separation_indices[0])
if sepplane.ordered_point_groups[1]:
pp_s2 = [pp for ip, pp in enumerate(self.local_geometry._coords) if ip in separation_indices[2]]
ordind_s2 = local_plane.project_and_to2dim_ordered_indices(pp_s2)
sep2 = [separation_indices[2][ii] for ii in ordind_s2]
else:
sep2 = list(separation_indices[2])
separation_perm = list(sep0)
ordind = local_plane.project_and_to2dim_ordered_indices(inp)
separation_perm.extend([separation_indices[1][ii] for ii in ordind])
separation_perm.extend(sep2)
else:
separation_perm = list(separation_indices[0])
separation_perm.extend(separation_indices[1])
separation_perm.extend(separation_indices[2])
if self.plane_safe_permutations:
sep_perms = sepplane.safe_separation_permutations(
ordered_plane=sepplane.ordered_plane,
ordered_point_groups=sepplane.ordered_point_groups,
)
else:
sep_perms = sepplane.permutations
for i_sep_perm, sep_perm in enumerate(sep_perms):
perm1 = [separation_perm[ii] for ii in sep_perm]
pp = [perm1[ii] for ii in argref_separation]
permutations.append(pp)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=pp)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures.append(sm_info)
if len(permutations_symmetry_measures) > 0:
return (
permutations_symmetry_measures,
permutations,
[sepplane.algorithm_type] * len(permutations),
stop_search,
)
return [], [], [], stop_search
def _cg_csm_separation_plane_optim2(
self,
coordination_geometry,
sepplane,
local_plane,
points_perfect=None,
separation_indices=None,
):
argref_separation = sepplane.argsorted_ref_separation_perm
permutations = []
permutations_symmetry_measures = []
stop_search = False
# TODO: do not do that several times ... also keep in memory
if sepplane.ordered_plane:
inp = self.local_geometry.coords.take(separation_indices[1], axis=0)
if sepplane.ordered_point_groups[0]:
pp_s0 = self.local_geometry.coords.take(separation_indices[0], axis=0)
ordind_s0 = local_plane.project_and_to2dim_ordered_indices(pp_s0)
# sep0 = [separation_indices[0][ii] for ii in ordind_s0]
sep0 = separation_indices[0].take(ordind_s0)
else:
# sep0 = list(separation_indices[0])
sep0 = separation_indices[0]
if sepplane.ordered_point_groups[1]:
pp_s2 = self.local_geometry.coords.take(separation_indices[2], axis=0)
ordind_s2 = local_plane.project_and_to2dim_ordered_indices(pp_s2)
# sep2 = [separation_indices[2][ii] for ii in ordind_s2]
sep2 = separation_indices[2].take(ordind_s2)
else:
# sep2 = list(separation_indices[2])
sep2 = separation_indices[2]
# separation_perm = list(sep0)
ordind = local_plane.project_and_to2dim_ordered_indices(inp)
# separation_perm.extend(
# [separation_indices[1][ii] for ii in ordind])
inp1 = separation_indices[1].take(ordind)
# separation_perm.extend(sep2)
separation_perm = np.concatenate((sep0, inp1, sep2))
else:
# separation_perm = list(separation_indices[0])
# separation_perm.extend(separation_indices[1])
# separation_perm.extend(separation_indices[2])
separation_perm = np.concatenate(separation_indices)
if self.plane_safe_permutations:
sep_perms = sepplane.safe_separation_permutations(
ordered_plane=sepplane.ordered_plane,
ordered_point_groups=sepplane.ordered_point_groups,
)
else:
sep_perms = sepplane.permutations
for i_sep_perm, sep_perm in enumerate(sep_perms):
perm1 = separation_perm.take(sep_perm)
pp = perm1.take(argref_separation)
permutations.append(pp)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=pp)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures.append(sm_info)
if len(permutations_symmetry_measures) > 0:
return (
permutations_symmetry_measures,
permutations,
[sepplane.algorithm_type] * len(permutations),
stop_search,
)
return [], [], [], stop_search
def coordination_geometry_symmetry_measures_fallback_random(
self, coordination_geometry, NRANDOM=10, points_perfect=None
):
"""
Returns the symmetry measures for a random set of permutations for the coordination geometry
"coordination_geometry". Fallback implementation for the plane separation algorithms measures
of each permutation
:param coordination_geometry: The coordination geometry to be investigated
:param NRANDOM: Number of random permutations to be tested
:return: The symmetry measures for the given coordination geometry for each permutation investigated
"""
permutations_symmetry_measures = [None] * NRANDOM
permutations = list()
algos = list()
perfect2local_maps = list()
local2perfect_maps = list()
for iperm in range(NRANDOM):
perm = np.random.permutation(coordination_geometry.coordination_number)
permutations.append(perm)
p2l = {}
l2p = {}
for i_p, pp in enumerate(perm):
p2l[i_p] = pp
l2p[pp] = i_p
perfect2local_maps.append(p2l)
local2perfect_maps.append(l2p)
points_distorted = self.local_geometry.points_wcs_ctwcc(permutation=perm)
sm_info = symmetry_measure(points_distorted=points_distorted, points_perfect=points_perfect)
sm_info["translation_vector"] = self.local_geometry.centroid_with_centre
permutations_symmetry_measures[iperm] = sm_info
algos.append("APPROXIMATE_FALLBACK")
return (
permutations_symmetry_measures,
permutations,
algos,
local2perfect_maps,
perfect2local_maps,
)
| mit | 406,041,871,006,039,230 | 45.326521 | 120 | 0.573229 | false |
compas-dev/compas | src/compas/datastructures/network/duality.py | 1 | 5940 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from math import pi
from compas.utilities import pairwise
from compas.geometry import angle_vectors
from compas.geometry import is_ccw_xy
__all__ = [
'network_find_cycles',
]
PI2 = 2.0 * pi
def network_find_cycles(network, breakpoints=None):
"""Find the faces of a network.
Parameters
----------
network : compas.datastructures.Network
The network object.
breakpoints : list, optional
The vertices at which to break the found faces.
Default is ``None``.
Notes
-----
``breakpoints`` are primarily used to break up the outside face in between
specific vertices. For example, in structural applications involving dual
diagrams, any vertices where external forces are applied (loads or reactions)
should be input as breakpoints.
Warnings
--------
This algorithms is essentially a wall follower (a type of maze-solving algorithm).
It relies on the geometry of the network to be repesented as a planar,
straight-line embedding. It determines an ordering of the neighboring vertices
around each vertex, and then follows the *walls* of the network, always
taking turns in the same direction.
Examples
--------
>>>
"""
if not breakpoints:
breakpoints = []
for u, v in network.edges():
network.adjacency[u][v] = None
network.adjacency[v][u] = None
network_sort_neighbors(network)
leaves = list(network.leaves())
if leaves:
u = sorted([(key, network.node_coordinates(key, 'xy')) for key in leaves], key=lambda x: (x[1][1], x[1][0]))[0][0]
else:
u = sorted(network.nodes(True), key=lambda x: (x[1]['y'], x[1]['x']))[0][0]
cycles = {}
found = {}
ckey = 0
v = network_node_find_first_neighbor(network, u)
cycle = network_find_edge_cycle(network, u, v)
frozen = frozenset(cycle)
found[frozen] = ckey
cycles[ckey] = cycle
for a, b in pairwise(cycle + cycle[:1]):
network.adjacency[a][b] = ckey
ckey += 1
for u, v in network.edges():
if network.adjacency[u][v] is None:
cycle = network_find_edge_cycle(network, u, v)
frozen = frozenset(cycle)
if frozen not in found:
found[frozen] = ckey
cycles[ckey] = cycle
ckey += 1
for a, b in pairwise(cycle + cycle[:1]):
network.adjacency[a][b] = found[frozen]
if network.adjacency[v][u] is None:
cycle = network_find_edge_cycle(network, v, u)
frozen = frozenset(cycle)
if frozen not in found:
found[frozen] = ckey
cycles[ckey] = cycle
ckey += 1
for a, b in pairwise(cycle + cycle[:1]):
network.adjacency[a][b] = found[frozen]
cycles = _break_cycles(cycles, breakpoints)
return cycles
def network_node_find_first_neighbor(network, key):
nbrs = network.neighbors(key)
if len(nbrs) == 1:
return nbrs[0]
ab = [-1.0, -1.0, 0.0]
a = network.node_coordinates(key, 'xyz')
b = [a[0] + ab[0], a[1] + ab[1], 0]
angles = []
for nbr in nbrs:
c = network.node_coordinates(nbr, 'xyz')
ac = [c[0] - a[0], c[1] - a[1], 0]
alpha = angle_vectors(ab, ac)
if is_ccw_xy(a, b, c, True):
alpha = PI2 - alpha
angles.append(alpha)
return nbrs[angles.index(min(angles))]
def network_sort_neighbors(network, ccw=True):
sorted_neighbors = {}
xyz = {key: network.node_coordinates(key) for key in network.nodes()}
for key in network.nodes():
nbrs = network.neighbors(key)
sorted_neighbors[key] = node_sort_neighbors(key, nbrs, xyz, ccw=ccw)
for key, nbrs in sorted_neighbors.items():
network.node_attribute(key, 'neighbors', nbrs[::-1])
return sorted_neighbors
def node_sort_neighbors(key, nbrs, xyz, ccw=True):
if len(nbrs) == 1:
return nbrs
ordered = nbrs[0:1]
a = xyz[key]
for i, nbr in enumerate(nbrs[1:]):
c = xyz[nbr]
pos = 0
b = xyz[ordered[pos]]
while not is_ccw_xy(a, b, c):
pos += 1
if pos > i:
break
b = xyz[ordered[pos]]
if pos == 0:
pos = -1
b = xyz[ordered[pos]]
while is_ccw_xy(a, b, c):
pos -= 1
if pos < -len(ordered):
break
b = xyz[ordered[pos]]
pos += 1
ordered.insert(pos, nbr)
if not ccw:
return ordered[::-1]
return ordered
def network_find_edge_cycle(network, u, v):
cycle = [u]
while True:
cycle.append(v)
nbrs = network.node_attribute(v, 'neighbors')
nbr = nbrs[nbrs.index(u) - 1]
u, v = v, nbr
if v == cycle[0]:
break
return cycle
def _break_cycles(cycles, breakpoints):
breakpoints = set(breakpoints)
broken = []
for fkey in cycles:
vertices = cycles[fkey]
faces = []
faces.append([vertices[0]])
for i in range(1, len(vertices) - 1):
key = vertices[i]
faces[-1].append(key)
if key in breakpoints:
faces.append([key])
faces[-1].append(vertices[-1])
faces[-1].append(vertices[0])
if len(faces) == 1:
broken.append(faces[0])
continue
if faces[0][0] not in breakpoints and faces[-1][-1] not in breakpoints:
if faces[0][0] == faces[-1][-1]:
faces[:] = [faces[-1] + faces[0][1:]] + faces[1:-1]
if len(faces) == 1:
broken.append(faces[0])
continue
for vertices in faces:
broken.append(vertices)
return broken
| mit | 5,137,100,386,426,476,000 | 27.834951 | 122 | 0.557407 | false |
rafamanzo/colab | colab/plugins/trac/migrations/0001_initial.py | 1 | 4776 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connections
def create_views(apps, schema_editor):
connection = connections['trac']
cursor = connection.cursor()
# revision_view
cursor.execute('''
CREATE OR REPLACE VIEW revision_view AS SELECT
revision.rev,
revision.author,
revision.message,
repository.value AS repository_name,
TIMESTAMP WITH TIME ZONE 'epoch' + (revision.time/1000000) * INTERVAL '1s' AS created,
CONCAT(revision.repos, '-', revision.rev) AS key
FROM revision
INNER JOIN repository ON(
repository.id = revision.repos
AND repository.name = 'name'
AND repository.value != ''
);
''')
# attachment_view
cursor.execute('''
CREATE OR REPLACE VIEW attachment_view AS SELECT
CONCAT(attachment.type, '/' , attachment.id, '/', attachment.filename) AS url,
attachment.type AS used_by,
attachment.filename AS filename,
attachment.id as attach_id,
(SELECT LOWER(SUBSTRING(attachment.filename FROM '\.(\w+)$'))) AS mimetype,
attachment.author AS author,
attachment.description AS description,
attachment.size AS size,
TIMESTAMP WITH TIME ZONE 'epoch' + (attachment.time/1000000)* INTERVAL '1s' AS created
FROM attachment;
''')
# wiki_view
cursor.execute('''
CREATE OR REPLACE VIEW wiki_view AS SELECT
wiki.name AS name,
(SELECT wiki2.text FROM wiki AS wiki2 WHERE wiki2.name = wiki.name
AND wiki2.version = MAX(wiki.version)) AS wiki_text,
(SELECT wiki3.author FROM wiki AS wiki3 WHERE wiki3.name = wiki.name
AND wiki3.version = 1) AS author,
string_agg(DISTINCT wiki.author, ', ') AS collaborators,
TIMESTAMP WITH TIME ZONE 'epoch' + (MIN(wiki.time)/1000000) * INTERVAL '1s' AS created,
TIMESTAMP WITH TIME ZONE 'epoch' + (MAX(wiki.time)/1000000) * INTERVAL '1s' AS modified,
(SELECT wiki4.author FROM wiki AS wiki4 WHERE wiki4.name = wiki.name
AND wiki4.version = MAX(wiki.version)) AS modified_by
FROM wiki
GROUP BY wiki.name;
''')
# ticket_view
cursor.execute('''
CREATE OR REPLACE VIEW ticket_view AS SELECT
ticket.id AS id,
ticket.summary as summary,
ticket.description as description,
ticket.milestone as milestone,
ticket.priority as priority,
ticket.component as component,
ticket.version as version,
ticket.severity as severity,
ticket.reporter as reporter,
ticket.reporter as author,
ticket.status as status,
ticket.keywords as keywords,
(SELECT
string_agg(DISTINCT ticket_change.author, ', ')
FROM ticket_change WHERE ticket_change.ticket = ticket.id
GROUP BY ticket_change.ticket) as collaborators,
TIMESTAMP WITH TIME ZONE 'epoch' + (time/1000000)* INTERVAL '1s' AS created,
TIMESTAMP WITH TIME ZONE 'epoch' + (changetime/1000000) * INTERVAL '1s' AS modified,
(SELECT
ticket_change.author
FROM ticket_change
WHERE ticket_change.ticket = ticket.id
AND ticket_change.time = ticket.changetime
LIMIT 1
) AS modified_by
FROM ticket;
''')
# ticket_collab_count_view
cursor.execute('''
CREATE OR REPLACE VIEW ticket_collab_count_view AS
SELECT
COALESCE (t1.author, t2.author) as author,
(COALESCE(t1.count, 0) + COALESCE(t2.count, 0)) as count
FROM
(SELECT author, count(*) as count
FROM ticket_change
GROUP BY author
ORDER BY author
) AS t1
FULL OUTER JOIN
(SELECT reporter as author, count(*) as count
FROM ticket
GROUP BY reporter
ORDER BY reporter
) AS t2
ON t1.author = t2.author;
''')
# wiki_collab_count_view
cursor.execute('''
CREATE OR REPLACE VIEW wiki_collab_count_view AS
SELECT author, count(*) from wiki GROUP BY author;
''')
def drop_views(apps, schema_editor):
connection = connections['trac']
cursor = connection.cursor()
cursor.execute('''
DROP VIEW IF EXISTS revision_view;
DROP VIEW IF EXISTS ticket_view;
DROP VIEW IF EXISTS wiki_view;
DROP VIEW IF EXISTS ticket_collab_count_view;
DROP VIEW IF EXISTS wiki_collab_count_view;
DROP VIEW IF EXISTS attachment_view;
''')
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(code=create_views, reverse_code=drop_views)
]
| gpl-2.0 | -4,734,781,128,982,950,000 | 32.87234 | 96 | 0.621022 | false |
rembo10/headphones | lib/pygazelle/api.py | 1 | 16383 | #!/usr/bin/env python
#
# PyGazelle - https://github.com/cohena/pygazelle
# A Python implementation of the What.cd Gazelle JSON API
#
# Loosely based on the API implementation from 'whatbetter', by Zachary Denton
# See https://github.com/zacharydenton/whatbetter
from HTMLParser import HTMLParser
import sys
import json
import time
import requests as requests
import headphones
from .user import User
from .artist import Artist
from .tag import Tag
from .request import Request
from .torrent_group import TorrentGroup
from .torrent import Torrent
from .category import Category
from .inbox import Mailbox
class LoginException(Exception):
pass
class RequestException(Exception):
pass
class GazelleAPI(object):
last_request = time.time() # share amongst all api objects
default_headers = {
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'User-Agent': 'Headphones/%s' % headphones.CURRENT_VERSION,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9'\
',*/*;q=0.8',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'en-US,en;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'}
def __init__(self, username=None, password=None, url=None):
self.session = requests.session()
self.session.headers = self.default_headers
self.username = username
self.password = password
self.authkey = None
self.passkey = None
self.userid = None
self.logged_in_user = None
self.default_timeout = 30
self.cached_users = {}
self.cached_artists = {}
self.cached_tags = {}
self.cached_torrent_groups = {}
self.cached_torrents = {}
self.cached_requests = {}
self.cached_categories = {}
self.site = url + "/"
self.past_request_timestamps = []
def wait_for_rate_limit(self):
# maximum is 5 requests within 10 secs
time_frame = 10
max_reqs = 5
slice_point = 0
while len(self.past_request_timestamps) >= max_reqs:
for i, timestamp in enumerate(self.past_request_timestamps):
if timestamp < time.time() - time_frame:
slice_point = i + 1
else:
break
if slice_point:
self.past_request_timestamps = self.past_request_timestamps[slice_point:]
else:
time.sleep(0.1)
def logged_in(self):
return self.logged_in_user is not None and self.logged_in_user.id == self.userid
def _login(self):
"""
Private method.
Logs in user and gets authkey from server.
"""
if self.logged_in():
return
self.wait_for_rate_limit()
loginpage = self.site + 'login.php'
data = {'username': self.username,
'password': self.password,
'keeplogged': '1'}
r = self.session.post(loginpage, data=data, timeout=self.default_timeout, headers=self.default_headers)
self.past_request_timestamps.append(time.time())
if r.status_code != 200:
raise LoginException("Login returned status code %s" % r.status_code)
try:
accountinfo = self.request('index', autologin=False)
except RequestException as e:
raise LoginException("Login probably incorrect")
if not accountinfo or 'id' not in accountinfo:
raise LoginException("Login probably incorrect")
self.userid = accountinfo['id']
self.authkey = accountinfo['authkey']
self.passkey = accountinfo['passkey']
self.logged_in_user = User(self.userid, self)
self.logged_in_user.set_index_data(accountinfo)
def request(self, action, autologin=True, **kwargs):
"""
Makes an AJAX request at a given action.
Pass an action and relevant arguments for that action.
"""
def make_request(action, **kwargs):
ajaxpage = 'ajax.php'
content = self.unparsed_request(ajaxpage, action, **kwargs)
try:
if not isinstance(content, text_type):
content = content.decode('utf-8')
parsed = json.loads(content)
if parsed['status'] != 'success':
raise RequestException
return parsed['response']
except ValueError:
raise RequestException
try:
return make_request(action, **kwargs)
except Exception as e:
if autologin and not self.logged_in():
self._login()
return make_request(action, **kwargs)
else:
raise e
def unparsed_request(self, sitepage, action, **kwargs):
"""
Makes a generic HTTP request at a given page with a given action.
Also pass relevant arguments for that action.
"""
self.wait_for_rate_limit()
url = "%s%s" % (self.site, sitepage)
params = {'action': action}
if self.authkey:
params['auth'] = self.authkey
params.update(kwargs)
r = self.session.get(url, params=params, allow_redirects=False, timeout=self.default_timeout)
if r.status_code == 302 and r.raw.headers['location'] == 'login.php':
self.logged_in_user = None
raise LoginException("User login expired")
self.past_request_timestamps.append(time.time())
return r.content
def get_user(self, id):
"""
Returns a User for the passed ID, associated with this API object. If the ID references the currently logged in
user, the user returned will be pre-populated with the information from an 'index' API call. Otherwise, you'll
need to call User.update_user_data(). This is done on demand to reduce unnecessary API calls.
"""
id = int(id)
if id == self.userid:
return self.logged_in_user
elif id in self.cached_users.keys():
return self.cached_users[id]
else:
return User(id, self)
def search_users(self, search_query):
"""
Returns a list of users returned for the search query. You can search by name, part of name, and ID number. If
one of the returned users is the currently logged-in user, that user object will be pre-populated with the
information from an 'index' API call. Otherwise only the limited info returned by the search will be pre-pop'd.
You can query more information with User.update_user_data(). This is done on demand to reduce unnecessary API calls.
"""
response = self.request(action='usersearch', search=search_query)
results = response['results']
found_users = []
for result in results:
user = self.get_user(result['userId'])
user.set_search_result_data(result)
found_users.append(user)
return found_users
def get_inbox(self, page='1', sort='unread'):
"""
Returns the inbox Mailbox for the logged in user
"""
return Mailbox(self, 'inbox', page, sort)
def get_sentbox(self, page='1', sort='unread'):
"""
Returns the sentbox Mailbox for the logged in user
"""
return Mailbox(self, 'sentbox', page, sort)
def get_artist(self, id=None, name=None):
"""
Returns an Artist for the passed ID, associated with this API object. You'll need to call Artist.update_data()
if the artist hasn't already been cached. This is done on demand to reduce unnecessary API calls.
"""
if id:
id = int(id)
if id in self.cached_artists.keys():
artist = self.cached_artists[id]
else:
artist = Artist(id, self)
if name:
artist.name = HTMLParser().unescape(name)
elif name:
artist = Artist(-1, self)
artist.name = HTMLParser().unescape(name)
else:
raise Exception("You must specify either an ID or a Name to get an artist.")
return artist
def get_tag(self, name):
"""
Returns a Tag for the passed name, associated with this API object. If you know the count value for this tag,
pass it to update the object. There is no way to query the count directly from the API, but it can be retrieved
from other calls such as 'artist', however.
"""
if name in self.cached_tags.keys():
return self.cached_tags[name]
else:
return Tag(name, self)
def get_request(self, id):
"""
Returns a Request for the passed ID, associated with this API object. You'll need to call Request.update_data()
if the request hasn't already been cached. This is done on demand to reduce unnecessary API calls.
"""
id = int(id)
if id in self.cached_requests.keys():
return self.cached_requests[id]
else:
return Request(id, self)
def get_torrent_group(self, id):
"""
Returns a TorrentGroup for the passed ID, associated with this API object.
"""
id = int(id)
if id in self.cached_torrent_groups.keys():
return self.cached_torrent_groups[id]
else:
return TorrentGroup(id, self)
def get_torrent(self, id):
"""
Returns a Torrent for the passed ID, associated with this API object.
"""
id = int(id)
if id in self.cached_torrents.keys():
return self.cached_torrents[id]
else:
return Torrent(id, self)
def get_torrent_from_info_hash(self, info_hash):
"""
Returns a Torrent for the passed info hash (if one exists), associated with this API object.
"""
try:
response = self.request(action='torrent', hash=info_hash.upper())
except RequestException:
return None
id = int(response['torrent']['id'])
if id in self.cached_torrents.keys():
torrent = self.cached_torrents[id]
else:
torrent = Torrent(id, self)
torrent.set_torrent_complete_data(response)
return torrent
def get_category(self, id, name=None):
"""
Returns a Category for the passed ID, associated with this API object.
"""
id = int(id)
if id in self.cached_categories.keys():
cat = self.cached_categories[id]
else:
cat = Category(id, self)
if name:
cat.name = name
return cat
def get_top_10(self, type="torrents", limit=25):
"""
Lists the top <limit> items of <type>. Type can be "torrents", "tags", or "users". Limit MUST be
10, 25, or 100...it can't just be an arbitrary number (unfortunately). Results are organized into a list of hashes.
Each hash contains the results for a specific time frame, like 'day', or 'week'. In the hash, the 'results' key
contains a list of objects appropriate to the passed <type>.
"""
response = self.request(action='top10', type=type, limit=limit)
top_items = []
if not response:
raise RequestException
for category in response:
results = []
if type == "torrents":
for item in category['results']:
torrent = self.get_torrent(item['torrentId'])
torrent.set_torrent_top_10_data(item)
results.append(torrent)
elif type == "tags":
for item in category['results']:
tag = self.get_tag(item['name'])
results.append(tag)
elif type == "users":
for item in category['results']:
user = self.get_user(item['id'])
results.append(user)
else:
raise Exception("%s is an invalid type argument for GazelleAPI.get_top_ten()" % type)
top_items.append({
"caption": category['caption'],
"tag": category['tag'],
"limit": category['limit'],
"results": results
})
return top_items
def search_torrents(self, **kwargs):
"""
Searches based on the args you pass and returns torrent groups filled with torrents.
Pass strings unless otherwise specified.
Valid search args:
searchstr (any arbitrary string to search for)
page (page to display -- default: 1)
artistname (self explanatory)
groupname (torrent group name, equivalent to album)
recordlabel (self explanatory)
cataloguenumber (self explanatory)
year (self explanatory)
remastertitle (self explanatory)
remasteryear (self explanatory)
remasterrecordlabel (self explanatory)
remastercataloguenumber (self explanatory)
filelist (can search for filenames found in torrent...unsure of formatting for multiple files)
encoding (use constants in pygazelle.Encoding module)
format (use constants in pygazelle.Format module)
media (use constants in pygazelle.Media module)
releasetype (use constants in pygazelle.ReleaseType module)
haslog (int 1 or 0 to represent boolean, 100 for 100% only, -1 for < 100% / unscored)
hascue (int 1 or 0 to represent boolean)
scene (int 1 or 0 to represent boolean)
vanityhouse (int 1 or 0 to represent boolean)
freetorrent (int 1 or 0 to represent boolean)
taglist (comma separated tag names)
tags_type (0 for 'any' matching, 1 for 'all' matching)
order_by (use constants in pygazelle.order module that start with by_ in their name)
order_way (use way_ascending or way_descending constants in pygazelle.order)
filter_cat (for each category you want to search, the param name must be filter_cat[catnum] and the value 1)
ex. filter_cat[1]=1 turns on Music.
filter_cat[1]=1, filter_cat[2]=1 turns on music and applications. (two separate params and vals!)
Category object ids return the correct int value for these. (verify?)
Returns a dict containing keys 'curr_page', 'pages', and 'results'. Results contains a matching list of Torrents
(they have a reference to their parent TorrentGroup).
"""
response = self.request(action='browse', **kwargs)
results = response['results']
if len(results):
curr_page = response['currentPage']
pages = response['pages']
else:
curr_page = 1
pages = 1
matching_torrents = []
for torrent_group_dict in results:
torrent_group = self.get_torrent_group(torrent_group_dict['groupId'])
torrent_group.set_torrent_search_data(torrent_group_dict)
for torrent_dict in torrent_group_dict['torrents']:
torrent_dict['groupId'] = torrent_group.id
torrent = self.get_torrent(torrent_dict['torrentId'])
torrent.set_torrent_search_data(torrent_dict)
matching_torrents.append(torrent)
return {'curr_page': curr_page, 'pages': pages, 'results': matching_torrents}
def generate_torrent_link(self, id, use_token=False):
url = "%storrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s&usetoken=%d" %\
(self.site, id, self.logged_in_user.authkey, self.logged_in_user.passkey, use_token)
return url
def save_torrent_file(self, id, dest, use_token=False):
file_data = self.unparsed_request("torrents.php", 'download',
id=id, authkey=self.logged_in_user.authkey, torrent_pass=self.logged_in_user.passkey,
usetoken=int(use_token))
with open(dest, 'w+') as dest_file:
dest_file.write(file_data)
if sys.version_info[0] == 3:
text_type = str
else:
text_type = unicode
| gpl-3.0 | 1,708,872,479,942,694,700 | 38.100239 | 124 | 0.591101 | false |
google/checkers_classic | examples/quickstart/example11_test.py | 1 | 2502 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example Checkers tests with setup and teardown hooks."""
from checkers.python import checkers
from checkers.python.integrations.hamcrest import AssertThat
from checkers.python.integrations.hamcrest import EqualTo
from checkers.python.integrations.hamcrest import Is
from checkers.python.integrations.pyunit import pyunit
@checkers.Test
def TestOnePlusOneEqualsTwo(name):
print '1 + 1 = 2'
print 'printing name: %s' % name
AssertThat(1 + 1, Is(EqualTo(2)))
@checkers.Test
def TestZeroPlusOneEqualsOne(context):
print '1 + 1 = 2'
print 'printing name: %s' % context.test_run.components.Lookup('name')
AssertThat(1 + 1, Is(EqualTo(2)))
@checkers.Test
def TestTwoPlusTwoEqualsFour(_, calculator):
AssertThat(2 + 2, Is(EqualTo(calculator.Add(2, 2))))
def CreateTestRun():
"""Test run that will execute the defined test."""
test_run = checkers.TestRun()
# For per-test setup/teardown (once per test case).
test_run.RegisterTestCaseSetUpFunction(TestCaseSetUp)
test_run.RegisterTestCaseTearDownFunction(TestCaseTearDown)
# For per-test run setup/teardown (once per test run).
test_run.RegisterSetUpFunction(TestRunSetUp)
test_run.RegisterTearDownFunction(TestRunTearDown)
# Actual test cases.
test_run.LoadTestCase(TestOnePlusOneEqualsTwo)
test_run.LoadTestCase(TestZeroPlusOneEqualsOne)
return test_run
def TestCaseSetUp(test_run):
print 'running test case setup for test_run %s' % test_run.name
test_run.RegisterComponent('name', 'phooey')
def TestCaseTearDown(test_run):
print 'running test case teardown for test_run %s' % test_run.name
test_run.UnregisterComponent('name')
def TestRunSetUp(test_run):
print 'running setup method for test_run %s' % test_run.name
def TestRunTearDown(test_run):
print 'running teardown method for test_run %s' % test_run.name
if __name__ == '__main__':
pyunit.main(CreateTestRun())
| apache-2.0 | 2,140,861,434,375,869,200 | 31.493506 | 78 | 0.7498 | false |
reidlindsay/wins | sandbox/experiments/aloha/infocom/parse-per.py | 1 | 6546 | #! /usr/bin/env python
"""
Parse PER vs. SINR data from trace files.
Revision Info
=============
* $LastChangedBy: mandke $
* $LastChangedDate: 2011-10-19 17:04:02 -0500 (Wed, 19 Oct 2011) $
* $LastChangedRevision: 5220 $
:author: Ketan Mandke <[email protected]>
:copyright:
Copyright 2009-2011 The University of Texas at Austin
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__docformat__ = "restructuredtext en"
from wins import *
from wins.ieee80211 import *
from optparse import OptionParser
import sys
from copy import copy
from numpy import array
def read_trace(options, tracefile):
# load trace from file
tr = Trace()
tr.read(tracefile)
# return trace
return tr
DETECTFAIL1 = "not detected in LISTEN"
HEADERFAIL1 = "header parameters failed"
HEADERFAIL2 = "header decoding failed"
IGNOREFAIL1 = "ignore rxdata in DECODE"
IGNOREFAIL2 = "ignore detect in DECODE"
def parse_per_info(options, trace, fmt='bo', usemodel=False):
# initialize parameters
param, data = {}, []
mcs, rmsdelay = None, []
ncollision = options.ncollision
# parse trace
for e in trace.events:
obj, evt = e['obj'], e['event']
# check for MCS parameter
if ('phy-rate' in e):
rate = int(e['phy-rate'])
hparamfail = ('drop' in e) and (e['drop']==HEADERFAIL1)
if not hparamfail:
if mcs is None: mcs = rate
else: assert (mcs == rate)
# check for 802.11n RCV & DRP events
if (obj=="80211N"):
rcv, drp = (evt=="RCV"), (evt=="DRP")
x, y = None, None
if drp:
drop = e['drop']
notdetected = (drop==DETECTFAIL1)
hparamfail = (drop==HEADERFAIL1)
headerfail = (drop==HEADERFAIL2)
ignorefail = (drop==IGNOREFAIL1) or (drop==IGNOREFAIL2)
assert (notdetected or hparamfail or headerfail or ignorefail), "%s"%(e)
#sinr = float(e['dot11n-sinr'].lower().replace("db","") )
#x, y = sinr, 1.0 # log header drop as a packet error also
elif rcv:
sinr = float(e['dot11n-sinr'].lower().replace("db","") )
err = e['crc']
haserror = (err=="FAIL")
noerror = (err=="OK")
assert (haserror or noerror)
if usemodel:
per = float(e['dot11n-model-per'])
else:
if haserror: per = 1.0
else: per = 0.0
# check if ncollision matches
keepdata = True
if (ncollision is not None):
keepdata = False
if 'cif-collision' in e:
coll = eval(e['cif-collision'])
assert isinstance(coll, list)
keepdata = (len(coll) == ncollision)
if keepdata:
x, y = sinr, per
# log data point
if (x is not None) and (y is not None):
dp = {'x':x, 'y':y, 'ndata': 1}
data.append(dp)
# check for RMS delay
if (rcv or drp):
tau = float(e['dot11n-rmsdelay'])
rmsdelay.append(tau)
# check parameters
assert (rmsdelay)
assert (mcs is not None)
avgdelay = array(rmsdelay).mean()
pertype = "actual"
if usemodel: pertype = "model"
# return param and data
param['mcs'] = mcs
param['rmsdelay'] = avgdelay
param['format'] = fmt
label = "${\\rm PER}_{%s}$ ${\\rm (MCS = %d}$, "%(pertype,mcs)
if ncollision is not None: label +="$N_{coll} = %d$, "%(ncollision)
label += "$\\sigma_{rms} = %.3g ns)$"%(avgdelay*1e9)
param['label'] = label
return param, data
def parse_per():
usage = "%prog [OPTIONS] TRACEFILE1 [TRACEFILE2 ...]\n" + \
" Writes parsed data to standard output."
parser = OptionParser(usage=usage)
parser.add_option("-c", "--ncollision", dest="ncollision", type="int", \
default=None, help="Filter results using number of collisions. [default=%default]")
(options, args) = parser.parse_args()
if len(args)<1:
print "Insufficient number of arguments."
parser.print_help()
raise SystemExit
tracefile = args[0:]
numtraces = len(tracefile)
# set parameters
default_parameters = {'xlabel': "SINR (dB)", \
'ylabel': "PER", \
'title': "PER vs. SINR", \
'label': None, \
'source': None, \
'format': None}
lgd, formats = [], [('ro','r:'), ('bo', 'b:'), ('go', 'g:')]
for k in range(numtraces):
tfile = tracefile[k]
# treat as normal wins trace file
trace = read_trace(options, tfile)
fmt = formats[k%len(formats)]
if not trace: continue
sys.stderr.write("Parsing trace from %s ...\n"%(tfile))
# parse actual PER from trace
param, data = parse_per_info(options, trace)
if data:
parameters = copy(default_parameters)
parameters.update(param)
parameters['source'] = tfile
parameters['format'] = fmt[0]
assert (param['label'] is not None)
parsed_data = {'parameters': parameters, 'data': data}
sys.stdout.write("%s\n"%(parsed_data) )
# parse model PER from trace
param, data = parse_per_info(options, trace, usemodel=True)
if data:
parameters = copy(default_parameters)
parameters.update(param)
parameters['source'] = tfile
parameters['format'] = fmt[1]
assert (param['label'] is not None)
parsed_data = {'parameters': parameters, 'data': data}
sys.stdout.write("%s\n"%(parsed_data) )
if __name__ == '__main__':
parse_per()
| apache-2.0 | 5,629,459,248,063,967,000 | 35.366667 | 95 | 0.543996 | false |
douglassquirrel/alexandra | libraries/pubsub.py | 1 | 7745 | from docstore import connect as docstore_connect
from pika import BlockingConnection, ConnectionParameters
from re import match
from time import time as now
from urllib2 import build_opener, HTTPHandler, Request, urlopen
EXCHANGE = 'alexandra'
HTTP_PATIENCE_SEC = 1
class AMQPConnection:
def __init__(self, url, context, marshal, unmarshal):
self._context = context
self.marshal, self.unmarshal = marshal, unmarshal
host = match(r"amqp://([\w\d\.]+)", url).group(1)
connection = BlockingConnection(ConnectionParameters(host))
self._channel = connection.channel()
self._channel.exchange_declare(exchange=EXCHANGE, type='topic')
self._init_docstore()
def _init_docstore(self):
location_queue = self._subscribe_raw('docstore', 'location')
self._publish_raw('docstore', 'locate', 'locate')
docstore_url = self._get_message_block_raw(location_queue, timeout=1)
self._docstore = docstore_connect(docstore_url)
def publish(self, topic, message):
self._publish_raw(self._context, topic, self.marshal(message))
def _publish_raw(self, context, topic, message):
self._channel.basic_publish(exchange=EXCHANGE,
routing_key=context + '.' + topic,
body=message)
def subscribe(self, topic):
return self._subscribe_raw(self._context, topic)
def _subscribe_raw(self, context, topic):
result = self._channel.queue_declare()
queue = result.method.queue
self._channel.queue_bind(exchange=EXCHANGE,
queue=queue,
routing_key=context + '.' + topic)
return queue
def unsubscribe(self, queue):
self._channel.queue_delete(callback=None, queue=queue)
def consume_queue(self, queue, f):
def callback(ch, method, properties, body):
f(self.unmarshal(body))
self._channel.basic_consume(callback, queue=queue, no_ack=True)
self._channel.start_consuming()
def consume_topic(self, topic, f):
queue = self.subscribe(topic)
self.consume_queue(queue, f)
def consume_all(self, f):
queue = self.subscribe('#')
def callback(ch, method, properties, body):
amqp_topic = method.routing_key
context, topic = amqp_topic.split('.', 1)
f(context, topic, self.unmarshal(body))
self._channel.basic_consume(callback, queue=queue, no_ack=True)
self._channel.start_consuming()
def get_message(self, queue):
raw_message = self._get_message_raw(queue)
if raw_message is None:
return None
else:
return self.unmarshal(raw_message)
def _get_message_raw(self, queue):
return self._channel.basic_get(queue=queue, no_ack=True)[2]
def get_all_messages(self, queue):
messages = []
while True:
message = self.get_message(queue)
if message is None:
return messages
else:
messages.append(message)
def get_message_block(self, queue, timeout=None):
return self._get_message_block(queue, self.get_message, timeout)
def _get_message_block_raw(self, queue, timeout=None):
return self._get_message_block(queue, self._get_message_raw, timeout)
def _get_message_block(self, queue, fetcher, timeout):
alarm = Alarm(timeout)
while True:
message = fetcher(queue)
if message is not None:
return message
if alarm.is_ringing():
return None
def get_current_message(self, topic):
raw_message = self._docstore.get('/%s/%s' % (self._context, topic))
if raw_message is None:
return None
else:
return self.unmarshal(raw_message)
def make_topic_monitor(self, topic):
return TopicMonitor(self, topic)
class HTTPConnection:
def __init__(self, url, context, marshal, unmarshal):
self._root_url = '%s/contexts/%s' % (url, context)
self.marshal, self.unmarshal = marshal, unmarshal
def publish(self, topic, message):
url = '%s/%s' % (self._root_url, topic)
self._visit_url(url=url, data=self.marshal(message), method='POST')
def subscribe(self, topic):
return self._visit_url('%s/%s' % (self._root_url, topic))
def unsubscribe(self, queue):
url = '%s/queues/%s' % (self._root_url, queue)
self._visit_url(url=url, method='DELETE')
def consume_queue(self, queue, f):
url = '%s/queues/%s' % (self._root_url, queue)
headers = [('Patience', HTTP_PATIENCE_SEC)]
while True:
message = self._visit_url(url=url, headers=headers)
if len(message) > 0:
f(self.unmarshal(message))
def consume_topic(self, topic, f):
queue = self.subscribe(topic)
self.consume_queue(queue, f)
def consume_all(self, f):
pass #not implemented
def get_message(self, queue):
url = '%s/queues/%s' % (self._root_url, queue)
message = self._visit_url(url)
if len(message) == 0:
return None
else:
return self.unmarshal(message)
def get_all_messages(self, queue):
url = '%s/queues/%s' % (self._root_url, queue)
headers = [('Range', 'all')]
result = self._visit_url(url=url, headers=headers)
if len(result) == 0:
return []
else:
return map(self.unmarshal, result.split('\n'))
def get_message_block(self, queue, timeout=None):
url = '%s/queues/%s' % (self._root_url, queue)
headers = [('Patience', HTTP_PATIENCE_SEC)]
alarm = Alarm(timeout)
while True:
message = self._visit_url(url=url, headers=headers)
if len(message) > 0:
return self.unmarshal(message)
if alarm.is_ringing():
return None
def get_current_message(self, topic):
url = '%s/%s' % (self._root_url, topic)
headers = [('Range', 'current')]
message = self._visit_url(url=url, headers=headers)
if len(message) == 0:
return None
else:
return self.unmarshal(message)
def make_topic_monitor(self, topic):
return TopicMonitor(self, topic)
def _visit_url(self, url, data=None, method='GET', headers=[]):
opener = build_opener(HTTPHandler)
request = Request(url)
request.get_method = lambda: method
for header in headers:
request.add_header(*header)
return opener.open(request, data).read()
connection_classes = {'amqp': AMQPConnection, 'http': HTTPConnection}
def identity(x):
return x
def connect(url, context, marshal=identity, unmarshal=identity):
protocol = match(r"(\w+)://", url).group(1)
return connection_classes[protocol](url, context, marshal, unmarshal)
def firehose(url):
return connect(url, '#')
class Alarm:
def __init__(self, duration):
if duration is not None:
self.alarm_time = now() + duration
else:
self.alarm_time = None
def is_ringing(self):
return self.alarm_time is not None and now() > self.alarm_time
class TopicMonitor:
def __init__(self, connection, topic):
self._connection = connection
self._queue = connection.subscribe(topic)
self._latest = None
def latest(self):
messages = self._connection.get_all_messages(self._queue)
if len(messages) > 0:
self._latest = messages[-1]
return self._latest
| mit | -9,086,060,357,095,325,000 | 33.575893 | 77 | 0.59277 | false |
zarr-developers/numcodecs | numcodecs/fixedscaleoffset.py | 1 | 4198 | import numpy as np
from .abc import Codec
from .compat import ensure_ndarray, ndarray_copy
class FixedScaleOffset(Codec):
"""Simplified version of the scale-offset filter available in HDF5.
Applies the transformation `(x - offset) * scale` to all chunks. Results
are rounded to the nearest integer but are not packed according to the
minimum number of bits.
Parameters
----------
offset : float
Value to subtract from data.
scale : int
Value to multiply by data.
dtype : dtype
Data type to use for decoded data.
astype : dtype, optional
Data type to use for encoded data.
Notes
-----
If `astype` is an integer data type, please ensure that it is
sufficiently large to store encoded values. No checks are made and data
may become corrupted due to integer overflow if `astype` is too small.
Examples
--------
>>> import numcodecs
>>> import numpy as np
>>> x = np.linspace(1000, 1001, 10, dtype='f8')
>>> x
array([1000. , 1000.11111111, 1000.22222222, 1000.33333333,
1000.44444444, 1000.55555556, 1000.66666667, 1000.77777778,
1000.88888889, 1001. ])
>>> codec = numcodecs.FixedScaleOffset(offset=1000, scale=10, dtype='f8', astype='u1')
>>> y1 = codec.encode(x)
>>> y1
array([ 0, 1, 2, 3, 4, 6, 7, 8, 9, 10], dtype=uint8)
>>> z1 = codec.decode(y1)
>>> z1
array([1000. , 1000.1, 1000.2, 1000.3, 1000.4, 1000.6, 1000.7,
1000.8, 1000.9, 1001. ])
>>> codec = numcodecs.FixedScaleOffset(offset=1000, scale=10**2, dtype='f8', astype='u1')
>>> y2 = codec.encode(x)
>>> y2
array([ 0, 11, 22, 33, 44, 56, 67, 78, 89, 100], dtype=uint8)
>>> z2 = codec.decode(y2)
>>> z2
array([1000. , 1000.11, 1000.22, 1000.33, 1000.44, 1000.56,
1000.67, 1000.78, 1000.89, 1001. ])
>>> codec = numcodecs.FixedScaleOffset(offset=1000, scale=10**3, dtype='f8', astype='u2')
>>> y3 = codec.encode(x)
>>> y3
array([ 0, 111, 222, 333, 444, 556, 667, 778, 889, 1000], dtype=uint16)
>>> z3 = codec.decode(y3)
>>> z3
array([1000. , 1000.111, 1000.222, 1000.333, 1000.444, 1000.556,
1000.667, 1000.778, 1000.889, 1001. ])
See Also
--------
numcodecs.quantize.Quantize
"""
codec_id = 'fixedscaleoffset'
def __init__(self, offset, scale, dtype, astype=None):
self.offset = offset
self.scale = scale
self.dtype = np.dtype(dtype)
if astype is None:
self.astype = self.dtype
else:
self.astype = np.dtype(astype)
if self.dtype == object or self.astype == object:
raise ValueError('object arrays are not supported')
def encode(self, buf):
# normalise input
arr = ensure_ndarray(buf).view(self.dtype)
# flatten to simplify implementation
arr = arr.reshape(-1, order='A')
# compute scale offset
enc = (arr - self.offset) * self.scale
# round to nearest integer
enc = np.around(enc)
# convert dtype
enc = enc.astype(self.astype, copy=False)
return enc
def decode(self, buf, out=None):
# interpret buffer as numpy array
enc = ensure_ndarray(buf).view(self.astype)
# flatten to simplify implementation
enc = enc.reshape(-1, order='A')
# decode scale offset
dec = (enc / self.scale) + self.offset
# convert dtype
dec = dec.astype(self.dtype, copy=False)
# handle output
return ndarray_copy(dec, out)
def get_config(self):
# override to handle encoding dtypes
return dict(
id=self.codec_id,
scale=self.scale,
offset=self.offset,
dtype=self.dtype.str,
astype=self.astype.str
)
def __repr__(self):
r = '%s(scale=%s, offset=%s, dtype=%r' % \
(type(self).__name__, self.scale, self.offset, self.dtype.str)
if self.astype != self.dtype:
r += ', astype=%r' % self.astype.str
r += ')'
return r
| mit | 3,727,921,587,109,626,400 | 29.867647 | 93 | 0.57051 | false |
treverhines/PyGeoNS | pygeons/plot/quiver.py | 1 | 5221 | '''
This module provides a quiver function which allows for error ellipses.
'''
import numpy as np
from matplotlib.quiver import Quiver as _Quiver
from matplotlib.collections import EllipseCollection
from matplotlib.backends import pylab_setup
from matplotlib.pyplot import sci
from matplotlib.pyplot import gca
from scipy.spatial import cKDTree
import warnings
_backend_mod, new_figure_manager, draw_if_interactive, _show = pylab_setup()
def _estimate_scale(x,y,u,v):
pos = np.array([x,y]).T
# return a scale of 1 if there is only one datum
if pos.shape[0] == 0:
return 1.0
T = cKDTree(pos)
average_dist = np.mean(T.query(pos,2)[0][:,1])
average_length = np.mean(np.sqrt(u**2 + v**2))
return average_length/average_dist
def compute_abphi(sigma_x,sigma_y,rho):
n = len(sigma_x)
a = []
b = []
phi = []
for i in range(n):
if ((not np.isfinite(sigma_x[i])) |
(not np.isfinite(sigma_y[i])) |
(not np.isfinite(rho[i]))):
# this block should run if the uncertainties or correlations are
# not finite or are masked
a += [0.0]
b += [0.0]
phi += [0.0]
continue
sigma_xy = rho[i]*sigma_x[i]*sigma_y[i]
cov_mat = np.array([[sigma_x[i]**2,sigma_xy],
[sigma_xy,sigma_y[i]**2]])
val,vec = np.linalg.eig(cov_mat)
maxidx = np.argmax(val)
minidx = np.argmin(val)
a += [np.sqrt(val[maxidx])]
b += [np.sqrt(val[minidx])]
phi += [np.arctan2(vec[:,maxidx][1],vec[:,maxidx][0])]
a = np.array(a)
b = np.array(b)
phi = np.array(phi)*180/np.pi
return a,b,phi
def quiver(*args, **kw):
ax = gca()
# allow callers to override the hold state by passing hold=True|False
washold = ax.ishold()
hold = kw.pop('hold', None)
if hold is not None:
ax.hold(hold)
try:
if not ax._hold:
ax.cla()
q = Quiver(ax, *args, **kw)
ax.add_collection(q, autolim=True)
ax.autoscale_view()
draw_if_interactive()
finally:
ax.hold(washold)
sci(q)
return q
class Quiver(_Quiver):
def __init__(self,ax,*args,**kwargs):
if 'sigma' in kwargs:
scale_units = kwargs.get('scale_units','xy')
kwargs['scale_units'] = scale_units
if kwargs['scale_units'] != 'xy':
raise ValueError('scale units must be "xy" when sigma is given')
angles = kwargs.get('angles','xy')
kwargs['angles'] = angles
if kwargs['angles'] != 'xy':
raise ValueError('angles must be "xy" when sigma is given')
sigma = kwargs.pop('sigma',None)
ellipse_kwargs = kwargs.pop('ellipse_kwargs',{})
if 'offsets' in ellipse_kwargs:
raise ValueError('cannot specify ellipse offsets')
if 'units' in ellipse_kwargs:
raise ValueError('cannot specify ellipse units')
self.ellipse_kwargs = {'edgecolors':'k',
'facecolors':'none',
'linewidths':1.0}
self.ellipse_kwargs.update(ellipse_kwargs)
self.ellipsoids = None
_Quiver.__init__(self,ax,*args,**kwargs)
if sigma is not None:
if self.scale is None:
self.scale = _estimate_scale(self.X,self.Y,self.U,self.V)
su,sv,rho = sigma[0],sigma[1],sigma[2]
self._update_ellipsoids(su,sv,rho)
def _update_ellipsoids(self,su,sv,rho):
self.scale_units = 'xy'
self.angles = 'xy'
tips_x = self.X + self.U/self.scale
tips_y = self.Y + self.V/self.scale
tips = np.array([tips_x,tips_y]).transpose()
a,b,angle = compute_abphi(su,sv,rho)
width = 2.0*a/self.scale
height = 2.0*b/self.scale
if self.ellipsoids is not None:
self.ellipsoids.remove()
# do not draw ellipses which are too small
too_small = 0.001
length = np.sqrt((self.U/self.scale)**2 + (self.V/self.scale)**2)
with warnings.catch_warnings():
# do not print out zero division warning
warnings.simplefilter("ignore")
is_not_too_small = ((np.nan_to_num(width/length) > too_small) |
(np.nan_to_num(height/length) > too_small))
width = width[is_not_too_small]
height = height[is_not_too_small]
angle = angle[is_not_too_small]
tips = tips[is_not_too_small]
# dont add ellipses if there are no ellipses to add
if any(is_not_too_small):
self.ellipsoids = EllipseCollection(width,height,angle,
units=self.scale_units,
offsets = tips,
transOffset=self.ax.transData,
**self.ellipse_kwargs)
self.ax.add_collection(self.ellipsoids)
else:
self.ellipsoids = None
def set_UVC(self,u,v,C=None,sigma=None):
if C is None:
_Quiver.set_UVC(self,u,v)
else:
_Quiver.set_UVC(self,u,v,C)
if sigma is not None:
su,sv,rho = sigma[0],sigma[1],sigma[2]
self._update_ellipsoids(su,sv,rho)
def remove(self):
# remove the quiver and ellipsoid collection
_Quiver.remove(self)
if self.ellipsoids is not None:
self.ellipsoids.remove()
| mit | 676,115,310,877,625,300 | 28.834286 | 88 | 0.583605 | false |
Friday811/tf-tacyt | tft.py | 1 | 15038 | # TensorFlow-Tacyt
#
# This script connects to 11Path's Tacyt database
# and learns to identify malicious applications.
# Connection to Tacyt through the tacyt python API.
# Machine learning through TFLearn and TensorFlow.
#
# Copyright (C) 2017 Rafael Ortiz <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from __future__ import print_function
from __future__ import division
import os
from tacyt import TacytApp as ta
import json
import tflearn
import numpy as np
import random
import hashlib
import pickle
from tftutils import TFTUtils
class TFTacyt(object):
"""
TensorFlow-Tacyt Class
See example.py for usage information.
"""
def __init__(self, api, categories, verbosity=0):
# Instantiate
self.api = api
self.categories = categories
self.verbosity = verbosity
self.DATA = []
self.LABELS = -1
self.MODEL = None
self.Util = TFTUtils(self.verbosity)
self.vPrint(('Categories: ' + str(self.categories)), self.Util.DEBUG)
# Use as shorthand for printing informational/debug
# stuff, will only print when VERBOSE = True
def vPrint(self, message, verbosity=TFTUtils.DEBUG):
self.Util.vPrint(message, verbosity)
# Get the categories to learn from the given file and return it.
# This allows you to easily select which criteria will be used
# to learn and search.
#
# File should be formatted with one category per line. Lines
# commented with # will be ignored
@staticmethod
def getCategoriesFromFile(fileName):
categories = []
with open(fileName) as f:
lines = f.readlines()
for line in lines:
if line[0] != '#':
categories.append(line.rstrip(os.linesep))
return categories
# Given a results json from tacyt and a list of categories to
# learn from, return a list of dictionaries for each app with
# any key not in the list of categories removed.
# If categories are not specified, return all.
# If a category is not found, it will be instantiated with the notFound var.
# If notFound is None, no replacement will be made
@staticmethod
def getFormattedApplicationsFromResults(results, categories=[], notFound=None):
apps = []
categoriesLen = len(categories)
for app in results['result']['applications']:
if categoriesLen:
for key in app.keys():
# Remove any keys not in categories
if key not in categories:
app.pop(key, None)
apps.append(app)
else:
apps.append(app)
if notFound is not None:
for app in apps:
appKeys = app.keys()
for cat in categories:
if cat not in appKeys:
app[cat] = notFound
return apps
# Takes the strings in app descriptions and hashes them to unique
# integer values. Should be normalized after. Usefullness will
# depend on the model you use.
@staticmethod
def hashAppStrings(apps):
for app in apps:
for key in app.keys():
if not (type(app[key]) == int or type(app[key]) == float):
app[key] = int(
hashlib.md5(app[key].encode('utf-8')).hexdigest(),
16)
return apps
# Given a list of dictionaries corresponding to apps,
# remove all elements from those dictionaries that are not ints
# or set them to a specific int
@staticmethod
def getIntFilteredAppDict(apps, setTo=None):
if setTo is None:
for app in apps:
for key in app.keys():
if not (type(app[key]) == int or type(app[key]) == float):
app.pop(key, None)
else:
for app in apps:
for key in app.keys():
if not (type(app[key]) == int or type(app[key]) == float):
app[key] = setTo
return apps
# Create a training data set from a list of app dicts
# Returns data, a list of lists sorted the same for each app
# and the labels for the categories [malicious, benign]
@staticmethod
def createTrainingSet(apps, malicious=False):
data = []
if malicious:
labels = np.repeat(np.array([[1., 0.]]), [len(apps)], axis=0)
else:
labels = np.repeat(np.array([[0., 1.]]), [len(apps)], axis=0)
for app in apps:
appList = []
for key in sorted(app):
appList.append(app[key])
data.append(appList)
return data, labels
# Set all values for app features to be the same for the entire
# list of dicts given. Used for debugging that the damn thing works.
@staticmethod
def setAllValues(apps, value=True):
for app in apps:
for key in app:
app[key] = value
return apps
# Normalize the relative values for each app to each other
# only works if all values are int or float
@staticmethod
def normalizeByApp(apps, nValue=1.0):
for app in apps:
maxValue = max(app)
maxValue = app[maxValue]
if maxValue == 0:
maxValue = 1
for key in app:
app[key] = (app[key] / float(maxValue)) * nValue
return apps
# Normalize the relative values for each app to every other app
# for that category. Only works if all values are int or float.
@staticmethod
def normalizeByCategory(apps, nValue=1.0):
maxValue = 0
for key in apps[0].keys():
# Find max
for app in apps:
if app[key] > maxValue:
maxValue = app[key]
# Normalize
if maxValue == 0:
maxValue = 1
for app in apps:
app[key] = (app[key] / float(maxValue)) * nValue
# Reset max value
maxValue = 0
return apps
# Same as the normalizeByCategory function, except for operating
# on the data list of lists, instead of the apps list of dicts
def normalizeDataByCategory(self, data, nValue=100.0):
maxValue = 0
for i in range(len(data[0])):
for app in data:
if app[i] > maxValue:
maxValue = app[i]
self.vPrint("Staged max: " + str(maxValue), self.Util.DEBUG)
# Normalize
self.vPrint("Max value: " + str(maxValue), self.Util.DEBUG)
if maxValue == 0:
maxValue = 1
for app in data:
app[i] = (app[i] / float(maxValue)) * nValue
self.vPrint("New normal: " + str(app[i]), self.Util.DEBUG)
maxValue = 0
return data
# Search for 1000 entries for the given string and format it with
# the given categories argument
def maxSearch(self, searchString=''):
api = self.api
categories = self.categories
results = []
for i in range(10):
self.vPrint("Searching for " + searchString + " page " + str(i+1), self.Util.DEBUG)
search = api.search_apps(searchString, maxResults=100, numberPage=i+1)
search = self.getFormattedApplicationsFromResults(
search.get_data(),
categories=categories,
notFound=-1)
results.extend(search)
return results
# Randomize data and labels, very important for training if you
# build your data sets per category.
@staticmethod
def randomizeData(data, labels):
a = []
b = []
combined = list(zip(data, labels))
random.shuffle(combined)
a[:], b[:] = zip(*combined)
b = np.array(b)
return a, b
# Creates a data, labels pair from the given API and list of search terms
# The categories should be passed as well.
def createDLPairFromList(self, searchTerms, malicious=False):
api = self.api
categories = self.categories
data = []
labels = -1
for term in searchTerms:
search = self.maxSearch(searchString=term)
search = TFTacyt.getIntFilteredAppDict(search, setTo=-1)
sData, sLabel = TFTacyt.createTrainingSet(search, malicious=malicious)
data.extend(sData)
if type(labels) is int:
labels = sLabel
else:
labels = np.append(labels, sLabel, axis=0)
return data, labels
#########################################################################
# Helper methods: These methods call other methods from the class to #
# make it easier to load data, search, save, etc. #
# #
# You should be able to, with these methods: #
# Create a dataset from a list of search words: #
# addDatasetFromTerms(searchTerms) #
# Save and load the dataset: saveDataset(), loadDataset() #
# Preprocess it for learning: preprocess() #
# Remove a set of validation data: createTestingSet() #
# Create, save, and load a model. #
# Validate the model from testing set. #
# #
# It should be possible to recreate their functionality with the #
# functions they wrap, if you want to modify parts in the middle. #
#########################################################################
# Wrapper function for createDLPairFromList that stores data and label as
# variables local to the TFT instance
def addDatasetFromTerms(self, searchTerms, malicious=False):
data, labels = self.createDLPairFromList(searchTerms, malicious=malicious)
self.DATA.extend(data)
if type(self.LABELS) is int:
self.LABELS = labels
else:
self.LABELS = np.append(self.LABELS, labels, axis=0)
return self.DATA, self.LABELS
# Save the data, labels to file
def saveDataset(self, filename="pickles/dataset.pickle"):
combined = list(zip(self.DATA, self.LABELS))
pickle.dump(combined, open(filename, "wb"))
# Load the data, labels from file
def loadDataset(self, filename="pickles/dataset.pickle"):
combined = pickle.load(open(filename, "rb"))
a = []
b = []
a[:], b[:] = zip(*combined)
b = np.array(b)
self.DATA = a
self.LABELS = b
return a, b
# Preprocesses data by randomizing the order and normalizing by category
def preprocess(self):
self.DATA, self.LABELS = self.randomizeData(self.DATA, self.LABELS)
self.DATA = self.normalizeDataByCategory(self.DATA)
# Creates a test set of data, removed from training set
# for validation of the model.
def createTestingSet(self, size=-1):
if size == -1:
size = len(self.DATA) // 10
testSet = []
testSetLabels = []
for i in range(size):
j = random.randrange(0, len(self.DATA), 1)
testSet.append(self.DATA.pop(j))
testSetLabels.append(self.LABELS[j])
self.LABELS = np.delete(self.LABELS, j, axis=0)
return testSet, testSetLabels
# Data must exist before the model is created
def createModel(self):
net = tflearn.input_data(shape=[None, len(self.DATA[0])])
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 2, activation='softmax')
adam = tflearn.optimizers.Adam(learning_rate=0.0001)
net = tflearn.regression(net, optimizer=adam)
model = tflearn.DNN(net, tensorboard_verbose=self.verbosity)
self.MODEL = model
def trainModel(self):
self.MODEL.fit(self.DATA,
self.LABELS,
n_epoch=1000,
batch_size=32,
show_metric=True
)
def saveModel(self, filename='models/model.tflearn'):
if self.MODEL is None:
self.createModel()
self.MODEL.save(filename)
def loadModel(self, filename='models/model.tflearn'):
if self.MODEL is None:
self.createModel()
self.MODEL.load(filename)
def validateModel(self, testSet, testSetLabels):
pred = self.MODEL.predict(testSet)
fP = 0
cM = 0
cS = 0
iS = 0
i = 0
for el in pred:
if (pred[i][0] > pred[i][1]) and (testSetLabels[i][0] > testSetLabels[i][1]):
self.vPrint("Test set #" + str(i+1) +
" correctly identified malicious.",
self.Util.DEBUG)
cM = cM + 1
elif (pred[i][0] > pred[i][1]) and (testSetLabels[i][0] < testSetLabels[i][1]):
self.vPrint("Test set #" + str(i+1) +
" false positively identified malicious.",
self.Util.DEBUG)
fP = fP + 1
elif (pred[i][0] < pred[i][1]) and (testSetLabels[i][0] < testSetLabels[i][1]):
self.vPrint("Test set #" + str(i+1) + " correctly identified safe.",
self.Util.DEBUG)
cS = cS + 1
elif (pred[i][0] < pred[i][1]) and (testSetLabels[i][0] > testSetLabels[i][1]):
self.vPrint("Test set #" + str(i+1) + " incorrectly marked safe.",
self.Util.DEBUG)
iS = iS + 1
i = i + 1
print("Correctly identified malicious: " + str(cM) + "/" + str(cM + iS))
print("False positives: " + str(fP) + "/" + str(fP+cS))
if __name__ == '__main__':
print("ERROR: This module should be imported, not run.\
\n See example.py for usage.")
| lgpl-2.1 | 706,738,460,525,060,500 | 38.469816 | 95 | 0.560114 | false |
tensorflow/models | official/vision/beta/modeling/segmentation_model_test.py | 1 | 2801 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for segmentation network."""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from official.vision.beta.modeling import backbones
from official.vision.beta.modeling import segmentation_model
from official.vision.beta.modeling.decoders import fpn
from official.vision.beta.modeling.heads import segmentation_heads
class SegmentationNetworkTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.parameters(
(128, 2),
(128, 3),
(128, 4),
(256, 2),
(256, 3),
(256, 4),
)
def test_segmentation_network_creation(
self, input_size, level):
"""Test for creation of a segmentation network."""
num_classes = 10
inputs = np.random.rand(2, input_size, input_size, 3)
tf.keras.backend.set_image_data_format('channels_last')
backbone = backbones.ResNet(model_id=50)
decoder = fpn.FPN(
input_specs=backbone.output_specs, min_level=2, max_level=7)
head = segmentation_heads.SegmentationHead(num_classes, level=level)
model = segmentation_model.SegmentationModel(
backbone=backbone,
decoder=decoder,
head=head
)
logits = model(inputs)
self.assertAllEqual(
[2, input_size // (2**level), input_size // (2**level), num_classes],
logits.numpy().shape)
def test_serialize_deserialize(self):
"""Validate the network can be serialized and deserialized."""
num_classes = 3
backbone = backbones.ResNet(model_id=50)
decoder = fpn.FPN(
input_specs=backbone.output_specs, min_level=3, max_level=7)
head = segmentation_heads.SegmentationHead(num_classes, level=3)
model = segmentation_model.SegmentationModel(
backbone=backbone,
decoder=decoder,
head=head
)
config = model.get_config()
new_model = segmentation_model.SegmentationModel.from_config(config)
# Validate that the config can be forced to JSON.
_ = new_model.to_json()
# If the serialization was successful, the new config should match the old.
self.assertAllEqual(model.get_config(), new_model.get_config())
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -7,493,939,535,731,933,000 | 31.952941 | 79 | 0.69975 | false |
nesdis/djongo | tests/django_tests/tests/v22/tests/model_inheritance/tests.py | 2 | 19969 | import unittest
from operator import attrgetter
from django.core.exceptions import FieldError, ValidationError
from django.db import connection, models
from django.test import SimpleTestCase, TestCase
from django.test.utils import CaptureQueriesContext, isolate_apps
from django.utils.version import PY36
from .models import (
Base, Chef, CommonInfo, GrandChild, GrandParent, ItalianRestaurant,
MixinModel, ParkingLot, Place, Post, Restaurant, Student, SubBase,
Supplier, Title, Worker,
)
class ModelInheritanceTests(TestCase):
def test_abstract(self):
# The Student and Worker models both have 'name' and 'age' fields on
# them and inherit the __str__() method, just as with normal Python
# subclassing. This is useful if you want to factor out common
# information for programming purposes, but still completely
# independent separate models at the database level.
w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker")
Worker.objects.create(name="Barney", age=34, job="Quarry worker")
s = Student.objects.create(name="Pebbles", age=5, school_class="1B")
self.assertEqual(str(w1), "Worker Fred")
self.assertEqual(str(s), "Student Pebbles")
# The children inherit the Meta class of their parents (if they don't
# specify their own).
self.assertSequenceEqual(
Worker.objects.values("name"), [
{"name": "Barney"},
{"name": "Fred"},
],
)
# Since Student does not subclass CommonInfo's Meta, it has the effect
# of completely overriding it. So ordering by name doesn't take place
# for Students.
self.assertEqual(Student._meta.ordering, [])
# However, the CommonInfo class cannot be used as a normal model (it
# doesn't exist as a model).
with self.assertRaisesMessage(AttributeError, "'CommonInfo' has no attribute 'objects'"):
CommonInfo.objects.all()
def test_reverse_relation_for_different_hierarchy_tree(self):
# Even though p.supplier for a Place 'p' (a parent of a Supplier), a
# Restaurant object cannot access that reverse relation, since it's not
# part of the Place-Supplier Hierarchy.
self.assertQuerysetEqual(Place.objects.filter(supplier__name="foo"), [])
msg = (
"Cannot resolve keyword 'supplier' into field. Choices are: "
"address, chef, chef_id, id, italianrestaurant, lot, name, "
"place_ptr, place_ptr_id, provider, rating, serves_hot_dogs, serves_pizza"
)
with self.assertRaisesMessage(FieldError, msg):
Restaurant.objects.filter(supplier__name="foo")
def test_model_with_distinct_accessors(self):
# The Post model has distinct accessors for the Comment and Link models.
post = Post.objects.create(title="Lorem Ipsum")
post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True)
post.attached_link_set.create(
content="The Web framework for perfections with deadlines.",
url="http://www.djangoproject.com/"
)
# The Post model doesn't have an attribute called
# 'attached_%(class)s_set'.
msg = "'Post' object has no attribute 'attached_%(class)s_set'"
with self.assertRaisesMessage(AttributeError, msg):
getattr(post, "attached_%(class)s_set")
def test_model_with_distinct_related_query_name(self):
self.assertQuerysetEqual(Post.objects.filter(attached_model_inheritance_comments__is_spam=True), [])
# The Post model doesn't have a related query accessor based on
# related_name (attached_comment_set).
msg = "Cannot resolve keyword 'attached_comment_set' into field."
with self.assertRaisesMessage(FieldError, msg):
Post.objects.filter(attached_comment_set__is_spam=True)
def test_meta_fields_and_ordering(self):
# Make sure Restaurant and ItalianRestaurant have the right fields in
# the right order.
self.assertEqual(
[f.name for f in Restaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef"]
)
self.assertEqual(
[f.name for f in ItalianRestaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef", "restaurant_ptr", "serves_gnocchi"],
)
self.assertEqual(Restaurant._meta.ordering, ["-rating"])
def test_custompk_m2m(self):
b = Base.objects.create()
b.titles.add(Title.objects.create(title="foof"))
s = SubBase.objects.create(sub_id=b.id)
b = Base.objects.get(pk=s.id)
self.assertNotEqual(b.pk, s.pk)
# Low-level test for related_val
self.assertEqual(s.titles.related_val, (s.id,))
# Higher level test for correct query values (title foof not
# accidentally found).
self.assertQuerysetEqual(s.titles.all(), [])
def test_update_parent_filtering(self):
"""
Updating a field of a model subclass doesn't issue an UPDATE
query constrained by an inner query (#10399).
"""
supplier = Supplier.objects.create(
name='Central market',
address='610 some street',
)
# Capture the expected query in a database agnostic way
with CaptureQueriesContext(connection) as captured_queries:
Place.objects.filter(pk=supplier.pk).update(name=supplier.name)
expected_sql = captured_queries[0]['sql']
# Capture the queries executed when a subclassed model instance is saved.
with CaptureQueriesContext(connection) as captured_queries:
supplier.save(update_fields=('name',))
for query in captured_queries:
sql = query['sql']
if 'UPDATE' in sql:
self.assertEqual(expected_sql, sql)
def test_create_child_no_update(self):
"""Creating a child with non-abstract parents only issues INSERTs."""
def a():
GrandChild.objects.create(
email='[email protected]',
first_name='grand',
last_name='parent',
)
def b():
GrandChild().save()
for i, test in enumerate([a, b]):
with self.subTest(i=i), self.assertNumQueries(4), CaptureQueriesContext(connection) as queries:
test()
for query in queries:
sql = query['sql']
self.assertIn('INSERT INTO', sql, sql)
def test_eq(self):
# Equality doesn't transfer in multitable inheritance.
self.assertNotEqual(Place(id=1), Restaurant(id=1))
self.assertNotEqual(Restaurant(id=1), Place(id=1))
def test_mixin_init(self):
m = MixinModel()
self.assertEqual(m.other_attr, 1)
@isolate_apps('model_inheritance')
def test_abstract_parent_link(self):
class A(models.Model):
pass
class B(A):
a = models.OneToOneField('A', parent_link=True, on_delete=models.CASCADE)
class Meta:
abstract = True
class C(B):
pass
self.assertIs(C._meta.parents[A], C._meta.get_field('a'))
@unittest.skipUnless(PY36, 'init_subclass is new in Python 3.6')
@isolate_apps('model_inheritance')
def test_init_subclass(self):
saved_kwargs = {}
class A(models.Model):
def __init_subclass__(cls, **kwargs):
super().__init_subclass__()
saved_kwargs.update(kwargs)
kwargs = {'x': 1, 'y': 2, 'z': 3}
class B(A, **kwargs):
pass
self.assertEqual(saved_kwargs, kwargs)
@unittest.skipUnless(PY36, '__set_name__ is new in Python 3.6')
@isolate_apps('model_inheritance')
def test_set_name(self):
class ClassAttr:
called = None
def __set_name__(self_, owner, name):
self.assertIsNone(self_.called)
self_.called = (owner, name)
class A(models.Model):
attr = ClassAttr()
self.assertEqual(A.attr.called, (A, 'attr'))
class ModelInheritanceDataTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.restaurant = Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2,
)
chef = Chef.objects.create(name="Albert")
cls.italian_restaurant = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=chef,
)
def test_filter_inherited_model(self):
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_update_inherited_model(self):
self.italian_restaurant.address = "1234 W. Elm"
self.italian_restaurant.save()
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_parent_fields_available_for_filtering_in_child_model(self):
# Parent fields can be used directly in filters on the child model.
self.assertQuerysetEqual(
Restaurant.objects.filter(name="Demon Dogs"), [
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
def test_filter_on_parent_returns_object_of_parent_type(self):
# Filters against the parent model return objects of the parent's type.
p = Place.objects.get(name="Demon Dogs")
self.assertIs(type(p), Place)
def test_parent_child_one_to_one_link(self):
# Since the parent and child are linked by an automatically created
# OneToOneField, you can get from the parent to the child by using the
# child's name.
self.assertEqual(
Place.objects.get(name="Demon Dogs").restaurant,
Restaurant.objects.get(name="Demon Dogs")
)
self.assertEqual(
Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
self.assertEqual(
Restaurant.objects.get(name="Ristorante Miron").italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
def test_parent_child_one_to_one_link_on_nonrelated_objects(self):
# This won't work because the Demon Dogs restaurant is not an Italian
# restaurant.
with self.assertRaises(ItalianRestaurant.DoesNotExist):
Place.objects.get(name="Demon Dogs").restaurant.italianrestaurant
def test_inherited_does_not_exist_exception(self):
# An ItalianRestaurant which does not exist is also a Place which does
# not exist.
with self.assertRaises(Place.DoesNotExist):
ItalianRestaurant.objects.get(name="The Noodle Void")
def test_inherited_multiple_objects_returned_exception(self):
# MultipleObjectsReturned is also inherited.
with self.assertRaises(Place.MultipleObjectsReturned):
Restaurant.objects.get()
def test_related_objects_for_inherited_models(self):
# Related objects work just as they normally do.
s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St")
s1.customers .set([self.restaurant, self.italian_restaurant])
s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St")
s2.customers.set([self.italian_restaurant])
# This won't work because the Place we select is not a Restaurant (it's
# a Supplier).
p = Place.objects.get(name="Joe's Chickens")
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
self.assertEqual(p.supplier, s1)
self.assertQuerysetEqual(
self.italian_restaurant.provider.order_by("-name"), [
"Luigi's Pasta",
"Joe's Chickens"
],
attrgetter("name")
)
self.assertQuerysetEqual(
Restaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
],
attrgetter("name"),
)
ParkingLot.objects.create(
name="Main St", address="111 Main St", main_site=s1
)
ParkingLot.objects.create(
name="Well Lit", address="124 Sesame St", main_site=self.italian_restaurant
)
self.assertEqual(
Restaurant.objects.get(lot__name="Well Lit").name,
"Ristorante Miron"
)
def test_update_works_on_parent_and_child_models_at_once(self):
# The update() command can update fields in parent and child classes at
# once (although it executed multiple SQL queries to do so).
rows = Restaurant.objects.filter(
serves_hot_dogs=True, name__contains="D"
).update(
name="Demon Puppies", serves_hot_dogs=False
)
self.assertEqual(rows, 1)
r1 = Restaurant.objects.get(pk=self.restaurant.pk)
self.assertFalse(r1.serves_hot_dogs)
self.assertEqual(r1.name, "Demon Puppies")
def test_values_works_on_parent_model_fields(self):
# The values() command also works on fields from parent models.
self.assertSequenceEqual(
ItalianRestaurant.objects.values("name", "rating"), [
{"rating": 4, "name": "Ristorante Miron"},
],
)
def test_select_related_works_on_parent_model_fields(self):
# select_related works with fields from the parent object as if they
# were a normal part of the model.
self.assertNumQueries(
2, lambda: ItalianRestaurant.objects.all()[0].chef
)
self.assertNumQueries(
1, lambda: ItalianRestaurant.objects.select_related("chef")[0].chef
)
def test_select_related_defer(self):
"""
#23370 - Should be able to defer child fields when using
select_related() from parent to child.
"""
qs = (Restaurant.objects.select_related("italianrestaurant")
.defer("italianrestaurant__serves_gnocchi").order_by("rating"))
# The field was actually deferred
with self.assertNumQueries(2):
objs = list(qs.all())
self.assertTrue(objs[1].italianrestaurant.serves_gnocchi)
# Model fields where assigned correct values
self.assertEqual(qs[0].name, 'Demon Dogs')
self.assertEqual(qs[0].rating, 2)
self.assertEqual(qs[1].italianrestaurant.name, 'Ristorante Miron')
self.assertEqual(qs[1].italianrestaurant.rating, 4)
def test_parent_cache_reuse(self):
place = Place.objects.create()
GrandChild.objects.create(place=place)
grand_parent = GrandParent.objects.latest('pk')
with self.assertNumQueries(1):
self.assertEqual(grand_parent.place, place)
parent = grand_parent.parent
with self.assertNumQueries(0):
self.assertEqual(parent.place, place)
child = parent.child
with self.assertNumQueries(0):
self.assertEqual(child.place, place)
grandchild = child.grandchild
with self.assertNumQueries(0):
self.assertEqual(grandchild.place, place)
def test_update_query_counts(self):
"""
Update queries do not generate unnecessary queries (#18304).
"""
with self.assertNumQueries(3):
self.italian_restaurant.save()
def test_filter_inherited_on_null(self):
# Refs #12567
Supplier.objects.create(
name="Central market",
address="610 some street",
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=False), [
"Central market",
],
attrgetter("name")
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=True).order_by("name"), [
"Demon Dogs",
"Ristorante Miron",
],
attrgetter("name")
)
def test_exclude_inherited_on_null(self):
# Refs #12567
Supplier.objects.create(
name="Central market",
address="610 some street",
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=False).order_by("name"), [
"Demon Dogs",
"Ristorante Miron",
],
attrgetter("name")
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=True), [
"Central market",
],
attrgetter("name")
)
@isolate_apps('model_inheritance', 'model_inheritance.tests')
class InheritanceSameModelNameTests(SimpleTestCase):
def test_abstract_fk_related_name(self):
related_name = '%(app_label)s_%(class)s_references'
class Referenced(models.Model):
class Meta:
app_label = 'model_inheritance'
class AbstractReferent(models.Model):
reference = models.ForeignKey(Referenced, models.CASCADE, related_name=related_name)
class Meta:
app_label = 'model_inheritance'
abstract = True
class Referent(AbstractReferent):
class Meta:
app_label = 'model_inheritance'
LocalReferent = Referent
class Referent(AbstractReferent):
class Meta:
app_label = 'tests'
ForeignReferent = Referent
self.assertFalse(hasattr(Referenced, related_name))
self.assertIs(Referenced.model_inheritance_referent_references.field.model, LocalReferent)
self.assertIs(Referenced.tests_referent_references.field.model, ForeignReferent)
class InheritanceUniqueTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.grand_parent = GrandParent.objects.create(
email='[email protected]',
first_name='grand',
last_name='parent',
)
def test_unique(self):
grand_child = GrandChild(
email=self.grand_parent.email,
first_name='grand',
last_name='child',
)
msg = 'Grand parent with this Email already exists.'
with self.assertRaisesMessage(ValidationError, msg):
grand_child.validate_unique()
def test_unique_together(self):
grand_child = GrandChild(
email='[email protected]',
first_name=self.grand_parent.first_name,
last_name=self.grand_parent.last_name,
)
msg = 'Grand parent with this First name and Last name already exists.'
with self.assertRaisesMessage(ValidationError, msg):
grand_child.validate_unique()
| agpl-3.0 | -4,056,343,769,675,050,000 | 36.820076 | 108 | 0.604437 | false |
yinzishao/programming | offer_11.py | 1 | 1126 | # -*- coding:utf-8 -*-
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
"""
链表中倒数第k个结点
题目描述
输入一个链表,输出该链表中倒数第k个结点。
特殊情况 k=0 k 超过长度 head 为空
思路:
如果我们在遍历时维持两个指针,第一个指针从链表的头指针开始遍历,在第k-1步之前,第二个指针保持不动;
在第k-1步开始,第二个指针也开始从链表的头指针开始遍历。
由于两个指针的距离保持在k-1,当第一个(走在前面的)指针到达链表的尾结点时,第二个指针(走在后面的)指针正好是倒数第k个结点。
"""
class Solution:
def FindKthToTail(self, head, k):
# write code here
pre,aft=head,head
if head ==None:
return head
if k ==0:
return None
for i in range(k-1):
if aft.next == None:
return None
aft = aft.next
while aft.next != None:
aft = aft.next
pre = pre.next
return pre | gpl-3.0 | -1,569,520,840,452,344,800 | 20.914286 | 64 | 0.55483 | false |
chhsiao90/cheat-ext | cheat_ext/utils.py | 1 | 1503 | import re
import os
_GITHUB_URL = "https://github.com"
STATE_UNLINK = "unlink"
STATE_CONFLICT = "conflict"
STATE_LINKED = "linked"
def get_github_url(repo):
return _GITHUB_URL + "/" + repo + ".git"
def get_cheat_path():
return os.path.join(
os.path.expanduser("~"),
".cheat")
def get_ext_path():
return os.path.join(
get_cheat_path(), ".ext")
def get_sheet_path(repo):
return os.path.join(
get_ext_path(),
repo.replace("/", "_"))
def get_available_sheets_at(sheet_dir):
def is_available_sheet(sheet):
return (
not os.path.isdir(os.path.join(sheet_dir, sheet)) and
re.match(r"^[a-zA-Z-_]+$", sheet))
sheets = list(filter(is_available_sheet, os.listdir(sheet_dir)))
sheets.sort()
return sheets
def get_sheets_with_state(cheat_dir, sheet_dir, sheets):
def append_state(sheet):
cheat_path = os.path.join(cheat_dir, sheet)
if not os.path.exists(cheat_path):
return (sheet, STATE_UNLINK)
elif (os.path.islink(cheat_path) and
os.readlink(cheat_path) == os.path.join(sheet_dir, sheet)):
return (sheet, STATE_LINKED)
else:
return (sheet, STATE_CONFLICT)
return list(map(append_state, sheets))
def filter_by_state(match, state_sheets):
def filter_by_state_function(state_sheet):
_, state = state_sheet
return state == match
return filter(filter_by_state_function, state_sheets)
| mit | -2,809,685,477,133,944,300 | 24.05 | 73 | 0.60479 | false |
RTHMaK/RPGOne | deep_qa-master/deep_qa/layers/attention/attention.py | 1 | 3467 | from copy import deepcopy
from typing import Any, Dict
from keras import backend as K
from overrides import overrides
from ..masked_layer import MaskedLayer
from ...common.params import get_choice_with_default
from ...tensors.masked_operations import masked_softmax
from ...tensors.similarity_functions import similarity_functions
class Attention(MaskedLayer):
"""
This Layer takes two inputs: a vector and a matrix. We compute the
similarity between the vector and each row in the matrix, and then perform
a softmax over rows using those computed similarities. We handle masking
properly for masked rows in the matrix, though we ignore any masking on
the vector.
By default similarity is computed with a dot product, but you can
alternatively use a parameterized similarity function if you wish.
Inputs:
- vector: shape ``(batch_size, embedding_dim)``, mask is ignored if provided
- matrix: shape ``(batch_size, num_rows, embedding_dim)``, with mask ``(batch_size, num_rows)``
Output:
- attention: shape ``(batch_size, num_rows)``, no mask (masked input rows have value 0 in the
output)
Parameters
----------
similarity_function_params: Dict[str, Any], optional (default={})
These parameters get passed to a similarity function (see
:mod:`deep_qa.tensors.similarity_functions` for more info on what's acceptable). The
default similarity function with no parameters is a simple dot product.
"""
def __init__(self, similarity_function: Dict[str, Any]=None, **kwargs):
super(Attention, self).__init__(**kwargs)
self.similarity_function_params = deepcopy(similarity_function)
if similarity_function is None:
similarity_function = {}
sim_function_choice = get_choice_with_default(similarity_function,
'type',
list(similarity_functions.keys()))
similarity_function['name'] = self.name + '_similarity_function'
self.similarity_function = similarity_functions[sim_function_choice](**similarity_function)
@overrides
def build(self, input_shape):
tensor_1_dim = input_shape[0][-1]
tensor_2_dim = input_shape[1][-1]
self.trainable_weights = self.similarity_function.initialize_weights(tensor_1_dim, tensor_2_dim)
super(Attention, self).build(input_shape)
@overrides
def compute_mask(self, inputs, mask=None):
# pylint: disable=unused-argument
# We do not need a mask beyond this layer.
return None
@overrides
def compute_output_shape(self, input_shapes):
return (input_shapes[1][0], input_shapes[1][1])
@overrides
def call(self, inputs, mask=None):
vector, matrix = inputs
if mask is None:
matrix_mask = None
else:
matrix_mask = mask[1]
num_rows = K.int_shape(matrix)[1]
tiled_vector = K.repeat_elements(K.expand_dims(vector, axis=1), num_rows, axis=1)
similarities = self.similarity_function.compute_similarity(tiled_vector, matrix)
return masked_softmax(similarities, matrix_mask)
@overrides
def get_config(self):
base_config = super(Attention, self).get_config()
config = {'similarity_function': self.similarity_function_params}
config.update(base_config)
return config
| apache-2.0 | 5,103,416,499,689,313,000 | 39.313953 | 104 | 0.657341 | false |
canardleteer/pydisque | pydisque/client.py | 1 | 16532 | """Pydisque makes Disque easy to access in python."""
import redis
from redis.exceptions import ConnectionError
from functools import wraps
try:
# Python 3
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
import logging
logger = logging.getLogger(__name__)
class Job(object):
"""Represents a Disque Job."""
def __init__(self, id, queue_name, payload):
"""Initialize a job."""
self.id = id
self.queue_name = queue_name
self.payload = payload
def __repr__(self):
"""Make a Job easy to read."""
return '<Job id:%s queue_name:%s>' % (self.id, self.queue_name)
class Node(object):
"""Represents a Disque Node via host and port."""
def __init__(self, node_id, host, port, connection):
"""
Initialize a the Disque Node.
:param node_id:
:param host:
:param port:
:param connection: redis.Redis connection
:returns:
"""
self.node_id = node_id
self.host = host
self.port = port
self.connection = connection
def __repr__(self):
"""Make Node easy to read."""
return '<Node %s:%s>' % (self.host, self.port)
class retry(object):
"""retry utility object."""
def __init__(self, retry_count=2):
"""Initialize retry utility object."""
self.retry_count = retry_count
def __call__(self, fn):
"""Function wrapper."""
@wraps(fn)
def wrapped_f(*args, **kwargs):
c = 0
while c <= self.retry_count:
try:
return fn(*args, **kwargs)
except:
logging.critical("retrying because of this exception - %s",
c)
logging.exception("exception to retry ")
if c == self.retry_count:
raise
c += 1
return wrapped_f
class Client(object):
"""
Client is the Disque Client.
You can pass in a list of nodes, it will try to connect to
first if it can't then it will try to connect to second and
so forth.
:Example:
>>> client = Client(['localhost:7711', 'localhost:7712'])
>>> client.connect()
"""
def __init__(self, nodes=None):
"""Initalize a client to the specified nodes."""
if nodes is None:
nodes = ['localhost:7711']
self.nodes = {}
for n in nodes:
self.nodes[n] = None
self.connected_node = None
def connect(self):
"""
Connect to one of the Disque nodes.
You can get current connection with connected_node property
:returns: nothing
"""
self.connected_node = None
for i, node in self.nodes.items():
host, port = i.split(':')
port = int(port)
redis_client = redis.Redis(host, port)
try:
ret = redis_client.execute_command('HELLO')
format_version, node_id = ret[0], ret[1]
others = ret[2:]
self.nodes[i] = Node(node_id, host, port, redis_client)
self.connected_node = self.nodes[i]
except redis.exceptions.ConnectionError:
pass
if not self.connected_node:
raise Exception('couldnt connect to any nodes')
logger.info("connected to node %s" % self.connected_node)
def get_connection(self):
"""
Return current connected_nodes connection.
:rtype: redis.Redis
"""
return self.connected_node.connection
@retry()
def execute_command(self, *args, **kwargs):
"""Execute a command on the connected server."""
try:
return self.get_connection().execute_command(*args, **kwargs)
except ConnectionError as e:
logger.warn('trying to reconnect')
self.connect()
logger.warn('connected')
raise
def _grouper(self, iterable, n, fillvalue=None):
"""Collect data into fixed-length chunks or blocks."""
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
def info(self):
"""
Return server information.
INFO
:returns: server info
"""
return self.execute_command("INFO")
def add_job(self, queue_name, job, timeout=200, replicate=None, delay=None,
retry=None, ttl=None, maxlen=None, async=None):
"""
Add a job to a queue.
ADDJOB queue_name job <ms-timeout> [REPLICATE <count>] [DELAY <sec>]
[RETRY <sec>] [TTL <sec>] [MAXLEN <count>] [ASYNC]
:param queue_name: is the name of the queue, any string, basically.
:param job: is a string representing the job.
:param timeout: is the command timeout in milliseconds.
:param replicate: count is the number of nodes the job should be
replicated to.
:param delay: sec is the number of seconds that should elapse
before the job is queued by any server.
:param retry: sec period after which, if no ACK is received, the
job is put again into the queue for delivery. If RETRY is 0,
the job has an at-least-once delivery semantics.
:param ttl: sec is the max job life in seconds. After this time,
the job is deleted even if it was not successfully delivered.
:param maxlen: count specifies that if there are already count
messages queued for the specified queue name, the message is
refused and an error reported to the client.
:param async: asks the server to let the command return ASAP and
replicate the job to other nodes in the background. The job
gets queued ASAP, while normally the job is put into the queue
only when the client gets a positive reply.
:returns: job_id
"""
command = ['ADDJOB', queue_name, job, timeout]
if replicate:
command += ['REPLICATE', replicate]
if delay:
command += ['DELAY', delay]
if retry:
command += ['RETRY', retry]
if ttl:
command += ['TTL', ttl]
if maxlen:
command += ['MAXLEN', maxlen]
if async:
command += ['ASYNC']
# TODO(canardleteer): we need to handle "-PAUSE" messages more
# appropriately, for now it's up to the person using the library
# to handle a generic ResponseError on their own.
logger.debug("sending job - %s", command)
job_id = self.execute_command(*command)
logger.debug("sent job - %s", command)
logger.debug("job_id: %s " % job_id)
return job_id
def get_job(self, queues, timeout=None, count=None, nohang=False, withcounters=False):
"""
Return some number of jobs from specified queues.
GETJOB [NOHANG] [TIMEOUT <ms-timeout>] [COUNT <count>] [WITHCOUNTERS] FROM
queue1 queue2 ... queueN
:param queues: name of queues
:returns: list of tuple(job_id, queue_name, job), tuple(job_id, queue_name, job, nacks, additional_deliveries) or empty list
:rtype: list
"""
assert queues
command = ['GETJOB']
if nohang:
command += ['NOHANG']
if timeout:
command += ['TIMEOUT', timeout]
if count:
command += ['COUNT', count]
if withcounters:
command += ['WITHCOUNTERS']
command += ['FROM'] + queues
results = self.execute_command(*command)
if not results:
return []
if withcounters:
return [(job_id, queue_name, job, nacks, additional_deliveries) for
job_id, queue_name, job, _, nacks, _, additional_deliveries in results]
else:
return [(job_id, queue_name, job) for
job_id, queue_name, job in results]
def ack_job(self, *job_ids):
"""
Acknowledge the execution of one or more jobs via job IDs.
ACKJOB jobid1 jobid2 ... jobidN
:param job_ids: list of job_ids
"""
self.execute_command('ACKJOB', *job_ids)
def nack_job(self, *job_ids):
"""
Acknowledge the failure of one or more jobs via job IDs.
NACK jobid1 jobid2 ... jobidN
:param job_ids: list of job_ids
"""
self.execute_command('NACK', *job_ids)
def fast_ack(self, *job_ids):
"""
Perform a best effort cluster wide deletion of the specified job IDs.
FASTACK jobid1 jobid2 ... jobidN
:param job_ids:
"""
self.execute_command('FASTACK', *job_ids)
def working(self, job_id):
"""
Signal Disque to postpone the next time it will deliver the job again.
WORKING <jobid>
:param job_id: name of the job still being worked on
:returns: returns the number of seconds you (likely)
postponed the message visiblity for other workers
"""
return self.execute_command('WORKING', job_id)
def qlen(self, queue_name):
"""
Return the length of the named queue.
QLEN <qname>
:param queue_name: name of the queue
:returns: length of the queue
"""
return self.execute_command('QLEN', queue_name)
# TODO (canardleteer): a QueueStatus object may be the best way to do this
# TODO (canardleteer): return_dict should probably be True by default, but
# i don't want to break anyones code
def qstat(self, queue_name, return_dict=False):
"""
Return the status of the queue (currently unimplemented).
Future support / testing of QSTAT support in Disque
QSTAT <qname>
Return produced ... consumed ... idle ... sources [...] ctime ...
"""
rtn = self.execute_command('QSTAT', queue_name)
if return_dict:
grouped = self._grouper(rtn, 2)
rtn = dict((a, b) for a, b in grouped)
return rtn
def qpeek(self, queue_name, count):
"""
Return, without consuming from queue, count jobs.
If count is positive the specified number of jobs are
returned from the oldest to the newest (in the same
best-effort FIFO order as GETJOB). If count is negative
the commands changes behavior and shows the count newest jobs,
from the newest from the oldest.
QPEEK <qname> <count>
:param queue_name: name of the queue
:param count:
"""
return self.execute_command("QPEEK", queue_name, count)
def enqueue(self, *job_ids):
"""
Queue jobs if not already queued.
:param job_ids:
"""
return self.execute_command("ENQUEUE", *job_ids)
def dequeue(self, *job_ids):
"""
Remove the job from the queue.
:param job_ids: list of job_ids
"""
return self.execute_command("DEQUEUE", *job_ids)
def del_job(self, *job_ids):
"""
Completely delete a job from a node.
Note that this is similar to FASTACK, but limited to a
single node since no DELJOB cluster bus message is sent
to other nodes.
:param job_ids:
"""
return self.execute_command("DELJOB", *job_ids)
# TODO (canardleteer): a JobStatus object may be the best for this,
# but I think SHOW is going to change to SHOWJOB
def show(self, job_id, return_dict=False):
"""
Describe the job.
:param job_id:
"""
rtn = self.execute_command('SHOW', job_id)
if return_dict:
grouped = self._grouper(rtn, 2)
rtn = dict((a, b) for a, b in grouped)
return rtn
def pause(self, queue_name, kw_in=None, kw_out=None, kw_all=None,
kw_none=None, kw_state=None, kw_bcast=None):
"""
Pause a queue.
Unfortunately, the PAUSE keywords are mostly reserved words in Python,
so I've been a little creative in the function variable names. Open
to suggestions to change it (canardleteer)
:param queue_name: The job queue we are modifying.
:param kw_in: pause the queue in input.
:param kw_out: pause the queue in output.
:param kw_all: pause the queue in input and output (same as specifying
both the in and out options).
:param kw_none: clear the paused state in input and output.
:param kw_state: just report the current queue state.
:param kw_bcast: send a PAUSE command to all the reachable nodes of
the cluster to set the same queue in the other nodes
to the same state.
"""
command = ["PAUSE", queue_name]
if kw_in:
command += ["in"]
if kw_out:
command += ["out"]
if kw_all:
command += ["all"]
if kw_none:
command += ["none"]
if kw_state:
command += ["state"]
if kw_bcast:
command += ["bcast"]
return self.execute_command(*command)
def qscan(self, cursor=0, count=None, busyloop=None, minlen=None,
maxlen=None, importrate=None):
"""
Iterate all the existing queues in the local node.
:param count: An hint about how much work to do per iteration.
:param busyloop: Block and return all the elements in a busy loop.
:param minlen: Don't return elements with less than count jobs queued.
:param maxlen: Don't return elements with more than count jobs queued.
:param importrate: Only return elements with an job import rate
(from other nodes) >= rate.
"""
command = ["QSCAN", cursor]
if count:
command += ["COUNT", count]
if busyloop:
command += ["BUSYLOOP"]
if minlen:
command += ["MINLEN", minlen]
if maxlen:
command += ["MAXLEN", maxlen]
if importrate:
command += ["IMPORTRATE", importrate]
return self.execute_command(*command)
def jscan(self, cursor=0, count=None, busyloop=None, queue=None,
state=None, reply=None):
"""Iterate all the existing jobs in the local node.
:param count: An hint about how much work to do per iteration.
:param busyloop: Block and return all the elements in a busy loop.
:param queue: Return only jobs in the specified queue.
:param state: Must be a list - Return jobs in the specified state.
Can be used multiple times for a logic OR.
:param reply: None or string {"all", "id"} - Job reply type. Type can
be all or id. Default is to report just the job ID. If all is
specified the full job state is returned like for the SHOW command.
"""
command = ["JSCAN", cursor]
if count:
command += ["COUNT", count]
if busyloop:
command += ["BUSYLOOP"]
if queue:
command += ["QUEUE", queue]
if type(state) is list:
for s in state:
command += ["STATE", s]
if reply:
command += ["REPLY", reply]
return self.execute_command(*command)
def hello(self):
"""
Returns hello format version, this node ID, all the nodes IDs, IP addresses, ports, and priority (lower is better, means node more available).
Clients should use this as an handshake command when connecting with a Disque node.
HELLO
:returns: [<hello format version>, <this node ID>, [<all the nodes IDs, IP addresses, ports, and priority>, ...]
"""
return self.execute_command("HELLO")
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
c = Client(['localhost:7712', 'localhost:7711'])
c.connect()
import json
job = json.dumps(["hello", "1234"])
logger.info(c.add_job("test", job))
jobs = c.get_job(['test'], timeout=5)
for queue_name, job_id, payload in jobs:
logger.info(job_id)
c.ack_job(job_id)
# while True:
# jobs = c.get_job(['test'], timeout=5)
| mit | -4,344,732,607,685,367,300 | 30.792308 | 150 | 0.567566 | false |
Trust-Code/trust-addons | crm_multi_call/models/multi_call.py | 1 | 2414 | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2015 Trustcode - www.trustcode.com.br #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from openerp import api, models, fields
class wizard(models.TransientModel):
_name = 'multi.call'
res_user_id = fields.Many2many('res.users', string="Atendentes")
@api.multi
def create_calls(self):
customers = self._context.get('active_ids')
customers_ids = self.env['res.partner'].browse(customers)
cpu = len(customers_ids) / len(self.res_user_id)
indice_usuario = 0
somador = 0
for c in customers_ids:
crm_phonecall = self.env['crm.phonecall']
crm_phonecall.create({
'name': c.category_id.name,
'partner_phone': '%s-%s-%s-%s' % (c.phone, c.mobile,
c.x_phone1, c.fax),
'partner_id': c.id,
'user_id': self.res_user_id[indice_usuario].id
})
somador += 1
if somador >= cpu and indice_usuario < len(self.res_user_id) - 1:
indice_usuario += 1
somador = 0
| agpl-3.0 | -1,911,900,117,082,598,700 | 48.265306 | 79 | 0.444905 | false |
davy39/eric | Plugins/WizardPlugins/QRegExpWizard/Ui_QRegExpWizardRepeatDialog.py | 1 | 7059 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './Plugins/WizardPlugins/QRegExpWizard/QRegExpWizardRepeatDialog.ui'
#
# Created: Tue Nov 18 17:53:58 2014
# by: PyQt5 UI code generator 5.3.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_QRegExpWizardRepeatDialog(object):
def setupUi(self, QRegExpWizardRepeatDialog):
QRegExpWizardRepeatDialog.setObjectName("QRegExpWizardRepeatDialog")
QRegExpWizardRepeatDialog.resize(331, 197)
QRegExpWizardRepeatDialog.setSizeGripEnabled(True)
self.vboxlayout = QtWidgets.QVBoxLayout(QRegExpWizardRepeatDialog)
self.vboxlayout.setObjectName("vboxlayout")
self.groupBox = QtWidgets.QGroupBox(QRegExpWizardRepeatDialog)
self.groupBox.setTitle("")
self.groupBox.setFlat(True)
self.groupBox.setObjectName("groupBox")
self.gridlayout = QtWidgets.QGridLayout(self.groupBox)
self.gridlayout.setContentsMargins(0, 0, 0, 0)
self.gridlayout.setObjectName("gridlayout")
self.textLabel1_6 = QtWidgets.QLabel(self.groupBox)
self.textLabel1_6.setObjectName("textLabel1_6")
self.gridlayout.addWidget(self.textLabel1_6, 2, 2, 1, 1)
self.textLabel1_7 = QtWidgets.QLabel(self.groupBox)
self.textLabel1_7.setObjectName("textLabel1_7")
self.gridlayout.addWidget(self.textLabel1_7, 3, 2, 1, 1)
self.textLabel1_5 = QtWidgets.QLabel(self.groupBox)
self.textLabel1_5.setObjectName("textLabel1_5")
self.gridlayout.addWidget(self.textLabel1_5, 1, 2, 1, 1)
self.lowerSpin = QtWidgets.QSpinBox(self.groupBox)
self.lowerSpin.setEnabled(False)
self.lowerSpin.setAlignment(QtCore.Qt.AlignRight)
self.lowerSpin.setProperty("value", 1)
self.lowerSpin.setObjectName("lowerSpin")
self.gridlayout.addWidget(self.lowerSpin, 4, 1, 1, 1)
self.upperSpin = QtWidgets.QSpinBox(self.groupBox)
self.upperSpin.setEnabled(False)
self.upperSpin.setAlignment(QtCore.Qt.AlignRight)
self.upperSpin.setProperty("value", 1)
self.upperSpin.setObjectName("upperSpin")
self.gridlayout.addWidget(self.upperSpin, 4, 3, 1, 1)
self.textLabel6 = QtWidgets.QLabel(self.groupBox)
self.textLabel6.setObjectName("textLabel6")
self.gridlayout.addWidget(self.textLabel6, 4, 2, 1, 1)
self.betweenButton = QtWidgets.QRadioButton(self.groupBox)
self.betweenButton.setObjectName("betweenButton")
self.gridlayout.addWidget(self.betweenButton, 4, 0, 1, 1)
self.exactSpin = QtWidgets.QSpinBox(self.groupBox)
self.exactSpin.setEnabled(False)
self.exactSpin.setAlignment(QtCore.Qt.AlignRight)
self.exactSpin.setProperty("value", 1)
self.exactSpin.setObjectName("exactSpin")
self.gridlayout.addWidget(self.exactSpin, 3, 1, 1, 1)
self.exactButton = QtWidgets.QRadioButton(self.groupBox)
self.exactButton.setObjectName("exactButton")
self.gridlayout.addWidget(self.exactButton, 3, 0, 1, 1)
self.maxSpin = QtWidgets.QSpinBox(self.groupBox)
self.maxSpin.setEnabled(False)
self.maxSpin.setAlignment(QtCore.Qt.AlignRight)
self.maxSpin.setProperty("value", 1)
self.maxSpin.setObjectName("maxSpin")
self.gridlayout.addWidget(self.maxSpin, 2, 1, 1, 1)
self.maxButton = QtWidgets.QRadioButton(self.groupBox)
self.maxButton.setObjectName("maxButton")
self.gridlayout.addWidget(self.maxButton, 2, 0, 1, 1)
self.minButton = QtWidgets.QRadioButton(self.groupBox)
self.minButton.setObjectName("minButton")
self.gridlayout.addWidget(self.minButton, 1, 0, 1, 1)
self.minSpin = QtWidgets.QSpinBox(self.groupBox)
self.minSpin.setEnabled(False)
self.minSpin.setAlignment(QtCore.Qt.AlignRight)
self.minSpin.setProperty("value", 1)
self.minSpin.setObjectName("minSpin")
self.gridlayout.addWidget(self.minSpin, 1, 1, 1, 1)
self.unlimitedButton = QtWidgets.QRadioButton(self.groupBox)
self.unlimitedButton.setObjectName("unlimitedButton")
self.gridlayout.addWidget(self.unlimitedButton, 0, 0, 1, 4)
self.vboxlayout.addWidget(self.groupBox)
self.buttonBox = QtWidgets.QDialogButtonBox(QRegExpWizardRepeatDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.vboxlayout.addWidget(self.buttonBox)
self.retranslateUi(QRegExpWizardRepeatDialog)
self.minButton.toggled['bool'].connect(self.minSpin.setEnabled)
self.maxButton.toggled['bool'].connect(self.maxSpin.setEnabled)
self.exactButton.toggled['bool'].connect(self.exactSpin.setEnabled)
self.betweenButton.toggled['bool'].connect(self.lowerSpin.setEnabled)
self.betweenButton.toggled['bool'].connect(self.upperSpin.setEnabled)
self.buttonBox.accepted.connect(QRegExpWizardRepeatDialog.accept)
self.buttonBox.rejected.connect(QRegExpWizardRepeatDialog.reject)
QtCore.QMetaObject.connectSlotsByName(QRegExpWizardRepeatDialog)
QRegExpWizardRepeatDialog.setTabOrder(self.unlimitedButton, self.minButton)
QRegExpWizardRepeatDialog.setTabOrder(self.minButton, self.minSpin)
QRegExpWizardRepeatDialog.setTabOrder(self.minSpin, self.maxButton)
QRegExpWizardRepeatDialog.setTabOrder(self.maxButton, self.maxSpin)
QRegExpWizardRepeatDialog.setTabOrder(self.maxSpin, self.exactButton)
QRegExpWizardRepeatDialog.setTabOrder(self.exactButton, self.exactSpin)
QRegExpWizardRepeatDialog.setTabOrder(self.exactSpin, self.betweenButton)
QRegExpWizardRepeatDialog.setTabOrder(self.betweenButton, self.lowerSpin)
QRegExpWizardRepeatDialog.setTabOrder(self.lowerSpin, self.upperSpin)
def retranslateUi(self, QRegExpWizardRepeatDialog):
_translate = QtCore.QCoreApplication.translate
QRegExpWizardRepeatDialog.setWindowTitle(_translate("QRegExpWizardRepeatDialog", "Number of repetitions"))
self.textLabel1_6.setText(_translate("QRegExpWizardRepeatDialog", "times"))
self.textLabel1_7.setText(_translate("QRegExpWizardRepeatDialog", "times"))
self.textLabel1_5.setText(_translate("QRegExpWizardRepeatDialog", "times"))
self.textLabel6.setText(_translate("QRegExpWizardRepeatDialog", "and"))
self.betweenButton.setText(_translate("QRegExpWizardRepeatDialog", "Between"))
self.exactButton.setText(_translate("QRegExpWizardRepeatDialog", "Exactly"))
self.maxButton.setText(_translate("QRegExpWizardRepeatDialog", "Maximum"))
self.minButton.setText(_translate("QRegExpWizardRepeatDialog", "Minimum"))
self.unlimitedButton.setText(_translate("QRegExpWizardRepeatDialog", "Unlimited (incl. zero times)"))
| gpl-3.0 | 4,168,190,662,937,062,400 | 57.338843 | 121 | 0.726732 | false |
EluOne/Nett | nett.py | 1 | 42271 | #!/usr/bin/python
'Nova Echo Trade Tool'
# Copyright (C) 2014 Tim Cumming
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Tim Cumming aka Elusive One
# Created: 01/04/14
import os
import pickle
import time
import datetime
import wx
import sqlite3 as lite
import config
from common.api import onError, reprocess, fetchItems
from common.classes import Item, Material, MaterialRow
from ObjectListView import ObjectListView, ColumnDefn, GroupListView
# This will be the lists for the ui choices on the market.
quickbarList = []
materialsList = []
itemList = []
marketGroups = {}
marketRelations = {}
numIDs = 0
materialDict = {}
# Lets try to load up our previous quickbarList from the cache file.
if (os.path.isfile('nett.cache')):
cacheFile = open('nett.cache', 'r')
quickbarList = pickle.load(cacheFile)
cacheFile.close()
class MainWindow(wx.Frame):
def __init__(self, *args, **kwds):
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.numWidgets = 0
# List and Dictionary initialisation.
if itemList == []: # Build a list of all items from the static data dump.
try:
con = lite.connect('static.db') # A cut down version of the CCP dump converted to sqlite. (~8mb)
con.text_factory = str
with con:
cur = con.cursor()
# With this query we are looking to populate the itemID's with their respective names and parent market groups.
# Eve items currently go up to ID 33612, then Dust items start from 350916
statement = "SELECT typeID, typeName, marketGroupID FROM invtypes WHERE marketGroupID >= 0 ORDER BY typeName;"
cur.execute(statement)
rows = cur.fetchall()
for row in rows:
# The data above taken from the db then all zeros for the buy/sell values (x16), query time and widget key.
itemList.append(Item(int(row[0]), str(row[1]), int(row[2]), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
# This query will hold all of the market group ID to name relations in a dictionary for ease.
groupStatement = "SELECT marketGroupID, marketGroupName FROM invMarketGroups WHERE marketGroupID >= 0 ORDER BY marketGroupID;"
cur.execute(groupStatement)
groupRows = cur.fetchall()
for row in groupRows:
marketGroups.update({int(row[0]): str(row[1])})
# This statement is for the branches of the market treeCtrl using all the market groups and their relationship to each other.
relationStatement = "SELECT marketGroupID, parentGroupID FROM invMarketGroups ORDER BY parentGroupID;"
cur.execute(relationStatement)
relationRows = cur.fetchall()
for row in relationRows:
if row[1]:
marketRelations.update({int(row[0]): int(row[1])})
else:
marketRelations.update({int(row[0]): 'Market'})
except lite.Error as err:
error = ('SQL Lite Error: ' + repr(err.args[0]) + repr(err.args[1:])) # Error String
onError(error)
finally:
if con:
con.close()
self.leftNotebook = wx.Notebook(self, wx.ID_ANY, style=0)
self.marketNotebookPane = wx.Panel(self.leftNotebook, wx.ID_ANY)
self.searchTextCtrl = wx.TextCtrl(self.marketNotebookPane, wx.ID_ANY, "")
self.searchButton = wx.Button(self.marketNotebookPane, wx.ID_FIND, (""))
self.marketTree = wx.TreeCtrl(self.marketNotebookPane, wx.ID_ANY, style=wx.TR_HAS_BUTTONS | wx.TR_DEFAULT_STYLE | wx.SUNKEN_BORDER)
self.addButton = wx.Button(self.marketNotebookPane, wx.ID_ANY, ("Add to Quickbar"))
self.fetchButton = wx.Button(self.marketNotebookPane, wx.ID_ANY, ("Fetch Data"))
self.quickbarNotebookPane = wx.Panel(self.leftNotebook, wx.ID_ANY)
self.quickbarListCtrl = ObjectListView(self.quickbarNotebookPane, wx.ID_ANY, style=wx.LC_REPORT | wx.SUNKEN_BORDER)
self.removeButton = wx.Button(self.quickbarNotebookPane, wx.ID_ANY, ("Remove From Quickbar"))
self.fetchButtonTwo = wx.Button(self.quickbarNotebookPane, wx.ID_ANY, ("Fetch Data"))
self.materiallsNotebookPane = wx.Panel(self.leftNotebook, wx.ID_ANY)
self.materialsListCtrl = GroupListView(self.materiallsNotebookPane, wx.ID_ANY, style=wx.LC_REPORT | wx.SUNKEN_BORDER)
self.rightPanel = wx.ScrolledWindow(self, wx.ID_ANY, style=wx.TAB_TRAVERSAL)
self.statusbar = self.CreateStatusBar() # A Status bar in the bottom of the window
# Menu Bar
self.frame_menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.menuAbout = wx.MenuItem(self.fileMenu, wx.ID_ABOUT, "&About", "", wx.ITEM_NORMAL)
self.fileMenu.AppendItem(self.menuAbout)
self.menuExport = wx.MenuItem(self.fileMenu, wx.ID_SAVE, "&Export", " Export Price Data", wx.ITEM_NORMAL)
self.fileMenu.AppendItem(self.menuExport)
self.menuExit = wx.MenuItem(self.fileMenu, wx.ID_EXIT, "E&xit", "", wx.ITEM_NORMAL)
self.fileMenu.AppendItem(self.menuExit)
self.frame_menubar.Append(self.fileMenu, "File")
self.SetMenuBar(self.frame_menubar)
# Menu Bar end
# Menu events.
self.Bind(wx.EVT_MENU, self.OnExport, self.menuExport)
self.Bind(wx.EVT_MENU, self.OnExit, self.menuExit)
self.Bind(wx.EVT_MENU, self.OnAbout, self.menuAbout)
# Button Events
self.Bind(wx.EVT_BUTTON, self.onProcess, self.fetchButton)
self.Bind(wx.EVT_BUTTON, self.onProcess, self.fetchButtonTwo)
self.Bind(wx.EVT_BUTTON, self.onAdd, self.addButton)
self.Bind(wx.EVT_BUTTON, self.onRemove, self.removeButton)
self.Bind(wx.EVT_BUTTON, self.searchTree, self.searchButton)
# register the self.onExpand function to be called
wx.EVT_TREE_ITEM_EXPANDING(self.marketTree, self.marketTree.GetId(), self.onExpand)
self.__set_properties()
self.__do_layout()
def __set_properties(self):
self.SetTitle(("Nett"))
self.SetSize((1024, 600))
self.rightPanel.SetScrollRate(10, 10)
self.SetBackgroundColour(wx.NullColour) # Use system default colour
self.statusbar.SetStatusText('Welcome to Nett')
self.quickbarListCtrl.SetEmptyListMsg('Add some items\nto start')
self.quickbarListCtrl.SetColumns([
ColumnDefn('Name', 'left', 320, 'itemName'),
])
self.materialsListCtrl.SetColumns([
ColumnDefn('Name', 'left', 100, 'materialName'),
ColumnDefn('Buy', 'right', 90, 'materialBuy'),
ColumnDefn('Sell', 'right', 90, 'materialSell'),
ColumnDefn('System', 'right', -1, 'systemName'),
])
self.materialsListCtrl.SetSortColumn(self.materialsListCtrl.columns[4])
def __do_layout(self):
mainSizer = wx.BoxSizer(wx.HORIZONTAL)
self.itemsSizer = wx.BoxSizer(wx.VERTICAL)
materialSizer = wx.BoxSizer(wx.VERTICAL)
quickbarSizer = wx.BoxSizer(wx.VERTICAL)
mainMarketSizer = wx.BoxSizer(wx.VERTICAL)
searchSizer = wx.BoxSizer(wx.HORIZONTAL)
searchSizer.Add(self.searchTextCtrl, 1, wx.EXPAND, 0)
searchSizer.Add(self.searchButton, 0, wx.ADJUST_MINSIZE, 0)
marketButtonSizer = wx.BoxSizer(wx.HORIZONTAL)
marketButtonSizer.Add(self.addButton, 1, wx.ADJUST_MINSIZE, 0)
marketButtonSizer.Add(self.fetchButton, 1, wx.ADJUST_MINSIZE, 0)
mainMarketSizer.Add(searchSizer, 0, wx.EXPAND, 0)
mainMarketSizer.Add(self.marketTree, 2, wx.EXPAND, 0)
mainMarketSizer.Add(marketButtonSizer, 0, wx.EXPAND, 0)
self.marketNotebookPane.SetSizer(mainMarketSizer)
quickbarButtonSizer = wx.BoxSizer(wx.HORIZONTAL)
quickbarButtonSizer.Add(self.removeButton, 1, wx.ADJUST_MINSIZE, 0)
quickbarButtonSizer.Add(self.fetchButtonTwo, 1, wx.ADJUST_MINSIZE, 0)
quickbarSizer.Add(self.quickbarListCtrl, 1, wx.EXPAND, 0)
quickbarSizer.Add(quickbarButtonSizer, 0, wx.EXPAND, 0)
self.quickbarNotebookPane.SetSizer(quickbarSizer)
materialSizer.Add(self.materialsListCtrl, 1, wx.EXPAND, 0)
self.materiallsNotebookPane.SetSizer(materialSizer)
self.leftNotebook.AddPage(self.marketNotebookPane, ("Market"))
self.leftNotebook.AddPage(self.quickbarNotebookPane, ("Quickbar"))
self.leftNotebook.AddPage(self.materiallsNotebookPane, ("Minerals"))
mainSizer.Add(self.leftNotebook, 1, wx.EXPAND, 0)
self.rightPanel.SetSizer(self.itemsSizer)
mainSizer.Add(self.rightPanel, 2, wx.EXPAND, 0)
self.SetSizer(mainSizer)
self.Layout()
# initialize the marketTree
self.buildTree('Market')
# If we've loaded up a cache file send the data to the UI.
if quickbarList != []:
self.quickbarListCtrl.SetObjects(quickbarList)
def searchTree(self, event):
searchText = self.searchTextCtrl.GetValue()
# Reset the itemList and marketRelations
del itemList[:]
marketRelations.clear()
itemMarketGroups = []
# List and Dictionary initialisation.
if itemList == []: # Build a list of all items from the static data dump.
try:
con = lite.connect('static.db') # A cut down version of the CCP dump converted to sqlite. (~8mb)
con.text_factory = str
with con:
cur = con.cursor()
# With this query we are looking to populate the itemID's with their respective names and parent market groups.
# Eve items currently go up to ID 33612, then Dust items start from 350916
statement = "SELECT typeID, typeName, marketGroupID FROM invtypes WHERE marketGroupID >= 0 AND typeName LIKE '%" + searchText + "%' ORDER BY typeName;"
cur.execute(statement)
rows = cur.fetchall()
for row in rows:
# The data above taken from the db then all zeros for the buy/sell values (x16), query time and widget key.
itemList.append(Item(int(row[0]), str(row[1]), int(row[2]), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
itemMarketGroups.append(int(row[2]))
# Iterate over the relations to build all the relavent branches.
while itemMarketGroups != []:
# This statement is for the branches of the market treeCtrl using all the market groups and their relationship to each other.
itemMarketList = ("', '".join(map(str, itemMarketGroups[:])))
relationStatement = ("SELECT marketGroupID, parentGroupID FROM invMarketGroups WHERE marketGroupID IN ('%s') ORDER BY parentGroupID;" % itemMarketList)
cur.execute(relationStatement)
relationRows = cur.fetchall()
itemMarketGroups = []
for row in relationRows:
if row[1]:
marketRelations.update({int(row[0]): int(row[1])})
itemMarketGroups.append(int(row[1]))
else:
marketRelations.update({int(row[0]): 'Market'})
except lite.Error as err:
error = ('SQL Lite Error: ' + repr(err.args[0]) + repr(err.args[1:])) # Error String
onError(error)
finally:
if con:
con.close()
# Reinitialize the marketTree
self.marketTree.DeleteAllItems()
self.buildTree('Market')
def onExpand(self, event):
'''onExpand is called when the user expands a node on the tree
object. It checks whether the node has been previously expanded. If
not, the extendTree function is called to build out the node, which
is then marked as expanded.'''
# get the wxID of the entry to expand and check it's validity
itemID = event.GetItem()
if not itemID.IsOk():
itemID = self.marketTree.GetSelection()
# only build that marketTree if not previously expanded
old_pydata = self.marketTree.GetPyData(itemID)
if old_pydata[1] is False:
# clean the subtree and rebuild it
self.marketTree.DeleteChildren(itemID)
self.extendTree(itemID)
self.marketTree.SetPyData(itemID, (old_pydata[0], True, old_pydata[2]))
def buildTree(self, rootID):
'''Add a new root element and then its children'''
self.rootID = self.marketTree.AddRoot(rootID)
self.marketTree.SetPyData(self.rootID, (rootID, 1))
self.extendTree(self.rootID)
self.marketTree.Expand(self.rootID)
def extendTree(self, parentID):
'''extendTree is a semi-lazy Tree builder. It takes
the ID of a tree entry and fills in the tree with its child
sub market groups and their children - updating 2 layers of the
tree. This function is called by buildTree and onExpand methods'''
parentGroup = self.marketTree.GetPyData(parentID)[0]
subGroups = []
numIDs = list(range(len(itemList)))
for key in marketRelations:
if marketRelations[key] == parentGroup:
subGroups.append(int(key))
subGroups.sort()
if subGroups == []:
# We've reached the end of the branch and must add the leaves.
newsubGroups = []
for x in numIDs: # Iterate over all of the id lists generated above.
if itemList[x].marketGroupID == parentGroup:
newsubGroups.append(int(x))
newsubGroups.sort()
for child in newsubGroups:
childGroup = child
childID = self.marketTree.AppendItem(parentID, str(itemList[child].itemName))
self.marketTree.SetPyData(childID, (itemList[child].itemID, False, True))
else:
for child in subGroups:
childGroup = child
# add the child to the parent
childID = self.marketTree.AppendItem(parentID, str(marketGroups[child]))
# associate the child ID with its marketTree entry
self.marketTree.SetPyData(childID, (childGroup, False, False))
# Now the child entry will show up, but it current has no
# known children of its own and will not have a '+' showing
# that it can be expanded to step further down the marketTree.
# Solution is to go ahead and register the child's children,
# meaning the grandchildren of the original parent
newParentID = childID
newParentGroup = childGroup
newsubGroups = []
for key in marketRelations:
if marketRelations[key] == newParentGroup:
newsubGroups.append(int(key))
newsubGroups.sort()
if newsubGroups != []:
for grandchild in newsubGroups:
grandchildGroup = grandchild
if marketRelations[grandchildGroup]:
grandchildID = self.marketTree.AppendItem(newParentID, str(marketGroups[grandchild]))
self.marketTree.SetPyData(grandchildID, (grandchildGroup, False, False))
else:
for x in numIDs: # Iterate over all of the id lists generated above.
if itemList[x].marketGroupID == newParentGroup:
newsubGroups.append(int(x))
newsubGroups.sort()
for grandchild in newsubGroups:
grandchildGroup = grandchild
grandchildID = self.marketTree.AppendItem(newParentID, str(itemList[grandchild].itemName))
self.marketTree.SetPyData(grandchildID, (grandchildGroup, False, False))
def onAddWidget(self, moduleID, moduleName, widgetKey):
'''onAddWidget will add widgets into the right scrolling
panel as required to show the number of items prices'''
# Lets try add to the right panel.
self.moduleSizer_1_staticbox = wx.StaticBox(self.rightPanel, int('100%s' % widgetKey), (str(moduleName)), name="module_%s" % moduleID)
self.moduleSizer_1_staticbox.Lower()
moduleSizer_1 = wx.StaticBoxSizer(self.moduleSizer_1_staticbox, wx.VERTICAL)
reproGrid_1 = wx.GridSizer(3, 5, 0, 0)
itemGrid_1 = wx.GridSizer(3, 5, 0, 0)
itemLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Item Value"), name="itemValue_%s" % moduleID)
itemMarketLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Market"), name="itemMarket_%s" % moduleID)
itemAmarrLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Amarr"), name="itemAmarr_%s" % moduleID)
itemDodiLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Dodixie"), name="itemDodixie_%s" % moduleID)
itemHekLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Hek"), name="itemHek_%s" % moduleID)
itemJitaLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Jita"), name="itemJita_%s" % moduleID)
itemSellLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Sell"), name="itemSell_%s" % moduleID)
# itemAmarrSell_1 = wx.TextCtrl(self.rightPanel, wx.ID_ANY, "", size=(130, 21), style=wx.TE_RIGHT, name="amarrItemSell_%s" % moduleID)
itemAmarrSell_1 = wx.TextCtrl(self.rightPanel, int('101%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="amarrItemSell_%s" % moduleID)
itemDodiSell_1 = wx.TextCtrl(self.rightPanel, int('102%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="dodixieItemSell_%s" % moduleID)
itemHekSell_1 = wx.TextCtrl(self.rightPanel, int('103%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="hekItemSell_%s" % moduleID)
itemJitaSell_1 = wx.TextCtrl(self.rightPanel, int('104%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="jitaItemSell_%s" % moduleID)
itemBuyLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Buy"), name="itemBuy_%s" % moduleID)
itemAmarrBuy_1 = wx.TextCtrl(self.rightPanel, int('105%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="amarrItemBuy_%s" % moduleID)
itemDodiBuy_1 = wx.TextCtrl(self.rightPanel, int('106%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="dodixieItemBuy_%s" % moduleID)
itemHekBuy_1 = wx.TextCtrl(self.rightPanel, int('107%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="hekItemBuy_%s" % moduleID)
itemJitaBuy_1 = wx.TextCtrl(self.rightPanel, int('108%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="jitaItemBuy_%s" % moduleID)
static_line_1 = wx.StaticLine(self.rightPanel, wx.ID_ANY, name="line_%s" % moduleID)
reproLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Reprocessed Value"), name="reproValue_%s" % moduleID)
reproMarketLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Market"), name="reproMarket_%s" % moduleID)
reproAmarrLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Amarr"), name="reproAmarr_%s" % moduleID)
reproDodiLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Dodixie"), name="reproDodixie_%s" % moduleID)
reproHekLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Hek"), name="reproHek_%s" % moduleID)
reproJitaLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Jita"), name="reproJita_%s" % moduleID)
reproSellLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Sell"), name="reproSell_%s" % moduleID)
reproAmarrSell_1 = wx.TextCtrl(self.rightPanel, int('201%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproAmarrSell_%s" % moduleID)
reproDodiSell_1 = wx.TextCtrl(self.rightPanel, int('202%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproDodixieSell_%s" % moduleID)
reproHekSell_1 = wx.TextCtrl(self.rightPanel, int('203%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproHekSell_%s" % moduleID)
reproJitaSell_1 = wx.TextCtrl(self.rightPanel, int('204%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproJitaSell_%s" % moduleID)
reproBuyLabel_1 = wx.StaticText(self.rightPanel, wx.ID_ANY, ("Buy"), name="reproBuy_%s" % moduleID)
reproAmarrBuy_1 = wx.TextCtrl(self.rightPanel, int('205%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproAmarrBuy_%s" % moduleID)
reproDodiBuy_1 = wx.TextCtrl(self.rightPanel, int('206%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproDodixieBuy_%s" % moduleID)
reproHekBuy_1 = wx.TextCtrl(self.rightPanel, int('207%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproHekBuy_%s" % moduleID)
reproJitaBuy_1 = wx.TextCtrl(self.rightPanel, int('208%s' % widgetKey), "", size=(130, 21), style=wx.TE_RIGHT, name="reproJitaBuy_%s" % moduleID)
moduleSizer_1.Add(itemLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemMarketLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemAmarrLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemDodiLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemHekLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemJitaLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemSellLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemAmarrSell_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemDodiSell_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemHekSell_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemJitaSell_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemBuyLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemAmarrBuy_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemDodiBuy_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemHekBuy_1, 0, wx.ADJUST_MINSIZE, 0)
itemGrid_1.Add(itemJitaBuy_1, 0, wx.ADJUST_MINSIZE, 0)
moduleSizer_1.Add(itemGrid_1, 1, wx.EXPAND, 0)
moduleSizer_1.Add(static_line_1, 0, wx.EXPAND, 0)
moduleSizer_1.Add(reproLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproMarketLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproAmarrLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproDodiLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproHekLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproJitaLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproSellLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproAmarrSell_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproDodiSell_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproHekSell_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproJitaSell_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproBuyLabel_1, 0, wx.ALIGN_CENTER_HORIZONTAL | wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproAmarrBuy_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproDodiBuy_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproHekBuy_1, 0, wx.ADJUST_MINSIZE, 0)
reproGrid_1.Add(reproJitaBuy_1, 0, wx.ADJUST_MINSIZE, 0)
moduleSizer_1.Add(reproGrid_1, 1, wx.EXPAND, 0)
self.itemsSizer.Add(moduleSizer_1, 1, wx.EXPAND | wx.SHAPED, 0)
self.rightPanel.SetSizer(self.itemsSizer)
self.Layout()
def onRemoveWidget(self, widgetKey):
"""Remove all children components for a given module and destroy them"""
child = wx.FindWindowById(int('100%s' % widgetKey))
if child:
parent = child.GetContainingSizer()
widgetIds = ['101', '102', '103', '104', '105', '106', '107', '108',
'201', '202', '203', '204', '205', '206', '207', '208']
for wid in widgetIds:
widget = wx.FindWindowById(int('%s%s' % (wid, widgetKey)))
if widget:
widget.Destroy()
if parent:
self.itemsSizer.Hide(parent)
self.itemsSizer.Remove(parent)
self.Layout()
def updateCache(self):
# Update the quickbarList to the cache file.
if quickbarList != []:
cacheFile = open('nett.cache', 'w')
pickle.dump(quickbarList, cacheFile)
cacheFile.close()
else:
# Delete the cache file when the quickbarList is empty.
if (os.path.isfile('nett.cache')):
os.remove('nett.cache')
def onAdd(self, event):
# Get current selection data from tree ctrl
currentSelection = self.marketTree.GetSelection()
pydata = self.marketTree.GetPyData(currentSelection)
# Check its an item not a market group
if pydata[2] is True:
selectedID = pydata[0]
for item in itemList:
# Find the selected ID in the complete item list
if item.itemID == selectedID:
# Check for duplicates in the quickbar list
if item not in quickbarList:
quickbarList.append(item)
self.quickbarListCtrl.SetObjects(quickbarList)
self.updateCache()
def onRemove(self, event):
# Use the selection from the quickbarListCtrl to remove items.
numItemRows = list(range(len(quickbarList)))
# Get current selection from quickbarList ctrl
for x in self.quickbarListCtrl.GetSelectedObjects():
for y in numItemRows:
if (x.itemID == quickbarList[y].itemID):
quickbarList[y] = 'deleted'
self.onRemoveWidget(x.widgetKey)
for z in quickbarList[:]:
if z == 'deleted':
quickbarList.remove(z)
# Recreate the iteration list so the loop can continue if removing multiple items.
numItemRows = list(range(len(quickbarList)))
self.quickbarListCtrl.SetObjects(quickbarList)
self.updateCache()
def updateDisplay(self, idList):
"""Send Values to the GUI elements. as we have added to the wx widgets
on the fly the easiest way to identify the widgets is by their unique
names assigned on creation."""
for item in idList:
if wx.FindWindowByName("module_%s" % int(item.itemID)):
continue
else:
self.numWidgets += 1
item.widgetKey = self.numWidgets
self.onAddWidget(int(item.itemID), item.itemName, item.widgetKey)
# Iterate over all of the widgets and their respective variables to fill in values.
# '{:,.2f}'.format(value) Uses the Format Specification Mini-Language to produce more human friendly output.
# Item Values
widgetNames = ['amarrItemBuy', 'dodixieItemBuy', 'hekItemBuy', 'jitaItemBuy',
'amarrItemSell', 'dodixieItemSell', 'hekItemSell', 'jitaItemSell']
for name in widgetNames:
widget = wx.FindWindowByName("%s_%s" % (name, int(item.itemID)))
widget.SetValue('{:,.2f}'.format(vars(item)[name]))
# Reprocess Values
widgetNames = ['reproAmarrBuy', 'reproDodixieBuy', 'reproHekBuy', 'reproJitaBuy',
'reproAmarrSell', 'reproDodixieSell', 'reproHekSell', 'reproJitaSell']
for name in widgetNames:
widget = wx.FindWindowByName("%s_%s" % (name, int(item.itemID)))
widget.SetValue('{:,.2f}'.format(vars(item)[name]))
def onProcess(self, event):
"""Generate a list of item and material ids to send to the Eve-Central servers
then use the returned data to generate our prices"""
currentTime = datetime.datetime.utcnow().replace(microsecond=0)
if quickbarList != []:
timingMsg = 'Using Local Cache'
# Build a list of item ids to send to Eve-Central.
idList = []
for item in quickbarList:
if item.lastQuery == 0:
idList.append(item.itemID)
elif (currentTime - item.lastQuery).seconds > config.queryLimit:
idList.append(item.itemID)
# We'll tag on the mineral query with the item ids to save traffic.
if materialsList != []:
for mat in materialsList:
if mat.lastQuery == 0:
idList.append(mat.materialID)
elif (currentTime - mat.lastQuery).seconds > config.queryLimit:
idList.append(mat.materialID)
else:
for mineral in config.mineralIDs:
idList.append(mineral)
# print(idList)
# idList = [4473, 16437...]
# This is for time stamping our out bound queries so we don't request data we already have that is recent.
queryTime = datetime.datetime.utcnow().replace(microsecond=0)
# Start the clock for the fetch from Eve-Central.
t = time.clock()
self.statusbar.SetStatusText('Nett - Fetching Data from Eve-Central.com...')
dodixieBuy, dodixieSell, jitaBuy, jitaSell, hekBuy, hekSell, amarrBuy, amarrSell = fetchItems(idList)
fetchTime = ((time.clock() - t) * 1000) # Timing messages for info and debug.
# Check that our mineral prices are updated if returned for the query.
for mineral in config.mineralIDs:
# Check if it was in the idList for the Eve-Central query.
if mineral in idList:
# Check if we already have some data for this id
if mineral in materialDict:
# Buy values updates via materialDict to materialsList
materialsList[materialDict[mineral]].amarrBuy = amarrBuy[mineral]
materialsList[materialDict[mineral]].dodixieBuy = dodixieBuy[mineral]
materialsList[materialDict[mineral]].hekBuy = hekBuy[mineral]
materialsList[materialDict[mineral]].jitaBuy = jitaBuy[mineral]
# Sell values updates via materialDict to materialsList
materialsList[materialDict[mineral]].amarrSell = amarrSell[mineral]
materialsList[materialDict[mineral]].dodixieSell = dodixieSell[mineral]
materialsList[materialDict[mineral]].hekSell = hekSell[mineral]
materialsList[materialDict[mineral]].jitaSell = jitaSell[mineral]
else:
materialsList.append(Material(int(mineral), config.mineralIDs[mineral],
amarrBuy[mineral], dodixieBuy[mineral], hekBuy[mineral], jitaBuy[mineral],
amarrSell[mineral], dodixieSell[mineral], hekSell[mineral], jitaSell[mineral],
queryTime))
# Once we have fetched material data its now stored in objects in materialsList
# So we need to make a quick dictionary like a primary key to match list positions to mineral ids.
numMats = list(range(len(materialsList)))
if numMats != []:
for x in numMats:
# materialDict = {materialId: materialsList[index], 34: 0, 35: 1, ...}
materialDict[materialsList[x].materialID] = x
# print(materialDict)
# TODO: Move this loop somewhere more logical.
materialRows = []
for mineral in materialsList:
materialRows.append(MaterialRow(mineral.materialName, 'Amarr', mineral.amarrBuy, mineral.amarrSell))
materialRows.append(MaterialRow(mineral.materialName, 'Dodixie', mineral.dodixieBuy, mineral.dodixieSell))
materialRows.append(MaterialRow(mineral.materialName, 'Hek', mineral.hekBuy, mineral.hekSell))
materialRows.append(MaterialRow(mineral.materialName, 'Jita', mineral.jitaBuy, mineral.jitaSell))
self.materialsListCtrl.SetObjects(materialRows)
self.statusbar.SetStatusText('Nett - Calculating Reprocessed Values...')
# Restart the clock for processing data.
t = time.clock()
for item in quickbarList:
if item.itemID in idList:
output = reprocess(item.itemID)
# print(output)
reproAmarrBuy = 0 # Fullfilling Buy orders
reproAmarrSell = 0 # Placing Sell orders
reproDodixieBuy = 0 # Fullfilling Buy orders
reproDodixieSell = 0 # Placing Sell orders
reproHekBuy = 0 # Fullfilling Buy orders
reproHekSell = 0 # Placing Sell orders
reproJitaBuy = 0 # Fullfilling Buy orders
reproJitaSell = 0 # Placing Sell orders
# Generate reprocessed values from raw material prices. (Currently not stored)
for key in output:
if key in config.mineralIDs:
# We are now using the materialDict so we can use previously fetched data in the materialsList.
reproAmarrBuy = reproAmarrBuy + (int(output[key]) * materialsList[materialDict[key]].amarrBuy)
reproAmarrSell = reproAmarrSell + (int(output[key]) * materialsList[materialDict[key]].amarrSell)
reproDodixieBuy = reproDodixieBuy + (int(output[key]) * materialsList[materialDict[key]].dodixieBuy)
reproDodixieSell = reproDodixieSell + (int(output[key]) * materialsList[materialDict[key]].dodixieSell)
reproHekBuy = reproHekBuy + (int(output[key]) * materialsList[materialDict[key]].hekBuy)
reproHekSell = reproHekSell + (int(output[key]) * materialsList[materialDict[key]].hekSell)
reproJitaBuy = reproJitaBuy + (int(output[key]) * materialsList[materialDict[key]].jitaBuy)
reproJitaSell = reproJitaSell + (int(output[key]) * materialsList[materialDict[key]].jitaSell)
# Send Values to the quickbarList objects.
item.amarrItemBuy = amarrBuy[item.itemID]
item.dodixieItemBuy = dodixieBuy[item.itemID]
item.hekItemBuy = hekBuy[item.itemID]
item.jitaItemBuy = jitaBuy[item.itemID]
item.amarrItemSell = amarrSell[item.itemID]
item.dodixieItemSell = dodixieSell[item.itemID]
item.hekItemSell = hekSell[item.itemID]
item.jitaItemSell = jitaSell[item.itemID]
item.reproAmarrBuy = reproAmarrBuy
item.reproDodixieBuy = reproDodixieBuy
item.reproHekBuy = reproHekBuy
item.reproJitaBuy = reproJitaBuy
item.reproAmarrSell = reproAmarrSell
item.reproDodixieSell = reproDodixieSell
item.reproHekSell = reproHekSell
item.reproJitaSell = reproJitaSell
item.lastQuery = queryTime
processTime = ((time.clock() - t) * 1000)
timingMsg = 'Fetch: %0.2f ms / Process: %0.2f ms' % (fetchTime, processTime)
self.updateDisplay(quickbarList)
self.statusbar.SetStatusText('Nett - Idle - %s' % timingMsg)
# Save the updated quickbarList to the cache file.
self.updateCache()
def OnExport(self, event):
# Export the contents of the Quickbar as csv.
if quickbarList != []:
self.dirname = ''
wildcard = "Comma Separated (*.csv)|*.csv|All files (*.*)|*.*"
dlg = wx.FileDialog(self, 'Export Price Data to File', self.dirname, 'export.csv', wildcard, wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
f = file(path, 'w')
""" Item(itemID, itemName, marketGroupID,
amarrItemBuy, dodixieItemBuy, hekItemBuy, jitaItemBuy,
amarrItemSell, dodixieItemSell, hekItemSell, jitaItemSell,
reproAmarrBuy, reproDodixieBuy, reproHekBuy, reproJitaBuy,
reproAmarrSell, reproDodixieSell, reproHekSell, reproJitaSell)"""
columns = ('Item Name', 'Amarr Market Buy Orders', 'Amarr Market Sell Orders', 'Amarr Material Buy Orders', 'Amarr Material Sell Orders',
'Dodixie Market Buy Orders', 'Dodixie Market Sell Orders', 'Dodixie Material Buy Orders', 'Dodixie Material Sell Orders',
'Hek Market Buy Orders', 'Hek Market Sell Orders', 'Hek Material Buy Orders', 'Hek Material Sell Orders',
'Jita Market Buy Orders', 'Jita Market Sell Orders', 'Jita Material Buy Orders', 'Jita Material Sell Orders')
dataExport = ('%s%s' % (','.join(columns), '\n'))
for row in quickbarList:
dataExport = ('%s%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n' % (dataExport, row.itemName,
row.amarrItemBuy, row.amarrItemSell, row.reproAmarrBuy, row.reproAmarrSell,
row.dodixieItemBuy, row.dodixieItemSell, row.reproDodixieBuy, row.reproDodixieSell,
row.hekItemBuy, row.hekItemSell, row.reproHekBuy, row.reproHekSell,
row.jitaItemBuy, row.jitaItemSell, row.reproJitaBuy, row.reproJitaSell))
f.write(dataExport)
f.close()
dlg.Destroy()
else:
onError('The Quickbar list is empty. There is no data to export yet.')
def OnAbout(self, e):
description = """A tool designed for our corporate industrialists to
compare items at the main market hubs.
If you like my work please consider an ISK donation to Elusive One.
This application uses data provided by Eve-Central.com
All EVE-Online related materials are property of CCP hf."""
licence = """NETT is released under GNU GPLv3:
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."""
info = wx.AboutDialogInfo()
# info.SetIcon(wx.Icon('', wx.BITMAP_TYPE_PNG))
info.SetName('Nova Echo Trade Tool')
info.SetVersion(config.version)
info.SetDescription(description)
# info.SetCopyright('(C) 2013 Tim Cumming')
info.SetWebSite('https://github.com/EluOne/Nett')
info.SetLicence(licence)
info.AddDeveloper('Tim Cumming aka Elusive One')
# info.AddDocWriter('')
# info.AddArtist('')
# info.AddTranslator('')
wx.AboutBox(info)
def OnExit(self, e):
dlg = wx.MessageDialog(self, 'Are you sure to quit Nett?', 'Please Confirm', wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
self.Close(True)
# end of class MainWindow
class MyApp(wx.App):
def OnInit(self):
frame = MainWindow(None, -1, '')
self.SetTopWindow(frame)
frame.Center()
frame.Show()
return 1
# end of class MyApp
if __name__ == '__main__':
app = MyApp(0)
app.MainLoop()
| gpl-3.0 | 5,782,214,964,515,885,000 | 51.315594 | 177 | 0.606752 | false |
mleger45/turnex | msn/tests/test_consumer.py | 1 | 1441 | # -*- coding: utf-8 -*-
from channels import Group
from channels.test import ChannelTestCase, WSClient, apply_routes
#TODO: use apply_routes here, these tests are wrong.
from msn import consumer
class MSNConsumerTest(ChannelTestCase):
def test_ws_connect(self):
client = WSClient()
default = 'turnex'
# Inject a message onto the channel to use in a consumer
#Channel("input").send({"value": 33})
# Run the consumer with the new Message object
#message = self.get_next_message("input", require=True)
#consumer.ws_connect(message)
# Verify there's a reply and that it's accurate
#result = self.get_next_message(message.reply_channel.name,
# require=True)
#self.assertIsNotNone(result)
client.send_and_consume('websocket.connect', path='/')
self.assertIsNone(client.receive())
Group(default).send({'text': 'ok'}, immediately=True)
self.assertEqual(client.receive(json=False), 'ok')
client.send_and_consume('websocket.receive',
text={'message': 'hey'},
path='/')
self.assertEqual(client.receive(), {'event': 'error', 'body': 'Stop Hacking.'})
client.send_and_consume('websocket.disconnect',
text={'message': 'hey'},
path='/')
| mit | 8,777,828,271,155,742,000 | 36.921053 | 87 | 0.575295 | false |
nens/raster-tools | raster_tools/txt2tif.py | 1 | 2465 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Create tif rasters from xyz files by linear interpolation using griddata.
"""
import argparse
import math
import os
from osgeo import gdal
from osgeo import osr
import numpy as np
from raster_tools import datasets
WIDTH = 0.5
HEIGHT = 0.5
NO_DATA_VALUE = np.finfo('f4').min.item()
DRIVER = gdal.GetDriverByName('gtiff')
OPTIONS = ['compress=deflate', 'tiled=yes']
PROJECTION = osr.GetUserInputAsWKT('epsg:28992')
def rasterize(points):
""" Create array. """
xmin, ymin = points[:, :2].min(0)
xmax, ymax = points[:, :2].max(0)
p = math.floor(xmin / WIDTH) * WIDTH
q = math.floor(ymax / HEIGHT) * HEIGHT
geo_transform = p, WIDTH, 0, q, 0, -HEIGHT
indices = np.empty((len(points), 3), 'u4')
indices[:, 2] = (points[:, 0] - p) / WIDTH
indices[:, 1] = (q - points[:, 1]) / HEIGHT
order = indices.view('u4,u4,u4').argsort(order=['f1', 'f2'], axis=0)[:, 0]
indices = indices[order]
indices[0, 0] = 0
py, px = indices[0, 1:]
for i in range(1, len(indices)):
same1 = indices[i, 1] == indices[i - 1, 1]
same2 = indices[i, 2] == indices[i - 1, 2]
if same1 and same2:
indices[i, 0] = indices[i - 1, 0] + 1
else:
indices[i, 0] = 0
array = np.full(indices.max(0) + 1, NO_DATA_VALUE)
array[tuple(indices.transpose())] = points[:, 2][order]
array = np.ma.masked_values(array, NO_DATA_VALUE)
return {'array': array,
'projection': PROJECTION,
'no_data_value': NO_DATA_VALUE,
'geo_transform': geo_transform}
def txt2tif(source_path):
root, ext = os.path.splitext(source_path)
points = np.loadtxt(source_path)
kwargs = rasterize(points)
array = kwargs.pop('array')
for statistic in 'min', 'max':
func = getattr(np.ma, statistic)
kwargs['array'] = func(array, 0).filled(NO_DATA_VALUE)[np.newaxis]
target_path = root + '_' + statistic + '.tif'
with datasets.Dataset(**kwargs) as dataset:
DRIVER.CreateCopy(target_path, dataset, options=OPTIONS)
def get_parser():
""" Return argument parser. """
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('source_path', metavar='FILE')
return parser
def main():
""" Call txt2tif with args from parser. """
return txt2tif(**vars(get_parser().parse_args()))
if __name__ == '__main__':
exit(main())
| gpl-3.0 | -6,831,315,423,260,359,000 | 27.011364 | 78 | 0.599189 | false |
EDUlib/edx-platform | common/test/acceptance/tests/lms/test_learner_profile.py | 1 | 6478 | """
End-to-end tests for Student's Profile Page.
"""
from datetime import datetime
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.common.logout import LogoutPage
from common.test.acceptance.pages.lms.learner_profile import LearnerProfilePage
from common.test.acceptance.tests.helpers import AcceptanceTest, EventsTestMixin
class LearnerProfileTestMixin(EventsTestMixin):
"""
Mixin with helper methods for testing learner profile pages.
"""
PRIVACY_PUBLIC = 'all_users'
PRIVACY_PRIVATE = 'private'
PUBLIC_PROFILE_FIELDS = ['username', 'country', 'language_proficiencies', 'bio']
PRIVATE_PROFILE_FIELDS = ['username']
PUBLIC_PROFILE_EDITABLE_FIELDS = ['country', 'language_proficiencies', 'bio']
USER_SETTINGS_CHANGED_EVENT_NAME = "edx.user.settings.changed"
def log_in_as_unique_user(self):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def set_public_profile_fields_data(self, profile_page):
"""
Fill in the public profile fields of a user.
"""
# These value_for_dropdown_field method calls used to include
# focus_out = True, but a change in selenium is focusing out of the
# drop down after selection without any more action needed.
profile_page.value_for_dropdown_field('language_proficiencies', 'English')
profile_page.value_for_dropdown_field('country', 'United Arab Emirates')
profile_page.set_value_for_textarea_field('bio', 'Nothing Special')
# Waits here for text to appear/save on bio field
profile_page.wait_for_ajax()
def visit_profile_page(self, username, privacy=None):
"""
Visit a user's profile page and if a privacy is specified and
is different from the displayed value, then set the privacy to that value.
"""
profile_page = LearnerProfilePage(self.browser, username)
# Change the privacy if requested by loading the page and
# changing the drop down
if privacy is not None:
profile_page.visit()
# Change the privacy setting if it is not the desired one already
profile_page.privacy = privacy
# Verify the current setting is as expected
if privacy == self.PRIVACY_PUBLIC:
assert profile_page.privacy == 'all_users'
else:
assert profile_page.privacy == 'private'
if privacy == self.PRIVACY_PUBLIC:
self.set_public_profile_fields_data(profile_page)
# Reset event tracking so that the tests only see events from
# loading the profile page.
self.start_time = datetime.now()
# Load the page
profile_page.visit()
return profile_page
def initialize_different_user(self, privacy=None, birth_year=None):
"""
Initialize the profile page for a different test user
"""
username, user_id = self.log_in_as_unique_user()
# Set the privacy for the new user
if privacy is None:
privacy = self.PRIVACY_PUBLIC
self.visit_profile_page(username, privacy=privacy)
# Set the user's year of birth
if birth_year:
self.set_birth_year(birth_year)
# Log the user out
LogoutPage(self.browser).visit()
return username, user_id
class LearnerProfileA11yTest(LearnerProfileTestMixin, AcceptanceTest):
"""
Class to test learner profile accessibility.
"""
a11y = True
def test_editable_learner_profile_a11y(self):
"""
Test the accessibility of the editable version of the profile page
(user viewing her own public profile).
"""
username, _ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('language_proficiencies')
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('bio')
profile_page.a11y_audit.check_for_accessibility_errors()
def test_read_only_learner_profile_a11y(self):
"""
Test the accessibility of the read-only version of a public profile page
(user viewing someone else's profile page).
"""
# initialize_different_user should cause country, language, and bio to be filled out (since
# privacy is public). It doesn't appear that this is happening, although the method
# works in regular bokchoy tests. Perhaps a problem with phantomjs? So this test is currently
# only looking at a read-only profile page with a username.
different_username, _ = self.initialize_different_user(privacy=self.PRIVACY_PUBLIC)
self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
]
})
profile_page.a11y_audit.check_for_accessibility_errors()
def test_badges_accessibility(self):
"""
Test the accessibility of the badge listings and sharing modal.
"""
username = 'testcert'
AutoAuthPage(self.browser, username=username).visit()
profile_page = self.visit_profile_page(username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'aria-valid-attr', # TODO: LEARNER-6611 & LEARNER-6865
'region', # TODO: AC-932
'color-contrast' # AC-938
]
})
profile_page.display_accomplishments()
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.badges[0].display_modal()
profile_page.a11y_audit.check_for_accessibility_errors()
| agpl-3.0 | 189,949,369,826,001,500 | 36.662791 | 101 | 0.636616 | false |
BorgERP/borg-erp-6of3 | verticals/garage61/acy_work_order/workorder.py | 1 | 5022 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010 Acysos S.L. (http://acysos.com) All Rights Reserved.
# Ignacio Ibeas <[email protected]>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
import tools
import os
import time
from datetime import datetime, date
# Word order
class workorder(osv.osv):
_description = 'Work Order'
_name = 'workorder'
_columns = {
'name': fields.char('Work Order Reference', size=64, readonly=False, required=True, select=True),
'partner_id': fields.many2one('res.partner', 'Customer', readonly=False, states={'draft': [('readonly', False)]}, required=True, change_default=True, select=True),
'partner_workorder_id': fields.many2one('res.partner.address', 'Address', readonly=False, required=True, states={'draft': [('readonly', False)]}, help="The name and address of the contact that requested the workorder."),
'sale_order_ids': fields.one2many('sale.order', 'workorder_id', 'Sale orders'),
'project_ids': fields.one2many('project.project', 'workorder_id', 'Projects'),
'date_created': fields.date('Created Date'),
'date_appointment': fields.date('Appointment Date'),
'date_work': fields.date('Work Date'),
'date_delivery': fields.date('Delivery Date'),
'number_sale_orders': fields.integer('Number Sale Orders'),
'user_id': fields.many2one('res.users', 'Salesman', readonly=False, select=True),
}
_defaults = {
'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'workorder'),
'number_sale_orders': lambda *a: 0,
'date_created': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'user_id': lambda obj, cr, uid, context: uid,
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The Code of the Workorder must be unique !')
]
def onchange_partner_id(self, cr, uid, ids, part):
if not part:
return {'value': {'partner_workorder_id': False}}
addr = self.pool.get('res.partner').address_get(cr, uid, [part], ['delivery', 'invoice', 'contact'])
val = {
'partner_workorder_id': addr['contact'],
}
return {'value': val}
def copy(self, cr, uid, id, default=None, context={}):
if not default:
default = {}
default.update({
'name': self.pool.get('ir.sequence').get(cr, uid, 'workorder'),
})
return super(workorder, self).copy(cr, uid, id, default, context)
def save_workorder(self, cr, uid, ids,name,partner_id,partner_workorder_id,date_appointment,date_work,date_delivery,sale_order_ids,project_ids, context={}):
wo_exist_id = self.pool.get('workorder').search(cr, uid, [('name','=',name)], context=context)
if not wo_exist_id:
wo_id = self.pool.get('workorder').create(cr, uid, {'name':name,'partner_id':partner_id,'partner_workorder_id':partner_workorder_id,'date_appointment':date_appointment,'date_work':date_work,'date_delivery':date_delivery,'sale_order_ids':sale_order_ids,'project_ids':project_ids},{'workorder':True})
self.write(cr, uid, ids, {'name':name,'partner_id':partner_id,'partner_workorder_id':partner_workorder_id,'date_appointment':date_appointment,'date_work':date_work,'date_delivery':date_delivery,'sale_order_ids':sale_order_ids,'project_ids':project_ids})
return {'value': {'id': wo_id}}
def create(self, cr, uid, vals, context=None):
if context.get('workorder', False):
return super(workorder,self).create(cr, uid, vals, context)
else:
sql = "SELECT workorder.id FROM workorder WHERE name = '%s'" % (vals.get('name'))
cr.execute(sql)
ids = cr.fetchone()[0]
super(workorder,self).write(cr, uid, ids, vals, context={})
return ids
workorder()
# Project
class project_project(osv.osv):
_inherit = 'project.project'
_columns = {
'workorder_id': fields.many2one('workorder', 'Work Order', readonly=True, required=False, select=True),
}
project_project() | agpl-3.0 | 4,570,658,077,266,191,400 | 45.943925 | 310 | 0.612505 | false |
RetailMeNotSandbox/dart | src/python/dart/test/full/test_workflow_chaining.py | 1 | 3785 | import unittest
from dart.client.python.dart_client import Dart
from dart.engine.no_op.metadata import NoOpActionTypes
from dart.model.action import ActionData, Action, ActionState
from dart.model.datastore import Datastore, DatastoreData, DatastoreState
from dart.model.trigger import Trigger, TriggerState
from dart.model.trigger import TriggerData
from dart.model.workflow import WorkflowData, WorkflowState, WorkflowInstanceState
from dart.model.workflow import Workflow
class TestWorkflowChaining(unittest.TestCase):
def setUp(self):
dart = Dart(host='localhost', port=5000)
""" :type dart: dart.client.python.dart_client.Dart """
self.dart = dart
dst_args = {'action_sleep_time_in_seconds': 0}
dst0 = Datastore(data=DatastoreData('test-datastore0', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE))
self.datastore0 = self.dart.save_datastore(dst0)
dst1 = Datastore(data=DatastoreData('test-datastore1', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE))
self.datastore1 = self.dart.save_datastore(dst1)
wf0 = Workflow(data=WorkflowData('test-workflow0', self.datastore0.id, state=WorkflowState.ACTIVE))
self.workflow0 = self.dart.save_workflow(wf0, self.datastore0.id)
wf1 = Workflow(data=WorkflowData('test-workflow1', self.datastore1.id, state=WorkflowState.ACTIVE))
self.workflow1 = self.dart.save_workflow(wf1, self.datastore1.id)
a00 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE))
a01 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE))
self.action00, self.action01 = self.dart.save_actions([a00, a01], workflow_id=self.workflow0.id)
a10 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE))
a11 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE))
self.action10, self.action11 = self.dart.save_actions([a10, a11], workflow_id=self.workflow1.id)
tr_args = {'completed_workflow_id': self.workflow0.id}
tr = Trigger(data=TriggerData('test-trigger', 'workflow_completion', [self.workflow1.id], tr_args, TriggerState.ACTIVE))
self.trigger = self.dart.save_trigger(tr)
def tearDown(self):
for a in self.dart.get_actions(workflow_id=self.workflow0.id):
self.dart.delete_action(a.id)
for a in self.dart.get_actions(workflow_id=self.workflow1.id):
self.dart.delete_action(a.id)
for wfi in self.dart.get_workflow_instances(self.workflow0.id):
self.dart.delete_datastore(wfi.data.datastore_id)
for wfi in self.dart.get_workflow_instances(self.workflow1.id):
self.dart.delete_datastore(wfi.data.datastore_id)
self.dart.delete_trigger(self.trigger.id)
self.dart.delete_workflow_instances(self.workflow0.id)
self.dart.delete_workflow_instances(self.workflow1.id)
self.dart.delete_workflow(self.workflow0.id)
self.dart.delete_workflow(self.workflow1.id)
self.dart.delete_datastore(self.datastore0.id)
self.dart.delete_datastore(self.datastore1.id)
def test_workflow_chaining(self):
self.dart.manually_trigger_workflow(self.workflow0.id)
wf_instances = self.dart.await_workflow_completion(self.workflow1.id)
for wfi in wf_instances:
self.assertEqual(wfi.data.state, WorkflowInstanceState.COMPLETED)
if __name__ == '__main__':
unittest.main()
| mit | 6,003,539,099,658,426,000 | 55.492537 | 151 | 0.724439 | false |
galad-loth/DescHash | DeepHash/TestDH.py | 1 | 1206 | import numpy as npy
import mxnet as mx
import logging
from symbols.symbol_dh import DHMidLayer,DHLossLayer
from common.data import SiftSmallIter
batchsize=50
opProj1=DHMidLayer(96,0.0001,0.0001)
opProj2=DHMidLayer(64,0.0001,0.0001)
opOut=DHLossLayer(0.001)
data = mx.symbol.Variable('data')
lm1=opProj1(data=data, name='lm1')
lm2=opProj2(data=lm1, name="lm2")
netDH=opOut(data=lm2)
ex = netDH.simple_bind(ctx=mx.cpu(), data=(batchsize, 128))
listArgs = dict(zip(netDH.list_arguments(), ex.arg_arrays))
for arg in listArgs:
data = listArgs[arg]
if 'weight' in arg:
data[:] = mx.random.uniform(-0.1, 0.1, data.shape)
if 'bias' in arg:
data[:] = 0
dataPath="E:\\DevProj\\Datasets\\SIFT1M\\siftsmall"
trainIter, valIter=SiftSmallIter(dataPath,21000,4000,batchsize)
learning_rate=0.01
for ii in range(200):
print "Deep Hash Training at iteration "+str(ii)
trainbatch=trainIter.next()
listArgs['data'][:] = trainbatch.data[0]
ex.forward(is_train=True)
ex.backward()
for arg, grad in zip(ex.arg_arrays, ex.grad_arrays):
arg[:] -= learning_rate * (grad / batchsize)
xx=ex.outputs[0].asnumpy()
| apache-2.0 | 6,880,003,447,890,738,000 | 26.046512 | 63 | 0.667496 | false |
Apstra/aeon-ztps | tests/test_cumulus_bootstrap.py | 1 | 22788 | import pytest
import os
import mock
import json
import semver
from copy import deepcopy
from paramiko import AuthenticationException
from paramiko.ssh_exception import NoValidConnectionsError
from pexpect.pxssh import ExceptionPxssh
from aeon_ztp.bin import cumulus_bootstrap
from aeon.cumulus.device import Device
args = {
'target': '1.1.1.1',
'server': '2.2.2.2',
'topdir': '/tmp/dir',
'logfile': '/tmp/logfile',
'reload_delay': '60',
'init_delay': '90',
'user': 'admin',
'env_user': 'ENV_USER',
'env_passwd': 'ENV_PASS'
}
facts = {
'os_name': 'cumulus',
'vendor': 'cumulus',
'hw_part_number': '1234',
'hostname': 'cumulus',
'fqdn': 'cumulus.localhost',
'virtual': False,
'service_tag': '1234',
'os_version': '3.1.1',
'hw_version': '1234',
'mac_address': '0123456789012',
'serial_number': '09786554',
'hw_model': 'c1000'
}
# Cumulus 2.x device
facts_v2 = dict(facts)
facts_v2['os_version'] = '2.5.7'
# Cumulus 3.1.2 device
facts_v312 = dict(facts)
facts_v312['os_version'] = '3.1.2'
# Cumulus VX
facts_cvx = dict(facts)
facts_cvx['virtual'] = True
_OS_NAME = 'cumulus'
@pytest.fixture()
def cli_args():
parse = cumulus_bootstrap.cli_parse(['--target', args['target'],
'--server', args['server'],
'--topdir', args['topdir'],
'--logfile', args['logfile'],
'--reload-delay', args['reload_delay'],
'--init-delay', args['init_delay'],
'--user', args['user'],
'--env-user', args['env_user'],
'--env-passwd', args['env_passwd']])
return parse
# Prevent all requests calls
@pytest.fixture(autouse=True)
def no_requests(monkeypatch):
mock_response = mock.MagicMock()
monkeypatch.setattr('requests.sessions.Session.request', mock_response)
@mock.patch('aeon.cumulus.device.Connector')
# Parametrize device to test Cumulus v2, v3.1.1, v3.1.2, and VX
@pytest.fixture(params=[facts_v312, facts, facts_v2, facts_cvx], ids=['Cumulusv312',
'Cumulusv311',
'Cumulusv257',
'CumulusVX'])
def device(mock_con, request):
dev = Device(args['target'], no_probe=True, no_gather_facts=True)
dev.facts = request.param
return dev
@pytest.fixture()
def cb_obj(cli_args):
os.environ['ENV_PASS'] = 'admin'
cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args)
return cb
def test_cli_parse(cli_args):
for arg in vars(cli_args):
assert str(getattr(cli_args, arg)) == args[arg]
def test_cumulus_bootstrap(cli_args, cb_obj):
assert args['server'] == cb_obj.server
assert cli_args == cb_obj.cli_args
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.requests')
def test_post_device_facts(mock_requests, device, cb_obj):
cb_obj.dev = device
cb_obj.post_device_facts()
mock_requests.put.assert_called_with(json={
'os_version': device.facts['os_version'],
'os_name': device.facts['os_name'],
'ip_addr': device.target,
'hw_model': device.facts['hw_model'],
'serial_number': device.facts['serial_number'],
'facts': json.dumps(device.facts),
'image_name': None,
'finally_script': None
},
url='http://{}/api/devices/facts'.format(args['server']))
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.requests')
def test_post_device_status(mock_requests, device, cb_obj):
kw = {
'message': 'Test message',
'state': 'DONE'
}
cb_obj.dev = device
cb_obj.post_device_status(**kw)
mock_requests.put.assert_called_with(json={
'message': kw['message'],
'os_name': device.facts['os_name'],
'ip_addr': device.target,
'state': kw['state']
},
url='http://{}/api/devices/status'.format(args['server']))
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.post_device_status')
def test_exit_results(mock_post, cb_obj, device):
kw = {
'results': {'ok': True},
'exit_error': None,
}
with pytest.raises(SystemExit) as e:
cb_obj.exit_results(**kw)
mock_post.assert_called_with(
state='DONE',
message='bootstrap completed OK'
)
assert e.value.code == 0
# Test bad exit
kw = {
'results': {'ok': False,
'message': 'Error Message'},
'exit_error': 1,
}
with pytest.raises(SystemExit) as e:
cb_obj.exit_results(**kw)
mock_post.assert_called_with(
state='FAILED',
message=kw['results']['message']
)
assert e.value.code == 1
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
def test_wait_for_device_missing_username(mock_exit, cli_args, device):
new_args = deepcopy(cli_args)
new_args.user = None
new_args.env_user = None
with pytest.raises(SystemExit):
cumulus_bootstrap.CumulusBootstrap(args['server'], new_args)
mock_exit.assert_called_with(
results={'ok': False,
'error_type': 'login',
'message': 'login user-name missing'}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
def test_wait_for_device_missing_passwd(mock_exit, cli_args, device):
new_args = deepcopy(cli_args)
new_args.env_passwd = None
with pytest.raises(SystemExit):
cumulus_bootstrap.CumulusBootstrap(args['server'], new_args)
mock_exit.assert_called_with(
results={'ok': False,
'error_type': 'login',
'message': 'login user-password missing'}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.Device', side_effect=AuthenticationException)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.post_device_status')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.requests.put')
def test_wait_for_device_auth_exception(mock_requests, mock_post_dev, mock_dev, mock_exit, cb_obj):
with pytest.raises(SystemExit):
cb_obj.wait_for_device(1, 2)
mock_exit.assert_called_with(
results={'ok': False,
'error_type': 'login',
'message': 'Unauthorized - check user/password'}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.Device')
def test_wait_for_device_no_valid_connections(mock_dev, mock_exit, mock_time, cb_obj):
mock_dev.side_effect = NoValidConnectionsError({'error': 'test error value'})
with pytest.raises(SystemExit):
cb_obj.wait_for_device(2, 1)
mock_exit.assert_called_with(
results={'ok': False,
'error_type': 'login',
'message': 'Failed to connect to target %s within reload countdown' % cb_obj.cli_args.target}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.post_device_facts')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.Device')
def test_wait_for_device(mock_dev, mock_post_facts, cb_obj):
poll_delay = 2
cb_obj.wait_for_device(1, poll_delay)
mock_dev.assert_called_with(
cb_obj.cli_args.target,
passwd=os.environ['ENV_PASS'],
timeout=poll_delay,
user=cb_obj.cli_args.user or os.getenv(cb_obj.cli_args.env_user)
)
mock_post_facts.assert_called()
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.pxssh')
def test_wait_for_onie_rescue(mock_pxssh, cb_obj):
countdown = 5
poll_delay = 1
user = 'root'
pxssh_calls = [mock.call.pxssh(options={'UserKnownHostsFile': '/dev/null', 'StrictHostKeyChecking': 'no'}),
mock.call.pxssh().login(cb_obj.cli_args.target, user, auto_prompt_reset=False),
mock.call.pxssh().sendline('\n'),
mock.call.pxssh().prompt()]
wait = cb_obj.wait_for_onie_rescue(countdown, poll_delay, user=user)
assert wait
mock_pxssh.assert_has_calls(pxssh_calls)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.post_device_status')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.pxssh.pxssh')
def test_wait_for_onie_rescue_pxsshexception(mock_pxssh, mock_post_dev, mock_exit_results, mock_time):
mock_pxssh.return_value.login.side_effect = ExceptionPxssh('Could not establish connection to host')
countdown = 1
poll_delay = 1
user = 'root'
mock_post_dev_calls = [mock.call(message='Cumulus installation in progress. Waiting for boot to ONIE rescue mode. '
'Timeout remaining: 1 seconds',
state='AWAIT-ONLINE'),
mock.call(message='Cumulus installation in progress. Waiting for boot to ONIE rescue mode. '
'Timeout remaining: 0 seconds',
state='AWAIT-ONLINE')
]
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args())
with pytest.raises(SystemExit):
local_cb.wait_for_onie_rescue(countdown, poll_delay, user=user)
mock_post_dev.assert_has_calls(mock_post_dev_calls)
mock_exit_results.assert_called_with(results={'message': 'Device 1.1.1.1 not reachable in ONIE rescue mode within reload countdown.',
'error_type': 'login',
'ok': False})
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.post_device_status')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.pxssh.pxssh')
def test_wait_for_onie_rescue_exception(mock_pxssh, mock_post_dev, mock_exit_results):
error = 'Super weird error youve never seen before'
mock_pxssh.return_value.login.side_effect = ExceptionPxssh(error)
countdown = 1
poll_delay = 1
user = 'root'
target = cli_args().target
mock_post_dev_calls = [mock.call(message='Cumulus installation in progress. Waiting for boot to ONIE rescue mode. '
'Timeout remaining: 1 seconds',
state='AWAIT-ONLINE')
]
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args())
with pytest.raises(SystemExit):
local_cb.wait_for_onie_rescue(countdown, poll_delay, user=user)
mock_post_dev.assert_has_calls(mock_post_dev_calls)
mock_exit_results.assert_called_with(results={'message': 'Error accessing {target} in ONIE rescue'
' mode: {error}.'.format(target=target, error=error),
'error_type': 'login',
'ok': False}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.json.loads')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.subprocess.Popen')
def test_check_os_install_json_exception(mock_subprocess, mock_json, mock_exit, cb_obj, device):
cb_obj.dev = device
test_stdout = 'test stdout'
exception_msg = 'test exception message'
errmsg = 'Unable to load os-select output as JSON: {}\n {}'.format(test_stdout, exception_msg)
mock_json.side_effect = Exception(exception_msg)
mock_subprocess.return_value.communicate.return_value = (test_stdout, 'test stderr')
with pytest.raises(SystemExit):
cb_obj.check_os_install_and_finally()
mock_exit.assert_called_with(
exit_error=errmsg,
results={
'ok': False,
'error_type': 'install',
'message': errmsg
}
)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.subprocess.Popen')
def test_get_required_os(mock_subprocess, device):
expected_os_sel_output = '{"output": "os-select test output"}'
mock_subprocess.return_value.communicate.return_value = (expected_os_sel_output, 'stderr')
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args())
local_cb.dev = device
conf_fpath = '{topdir}/etc/profiles/cumulus/os-selector.cfg'.format(topdir=cli_args().topdir)
cmd = "{topdir}/bin/aztp_os_selector.py -j '{dev_json}' -c {config}".format(topdir=cli_args().topdir,
dev_json=json.dumps(device.facts),
config=conf_fpath)
os_sel_output = local_cb.check_os_install_and_finally()
assert os_sel_output == json.loads(expected_os_sel_output)
mock_subprocess.assert_called_with(cmd, shell=True, stdout=-1)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.pxssh.pxssh')
def test_onie_install_pxssh_exception(mock_pxssh, mock_exit_results, cb_obj, device):
cb_obj.dev = device
exc = ExceptionPxssh('Could not establish connection to host')
mock_pxssh.return_value.login.side_effect = exc
with pytest.raises(SystemExit):
cb_obj.do_onie_install()
mock_exit_results.assert_called_with(results={'ok': False,
'error_type': 'install',
'message': exc})
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.pxssh.pxssh')
def test_onie_install_pxssh(mock_pxssh, mock_time, cb_obj, device):
cb_obj.dev = device
user = 'test'
image_name = 'test_image'
cb_obj.image_name = image_name
pxssh_calls = [mock.call().pxssh(options={'UserKnownHostsFile': '/dev/null', 'StrictHostKeyChecking': 'no'}),
mock.call().login(cb_obj.cli_args.target, user, auto_prompt_reset=False),
mock.call().sendline('\n'),
mock.call().prompt(),
mock.call().sendline('onie-nos-install http://{server}/images/{os_name}/{image_name}'.format(
server=cb_obj.cli_args.server, os_name=_OS_NAME, image_name=image_name)),
mock.call().expect('installer', timeout=15),
mock.call().expect('Please reboot to start installing OS.', timeout=240),
mock.call().prompt(),
mock.call().sendline('reboot'),
mock.call().close()]
success = cb_obj.do_onie_install(user=user)
assert success
assert mock_pxssh.mock_calls == pxssh_calls
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.os.path.exists', return_value=False)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
def test_install_os_image_missing(mock_exit_results, mock_os, cb_obj, device):
image_name = 'test_image'
cb_obj.image_name = image_name
image_fpath = os.path.join(cb_obj.cli_args.topdir, 'vendor_images', _OS_NAME, image_name)
errmsg = 'Image file does not exist: {}'.format(image_fpath)
with pytest.raises(SystemExit):
cb_obj.install_os()
mock_exit_results.assert_called_with(results={'ok': False,
'error_type': 'install',
'message': errmsg}
)
@mock.patch('aeon.cumulus.device.Connector')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.os.path.exists', return_value=True)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
def test_install_os_image_not_all_good(mock_exit_results, mock_os, mock_con, device, cli_args):
image_name = 'test_image'
errmsg = 'error running command'
device.api.execute.return_value = (False, errmsg)
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args)
local_cb.dev = device
local_cb.image_name = image_name
sem_ver = semver.parse_version_info(device.facts['os_version'])
if sem_ver >= (3, 0, 0):
# Cumulus 3.x install command
cmd = 'sudo onie-select -rf'
else:
# Cumulus 2.x install command
cmd = 'sudo /usr/cumulus/bin/cl-img-install -sf http://{server}/images/{os_name}/{image_name}'.format(
server=local_cb.cli_args.server, os_name=_OS_NAME, image_name=image_name)
with pytest.raises(SystemExit):
local_cb.install_os()
mock_exit_results.assert_called_with(results={'ok': False,
'error_type': 'install',
'message': 'Unable to run command: {cmd}. '
'Error message: {errmsg}'.format(cmd=cmd, errmsg=errmsg)})
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.wait_for_onie_rescue')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.onie_install')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.wait_for_device')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon.cumulus.device.Connector')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.os.path.exists', return_value=True)
def test_install_os_image(mock_os, mock_con, mock_time, mock_wait_device,
mock_onie_install, mock_wait_for_onie, device, cli_args):
image_name = 'test_image'
results = 'test result message'
device.api.execute.return_value = (True, results)
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args)
local_cb.dev = device
local_cb.image_name = image_name
sem_ver = semver.parse_version_info(device.facts['os_version'])
if sem_ver >= (3, 0, 0):
# Cumulus 3.x install command
cmd = 'sudo onie-select -rf'
method_calls = [mock.call.execute([cmd]), mock.call.execute(['sudo reboot'])]
else:
# Cumulus 2.x install command
cmd = 'sudo /usr/cumulus/bin/cl-img-install -sf http://{server}/images/{os_name}/{image_name}'.format(
server=local_cb.cli_args.server, os_name=_OS_NAME, image_name=image_name)
method_calls = [mock.call.execute([cmd])]
local_cb.install_os()
assert device.api.method_calls == method_calls
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.install_os')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.check_os_install_and_finally')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.wait_for_device')
def test_ensure_os_version(mock_wait_for_device, mock_get_os, mock_install_os, mock_time, device, cli_args):
results = 'test result message'
device.api.execute.return_value = (True, results)
ver_required = '3.1.2'
device_semver = semver.parse_version_info(device.facts['os_version'])
image_name = 'image_file_name'
def mock_get_os_function():
diff = semver.compare(device.facts['os_version'], ver_required)
# Check if upgrade is required
if diff < 0:
# upgrade required
local_cb.image_name = image_name
else:
# upgrade not required
local_cb.image_name = None
mock_get_os.side_effect = mock_get_os_function
local_cb = cumulus_bootstrap.CumulusBootstrap(args['server'], cli_args)
local_cb.dev = device
local_cb.ensure_os_version()
# If upgrade was required, check that the correct calls were made
if local_cb.image_name:
assert mock_install_os.called_with(mock.call(device), image_name=image_name)
if device_semver < (3, 0, 0):
device.api.execute.assert_called_with(['sudo reboot'])
mock_wait_for_device.assert_called_with(countdown=local_cb.cli_args.reload_delay, poll_delay=10)
else:
# Ensure device was not rebooted if v3 or greater, and wait_for_device was called
assert not device.api.execute.called
else:
assert not device.api.execute.called
assert not mock_install_os.called
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.os.path.isdir', return_value=False)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.cli_parse')
def test_main_invalid_topdir(mock_cli_parse, mock_time, mock_isdir, mock_exit, cli_args):
mock_cli_parse.return_value = cli_args
exc = '{} is not a directory'.format(cli_args.topdir)
with pytest.raises(SystemExit):
cumulus_bootstrap.main()
mock_exit.assert_called_with({'ok': False,
'error_type': 'args',
'message': exc})
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.check_os_install_and_finally')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.ensure_os_version')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.wait_for_device')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.CumulusBootstrap.exit_results', side_effect=SystemExit)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.os.path.isdir', return_value=True)
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.time')
@mock.patch('aeon_ztp.bin.cumulus_bootstrap.cli_parse')
def test_main(mock_cli_parse, mock_time, mock_isdir, mock_exit, mock_wait, mock_ensure_os, mock_check_os_and_finally, mock_cb, cli_args, device, cb_obj):
mock_cli_parse.return_value = cli_args
mock_wait.return_value = device
cb_obj.dev = device
mock_cb.return_value = cb_obj
with pytest.raises(SystemExit):
cumulus_bootstrap.main()
mock_exit.assert_called_with({'ok': True})
mock_wait.assert_called_with(countdown=cli_args.reload_delay, poll_delay=10, msg='Waiting for device access')
if device.facts['virtual']:
assert not mock_ensure_os.called
else:
mock_ensure_os.assert_called
| apache-2.0 | 37,383,628,305,071,304 | 42.655172 | 153 | 0.622038 | false |
quodlibet/quodlibet | tests/test_library_playlist.py | 1 | 7427 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import shutil
from pathlib import Path
from quodlibet import app
from quodlibet.formats import AudioFile
from quodlibet.library import SongFileLibrary
from quodlibet.util import connect_obj
from quodlibet.util.collection import Playlist, FileBackedPlaylist
from tests import TestCase, _TEMP_DIR
from tests.test_library_libraries import FakeSong
from quodlibet.library.playlist import _DEFAULT_PLAYLIST_DIR
def AFrange(*args):
songs = [
AudioFile({"~filename": f"/tmp/{i}.mp3",
"artist": "Foo",
"title": f"track-{i}"})
for i in range(*args)]
# Need a mountpoint, or everything goes wrong...
for song in songs:
song.sanitize()
return songs
PL_NAME = "The Only"
class TPlaylistLibrary(TestCase):
Fake = FakeSong
Frange = staticmethod(AFrange)
UnderlyingLibrary = SongFileLibrary
def setUp(self):
self.underlying = self.UnderlyingLibrary()
# Need this for ~playlists
app.library = self.underlying
self.added = []
self.changed = []
self.removed = []
self._sigs = [
connect_obj(self.underlying, 'changed', list.extend, self.changed),
connect_obj(self.underlying, 'removed', list.extend, self.removed),
]
for song in self.underlying:
song.sanitize()
# Populate for every test
self.underlying.add(self.Frange(12))
pl_dir = Path(_TEMP_DIR) / _DEFAULT_PLAYLIST_DIR
self.create_playlist_file(pl_dir)
self.add_ignored_file(pl_dir)
# Creates the library
self.library = self.underlying.playlists
def create_playlist_file(self, pl_dir) -> None:
# Won't exist, we haven't started the library yet
temp_path = Path(_TEMP_DIR).absolute()
parents = {path.absolute() for path in pl_dir.parents}
assert temp_path in parents or os.environ.get('CI', False), "Dangerous test!"
shutil.rmtree(pl_dir, ignore_errors=True)
os.makedirs(pl_dir)
fn = FileBackedPlaylist.filename_for(PL_NAME)
# No PL library given - rely on import
self.pl = pl = FileBackedPlaylist(str(pl_dir), fn, self.underlying, None)
pl.extend(list(sorted(self.underlying))[-3:])
assert len(pl) == 3, "Should have only the three songs just added"
diff = set(self.underlying) - set(pl)
assert all(song in self.underlying for song in pl), f"Missing from lib: {diff}"
pl.finalize()
pl.write()
@staticmethod
def add_ignored_file(pl_dir):
# See #3639
with open(pl_dir / ".gitignore", "w") as f:
f.write(".backup\n")
def tearDown(self):
for pl in list(self.library.values()):
pl.delete()
for s in self._sigs:
self.underlying.disconnect(s)
self.underlying.destroy()
# Don't destroy self.library, it's a reference which is gone
app.library = None
def test_migrate(self):
pl_path = Path(self.library.pl_dir)
path = pl_path / f"{PL_NAME}.xspf"
assert path.exists(), f"New playlist not found - got {os.listdir(pl_path)}"
def test_old_playlist_removed(self):
pl_path = Path(self.library.pl_dir)
fn = FileBackedPlaylist.filename_for(PL_NAME)
old_path = pl_path / fn
assert not old_path.exists(), "Didn't remove old playlist"
assert len(self.library) == 1
def test_backup(self):
pl_path = Path(self.library.pl_dir)
fn = FileBackedPlaylist.filename_for(PL_NAME)
backup = (pl_path / ".backup" / fn)
assert backup.exists(), "Didn't backup"
with open(backup) as f:
lines = f.readlines()
assert len(lines) == 3
def test_dotfiles_ignored(self):
pl_path = Path(self.library.pl_dir)
ignore_path = pl_path / ".gitignore"
assert ignore_path.exists(), "Shouldn't have removed hidden file"
assert not any("gitignore" in pl.name for pl in self.library)
def test_get(self):
pl = self.library.get(PL_NAME)
assert pl, f"Not found - got {self.library.items()}"
assert pl.name == PL_NAME
assert pl.key == PL_NAME
assert len(pl.songs) == 3
assert not self.underlying.get("Another")
def test_keys(self):
assert list(self.library.keys()) == [PL_NAME]
def test_has_key(self):
assert self.library.has_key(PL_NAME)
def test_misc_collection(self):
self.failUnless(self.library.values())
def test_items(self):
assert self.library.items() == [(PL_NAME, self.pl)]
def test_remove_songs(self):
pl = self.library[PL_NAME]
all_contents = list(self.underlying.values())
assert all(song in self.underlying for song in pl), "Not all songs are in lib"
removed = self.underlying.remove(all_contents)
assert set(removed) == set(all_contents), "Not everything removed from lib"
assert not pl, f"PL should be empty, has: {list(pl)}"
def test_misc(self):
# It shouldn't implement FileLibrary etc
self.failIf(getattr(self.library, "filename", None))
class TPlaylistLibrarySignals(TestCase):
def setUp(self):
self.lib = lib = SongFileLibrary()
self.received = []
def listen(name, items):
self.received.append(name)
self._sigs = [
connect_obj(lib, 'added', listen, 'added'),
connect_obj(lib, 'changed', listen, 'changed'),
connect_obj(lib, 'removed', listen, 'removed'),
]
self.playlists = lib.playlists
self._asigs = [
connect_obj(self.playlists, 'added', listen, 'pl_added'),
connect_obj(self.playlists, 'changed', listen, 'pl_changed'),
connect_obj(self.playlists, 'removed', listen, 'pl_removed'),
]
songs = AFrange(3)
self.lib.add(songs)
def test_add_remove(self):
pl = Playlist("only", self.lib, self.playlists)
assert self.received == ["added", "pl_added"]
self.received.clear()
# Update playlist, should trigger changes in files too
pl.extend(self.lib._contents.values())
# Changing files then does trigger another change,
# annoying but seems impossible to avoid if we want to save metadata, ~playlists
assert self.received == ["pl_changed", "changed", "pl_changed"]
self.received.clear()
# Remove some songs and watch the playlist change
songs = list(self.lib._contents.values())
self.lib.remove(songs[:2])
assert self.received == ["removed", "pl_changed", "changed", "pl_changed"]
self.received.clear()
pl.delete()
assert self.received == ["pl_removed"]
def test_songs_changes_have_no_effect(self):
self.received.clear()
self.lib.changed(list(self.lib)[0:1])
assert self.received == ["changed"]
def tearDown(self):
for s in self._asigs:
self.playlists.disconnect(s)
for s in self._sigs:
self.lib.disconnect(s)
self.lib.destroy()
| gpl-2.0 | -9,192,094,516,210,168,000 | 34.366667 | 88 | 0.614919 | false |
c86j224s/snippet | Python_asyncio_binary_echo/pyclient2/echoclient/cli.py | 1 | 3199 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Dummy echo client based on binary protocol with asyncio
'''
import asyncio
import struct
class conn_mgr:
def __init__(self, addr, port, asyncio_loop):
''' initialize object member variables '''
# network connection information
self.addr = addr
self.port = port
# asyncio streams, tasks
self.loop = asyncio_loop
self.reader = None
self.writer = None
self.read_task = None
# transaction map
self.tid = 1
self.transactions = {}
def transactionid(self):
''' issue new transaction id '''
tid = self.tid
self.tid += 1
return tid
async def open_connection(self):
''' open connection and start packet read loop '''
self.reader, self.writer, = await asyncio.open_connection(self.addr, self.port, loop=self.loop)
self.read_task = self.loop.create_task(self._read_loop())
async def _read_loop(self):
''' packet read loop handling response and notification messages '''
while True:
command, tid, message, = await self._read_message()
if (command, tid) in self.transactions:
self.transactions[(command, tid)].set_result(message)
print('handled response. {}, {}, {}'.format(command, tid, message))
else:
print('unhandled response. {}, {}, {}'.format(command, tid, message))
async def request(self, command, body):
''' request and wait response message '''
tid = self.transactionid()
self.transactions[(command, tid)] = self.loop.create_future()
await self._write_message(command, tid, body)
return await self.transactions[(command, tid)]
def close_connection(self):
''' close streams and stop the packet read loop '''
self.writer.close()
self.reader = None
self.writer = None
self.read_task.cancel()
async def _write_message(self, command, tid, body):
''' write a message to stream '''
payload = struct.pack('<ii{}s'.format(len(body)+4+4), command, tid, body)
self.writer.write(struct.pack('<i{}s'.format(len(payload)), len(payload), payload))
await self.writer.drain()
async def _read_message(self):
''' read a message from stream '''
length, = struct.unpack('<i', await self.reader.read(4))
command, = struct.unpack('<i', await self.reader.read(4))
payload = await self.reader.read(length-4)
tid, body, = struct.unpack('<i{}s'.format(len(payload)-4), payload)
return command, tid, body
async def tcp_echo_client(loop):
conn = conn_mgr('127.0.0.1', 9999, loop)
await conn.open_connection()
body = await conn.request(1, b'this is first data')
print('Received body = {}'.format(body.decode()))
body = await conn.request(2, b'this is second data')
print('Received body = {}'.format(body.decode()))
conn.close_connection()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(loop))
loop.stop()
loop.close()
| apache-2.0 | 257,500,368,639,662,980 | 31.642857 | 103 | 0.599562 | false |
DemocracyClub/yournextrepresentative | ynr/apps/people/data_removal_helpers.py | 1 | 4426 | """
A set of helpers that automate personal data removal. Used in the admin
interface, typically after a GDPR request for removal.
"""
import abc
from collections import defaultdict
DELETED_STR = "<DELETED>"
class BaseCheck(metaclass=abc.ABCMeta):
def __init__(self, person):
self.person = person
def collect(self):
return {self.__class__.__name__: self.run_collect()}
@abc.abstractmethod
def run_collect(self):
pass
@abc.abstractmethod
def run_remove(self):
pass
@abc.abstractmethod
def get_item_display_info(self, item):
pass
class PhotoCheck(BaseCheck):
def get_item_display_info(self, item):
return {
"title": "image",
"description": """Source: {source}
User: {user}
""".format(
source=item.source or None, user=item.uploading_user
),
"image": item.image.url,
}
def run_collect(self):
photos_to_remove = []
for photo in self.person.images.all():
photos_to_remove.append(self.get_item_display_info(photo))
return photos_to_remove
def run_remove(self):
self.person.images.all().delete()
class VersionHistoryCheck(BaseCheck):
def get_item_display_info(self, item):
return {
"title": item[0],
"description": "\n\t".join(sorted([x for x in item[1] if x])),
}
def run_collect(self, do_remove=False):
version_data_to_remove = []
never_remove = [
"death_date",
"honorific_prefix",
"id",
"wikipedia_url",
"candidacies",
"name",
"honorific_suffix",
"wikidata_id",
"other_names",
"slug",
]
never_remove_identifiers = ["uk.org.publicwhip"]
to_remove = defaultdict(set)
versions = self.person.versions
for version in versions:
for key, value in version.get("data").items():
if key not in never_remove:
if value or value == DELETED_STR:
if key == "identifiers":
for v in value:
if (
not v.get("scheme")
in never_remove_identifiers
):
if v["identifier"] == DELETED_STR:
continue
to_remove[
"Identifier: " + v.get("scheme")
].add(v["identifier"])
if do_remove:
v["identifier"] = DELETED_STR
else:
if str(value) == DELETED_STR:
continue
to_remove[key].add(str(value))
if do_remove:
version["data"][key] = DELETED_STR
for remove in to_remove.items():
if not remove[1]:
continue
version_data_to_remove.append(self.get_item_display_info(remove))
if do_remove:
self.person.versions = versions
self.person.save()
return sorted(version_data_to_remove, key=lambda item: item["title"])
def run_remove(self):
self.run_collect(do_remove=True)
class DataRemover:
def __init__(self, person):
self.person = person
self.to_remove = {}
self._collected = False
self.checks = [PhotoCheck, VersionHistoryCheck]
def collect(self):
"""
Runs all checks and collects the data that will be removed without
performing any actions.
:return:
"""
for check in self.checks:
self.to_remove.update(check(self.person).collect())
self._collected = True
return self.to_remove
def remove(self):
"""
Removes all data found in the checks.
:return:
"""
if not self._collected:
raise ValueError("Can't remove data without calling collect first")
for check in self.checks:
check(self.person).run_remove()
return self.to_remove
| agpl-3.0 | -711,959,650,726,068,900 | 29.736111 | 79 | 0.491414 | false |
jstacoder/flask-basehead | flask_basehead/core.py | 1 | 2152 | '''
new_bc.core.py
core api calls for new_bc api library
'''
import os
import requests
API_URL = 'https://basecamp.com/{}/api/v1/'
MY_BC_NUMBER = '2361076'
def make_api_url(account_num=None,call=None,*args):
if account_num is None:
account_num = MY_BC_NUMBER
if call is None:
call = ''
u = API_URL.format(account_num) + call
u = u + '.json' if not args else u + '/' + '/'.join(map(str,args)) + '.json'
return u
def get_auth(username=None,passwd=None):
if username and passwd:
return (username,passwd)
elif os.environ.get('BC_AUTH',False):
return os.environ['BC_AUTH'].split(' ')
else:
if os.path.exists('auth.txt'):
return tuple([str(x[:-1]) for x in tuple(open('auth.txt').readlines())])
def create_session(auth=None,oauth2=False):
if not oauth2:
req = requests.session()
else:
import os
url = os.environ.get('INIT_REQUEST_URL',None)
import requests_oauthlib
req = requests_oauthlib.OAuth2Session(url)
if auth is None:
req.auth = get_auth()
else:
if len(auth) == 2:
req.auth = get_auth(auth)
else:
raise IOError('unsupported authentication')
return req
def send_request(url,json=True,post=False,session=None,**kwargs):
if session is None:
req = create_session()
else:
req = session
if url is None:
if kwargs == {}:
raise IOError('need a url to send request to')
else:
account_num = kwargs.pop('account_num',None)
call = kwargs.pop('call',None)
args = kwargs.values()
if args:
url = make_api_url(account_num=account_num,call=call,*args)
else:
url = make_api_url(account_num=account_num,call=call)
if not post:
if json:
return req.get(url).json()
else:
return req.get(url)
else:
data = kwargs.get('post_data',None)
if json:
return req.post(url,data=data).json()
else:
return req.post(url,data=data)
| bsd-3-clause | -6,049,897,316,050,391,000 | 27.693333 | 84 | 0.560409 | false |
kaushik94/sympy | sympy/assumptions/satask.py | 2 | 5183 | from __future__ import print_function, division
from sympy import Symbol, S
from sympy.assumptions.ask_generated import get_all_known_facts
from sympy.assumptions.assume import global_assumptions, AppliedPredicate
from sympy.assumptions.sathandlers import fact_registry
from sympy.core import oo
from sympy.logic.inference import satisfiable
from sympy.assumptions.cnf import CNF, EncodedCNF
def satask(proposition, assumptions=True, context=global_assumptions,
use_known_facts=True, iterations=oo):
props = CNF.from_prop(proposition)
_props = CNF.from_prop(~proposition)
if context:
tmp = CNF()
context = tmp.extend(context)
assumptions = CNF.from_prop(assumptions)
sat = get_all_relevant_facts(props, assumptions, context,
use_known_facts=use_known_facts, iterations=iterations)
if context:
sat.add_from_cnf(context)
sat.add_from_cnf(assumptions)
return check_satisfiability(props, _props, sat)
def check_satisfiability(prop, _prop, factbase):
sat_true = factbase.copy()
sat_false = factbase.copy()
sat_true.add_from_cnf(prop)
sat_false.add_from_cnf(_prop)
can_be_true = satisfiable(sat_true)
can_be_false = satisfiable(sat_false)
if can_be_true and can_be_false:
return None
if can_be_true and not can_be_false:
return True
if not can_be_true and can_be_false:
return False
if not can_be_true and not can_be_false:
# TODO: Run additional checks to see which combination of the
# assumptions, global_assumptions, and relevant_facts are
# inconsistent.
raise ValueError("Inconsistent assumptions")
def get_relevant_facts(proposition, assumptions=None,
context=None, exprs=None,
relevant_facts=None):
newexprs = set()
if not assumptions:
assumptions = CNF({S.true})
if not relevant_facts:
relevant_facts = set()
def find_symbols(pred):
if isinstance(pred, CNF):
symbols = set()
for a in pred.all_predicates():
symbols |= find_symbols(a)
return symbols
if isinstance(pred.args, AppliedPredicate):
return {pred.args[0]}
return pred.atoms(Symbol)
if not exprs:
req_keys = find_symbols(proposition)
keys = proposition.all_predicates()
# XXX: We need this since True/False are not Basic
lkeys = set()
lkeys |= assumptions.all_predicates()
if context:
lkeys |= context.all_predicates()
lkeys = lkeys - {S.true, S.false}
tmp_keys = None
while tmp_keys != set():
tmp = set()
for l in lkeys:
syms = find_symbols(l)
if (syms & req_keys) != set():
tmp |= syms
tmp_keys = tmp - req_keys
req_keys |= tmp_keys
keys |= {l for l in lkeys if find_symbols(l) & req_keys != set()}
exprs = {key.args[0] if isinstance(key, AppliedPredicate) else key for key in keys}
return exprs, relevant_facts
for expr in exprs:
for fact in fact_registry[expr.func]:
cnf_fact = CNF.to_CNF(fact)
newfact = cnf_fact.rcall(expr)
relevant_facts = relevant_facts._and(newfact)
newexprs |= set([key.args[0] for key in newfact.all_predicates()
if isinstance(key, AppliedPredicate)])
return newexprs - exprs, relevant_facts
def get_all_relevant_facts(proposition, assumptions=True,
context=global_assumptions, use_known_facts=True, iterations=oo):
# The relevant facts might introduce new keys, e.g., Q.zero(x*y) will
# introduce the keys Q.zero(x) and Q.zero(y), so we need to run it until
# we stop getting new things. Hopefully this strategy won't lead to an
# infinite loop in the future.
i = 0
relevant_facts = CNF()
exprs = None
all_exprs = set()
while exprs != set():
exprs, relevant_facts = get_relevant_facts(proposition,
assumptions, context, exprs=exprs,
relevant_facts=relevant_facts)
all_exprs |= exprs
i += 1
if i >= iterations:
break
if use_known_facts:
known_facts_CNF = CNF()
known_facts_CNF.add_clauses(get_all_known_facts())
kf_encoded = EncodedCNF()
kf_encoded.from_cnf(known_facts_CNF)
def translate_literal(lit, delta):
if lit > 0:
return lit + delta
else:
return lit - delta
def translate_data(data, delta):
return [{translate_literal(i, delta) for i in clause} for clause in data]
data = []
symbols = []
n_lit = len(kf_encoded.symbols)
for i, expr in enumerate(all_exprs):
symbols += [pred(expr) for pred in kf_encoded.symbols]
data += translate_data(kf_encoded.data, i * n_lit)
encoding = dict(list(zip(symbols, range(1, len(symbols)+1))))
ctx = EncodedCNF(data, encoding)
else:
ctx = EncodedCNF()
ctx.add_from_cnf(relevant_facts)
return ctx
| bsd-3-clause | -4,291,670,985,093,664,300 | 31.803797 | 91 | 0.612001 | false |
umaptechnologies/must | details/factories.py | 1 | 4044 | import inspect
from class_pattern import ClassPattern
from primitive_musts import SafeObject
class Factory(object):
''' WRITEME '''
def __init__(self, obj_constructor, constructor_args, product_pattern, universe, known_parameters):
self._obj_constructor = obj_constructor
self._constructor_args = constructor_args
self._factory_header = constructor_args
self._product_pattern = product_pattern
self._universe = universe
self._known_parameters = known_parameters
def make(self, *args):
arg_index = 0
dependencies = []
for i in range(len(self._constructor_args)):
a = self._constructor_args[i]
if a in self._factory_header:
dependencies.append(args[arg_index])
arg_index += 1
else:
namehint = str(self._obj_constructor)+' needs '+('an' if a[0] in 'aeiou' else 'a')+' "'+a+'" that'
dependencies.append(self._universe.create_with_namehint(namehint, self._product_pattern._constructor.param_signatures[i].get_param_mold()))
# TODO: Incorporate self._known_parameters
result = self._obj_constructor(*dependencies)
result.must_return = lambda x: SafeObject()
return result
def must_make(self, obj_type, parameters):
new_factory_header = parameters.split(', ')
assert self._factory_header == self._constructor_args or new_factory_header == self._factory_header, "Factory parameters cannot be %s; already specified as %s." % (new_factory_header, self._factory_header)
self._factory_header = new_factory_header
return self
def that_must_make(self, obj_type, parameters):
return self.must_make(obj_type, parameters)
def and_must_make(self, obj_type, parameters):
return self.must_make(obj_type, parameters)
def must(self, action, taking='', returning=''):
return self
def must_have(self, *attributes):
return self
def must_use(self, **known_parameters):
return self
def that_must(self, action, taking='', returning=''):
return self.must(action, taking, returning)
def that_must_have(self, *attributes):
return self.must_have(*attributes)
def that_must_use(self, **known_parameters):
return self.must_use(**known_parameters)
def and_must(self, action, taking='', returning=''):
return self.must(action, taking, returning)
def and_must_have(self, *attributes):
return self.must_have(*attributes)
def and_must_use(self, **known_parameters):
return self.must_use(**known_parameters)
def __str__(self):
result = str(self._obj_constructor)+" factory("
result += ', '.join(self._constructor_args)
result += ")"
return result
class FactoryPattern(object):
''' WRITEME '''
def __init__(self, constructor, ignore_warnings=False):
self._constructor = constructor
self._constructor_args = inspect.getargspec(constructor.__init__).args[1:] # Ignore 'self'
self._product = ClassPattern(constructor)
def reflects_class(self, possible_class):
return False
def create(self, universe, aliases, known_parameters):
return Factory(self._constructor, self._constructor_args, self._product, universe, known_parameters)
def matches(self, requirements, aliases):
is_factory = requirements.type == 'factory'
has_parameters = self.has_parameters(requirements.parameters)
product_matches = (requirements.product is None) or \
(self._product.matches(requirements.product, aliases))
return is_factory and has_parameters and product_matches
def has_parameters(self, parameters):
return all([x in self._constructor_args for x in parameters])
def __str__(self):
result = str(self._constructor)+" factory("
result += ', '.join(self._constructor_args)
result += ")"
return result
| apache-2.0 | 4,343,050,456,781,097,000 | 37.514286 | 213 | 0.638229 | false |
mjirik/io3d | io3d/fsbrowser.py | 1 | 17535 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from loguru import logger
import glob
import numpy as np
import os
# TODO remove cv2 - done
import matplotlib.pyplot as plt
from fnmatch import fnmatch
try:
import pydicom as pdicom
except ImportError:
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
import dicom as pdicom
logger.debug("dicom imported - it would be better use pydicom")
from os import listdir
from os.path import isfile, join
from . import datareader
from skimage import io
# TODO - PyQt5 - done
from PyQt5.QtWidgets import QFileDialog, QLabel, QVBoxLayout
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import Qt
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
# FileSystemBrowser("c:/jkdfaldkfj/asdfasjfh")
class FileSystemBrowser:
def __init__(self, path=None):
self.path = path
self.preview_size = [100, 100]
self.nova_promenna = 5
pass
# metoda pouze na zobrazeni obrazku - volala by se v pripade ze tam nejaky bude
def get_path_info_preview(self, path):
path_lower = path.lower()
# name
name = os.path.basename(os.path.normpath(path))
name_final = "name: " + name
path_sl = path + "/"
if ".jpg" in path_lower:
preview = "Used path leads to current image."
img = io.imread(path)
io.imshow(img)
io.show()
elif ".png" in path_lower:
preview = "Used path leads to current image."
img = io.imread(path)
io.imshow(img)
io.show()
elif ".dcm" in path_lower:
preview = "Used path leads to current image."
ds = pdicom.dcmread(path)
plt.imshow(ds.pixel_array, cmap=plt.cm.bone)
else:
preview = "Preview of files in dir: " + name
only_files = [f for f in listdir(path) if isfile(join(path, f))]
for x in only_files:
if (".dcm" or ".Dcm" or ".DCM") in x:
ending = os.path.basename(os.path.normpath(path_sl + x))
preview_path = path_sl + ending
ds = pdicom.dcmread(preview_path)
plt.imshow(ds.pixel_array, cmap=plt.cm.bone)
break
elif (".jpg" or ".Jpg" or ".JPG") in x:
ending = os.path.basename(os.path.normpath(path_sl + x))
preview_path = path_sl + ending
img = io.imread(preview_path)
io.imshow(img)
io.show()
break
elif (".png" or ".Png" or ".PNG") in x:
ending = os.path.basename(os.path.normpath(path_sl + x))
preview_path = path_sl + ending
img = io.imread(preview_path)
io.imshow(img)
io.show()
break
else:
None
break
# Tady skutečně musí být (self, path). Self je odkaz na mateřský objekt, následují pak další parametry.
# def get_path_info(path): #(self, path)?
def get_path_info(self, path):
try:
path_sl = path + "/"
res_last = path[-1]
if res_last == "/":
path_sl = path
else:
path_sl = path + "/"
# name
name = os.path.basename(os.path.normpath(path))
name_final = "name: " + name
# type
type_ = os.path.isdir(path)
if type_ == 1:
type_res = "type: .dir"
if type_ == 0:
type_res = "type: " + name
# text - files, series, files
serie_counter = 0
study_counter = 0
all_names = []
for root, dirs, files in os.walk(path):
for d in dirs:
all_names.append(d.lower())
for f in files:
all_names.append(f.lower())
# lowercase - should be able to count all series,studies..
for i in all_names:
if "serie" in i:
serie_counter += 1
if "study" in i:
study_counter += 1
filescounter = sum([len(files) for r, d, files in os.walk(path)])
text = (
"Study: "
+ str(study_counter)
+ " Series: "
+ str(serie_counter)
+ " Files: "
+ str(filescounter)
)
path_lower = path.lower()
# preview - forced path,some pic. from serie?
if ".jpg" in path_lower:
preview = "Used path leads to current image."
elif ".png" in path_lower:
preview = "Used path leads to current image."
elif ".dcm" in path_lower:
preview = "Used path leads to current image."
else:
preview = "Preview of files in dir: " + name
only_files = [f for f in listdir(path) if isfile(join(path, f))]
for x in only_files:
if (".dcm" or ".Dcm" or ".DCM") in x:
print("dcm files")
break
elif (".jpg" or ".Jpg" or ".JPG") in x:
print("jpf files")
break
elif (".png" or ".Png" or ".PNG") in x:
print("png files")
break
else:
None
break
# add required endings..
# import io3d.datareader
# io3d.datareader.read(file_path)
# add required endings..
# path
text_path = "path: " + path
# Fallowing function can be used for directory analysis
# import io3d.dcmreaddata
# dd = io3d.dcmreaddata.DicomDirectory(dirpath=path)
# dd.get_stats_of_series_in_dir()
# dd = dcmreaddata.DicomDirectory(self.path)
# stats = dd.get_stats_of_series_in_dir()
# studies_and_series = dd.get_stats_of_studies_and_series_in_dir()
# import pydicom
# pydicom.read_file(stats[7].dcmfilelist[0])
# np.ndarray.resize()
# JPG
# import SimpleITK as Sitk
# image = Sitk.ReadImage(datapath)
# data3d = dcmtools.get_pixel_array_from_sitk(image)
# TODO
acquid = 0
modality = 0
path = text_path
name = name_final
retval = [name, type_res, preview, text, acquid, modality, path]
# "acquisition_date": ["2015-02-16", "2015-02-16"],
# "modality": "MRI",
# print(retval)
# print(retval[0])
# print(retval[1])
# print(retval[2])
# print(retval[3])
# print(retval[4])
# print(retval[5])
# print(retval[6])
except:
print("$Error$")
return None
return retval
def get_dir_list(self):
from . import dcmreaddata
# datareader.read()
# TODO check the design of output structure
retval = [
{
"name": "Study0545",
"type": "dir",
"preview": np.zeros(self.preview_size),
"text": "1 study, 3 series, 18321 files, acquisition_date=2017-02-16 to 2017-02-19",
"acquisition_date": ["2015-02-16", "2015-02-16"],
"modality": "MRI",
"path": "C:/data/Study0545",
},
{
"name": "Serie54864",
"type": "serie",
"preview": np.zeros(self.preview_size),
"text": "3 series, 18321 files, acquisition_date=2017-02-16 to 2017-02-19",
"acquisition_date": ["2015-02-16", "2015-02-16"],
"modality": "MRI",
"path": "c:/data/",
},
{ # maybe signle file make no sense
"name": "first.mhd",
"type": "file",
"preview": np.zeros(self.preview_size),
"text": "[1x512x512], voxelsize_mm=[5.0, 0.5, 0.5], acquisition_date=2015-08-16",
"voxelsize_mm": [5.0, 0.5, 0.5],
"acquisition_date": "2015-08-16",
"modality": "CT",
},
]
return retval
# def file_anonymization(self, filename, output_filename=None):
# pass
def recursive_anonymization(self, path, output_path=None):
dirlist = glob.glob(path)
pass
def getOpenFileName(path, *other_params):
# TODO naimplementovat na základě fsbrowser_test.py:test_devel_qt_dialog_fsbrowser()
filename = ""
return filename
# Widget - dcm browser
# dcm preview widget + dir/img info widget
# getOpenFileName - fcn. to get path of chosen file
class DCMage(QFileDialog):
def __init__(self, *args, **kwargs):
QFileDialog.__init__(self, *args, **kwargs)
self.setOption(QFileDialog.DontUseNativeDialog, True)
box = QVBoxLayout()
self.setFixedSize(self.width() + 450, self.height() + 500)
self.mpPreview = QLabel("Preview", self)
self.mpPreview.setFixedSize(500, 500)
self.mpPreview.setAlignment(Qt.AlignCenter)
self.mpPreview.setObjectName("DCMage")
box.addWidget(self.mpPreview)
box.addStretch()
self.layout().addLayout(box, 1, 3, 1, 1)
self.mpPreview_1 = QLabel("Preview", self)
self.mpPreview_1.setFixedSize(500, 500)
self.mpPreview_1.setAlignment(Qt.AlignCenter)
self.mpPreview_1.setObjectName("DCMage")
box.addWidget(self.mpPreview_1)
box.addStretch()
self.layout().addLayout(box, 3, 3, 1, 1)
self.currentChanged.connect(self.onChange)
self.fileSelected.connect(self.getOpenFileName)
self._fileSelected = None
def dcm2png(self, path):
ds1 = pdicom.read_file(path, force=True)
x = plt.imsave("tempfile.png", ds1.pixel_array, cmap=plt.cm.gray)
img = io.imread("tempfile.png")
def onChange_text(self, path):
path_l = path.lower()
if ".dcm" in path_l:
temp_text = self.get_path_info(path_l)
self.mpPreview_1.setText(temp_text)
elif "study" in path_l:
temp_text = self.get_path_info(path_l)
self.mpPreview_1.setText(temp_text)
elif "serie" in path_l:
temp_text = self.get_path_info(path_l)
self.mpPreview_1.setText(temp_text)
elif "case" in path_l:
temp_text = self.get_path_info(path_l)
self.mpPreview_1.setText(temp_text)
elif "series" in path_l:
temp_text = self.get_path_info(path_l)
self.mpPreview_1.setText(temp_text)
else:
temp_text = "go to dir with dcm files"
def onChange(self, path):
self._fileSelected = path
path_l = path.lower()
self.onChange_text(path_l)
if ".dcm" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
self.get_path_info(path_l)
elif "image_" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
self.get_path_info(path_l)
elif "study" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
self.get_path_info(path_l)
elif "serie" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
elif "case" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
elif "series" in path_l:
try:
self.dcm2png(path)
except:
print("no dcm to display")
self.get_path_info(path_l)
else:
self.mpPreview.setText("Preview")
pixmap = QPixmap("tempfile.png")
if pixmap.isNull():
self.mpPreview.setText("Preview")
else:
self.mpPreview.setPixmap(
pixmap.scaled(
self.mpPreview.width(),
self.mpPreview.height(),
Qt.KeepAspectRatio,
Qt.SmoothTransformation,
)
)
# self.get_path_info("tempfile.png")
try:
os.remove("tempfile.png")
except:
print("")
def getOpenFileName(self, file):
self.show()
self.exec_()
temp = self._fileSelected
# print(temp)
return temp
def get_path_info(self, path):
# problem with text len for qlabel - recomended for noneditable text //*textlen set to 00 needs to be edited
if len(path) >= 50 & len(path) < 100:
path1 = path[:50]
path2 = path[50:100]
path_formated = path1 + "\n" + path2
# prepared cases for longer paths...
elif len(path) >= 100 & len(path) < 150:
path1 = path[:50]
path2 = path[50:100]
path3 = path[100:150]
path_formated = path1 + "\n" + path2 + "\n" + path3
elif len(path) >= 150 & len(path) < 200:
path1 = path[:50]
path2 = path[50:100]
path3 = path[100:150]
path4 = path[150:200]
path_formated = path1 + "\n" + path2 + "\n" + path3 + "\n" + path4
elif len(path) >= 240 & len(path) < 300:
path1 = path[:60]
path2 = path[60:120]
path3 = path[120:180]
path4 = path[180:240]
path5 = path[240:300]
path_formated = (
path1 + "\n" + path2 + "\n" + path3 + "\n" + path4 + "\n" + path5
)
else:
print("too long path")
path_formated = path
try:
path_sl = path + "/"
res_last = path[-1]
if res_last == "/":
path_sl = path
else:
path_sl = path + "/"
# name
name = os.path.basename(os.path.normpath(path))
name_final = "name: " + name + "\n"
# type
type_ = os.path.isdir(path)
if type_ == 1:
type_res = "type: .dir" + "\n"
if type_ == 0:
type_res = "type: " + name + "\n"
# text - files, series, files
serie_counter = 0
study_counter = 0
all_names = []
counter_fail = 0
for root, dirs, files in os.walk(path):
for d in dirs:
all_names.append(d.lower())
# TODO fix limit
for f in files:
all_names.append(f.lower())
# lowercase - should be able to count all series,studies..
for i in all_names:
if "serie" in i:
serie_counter += 1
if "study" in i:
study_counter += 1
filescounter = sum([len(files) for r, d, files in os.walk(path)])
text = (
"Study: "
+ str(study_counter)
+ "\n"
+ " Series: "
+ str(serie_counter)
+ " Files: "
+ str(filescounter)
+ "\n"
)
path_lower = path.lower()
# preview - forced path,some pic. from serie?
if ".jpg" in path_lower:
preview = "image."
elif ".png" in path_lower:
preview = "image."
elif ".dcm" in path_lower:
preview = "image."
else:
preview = "Type: " + name
only_files = [f for f in listdir(path) if isfile(join(path, f))]
for x in only_files:
if (".dcm" or ".Dcm" or ".DCM") in x:
print("dcm files")
break
elif (".jpg" or ".Jpg" or ".JPG") in x:
print("jpf files")
break
elif (".png" or ".Png" or ".PNG") in x:
print("png files")
break
else:
None
break
text_path = "path: " + path
acquid = 0
modality = 0
path = text_path
name = name_final
retval = [name, type_res, preview, text, acquid, modality, path_formated]
# retval = [path_formated, path_formated1]
retval_str = "".join(map(str, retval))
# "acquisition_date": ["2015-02-16", "2015-02-16"],
# "modality": "MRI",
return retval_str
except:
print("$$$")
return None
return None
| mit | -8,339,100,194,311,851,000 | 31.814607 | 116 | 0.47315 | false |
sfu-discourse-lab/SFU_Comment_Extractor | Source_Code/CSV_creation/duplicate_threads.py | 1 | 2389 | import pandas as pd
import re
import ast
import multiprocessing as mp
from multiprocessing import cpu_count
import sys
def check_match(thread_df):
pat = "source2_\d+_\d+"
for i, row in thread_df.iterrows():
duplicate = ast.literal_eval(row.duplicate_flag)
if not duplicate['exact_match']:
return False
return re.findall(pat, " ".join(duplicate['exact_match']))
def thread_length(orig_length, comment_id, threads_df, orig_comment_id):
orig_df = threads_df[threads_df.comment_counter.str.contains(orig_comment_id + "$|" + orig_comment_id + "_")]
for id in comment_id:
counter = 0
temp_df = threads_df[threads_df.comment_counter.str.contains(id + "$|" + id + "_")]
if len(temp_df) == orig_length:
for i, row in orig_df.iterrows():
match_list = ast.literal_eval(row.duplicate_flag)
if re.findall(id + "$|" + id + "_", " ".join(match_list['exact_match'])):
counter += 1
if counter == orig_length:
return id
return False
def parallelize(data, func):
cores = cpu_count()
df_list = []
for i, df_article_id in data.groupby('article_id'):
df_list.append(df_article_id)
print("Dataframes list prepared.")
pool = mp.Pool(cores)
data = pd.concat(pool.map(func, df_list))
pool.close()
pool.join()
return data
def remove_duplicate_threads(threads_df):
pattern = "source1_\d+_\d+$"
source1_df = threads_df[threads_df['comment_counter'].str.contains(pattern)]
root_comments = list(source1_df.comment_counter)
for comment in root_comments:
thread = threads_df[threads_df.comment_counter.str.contains(comment + "$|" + comment + "_")]
if thread.empty:
continue
match = check_match(thread)
if match:
match_id = thread_length(len(thread), match, threads_df, comment)
if match_id:
threads_df = threads_df[~threads_df['comment_counter'].str.contains(match_id + "$|" + match_id + "_")]
return threads_df
def main():
articles_df = pd.DataFrame.from_csv(sys.argv[1], encoding="ISO-8859-1", index_col=None)
df_processed = parallelize(articles_df, remove_duplicate_threads)
df_processed.to_csv("duplicates_removed.csv", index=False)
if __name__ == "__main__":
main()
| mit | -2,227,418,396,662,079,500 | 30.434211 | 118 | 0.609041 | false |
MartinThoma/algorithms | ML/50-mlps/05-keras-cnn/main.py | 1 | 1763 | #!/usr/bin/env python
# internal modules
import hasy_tools
import numpy as np
# 3rd party modules
from keras.callbacks import CSVLogger, ModelCheckpoint
from keras.layers import Conv2D, Dense, Dropout, Flatten, MaxPooling2D
from keras.models import Sequential
# Load the data
data = hasy_tools.load_data()
x_train = data["x_train"]
y_train = data["y_train"]
x_validate = data["x_train"]
y_validate = data["y_train"]
x_test = data["x_test"]
y_test = data["y_test"]
# One-Hot encoding
y_train = np.eye(hasy_tools.n_classes)[y_train.squeeze()]
y_validate = np.eye(hasy_tools.n_classes)[y_validate.squeeze()]
y_test = np.eye(hasy_tools.n_classes)[y_test.squeeze()]
# Preprocessing
x_train = hasy_tools.preprocess(x_train)
x_validate = hasy_tools.preprocess(x_validate)
x_test = hasy_tools.preprocess(x_test)
# Define the model
model = Sequential()
model.add(Conv2D(16, (3, 3)))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(256, activation="tanh"))
model.add(Dropout(0.25)) # Drop 25% of the units
model.add(Dense(256, activation="tanh"))
model.add(Dense(hasy_tools.n_classes, activation="softmax"))
# Compile model
model.compile(loss="categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
# Fit the model
csv_logger = CSVLogger("log.csv", append=True, separator=";")
checkpointer = ModelCheckpoint(
filepath="checkpoint.h5", verbose=1, period=10, save_best_only=True
)
model.fit(
x_train,
y_train,
validation_data=(x_validate, y_validate),
epochs=700,
batch_size=128,
callbacks=[csv_logger, checkpointer],
)
# Serialize model
model.save("model.h5")
# evaluate the model
scores = model.evaluate(x_test, y_test)
print("\n{}: {:.2f}%".format(model.metrics_names[1], scores[1] * 100))
| mit | 5,841,673,969,354,927,000 | 26.984127 | 86 | 0.713556 | false |
leiferikb/bitpop | src/v8/tools/js2c.py | 1 | 15907 | #!/usr/bin/env python
#
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This is a utility for converting JavaScript source code into C-style
# char arrays. It is used for embedded JavaScript code in the V8
# library.
import os, re, sys, string
import optparse
import jsmin
import bz2
import textwrap
class Error(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
def ToCArray(byte_sequence):
result = []
for chr in byte_sequence:
result.append(str(ord(chr)))
joined = ", ".join(result)
return textwrap.fill(joined, 80)
def RemoveCommentsAndTrailingWhitespace(lines):
lines = re.sub(r'//.*\n', '\n', lines) # end-of-line comments
lines = re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', lines) # comments.
lines = re.sub(r'\s+\n+', '\n', lines) # trailing whitespace
return lines
def ReadFile(filename):
file = open(filename, "rt")
try:
lines = file.read()
finally:
file.close()
return lines
EVAL_PATTERN = re.compile(r'\beval\s*\(')
WITH_PATTERN = re.compile(r'\bwith\s*\(')
def Validate(lines):
# Because of simplified context setup, eval and with is not
# allowed in the natives files.
if EVAL_PATTERN.search(lines):
raise Error("Eval disallowed in natives.")
if WITH_PATTERN.search(lines):
raise Error("With statements disallowed in natives.")
# Pass lines through unchanged.
return lines
def ExpandConstants(lines, constants):
for key, value in constants:
lines = key.sub(str(value), lines)
return lines
def ExpandMacroDefinition(lines, pos, name_pattern, macro, expander):
pattern_match = name_pattern.search(lines, pos)
while pattern_match is not None:
# Scan over the arguments
height = 1
start = pattern_match.start()
end = pattern_match.end()
assert lines[end - 1] == '('
last_match = end
arg_index = [0] # Wrap state into array, to work around Python "scoping"
mapping = { }
def add_arg(str):
# Remember to expand recursively in the arguments
replacement = expander(str.strip())
mapping[macro.args[arg_index[0]]] = replacement
arg_index[0] += 1
while end < len(lines) and height > 0:
# We don't count commas at higher nesting levels.
if lines[end] == ',' and height == 1:
add_arg(lines[last_match:end])
last_match = end + 1
elif lines[end] in ['(', '{', '[']:
height = height + 1
elif lines[end] in [')', '}', ']']:
height = height - 1
end = end + 1
# Remember to add the last match.
add_arg(lines[last_match:end-1])
result = macro.expand(mapping)
# Replace the occurrence of the macro with the expansion
lines = lines[:start] + result + lines[end:]
pattern_match = name_pattern.search(lines, start + len(result))
return lines
def ExpandMacros(lines, macros):
# We allow macros to depend on the previously declared macros, but
# we don't allow self-dependecies or recursion.
for name_pattern, macro in reversed(macros):
def expander(s):
return ExpandMacros(s, macros)
lines = ExpandMacroDefinition(lines, 0, name_pattern, macro, expander)
return lines
class TextMacro:
def __init__(self, args, body):
self.args = args
self.body = body
def expand(self, mapping):
result = self.body
for key, value in mapping.items():
result = result.replace(key, value)
return result
class PythonMacro:
def __init__(self, args, fun):
self.args = args
self.fun = fun
def expand(self, mapping):
args = []
for arg in self.args:
args.append(mapping[arg])
return str(self.fun(*args))
CONST_PATTERN = re.compile(r'^const\s+([a-zA-Z0-9_]+)\s*=\s*([^;]*);$')
MACRO_PATTERN = re.compile(r'^macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
PYTHON_MACRO_PATTERN = re.compile(r'^python\s+macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*=\s*([^;]*);$')
def ReadMacros(lines):
constants = []
macros = []
for line in lines.split('\n'):
hash = line.find('#')
if hash != -1: line = line[:hash]
line = line.strip()
if len(line) is 0: continue
const_match = CONST_PATTERN.match(line)
if const_match:
name = const_match.group(1)
value = const_match.group(2).strip()
constants.append((re.compile("\\b%s\\b" % name), value))
else:
macro_match = MACRO_PATTERN.match(line)
if macro_match:
name = macro_match.group(1)
args = [match.strip() for match in macro_match.group(2).split(',')]
body = macro_match.group(3).strip()
macros.append((re.compile("\\b%s\\(" % name), TextMacro(args, body)))
else:
python_match = PYTHON_MACRO_PATTERN.match(line)
if python_match:
name = python_match.group(1)
args = [match.strip() for match in python_match.group(2).split(',')]
body = python_match.group(3).strip()
fun = eval("lambda " + ",".join(args) + ': ' + body)
macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun)))
else:
raise Error("Illegal line: " + line)
return (constants, macros)
INLINE_MACRO_PATTERN = re.compile(r'macro\s+([a-zA-Z0-9_]+)\s*\(([^)]*)\)\s*\n')
INLINE_MACRO_END_PATTERN = re.compile(r'endmacro\s*\n')
def ExpandInlineMacros(lines):
pos = 0
while True:
macro_match = INLINE_MACRO_PATTERN.search(lines, pos)
if macro_match is None:
# no more macros
return lines
name = macro_match.group(1)
args = [match.strip() for match in macro_match.group(2).split(',')]
end_macro_match = INLINE_MACRO_END_PATTERN.search(lines, macro_match.end());
if end_macro_match is None:
raise Error("Macro %s unclosed" % name)
body = lines[macro_match.end():end_macro_match.start()]
# remove macro definition
lines = lines[:macro_match.start()] + lines[end_macro_match.end():]
name_pattern = re.compile("\\b%s\\(" % name)
macro = TextMacro(args, body)
# advance position to where the macro defintion was
pos = macro_match.start()
def non_expander(s):
return s
lines = ExpandMacroDefinition(lines, pos, name_pattern, macro, non_expander)
HEADER_TEMPLATE = """\
// Copyright 2011 Google Inc. All Rights Reserved.
// This file was generated from .js source files by GYP. If you
// want to make changes to this file you should either change the
// javascript source files or the GYP script.
#include "v8.h"
#include "natives.h"
#include "utils.h"
namespace v8 {
namespace internal {
%(sources_declaration)s\
%(raw_sources_declaration)s\
template <>
int NativesCollection<%(type)s>::GetBuiltinsCount() {
return %(builtin_count)i;
}
template <>
int NativesCollection<%(type)s>::GetDebuggerCount() {
return %(debugger_count)i;
}
template <>
int NativesCollection<%(type)s>::GetIndex(const char* name) {
%(get_index_cases)s\
return -1;
}
template <>
int NativesCollection<%(type)s>::GetRawScriptsSize() {
return %(raw_total_length)i;
}
template <>
Vector<const char> NativesCollection<%(type)s>::GetRawScriptSource(int index) {
%(get_raw_script_source_cases)s\
return Vector<const char>("", 0);
}
template <>
Vector<const char> NativesCollection<%(type)s>::GetScriptName(int index) {
%(get_script_name_cases)s\
return Vector<const char>("", 0);
}
template <>
Vector<const byte> NativesCollection<%(type)s>::GetScriptsSource() {
return Vector<const byte>(sources, %(total_length)i);
}
template <>
void NativesCollection<%(type)s>::SetRawScriptsSource(Vector<const char> raw_source) {
ASSERT(%(raw_total_length)i == raw_source.length());
raw_sources = raw_source.start();
}
} // internal
} // v8
"""
SOURCES_DECLARATION = """\
static const byte sources[] = { %s };
"""
RAW_SOURCES_COMPRESSION_DECLARATION = """\
static const char* raw_sources = NULL;
"""
RAW_SOURCES_DECLARATION = """\
static const char* raw_sources = reinterpret_cast<const char*>(sources);
"""
GET_INDEX_CASE = """\
if (strcmp(name, "%(id)s") == 0) return %(i)i;
"""
GET_RAW_SCRIPT_SOURCE_CASE = """\
if (index == %(i)i) return Vector<const char>(raw_sources + %(offset)i, %(raw_length)i);
"""
GET_SCRIPT_NAME_CASE = """\
if (index == %(i)i) return Vector<const char>("%(name)s", %(length)i);
"""
def BuildFilterChain(macro_filename):
"""Build the chain of filter functions to be applied to the sources.
Args:
macro_filename: Name of the macro file, if any.
Returns:
A function (string -> string) that reads a source file and processes it.
"""
filter_chain = [ReadFile]
if macro_filename:
(consts, macros) = ReadMacros(ReadFile(macro_filename))
filter_chain.append(lambda l: ExpandConstants(l, consts))
filter_chain.append(lambda l: ExpandMacros(l, macros))
filter_chain.extend([
RemoveCommentsAndTrailingWhitespace,
ExpandInlineMacros,
Validate,
jsmin.JavaScriptMinifier().JSMinify
])
def chain(f1, f2):
return lambda x: f2(f1(x))
return reduce(chain, filter_chain)
class Sources:
def __init__(self):
self.names = []
self.modules = []
self.is_debugger_id = []
def IsDebuggerFile(filename):
return filename.endswith("-debugger.js")
def IsMacroFile(filename):
return filename.endswith("macros.py")
def PrepareSources(source_files):
"""Read, prepare and assemble the list of source files.
Args:
sources: List of Javascript-ish source files. A file named macros.py
will be treated as a list of macros.
Returns:
An instance of Sources.
"""
macro_file = None
macro_files = filter(IsMacroFile, source_files)
assert len(macro_files) in [0, 1]
if macro_files:
source_files.remove(macro_files[0])
macro_file = macro_files[0]
filters = BuildFilterChain(macro_file)
# Sort 'debugger' sources first.
source_files = sorted(source_files,
lambda l,r: IsDebuggerFile(r) - IsDebuggerFile(l))
result = Sources()
for source in source_files:
try:
lines = filters(source)
except Error as e:
raise Error("In file %s:\n%s" % (source, str(e)))
result.modules.append(lines);
is_debugger = IsDebuggerFile(source)
result.is_debugger_id.append(is_debugger);
name = os.path.basename(source)[:-3]
result.names.append(name if not is_debugger else name[:-9]);
return result
def BuildMetadata(sources, source_bytes, native_type, omit):
"""Build the meta data required to generate a libaries file.
Args:
sources: A Sources instance with the prepared sources.
source_bytes: A list of source bytes.
(The concatenation of all sources; might be compressed.)
native_type: The parameter for the NativesCollection template.
omit: bool, whether we should omit the sources in the output.
Returns:
A dictionary for use with HEADER_TEMPLATE.
"""
total_length = len(source_bytes)
raw_sources = "".join(sources.modules)
# The sources are expected to be ASCII-only.
assert not filter(lambda value: ord(value) >= 128, raw_sources)
# Loop over modules and build up indices into the source blob:
get_index_cases = []
get_script_name_cases = []
get_raw_script_source_cases = []
offset = 0
for i in xrange(len(sources.modules)):
native_name = "native %s.js" % sources.names[i]
d = {
"i": i,
"id": sources.names[i],
"name": native_name,
"length": len(native_name),
"offset": offset,
"raw_length": len(sources.modules[i]),
}
get_index_cases.append(GET_INDEX_CASE % d)
get_script_name_cases.append(GET_SCRIPT_NAME_CASE % d)
get_raw_script_source_cases.append(GET_RAW_SCRIPT_SOURCE_CASE % d)
offset += len(sources.modules[i])
assert offset == len(raw_sources)
# If we have the raw sources we can declare them accordingly.
have_raw_sources = source_bytes == raw_sources and not omit
raw_sources_declaration = (RAW_SOURCES_DECLARATION
if have_raw_sources else RAW_SOURCES_COMPRESSION_DECLARATION)
metadata = {
"builtin_count": len(sources.modules),
"debugger_count": sum(sources.is_debugger_id),
"sources_declaration": SOURCES_DECLARATION % ToCArray(source_bytes),
"sources_data": ToCArray(source_bytes) if not omit else "",
"raw_sources_declaration": raw_sources_declaration,
"raw_total_length": sum(map(len, sources.modules)),
"total_length": total_length,
"get_index_cases": "".join(get_index_cases),
"get_raw_script_source_cases": "".join(get_raw_script_source_cases),
"get_script_name_cases": "".join(get_script_name_cases),
"type": native_type,
}
return metadata
def CompressMaybe(sources, compression_type):
"""Take the prepared sources and generate a sequence of bytes.
Args:
sources: A Sources instance with the prepared sourced.
compression_type: string, describing the desired compression.
Returns:
A sequence of bytes.
"""
sources_bytes = "".join(sources.modules)
if compression_type == "off":
return sources_bytes
elif compression_type == "bz2":
return bz2.compress(sources_bytes)
else:
raise Error("Unknown compression type %s." % compression_type)
def JS2C(source, target, native_type, compression_type, raw_file, omit):
sources = PrepareSources(source)
sources_bytes = CompressMaybe(sources, compression_type)
metadata = BuildMetadata(sources, sources_bytes, native_type, omit)
# Optionally emit raw file.
if raw_file:
output = open(raw_file, "w")
output.write(sources_bytes)
output.close()
# Emit resulting source file.
output = open(target, "w")
output.write(HEADER_TEMPLATE % metadata)
output.close()
def main():
parser = optparse.OptionParser()
parser.add_option("--raw", action="store",
help="file to write the processed sources array to.")
parser.add_option("--omit", dest="omit", action="store_true",
help="Omit the raw sources from the generated code.")
parser.set_usage("""js2c out.cc type compression sources.js ...
out.cc: C code to be generated.
type: type parameter for NativesCollection template.
compression: type of compression used. [off|bz2]
sources.js: JS internal sources or macros.py.""")
(options, args) = parser.parse_args()
JS2C(args[3:], args[0], args[1], args[2], options.raw, options.omit)
if __name__ == "__main__":
main()
| gpl-3.0 | -8,353,512,724,255,907,000 | 29.947471 | 101 | 0.663293 | false |
drawquest/drawquest-web | website/canvas/migrations/0177_add_multi_col_parent_comment_id_visibility_id_index_actually.py | 1 | 21697 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute("create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);")
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.apiapp': {
'Meta': {'object_name': 'APIApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'canvas.apiauthtoken': {
'Meta': {'unique_together': "(('user', 'app'),)", 'object_name': 'APIAuthToken'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.APIApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'best_of'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_groups'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Comment']"}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['auth.User']"})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': "orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'epic_message': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.commentstickerlog': {
'Meta': {'object_name': 'CommentStickerLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.emailunsubscribe': {
'Meta': {'object_name': 'EmailUnsubscribe'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.externalcontent': {
'Meta': {'object_name': 'ExternalContent'},
'_data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'external_content'", 'to': "orm['canvas.Comment']"}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'null': 'True', 'blank': 'True'})
},
'canvas.facebookinvite': {
'Meta': {'object_name': 'FacebookInvite'},
'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'unique_together': "(('user', 'category'),)", 'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': "orm['auth.User']"})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.remixplugin': {
'Meta': {'object_name': 'RemixPlugin'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
's3md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']", 'null': 'True'}),
'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']", 'null': 'True'}),
'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}),
'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': "orm['auth.User']"})
},
'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_warnings'", 'to': "orm['auth.User']"}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.welcomeemailrecipient': {
'Meta': {'object_name': 'WelcomeEmailRecipient'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
| bsd-3-clause | -5,407,122,022,166,186,000 | 79.062731 | 197 | 0.545513 | false |
Mekhantev/Interview | tests/test_stack.py | 1 | 3785 | from unittest import TestCase
from structures.stack import *
class TestStack(TestCase):
def test_push(self):
stack = Stack()
for i in range(10):
stack.push(i)
for i in range(9, -1, -1):
self.assertEqual(stack.pop(), i)
def test_pop(self):
stack = Stack()
for i in range(5):
stack.push(i)
for i in range(4, -1, -1):
self.assertEqual(stack.pop(), i)
self.assertRaises(EmptyError, stack.pop)
def test_peek(self):
stack = Stack()
for i in range(5):
stack.push(i)
self.assertEqual(stack.peek(), 4)
for i in range(4, -1, -1):
self.assertEqual(stack.pop(), i)
self.assertRaises(EmptyError, stack.peek)
def test_min(self):
stack = Stack()
for i in (2, 4, 5):
stack.push(i)
self.assertEqual(stack.min(), 2)
stack.push(1)
self.assertEqual(stack.min(), 1)
for i in range(4):
stack.pop()
self.assertRaises(EmptyError, stack.min)
class TestSetOfStacks(TestCase):
def test_push(self):
stack = SetOfStacks()
for i in range(10):
stack.push(i)
self.assertEqual([stack.pop() for i in range(10)],
[i for i in range(9, -1, -1)])
def test_pop(self):
stack = SetOfStacks()
for i in range(10):
stack.push(i)
self.assertEqual([stack.pop() for i in range(10)],
[i for i in range(9, -1, -1)])
self.assertRaises(EmptyError, stack.pop)
def test_peek(self):
stack = SetOfStacks()
for i in range(10):
stack.push(i)
self.assertEqual(stack.peek(), 9)
self.assertEqual([stack.pop() for i in range(10)],
[i for i in range(9, -1, -1)])
self.assertRaises(EmptyError, stack.peek)
class TestStackSort(TestCase):
def test_sort(self):
stack = [2, 4, 2, 9, 1, 7]
result = sort(stack)
self.assertEqual(result, sorted(stack))
class TestFixedTripleStack(TestCase):
def test_push(self):
stack = FixedTripleStack()
values = ((0, 1, 2, 3, 2, 3),
(2, 8, 6, 9, 7, 2, 7),
(9, 2, 3, 1, 5, 6, 2, 4, 1, 5))
for i, t in enumerate(values):
for y in t:
stack.push(i, y)
self.assertEqual(stack.pop(0), 3)
for i in range(4):
stack.pop(1)
self.assertEqual(stack.pop(1), 6)
self.assertRaises(OutOfSpaceError, stack.push, 2, 1)
def test_pop(self):
stack = FixedTripleStack()
values = ((0, 1, 2, 3, 2, 3),
(2, 8, 6, 9, 7, 2, 7),
(9, 2, 3, 1, 5, 6))
for i, t in enumerate(values):
for y in t:
stack.push(i, y)
self.assertEqual(stack.pop(2), 6)
for i in range(7):
stack.pop(1)
self.assertRaises(EmptyError, stack.pop, 1)
def test_peek(self):
stack = FixedTripleStack()
values = ((0, 1, 2, 3, 2, 3),
(2, 8, 6, 9, 7, 2, 7),
(9, 2, 3, 1, 5, 6))
for i, t in enumerate(values):
for y in t:
stack.push(i, y)
self.assertEqual(stack.peek(0), 3)
for i in range(6):
stack.pop(2)
self.assertRaises(EmptyError, stack.peek, 2)
class TestHanoiTower(TestCase):
def test_move_disks(self):
expected_result = [1, 2, 5, 6, 8, 9, 12, 14]
source = list(expected_result)
destination = []
buffer = []
move_disks(len(source), source, destination, buffer)
self.assertEqual(destination, expected_result) | mit | -7,892,123,095,954,620,000 | 29.532258 | 60 | 0.50753 | false |
grimoirelab/GrimoireELK | grimoire_elk/raw/supybot.py | 1 | 1347 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Alvaro del Castillo San Felix <[email protected]>
#
from .elastic import ElasticOcean
class SupybotOcean(ElasticOcean):
"""MediaWiki Ocean feeder"""
@classmethod
def get_perceval_params_from_url(cls, url):
# In the url the uri and the data dir are included
params = url.split()
return params
@classmethod
def get_arthur_params_from_url(cls, url):
# In the url the uri and the dirpath are included
params = url.split()
""" Get the arthur params given a URL for the data source """
params = {"uri": params[0], "dirpath": params[1]}
return params
| gpl-3.0 | -1,495,727,651,173,500,000 | 30.325581 | 70 | 0.69265 | false |
fastflo/emma | emmalib/providers/sqlite/test.py | 1 | 1782 | # -*- coding: utf-8 -*-
# emma
#
# Copyright (C) 2006 Florian Schmidt ([email protected])
# 2014 Nickolay Karnaukhov ([email protected])
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import sqlite3
conn = sqlite3.connect(database='/home/nick/test_database.sqlite')
print conn
cur = conn.cursor()
print cur.execute("SELECT * FROM sqlite_master ORDER BY name")
print cur.description
res = cur.fetchall()
for row in res:
print row
# from SQLiteHost import SQLiteHost
#
# host = SQLiteHost(None, None, '/home/nick/test.sqlite')
# host.connect()
#
# host.databases['dummydb'].refresh()
# print host.databases['dummydb'].tables
#
# table = host.databases['dummydb'].tables['aaa']
# table.refresh()
#
# print "---------------------------"
# print "Table:"
# print table.__dict__
#
# print "---------------------------"
# print "Table fields:"
# for f in table.fields:
# print f.__dict__
#
# print "---------------------------"
# print "Table indexes:"
# for i in table.indexes:
# print i.__dict__
| gpl-2.0 | 3,862,581,033,829,721,000 | 31.4 | 75 | 0.640853 | false |
jbeyerstedt/RIOT-OTA-update | examples/ota_update/test2.py | 1 | 6712 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Jannik Beyerstedt <[email protected]>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
# module for integration in tests.py. No standalone use intended
import subprocess
import time
import sys
import os
import signal
from nbstreamreader import NonBlockingStreamReader as NBSR
from test_ethos_helpers import ethos_command
nbsr = None
ethos = None
def kill_ethos(ethos):
# kill the ethos process properly
os.killpg(os.getpgid(ethos.pid), signal.SIGTERM)
### call first to prepare and setup things
def prepare(tty_out):
global nbsr
global ethos
print("(Step 0) flashing with test firmware")
subprocess.call("FW_VERS=0x2 FW_VERS_2=0x3 make merge-test-hex >" + tty_out, shell=True)
## flash the devive with factory-hex
if subprocess.call("FW_VERS=0x1 make flash-test >" + tty_out, shell=True):
return -1
time.sleep(1)
## start ethos console
ethos = subprocess.Popen("make ethos 2>/dev/null", stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(1)
nbsr = NBSR(ethos.stdout)
# get first diagnostic lines from ethos console
ret_val = ethos_command(nbsr, ethos, "/dist/tools/ethos")
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] ethos not properly started")
kill_ethos(ethos)
return -1
ret_val, answer = ethos_command(nbsr, ethos, "command not found", command="h")
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] ethos shell does not answer correctly")
print(answer)
kill_ethos(ethos)
return -1
print(" [OK] both slots populated, ethos console started\n")
return 0
### Test 2a (update file with invalid file signature)
def do_part_a(tty_out):
global nbsr
global ethos
subprocess.call("cp -p fw_update-0xabc0123456789def-0x4-s1.bin fw_update-orig-0x4-s1", shell=True)
print("(Part A) testing FW update file signature validation")
# manipulate some bits of the vers 4, slot 1 file
subprocess.call("cat fw_update-orig-0x4-s1 | head -c -16 >fw_update-0xabc0123456789def-0x4-s1.bin", shell=True)
if subprocess.call("FW_VERS=0x4 make flash-updatefile-slot1 >" + tty_out, shell=True):
kill_ethos(ethos)
return -1
time.sleep(1)
## check running FW version
ret_val, answer = ethos_command(nbsr, ethos, "FW version 3, slot 2", command="fw_info")
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] wrong firmware version or slot started")
print("dumping fetched answer from device:\n" + answer)
kill_ethos(ethos)
return -1
print(" [OK] correct inital FW running")
## start update
ret_val, answer = ethos_command(nbsr, ethos, "[ota_file] INFO incorrect decrypted hash", command="ota_install", timeout=5)
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] detection of invalid signature not successful")
print("dumping fetched answer from device:\n\n" + answer)
kill_ethos(ethos)
return -1
print(" ==>[OK] broken file signature successfully detected\n")
# tidy up
subprocess.call("rm fw_update-0xabc0123456789def-0x4-s1.bin", shell=True)
subprocess.call("mv fw_update-orig-0x4-s1 fw_update-0xabc0123456789def-0x4-s1.bin", shell=True)
return 0
### Test 2b (update file with invalid hw_id)
def do_part_b(tty_out):
global nbsr
global ethos
print("(Part B) testing hardware ID validation")
if subprocess.call("HW_ID=0xbaadf00dbaadf00d FW_VERS=0x4 make flash-updatefile-slot1 >" + tty_out, shell=True):
kill_ethos(ethos)
return -1
time.sleep(1)
## check running FW version
ret_val, answer = ethos_command(nbsr, ethos, "FW version 3, slot 2", command="fw_info")
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] TODO")
print("dumping fetched answer from device:\n" + answer)
kill_ethos(ethos)
return -1
print(" [OK] correct inital FW running")
## start update
ret_val, answer = ethos_command(nbsr, ethos, "[ota_updater] ERROR update file is invalid", command="ota_install", timeout=5)
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] detection of invalid HW_ID not successful")
print("dumping fetched answer from device:\n\n" + answer)
kill_ethos(ethos)
return -1
print(" ==>[OK] file with wrong hardware id successfully detected\n")
return 0
### Test 2c (update file with lower fw_vers)
def do_part_c(tty_out):
global nbsr
global ethos
print("(Part C) testing FW update file signature validation")
if subprocess.call("FW_VERS=0x1 make flash-updatefile-slot1 >" + tty_out, shell=True):
kill_ethos(ethos)
return -1
time.sleep(1)
## check running FW version
ret_val, answer = ethos_command(nbsr, ethos, "FW version 3, slot 2", command="fw_info")
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] TODO")
print("dumping fetched answer from device:\n" + answer)
kill_ethos(ethos)
return -1
print(" [OK] correct inital FW running")
## start update
ret_val, answer = ethos_command(nbsr, ethos, "[ota_updater] ERROR update file is invalid", command="ota_install", timeout=5)
if ret_val < 0:
print(" [ERROR] no answer from ethos")
kill_ethos(ethos)
return -1
elif ret_val == 0:
print(" [ERROR] detection of downgrade attempt not successful")
print("dumping fetched answer from device:\n\n" + answer)
kill_ethos(ethos)
return -1
print(" ==>[OK] file with lower FW version successfully detected\n")
return 0
### call last to tidy up afterwards
def finish(tty_out):
global nbsr
global ethos
kill_ethos(ethos)
print("(Finish) tidying up done")
return 0
| lgpl-2.1 | 2,028,957,324,872,644,600 | 30.218605 | 135 | 0.639005 | false |
arbrandes/hastexo-xblock | tests/unit/test_tasks.py | 1 | 59280 | import copy
import socket
from unittest import TestCase
from mock import Mock, patch
from hastexo.models import Stack
from hastexo.provider import ProviderException
from hastexo.common import (
get_stack,
update_stack,
update_stack_fields,
RemoteExecException,
)
from hastexo.tasks import (
PING_COMMAND,
LaunchStackTask,
SuspendStackTask,
DeleteStackTask,
CheckStudentProgressTask,
)
from celery.exceptions import SoftTimeLimitExceeded
from django.db.utils import OperationalError
class HastexoTestCase(TestCase):
def setUp(self):
self.stack_name = "bogus_stack_name"
self.stack_user_name = "bogus_stack_user_name"
self.stack_ip = "127.0.0.1"
self.stack_key = u"bogus_stack_key"
self.stack_password = "bogus_stack_password"
self.protocol = "ssh"
self.port = None
self.stack_run = "bogus_run"
self.course_id = "bogus_course_id"
self.student_id = "bogus_student_id"
self.providers = [
{"name": "provider1",
"capacity": 1,
"template": "tmpl1",
"environment": "env1"},
{"name": "provider2",
"capacity": 2,
"template": "tmpl2",
"environment": "env2"},
{"name": "provider3",
"capacity": -1,
"template": "tmpl3",
"environment": "env3"}
]
self.hook_script = "bogus_hook_script"
self.hook_events = {
"suspend": True,
"resume": True,
"delete": True
}
self.read_from_contentstore = "bogus_content"
# Mock settings
self.settings = {
"sleep_timeout": 0,
"delete_attempts": 2,
}
# Create a set of mock stacks to be returned by the provider mock.
self.stacks = {}
self.stack_states = (
"CREATE_IN_PROGRESS",
"CREATE_FAILED",
"CREATE_COMPLETE",
"SUSPEND_IN_PROGRESS",
"SUSPEND_FAILED",
"SUSPEND_COMPLETE",
"RESUME_IN_PROGRESS",
"RESUME_FAILED",
"RESUME_COMPLETE",
"DELETE_IN_PROGRESS",
"DELETE_FAILED",
"DELETE_COMPLETE"
)
for state in self.stack_states:
self.stacks[state] = {
"status": state,
"outputs": {
"public_ip": self.stack_ip,
"private_key": self.stack_key,
"password": self.stack_password
}
}
# Clear database
Stack.objects.all().delete()
# Create stack in the database
stack, _ = Stack.objects.get_or_create(
student_id=self.student_id,
course_id=self.course_id,
name=self.stack_name,
status="LAUNCH_PENDING",
protocol=self.protocol,
port=self.port,
run=self.stack_run,
user=self.stack_user_name,
providers=self.providers,
hook_script=self.hook_script,
hook_events=self.hook_events,
)
stack.save()
# Run kwargs
self.kwargs = {
"stack_id": stack.id,
"reset": False
}
# Patchers
patchers = {
"os": patch("hastexo.tasks.os"),
"socket": patch("hastexo.tasks.socket"),
"Provider": patch("hastexo.tasks.Provider"),
"settings": patch.dict("hastexo.common.DEFAULT_SETTINGS",
self.settings),
"ssh_to": patch("hastexo.tasks.ssh_to"),
"read_from_contentstore": patch(
"hastexo.tasks.read_from_contentstore"),
"remote_exec": patch("hastexo.tasks.remote_exec"),
}
self.mocks = {}
for mock_name, patcher in patchers.items():
self.mocks[mock_name] = patcher.start()
self.addCleanup(patcher.stop)
self.mocks["os"].system.return_value = 0
self.mocks["read_from_contentstore"].return_value = \
self.read_from_contentstore
self.mocks["remote_exec"].return_value = 0
# Set up mock providers
self.mock_providers = []
for p in self.providers:
m = Mock()
m.name = p["name"]
m.capacity = p["capacity"]
m.template = p["template"]
m.environment = p["environment"]
self.mock_providers.append(m)
self.mocks["Provider"].init.side_effect = self.mock_providers
def get_ssh_to_mock(self):
return self.mocks["ssh_to"]
def get_socket_mock(self):
return self.mocks["socket"].socket.return_value
def get_stack(self, prop=None):
return get_stack(self.stack_name, self.course_id, self.student_id,
prop)
def update_stack(self, data):
update_stack(self.stack_name, self.course_id, self.student_id, data)
def create_stack(self, name, course_id, student_id, data):
stack, _ = Stack.objects.get_or_create(
student_id=student_id,
course_id=course_id,
name=name)
update_stack_fields(stack, data)
stack.save()
class TestLaunchStackTask(HastexoTestCase):
def test_create_stack(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.assertEqual(stack.error_msg, u"")
provider.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
ping_command = PING_COMMAND % (0, self.stack_ip)
self.mocks["os"].system.assert_called_with(ping_command)
self.mocks["ssh_to"].assert_called_with(
self.stack_user_name,
self.stack_ip,
self.stack_key
)
self.assertFalse(self.mocks["remote_exec"].called)
def test_create_stack_has_no_ip(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
create_complete_stack = copy.deepcopy(self.stacks["CREATE_COMPLETE"])
create_complete_stack["outputs"]["public_ip"] = None
provider1.create_stack.side_effect = [
create_complete_stack
]
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
provider1.delete_stack.assert_called()
def test_provider_error_on_first_provider(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
ProviderException()
]
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
def test_provider_error_on_all_providers(self):
# Setup
for m in self.mock_providers:
m.get_stack.side_effect = [
ProviderException()
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
self.assertNotEqual(stack.error_msg, u"")
def test_provider_error_on_create(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"],
]
provider1.create_stack.side_effect = [
ProviderException()
]
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"],
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"],
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
def test_provider_error_on_reset(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [self.stacks["CREATE_FAILED"]]
provider.delete_stack.side_effect = [ProviderException()]
self.update_stack({
"provider": self.providers[0]["name"],
"status": "CREATE_FAILED"
})
self.kwargs["reset"] = True
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
self.assertNotEqual(stack.error_msg, u"")
def test_provider_error_on_resume(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
ProviderException()
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
self.assertEqual(stack.provider, self.providers[1]["name"])
provider.resume_stack.assert_called()
def test_provider_error_on_cleanup_delete(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider1.create_stack.side_effect = [
ProviderException()
]
provider1.delete_stack.side_effect = [
ProviderException()
]
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"],
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"],
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
provider1.delete_stack.assert_called()
def test_timeout_on_cleanup_delete(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider1.create_stack.side_effect = [
ProviderException()
]
provider1.delete_stack.side_effect = [
SoftTimeLimitExceeded()
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
self.assertEqual(stack.provider, "")
provider1.delete_stack.assert_called()
def test_provider_error_on_cleanup_resume(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
ProviderException()
]
provider.suspend_stack.side_effect = [
ProviderException()
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
self.assertEqual(stack.provider, self.providers[1]["name"])
provider.suspend_stack.assert_called()
def test_infinite_capacity(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
self.providers[0]["capacity"] = -1
self.update_stack({"providers": self.providers})
provider.capacity = -1
data = {
"provider": self.providers[0]["name"],
"status": "CREATE_COMPLETE"
}
for i in range(0, 10):
name = "stack_%d" % i
student_id = "student_%d" % i
self.create_stack(name, self.course_id, student_id, data)
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.assertEqual(stack.error_msg, u"")
provider.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
def test_use_next_provider_if_first_is_disabled(self):
# Setup
self.providers[0]["capacity"] = 0
self.update_stack({"providers": self.providers})
provider1 = self.mock_providers[0]
provider1.capacity = 0
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
provider2.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
def test_use_next_provider_if_first_is_full(self):
# Setup
capacity = 2
self.providers[0]["capacity"] = capacity
self.update_stack({"providers": self.providers})
provider1 = self.mock_providers[0]
provider1.capacity = capacity
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
data = {
"provider": self.providers[0]["name"],
"status": "CREATE_COMPLETE"
}
for i in range(0, capacity):
name = "stack_%d" % i
student_id = "student_%d" % i
self.create_stack(name, self.course_id, student_id, data)
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
provider2.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
def test_all_providers_full(self):
# Setup
capacity = 2
for i, p in enumerate(self.providers):
p["capacity"] = capacity
self.mock_providers[i].capacity = capacity
data = {
"provider": p["name"],
"status": "CREATE_COMPLETE"
}
for j in range(0, capacity):
name = "stack_%d_%d" % (i, j)
student_id = "student_%d_%d" % (i, j)
self.create_stack(name, self.course_id, student_id, data)
self.update_stack({"providers": self.providers})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
self.assertEqual(stack.provider, "")
for m in self.mock_providers:
m.create_stack.assert_not_called()
def test_use_next_provider_if_create_fails(self):
# Setup
provider1 = self.mock_providers[0]
provider1.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider1.create_stack.side_effect = [
ProviderException()
]
provider2 = self.mock_providers[1]
provider2.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider2.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertEqual(stack.error_msg, u"")
provider1.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
provider2.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
def test_dont_use_next_provider_if_timeout(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
SoftTimeLimitExceeded
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
self.assertEqual(stack.provider, "")
def test_timeout_on_get_stack(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
SoftTimeLimitExceeded
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
self.assertEqual(stack.provider, "")
def test_create_failure_on_all_providers(self):
# Setup
for m in self.mock_providers:
m.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
m.create_stack.side_effect = [
ProviderException()
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
self.assertNotEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
for m in self.mock_providers:
m.create_stack.assert_called_with(
self.stack_name,
self.stack_run
)
def test_reset_stack(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_FAILED"],
self.stacks["DELETE_COMPLETE"],
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
self.update_stack({
"provider": self.providers[0]["name"],
"status": "CREATE_FAILED"
})
self.kwargs["reset"] = True
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
provider.delete_stack.assert_called_with(
self.stack_name
)
provider.create_stack.assert_called()
self.assertEqual(stack.status, "CREATE_COMPLETE")
def test_dont_reset_new_stack(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
self.kwargs["reset"] = True
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
provider.delete_stack.assert_not_called()
provider.create_stack.assert_called()
self.assertEqual(stack.status, "CREATE_COMPLETE")
def test_reset_timeout_on_delete(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_FAILED"],
]
provider.delete_stack.side_effect = [
SoftTimeLimitExceeded
]
self.update_stack({
"provider": self.providers[0]["name"],
"status": "CREATE_FAILED"
})
self.kwargs["reset"] = True
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
self.assertEqual(stack.provider, "")
def test_resume_suspended_stack(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="resume"
)
@patch.object(LaunchStackTask,
'update_stack',
side_effect=[OperationalError,
OperationalError,
None])
def test_resume_suspended_stack_transient_operational_error(self,
update_stack_patch): # noqa: E501
"""
Try to resume a previously suspended stack, but simulate a
database error, only on the first three calls, to
LaunchStackTask.update_stack(). Such an error should cause the
stack update to be retried. When the error does not persist on
the fourth try, the task should succeed.
"""
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# The update_stack() method would have to be called 3 times (2
# failures with an OperationalError, then 1 success).
self.assertEqual(update_stack_patch.call_count, 3)
# Fetch stack
stack = self.get_stack()
# Assertions
# self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
@patch.object(LaunchStackTask,
'update_stack',
side_effect=[OperationalError,
OperationalError,
OperationalError])
def test_resume_suspended_stack_persistent_operational_error(self,
update_stack_patch): # noqa: E501
"""
Try to resume a previously suspended stack, but simulate a
persistent database error in the process. Such an error should cause
the task to time out.
"""
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
with self.assertRaises(OperationalError):
LaunchStackTask().run(**self.kwargs)
# The update_stack() method would have to be called 3 times
# (all failures with an OperationalError).
self.assertEqual(update_stack_patch.call_count, 3)
# Fetch stack
stack = self.get_stack()
# Assertions
# Whatever happened in the database could have caused the
# stack status to be anything *except* successful resume.
self.assertNotEqual(stack.status, "RESUME_COMPLETE")
# Regardless, the database information about the stack
# provider should still be unchanged.
self.assertEqual(stack.provider, self.providers[1]["name"])
def test_resumed_stack_has_no_ip(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
resume_complete_stack = copy.deepcopy(self.stacks["RESUME_COMPLETE"])
resume_complete_stack["outputs"]["public_ip"] = None
provider.resume_stack.side_effect = [
resume_complete_stack
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
self.assertEqual(stack.provider, self.providers[1]["name"])
provider.suspend_stack.assert_called()
def test_timeout_resuming_stack(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
SoftTimeLimitExceeded
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
self.assertEqual(stack.provider, self.providers[1]["name"])
def test_resume_hook_empty(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE",
"hook_events": {},
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertFalse(self.mocks["remote_exec"].called)
def test_resume_hook_exception(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
self.mocks["remote_exec"].side_effect = Exception()
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertTrue(self.mocks["remote_exec"].called)
def test_resume_hook_failure(self):
# Setup
provider = self.mock_providers[1]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[1]["name"],
"status": "SUSPEND_COMPLETE"
})
self.mocks["remote_exec"].side_effect = RemoteExecException
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[1]["name"])
self.assertTrue(self.mocks["remote_exec"].called)
def test_error_waiting_for_stack_to_change_state_on_resume(self):
# Setup
provider = self.mock_providers[2]
provider.get_stack.side_effect = [
self.stacks["RESUME_IN_PROGRESS"],
ProviderException()
]
self.update_stack({
"provider": self.providers[2]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
self.assertEqual(stack.provider, self.providers[2]["name"])
def test_error_waiting_for_stack_to_change_state_on_create(self):
# Setup
provider = self.mock_providers[2]
provider.get_stack.side_effect = [
self.stacks["DELETE_IN_PROGRESS"],
ProviderException()
]
self.update_stack({
"provider": self.providers[2]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
self.assertEqual(stack.provider, "")
def test_resume_suspending_stack(self):
# Setup
provider = self.mock_providers[2]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_IN_PROGRESS"],
self.stacks["SUSPEND_IN_PROGRESS"],
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.update_stack({
"provider": self.providers[2]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_COMPLETE")
self.assertEqual(stack.provider, self.providers[2]["name"])
provider.resume_stack.assert_called_with(
self.stack_name
)
def test_delete_stack_on_create_failed(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
ProviderException()
]
self.providers = [self.providers[0]]
self.update_stack({"providers": self.providers})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
provider.delete_stack.assert_called_with(
self.stack_name, False
)
def test_cleanup_timeout_on_create_failed(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
ProviderException()
]
self.providers = [self.providers[0]]
self.update_stack({"providers": self.providers})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
provider.delete_stack.assert_called_with(
self.stack_name, False
)
def test_dont_delete_manually_resumed_stack_on_verify_failure(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.mocks["ssh_to"].side_effect = Exception()
self.update_stack({"provider": self.providers[0]["name"]})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
provider.delete_stack.assert_not_called()
def test_eoferror_does_not_constitute_verify_failure(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
ssh = self.mocks["ssh_to"]
ssh.connect.side_effect = [
EOFError,
True
]
self.update_stack({"provider": self.providers[0]["name"]})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_COMPLETE")
def test_ssh_bombs_out(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
self.mocks["ssh_to"].side_effect = Exception()
self.update_stack({"provider": self.providers[0]["name"]})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "CREATE_FAILED")
def test_dont_wait_forever_for_ping(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
system = self.mocks["os"].system
system.side_effect = SoftTimeLimitExceeded
self.update_stack({"provider": self.providers[0]["name"]})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
system.assert_called()
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
def test_dont_wait_forever_for_ssh(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
self.mocks["ssh_to"].side_effect = SoftTimeLimitExceeded
self.update_stack({"provider": self.providers[0]["name"]})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertTrue(self.mocks["ssh_to"].called)
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
def test_dont_wait_forever_for_rdp(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
s = self.get_socket_mock()
s.connect.side_effect = [
socket.timeout,
socket.timeout,
socket.timeout,
SoftTimeLimitExceeded
]
self.update_stack({"provider": self.providers[0]["name"]})
self.protocol = "rdp"
self.update_stack({"protocol": self.protocol})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
s.connect.assert_called_with((self.stack_ip, 3389))
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
def test_dont_wait_forever_for_rdp_on_custom_port(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
s = self.get_socket_mock()
s.connect.side_effect = [
socket.timeout,
socket.timeout,
socket.timeout,
SoftTimeLimitExceeded
]
self.protocol = "rdp"
self.port = 3390
self.update_stack({
"provider": self.providers[0]["name"],
"protocol": self.protocol,
"port": self.port,
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
s.connect.assert_called_with((self.stack_ip, self.port))
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
def test_dont_wait_forever_for_suspension(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_IN_PROGRESS"],
self.stacks["SUSPEND_IN_PROGRESS"],
SoftTimeLimitExceeded
]
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
provider.delete_stack.assert_not_called()
def test_cleanup_on_timeout(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
provider.create_stack.side_effect = [
SoftTimeLimitExceeded
]
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
provider.delete_stack.assert_called_with(
self.stack_name, False
)
self.assertEqual(stack.status, "LAUNCH_TIMEOUT")
def test_resume_failed(self):
# Setup
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
ProviderException()
]
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_COMPLETE"
})
# Run
LaunchStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
class TestSuspendStackTask(HastexoTestCase):
def test_suspend_up_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
provider.suspend_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="suspend"
)
def test_suspend_suspend_failed_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_FAILED"]
]
provider.suspend_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="suspend"
)
def test_suspend_hook_empty(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING",
"hook_events": {},
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
provider.suspend_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.mocks["remote_exec"].assert_not_called()
def test_suspend_even_if_hook_fails(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.mocks["remote_exec"].side_effect = [
RemoteExecException("error message")
]
provider.suspend_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="suspend"
)
def test_suspend_even_if_hook_exception(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.mocks["remote_exec"].side_effect = Exception("")
provider.suspend_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="suspend"
)
def test_dont_suspend_deleted_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
provider.suspend_stack.assert_not_called()
self.mocks["remote_exec"].assert_not_called()
def test_dont_suspend_failed_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_FAILED"]
]
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "RESUME_FAILED")
self.assertEqual(stack.error_msg, u"")
provider.suspend_stack.assert_not_called()
self.mocks["remote_exec"].assert_not_called()
def test_mark_failed_on_exception(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = Exception()
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_FAILED")
self.assertNotEqual(stack.error_msg, u"")
def dont_wait_for_suspension_forever(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "SUSPEND_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
provider.suspend_stack.side_effect = SoftTimeLimitExceeded
# Run
SuspendStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "SUSPEND_FAILED")
self.assertNotEqual(stack.error_msg, u"")
class TestDeleteStackTask(HastexoTestCase):
def test_delete_suspended_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_called()
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="delete"
)
provider.delete_stack.assert_called()
def test_delete_up_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["CREATE_COMPLETE"]
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_not_called()
provider.delete_stack.assert_called()
self.mocks["remote_exec"].assert_called_with(
self.mocks["ssh_to"].return_value,
self.read_from_contentstore,
params="delete"
)
def test_delete_failed_stack(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["DELETE_FAILED"]
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_not_called()
self.mocks["remote_exec"].assert_not_called()
provider.delete_stack.assert_called()
def test_delete_suspended_stack_even_if_resume_fails(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = Exception()
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_called()
provider.delete_stack.assert_called()
self.mocks["remote_exec"].assert_not_called()
def test_delete_hook_empty(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING",
"hook_events": {},
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_not_called()
self.mocks["remote_exec"].assert_not_called()
provider.delete_stack.assert_called()
def test_delete_suspended_stack_even_if_hook_fails(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.mocks["remote_exec"].side_effect = [
RemoteExecException("error message")
]
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_called()
self.mocks["remote_exec"].assert_called()
provider.delete_stack.assert_called()
def test_delete_suspended_stack_even_if_hook_exception(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"]
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"]
]
self.mocks["remote_exec"].side_effect = Exception("")
provider.delete_stack.side_effect = [
self.stacks["DELETE_COMPLETE"]
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
provider.resume_stack.assert_called()
self.mocks["remote_exec"].assert_called()
provider.delete_stack.assert_called()
def test_retry_on_exception(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"],
self.stacks["SUSPEND_COMPLETE"],
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"],
self.stacks["RESUME_COMPLETE"],
]
provider.delete_stack.side_effect = [
Exception(""),
self.stacks["DELETE_COMPLETE"],
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_COMPLETE")
self.assertEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, u"")
def test_mark_failed_after_attempts(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"],
self.stacks["SUSPEND_COMPLETE"],
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"],
self.stacks["RESUME_COMPLETE"],
]
provider.delete_stack.side_effect = [
Exception(""),
Exception(""),
]
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_FAILED")
self.assertNotEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
def test_dont_wait_forever_for_deletion(self):
# Setup
self.update_stack({
"provider": self.providers[0]["name"],
"status": "DELETE_PENDING"
})
provider = self.mock_providers[0]
provider.get_stack.side_effect = [
self.stacks["SUSPEND_COMPLETE"],
]
provider.resume_stack.side_effect = [
self.stacks["RESUME_COMPLETE"],
]
provider.delete_stack.side_effect = SoftTimeLimitExceeded
# Run
DeleteStackTask().run(**self.kwargs)
# Fetch stack
stack = self.get_stack()
# Assertions
self.assertEqual(stack.status, "DELETE_FAILED")
self.assertNotEqual(stack.error_msg, u"")
self.assertEqual(stack.provider, self.providers[0]["name"])
class TestCheckStudentProgressTask(HastexoTestCase):
def test_check_student_progress_failure(self):
# Setup
stderr_fail_1 = "single line"
stderr_fail_2 = "line 1\nline 2"
stderr_fail_3 = ""
self.mocks["remote_exec"].side_effect = [
0,
RemoteExecException(stderr_fail_1),
RemoteExecException(stderr_fail_2),
RemoteExecException(stderr_fail_3),
]
tests = [
"test pass",
"test fail",
"test fail",
"test fail"
]
kwargs = {
"tests": tests,
"stack_ip": self.stack_ip,
"stack_key": self.stack_key,
"stack_user_name": self.stack_user_name
}
# Run
res = CheckStudentProgressTask().run(**kwargs)
# Assertions
self.assertEqual(res["status"], "CHECK_PROGRESS_COMPLETE")
self.assertEqual(res["pass"], 1)
self.assertEqual(res["total"], 4)
self.assertEqual(res["errors"], ["single line", "line 1\nline 2"])
def test_check_student_progress_success(self):
# Setup
self.mocks["remote_exec"].return_value = 0
tests = [
"test pass",
"test pass",
"test pass"
]
kwargs = {
"tests": tests,
"stack_ip": self.stack_ip,
"stack_key": self.stack_key,
"stack_user_name": self.stack_user_name
}
# Run
res = CheckStudentProgressTask().run(**kwargs)
# Assertions
self.assertEqual(res["status"], "CHECK_PROGRESS_COMPLETE")
self.assertEqual(res["pass"], 3)
self.assertEqual(res["total"], 3)
self.assertEqual(res["errors"], [])
| agpl-3.0 | 6,325,054,464,297,661,000 | 29.493827 | 99 | 0.555111 | false |
juancarlospaco/unicodemoticon | unicodemoticon/__main__.py | 1 | 1742 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from datetime import datetime
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication, QStyle
from anglerfish import (make_logger, check_encoding,
make_post_exec_msg, set_process_name,
set_single_instance, set_process_priority)
try:
import qdarkstyle # https://github.com/ColinDuquesnoy/QDarkStyleSheet
except ImportError: # sudo pip3 install qdarkstyle
qdarkstyle = None # 100% optional
# if this script is executed directly: make relative imports work
if not __package__:
from pathlib import Path
parent_dir = Path(__file__).absolute().parent
sys.path.insert(0, str(parent_dir))
import unicodemoticon # noqa
__package__ = str("unicodemoticon")
from . import MainWidget # lint:ok noqa pragma:nocover
start_time = datetime.now()
def main(args=sys.argv):
make_logger("unicodemoticon", emoji=True)
lock = set_single_instance("unicodemoticon")
check_encoding()
set_process_name("unicodemoticon")
set_process_priority()
app = QApplication(args)
app.setApplicationName("unicodemoticon")
app.setOrganizationName("unicodemoticon")
app.setOrganizationDomain("unicodemoticon")
app.instance().setQuitOnLastWindowClosed(False) # no quit on dialog quit
if qdarkstyle:
app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
icon = QIcon(app.style().standardPixmap(QStyle.SP_FileIcon))
app.setWindowIcon(icon)
mainwindow = MainWidget()
mainwindow.show()
mainwindow.hide()
make_post_exec_msg(start_time)
sys.exit(app.exec())
# may be unicodemoticon.__main__
if __name__.endswith("__main__"):
main()
| gpl-3.0 | 616,482,973,865,316,100 | 28.525424 | 77 | 0.691734 | false |
cerisola/fiscomp | percolation/analysis/common.py | 1 | 1592 | import numpy as np
import scipy.stats as stats
import scipy.integrate as integrate
Z_normal = { None: 1, '90': 1.644854, '95': 1.959964, '99': 2.575829, '99.9': 3.290527, '99.99': 3.890592 }
# % Generic % #
def mean(v):
return np.mean(v)
def var(v):
return np.var(v, ddof=1)
def std(v):
return np.std(v, ddof=1)
def sem(v, ci=None):
Z = Z_normal[ci]
return Z*stats.sem(v)
def cdf_mean(F, x):
return (1 - integrate.simps(y=F, x=x))
def cdf_var(F, x):
return (2*integrate.simps(y=x*(1-F), x=x) - cdf_mean(F, x)**2)
def cdf_std(F, x):
return np.sqrt(cdf_var(F, x))
# % Binomial Distribution Aux % #
def binomial_var(p, n):
return n*p*(1-p)
def binomial_std(p, n):
return np.sqrt(n*p*(1 - p))
def binomial_sem(p, n, ci=None):
Z = Z_normal[ci]
return Z*np.sqrt(p*(1 - p)/n)
def binomial_ci_wald(p, n, ci=None):
Z = Z_normal[ci]
normal_stderr = Z*np.sqrt(p*(1 - p)/n)
p_min = p - normal_stderr
p_max = p + normal_stderr
return p_min, p_max
def binomial_ci_wilson(p, n, ci=None):
Z = Z_normal[ci]
p_min = (2*n*p + Z**2 - (Z*np.sqrt(Z**2 - 1/n + 4*n*p*(1-p) + (4*p - 2)) + 1))/(2*(n + Z**2))
p_max = (2*n*p + Z**2 + (Z*np.sqrt(Z**2 - 1/n + 4*n*p*(1-p) - (4*p - 2)) + 1))/(2*(n + Z**2))
p_min = np.maximum(0, p_min)
p_max = np.minimum(1, p_max)
return p_min, p_max
# % Utility function to apply above funtions to lists of different sizes of arrays % #
def listmap(func, v, args=None):
return np.array([func(v[idx], **args) if args else func(v[idx]) for idx in range(len(v))])
| mit | 1,011,829,247,394,902,300 | 20.808219 | 107 | 0.56407 | false |
balancehero/kinsumer | kinsumer/config.py | 1 | 2778 | """:mod:`kinsumer.config` --- Implements the configuration related objects
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import errno
import os
import types
from typing import Dict, Any
from typeguard import typechecked
from werkzeug.datastructures import ImmutableDict
from werkzeug.utils import import_string
class ConfigAttribute(object):
"""Make an attribute forward to the config"""
def __init__(self, name: str, get_converter=None):
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj: object, type_=None) -> object:
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj: object, value: object) -> None:
obj.config[self.__name__] = value
class Config(dict):
@typechecked
def __init__(self, root_path: str, defaults: ImmutableDict = None) -> None:
super().__init__(defaults or {})
self.root_path = root_path
@typechecked
def from_envvar(self, variable_name: str, silent: bool = False) -> bool:
rv = os.environ.get(variable_name)
if not rv:
if silent:
False
raise RuntimeError()
return self.from_pyfile(rv, silent=silent)
@typechecked
def from_pyfile(self, filename: str, silent: bool = False) -> bool:
filename = os.path.join(self.root_path, filename)
d = types.ModuleType('config')
d.__file__ = filename
try:
with open(filename, mode='rb') as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(d)
return True
@typechecked
def from_object(self, obj) -> None:
if isinstance(obj, str):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
@typechecked
def get_namespace(self,
namespace: str,
lowercase=True,
trim_namespace=True) -> Dict[str, Any]:
rv = {}
for k, v in self.items():
if not k.startswith(namespace):
continue
if trim_namespace:
key = k[len(namespace):]
else:
key = k
if lowercase:
key = key.lower()
rv[key] = v
return rv
| mit | -2,695,724,280,624,929,000 | 30.213483 | 79 | 0.537437 | false |
cligs/tmw | tmw_config.py | 1 | 8798 | #!/usr/bin/env python3
# Filename: my_tmw.py
# Author: #cf
# Version 0.2.0 (2015-08-27)
##################################################################
### CONFIG FILE for: Topic Modeling Workflow (tmw) ###
##################################################################
# Used in the following paper:
# Christof Schoech, "Topic Modeling French Crime Fiction",
# presented at the Digital Humanities Conference, Sydney, 2015.
# For information on requirements and usage, see the README file.
# This config file is structured as follows:
# 1. Preprocessing Texts
# 2. Topic Modeling
# 3. Posprocessing Data
# 4. Visualization
# 5. Other / Obsolete
import tmw
#print(help(topmod))
### Set the general working directory.
wdir = "/home/.../" # end with slash.
################################
### PREPROCESSING TEXTS ###
################################
### tei5reader_fulldocs (standard option)
### Extract selected plain text from XML/TEI files.
inpath = wdir + "master/*.xml"
outfolder = wdir + "1_txt/"
#tmw.tei5reader_fulldocs(inpath,outfolder)
### segmenter
### Split entire texts into smaller segments.
inpath = wdir + "1_txt/*.txt"
outfolder = wdir + "2_segs/"
target = 600
sizetolerancefactor = 1.1 # 1 = exact target; >1 = with some tolerance (1.1 = +/- 10%).
preserveparagraphs = True # True|False
#tmw.segmenter(inpath, outfolder, target, sizetolerancefactor, preserveparagraphs)
### segments_to_bins: inpath, outfile
### Currently not implemented any more / yet.
### pretokenize
### Perform some preliminary tokenization.
inpath = wdir + "2_segs/*.txt"
outfolder = wdir + "3_tokens/"
substitutionsFile = "./extras/fr_pretokenize_subs.csv"
#tmw.pretokenize(inpath, substitutionsFile, outfolder)
### call_treetagger
### Perform lemmatization and POS tagging.
infolder = wdir + "3_tokens/"
outfolder = wdir + "4_tagged/"
tagger = "/home/christof/Programs/TreeTagger/cmd/tree-tagger-french"
#tmw.call_treetagger(infolder, outfolder, tagger)
### make_lemmatext
### Extract selected lemmata from tagged text.
inpath = wdir + "4_tagged/*.trt"
outfolder = wdir + "5_lemmata/"
mode = "frN" # frN=nouns, esN=nouns, frNV=nouns+verbs, frNVAA=nouns+verbs+adj+adverbs
stoplist_errors = "./extras/fr_stopwords_errors.txt" # in tmw folder
#tmw.make_lemmatext(inpath, outfolder, mode, stoplist_errors)
################################
### TOPIC MODELING ###
################################
### call_mallet_import
### Imports text data into the Mallet corpus format.
mallet_path = "/home/christof/Programs/Mallet/bin/mallet"
infolder = wdir + "5_lemmata/"
outfolder = wdir + "6_mallet/"
outfile = outfolder + "corpus.mallet"
stoplist_project = "./extras/fr_stopwords_project.txt" # in tmw folder
#tmw.call_mallet_import(mallet_path, infolder, outfolder, outfile, stoplist_project)
### call_mallet_model
### Performs the actual topic modeling.
mallet_path = "/home/christof/Programs/Mallet/bin/mallet"
inputfile = wdir + "6_mallet/corpus.mallet"
outfolder = wdir + "6_mallet/"
num_topics = "250"
optimize_interval = "100"
num_iterations = "5000"
num_top_words = "200"
doc_topics_max = num_topics
num_threads = "4"
#tmw.call_mallet_modeling(mallet_path, inputfile, outfolder, num_topics, optimize_interval, num_iterations, num_top_words, doc_topics_max)
################################
### POSTPROCESSING DATA ###
################################
### create_mastermatrix
### Creates the mastermatrix with all information in one place.
corpuspath = wdir+"/2_segs/*.txt"
outfolder = wdir+"7_aggregates/"
mastermatrixfile = "mastermatrix.csv"
metadatafile = wdir+"/metadata.csv"
topics_in_texts = wdir+"/6_mallet/topics-in-texts.csv"
number_of_topics = 250
#tmw.create_mastermatrix(corpuspath, outfolder, mastermatrixfile, metadatafile, topics_in_texts, number_of_topics)
### calculate_averageTopicScores
### Based on the mastermatrix, calculates various average topic score datasets.
mastermatrixfile = wdir+"/7_aggregates/mastermatrix.csv"
outfolder = wdir+"7_aggregates/"
# targets: one or several:author|decade|subgenre|author-gender|idno|segmentID|narration
targets = ["author-name", "author-gender", "title", "decade", "subgenre",
"idno", "segmentID", "narration", "protagonist-policier"]
#tmw.calculate_averageTopicScores(mastermatrixfile, targets, outfolder)
### save_firstWords
### Saves the first words of each topic to a separate file.
topicWordFile = wdir+"6_mallet/topics-with-words.csv"
outfolder = wdir+"7_aggregates/"
filename = "firstWords.csv"
#tmw.save_firstWords(topicWordFile, outfolder, filename)
################################
### VISUALIZATION ###
################################
### make_wordle_from_mallet
### Creates a wordle for each topic.
word_weights_file = wdir + "6_mallet/" + "word-weights.txt"
topics = 250
words = 40
outfolder = wdir + "8_visuals/wordles/"
font_path = "/home/christof/.fonts/AlegreyaSans-Regular.otf"
dpi = 300
#tmw.make_wordle_from_mallet(word_weights_file,topics,words,outfolder,font_path,dpi)
### crop_images
### Crops the wordle image files, use if needed.
inpath = wdir + "8_visuals/wordles/*.png"
outfolder = wdir + "8_visuals/wordles/"
left = 225 # image start at the left
upper = 210 # image start at the top
right = 2225 # image end on the right
lower = 1310 # image end at the bottom
#tmw.crop_images(inpath, outfolder, left, upper, right, lower)
### plot_topTopics
### For each item from a category, creates a barchart of the top topics.
averageDatasets = wdir+"/7_aggregates/avg*.csv"
firstWordsFile = wdir+"/7_aggregates/firstWords.csv"
numberOfTopics = 250 # must be actual number of topics modeled.
targetCategories = ["author-name", "author-gender", "decade", "subgenre", "title"]
# one or several: "author-name", "author-gender", "decade", "subgenre", "title"
topTopicsShown = 30
fontscale = 1.0
height = 0 # 0=automatic and variable
dpi = 300
outfolder = wdir+"/8_visuals/topTopics/"
#tmw.plot_topTopics(averageDatasets, firstWordsFile, numberOfTopics, targetCategories, topTopicsShown, fontscale, height, dpi, outfolder)
### plot_topItems
### For each topic, creates a barchart with top items from a category.
averageDatasets = wdir+"/7_aggregates/avg*.csv"
outfolder = wdir+"/8_visuals/topItems/"
firstWordsFile = wdir+"/7_aggregates/firstWords.csv"
numberOfTopics = 250 # must be actual number of topics modeled.
targetCategories = ["author-name", "subgenre", "title", "decade", "author-gender"]
# choose one or several from: author-name, decade, subgenre, gender, idno, title, segmentID
topItemsShown = 30
fontscale = 0.8
height = 0 # 0=automatic and flexible
dpi = 300
#tmw.plot_topItems(averageDatasets, outfolder, firstWordsFile, numberOfTopics, targetCategories, topItemsShown, fontscale, height, dpi)
### plot_distinctiveness_heatmap
### For each category, make a heatmap of most distinctive topics.
averageDatasets = wdir+"/7_aggregates/avg*.csv"
firstWordsFile = wdir+"/7_aggregates/firstWords.csv"
outfolder = wdir+"/8_visuals/distinctiveness/"
targetCategories = ["author-name", "decade", "subgenre", "gender"]
# one or several: "author-name", "decade", "subgenre", "gender", "idno", "title"
numberOfTopics = 250 # must be actual number of topics modeled.
topTopicsShown = 20
fontscale = 1.0
dpi = 300
#tmw.plot_distinctiveness_heatmap(averageDatasets, firstWordsFile, outfolder, targetCategories, numberOfTopics, topTopicsShown, fontscale, dpi)
### plot_topicsOverTime
### Creates lineplots or areaplots for topic development over time.
averageDatasets = wdir+"/7_aggregates/avgtopicscores_by-decade.csv"
firstWordsFile = wdir+"/7_aggregates/firstWords.csv"
outfolder = wdir+"/8_visuals/overTime/"
numberOfTopics = 250 # must be actual number of topics modeled.
fontscale = 1.0
dpi = 300
height = 0 # for lineplot; 0=automatic
mode = "line" # area|line for areaplot or lineplot
topics = ["48","67","199"] # list of one or several topics
#tmw.plot_topicsOverTime(averageDatasets, firstWordsFile, outfolder, numberOfTopics, fontscale, dpi, height, mode, topics)
################################
### OTHER/OBSOLETE ###
################################
### 5c show segment
## To read a specific segment, better than looking in the folder.
segmentID = "rf0546§000083"
outfolder = wdir+"/9_sel-segs/"
#tmw.show_segment(wdir,segmentID, outfolder)
### 6b - create_topicscores_lineplot
inpath = wdir + "7_aggregates/*-lp.csv" # narrow down as needed
outfolder = wdir + "8_visuals/lineplots/"
topicwordfile = wdir + "6_mallet/topics-with-words.csv"
dpi = 300
height = 0.050
genres = ["detection","noir"] # User: set depending on metadata. Available: noir, detection, criminel, experim., archq., blanche, neopl., susp.
#tmw.create_topicscores_lineplot(inpath,outfolder,topicwordfile,dpi,height,genres)
| mit | 6,944,172,194,449,163,000 | 36.918103 | 143 | 0.69285 | false |
schristakidis/p2ner | p2ner/components/plugin/holepuncher/holepuncher/holepuncher.py | 1 | 8114 | from p2ner.core.namespace import Namespace, initNS
# Copyright 2012 Loris Corazza, Sakis Christakidis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import p2ner.util.utilities as util
from messages.messages import PunchMessage,PunchReplyMessage,KeepAliveMessage,AskServerPunchMessage,StartPunchingMessage
from twisted.internet import reactor,task,defer
from p2ner.core.pipeline import Pipeline
from p2ner.core.components import loadComponent
from time import time
from p2ner.base.ControlMessage import MessageSent, MessageError
from p2ner.base.Consts import MessageCodes as MSG
class HolePuncher(Namespace):
@initNS
def __init__(self):
self.peers=[]
self.registerMessages()
self.constructPipe()
self.loopingCall = task.LoopingCall(self.sendKeepAlive)
self.loopingCall.start(30)
self.checkPeers={}
self.mcount=0
self.requestingPeers=[]
def registerMessages(self):
self.messages = []
self.messages.append(PunchMessage())
self.messages.append(PunchReplyMessage())
self.messages.append(KeepAliveMessage())
self.messages.append(AskServerPunchMessage())
self.messages.append(StartPunchingMessage())
def constructPipe(self):
self.holePipe=self.trafficPipe
def check(self,msg,content,peer,d,pipe):
if not peer:
return
#print 'checkinggggggg ',peer
toCheck=[]
send=True
if not isinstance(peer, (list, tuple)):
peer=[peer]
for p in peer:
if not self.netChecker.hpunching and not p.hpunch:
p.conOk=True
elif p.conOk:
if msg.code!=MSG.KEEP_ALIVE:
p.lastSend=time()
elif p.conProb:
print "can't connect to peer ",p," as determined from previous try"
elif p.hpunch or self.netChecker.hpunching and p.dataPort:
send=False
pr=[i for i in peer if i!=p]
if self.checkPeers.has_key(p):
self.checkPeers[p].append({'msg':(msg,content,peer,d,pipe),'peers':pr,'id':self.mcount})
else:
self.checkPeers[p]=[{'msg':(msg,content,peer,d,pipe),'peers':pr,'id':self.mcount}]
if not p in self.requestingPeers:
toCheck.append(p)
self.mcount+=1
#print 'to check ',toCheck
if send:
if len(peer)==1:
peer=peer[0]
pipe._send(msg,content,peer,d)
else:
for p in toCheck:
reactor.callLater(0.1,self.startPunching,p)
def sendKeepAlive(self):
for p in self.peers:
print p, p.lastSend,time()-p.lastSend
oldPeers=[p for p in self.peers if p.lastSend and time()-p.lastSend>=60]
for p in oldPeers:
p.conOk=False
self.peers=[p for p in self.peers if not p.lastSend or time()-p.lastSend<60]
for p in self.peers:
print 'sending keep allive to ',p
KeepAliveMessage.send(p, self.controlPipe,self.keepAliveFailed)
KeepAliveMessage.send(p, self.holePipe,self.keepAliveFailed)
servers=[s.server for s in self.root.getAllStreams()]
try:
nat=self.root.netChecker.nat
except:
nat=False
if nat:
for p in servers:
KeepAliveMessage.send(p, self.controlPipe,self.keepAliveFailed)
def startPunching(self,peer):
if True:# peer.hpunch:
print 'sending ask server punch message to ',peer.learnedFrom,' for ',peer
AskServerPunchMessage.send(peer,peer.learnedFrom,self.controlPipe,self._startPunching,self.failedInterPunch,peer)
else:
self._startPunching(None,peer)
def failedInterPunch(self,server,peer):
print 'failed to start punching with ',peer,' through ',server
self.punchingFailed(peer)
def _startPunching(self,server,peer,init=True):
print 'punchingggggggggggggggggggggggg',peer
if not init:
self.requestingPeers.append(peer)
PunchMessage.send(peer,'port', self.controlPipe,self.punchingFailed)
PunchMessage.send(peer, 'dataPort', self.holePipe,self.punchingFailed)
def receivedReply(self,peer,port):
if port=='port':
peer.portOk=True
else:
peer.dataPortOk=True
if peer.portOk and peer.dataPortOk:
peer.lastSend=0
self.peers.append(peer)
peer.conOk=True
print 'okkkkkkkkkkkk ',peer
try:
self.requestingPeers.remove(peer)
except:
pass
self.sendMessage(peer)
def sendMessage(self,peer):
clean=[]
#print 'should send message'
if not peer in self.checkPeers.keys():
#print 'returning'
return
for m in self.checkPeers[peer]:
send=True
for p in m['peers']:
if not p.conOk:
send=False
break
msg=m['msg']
#print msg
if send:
print 'sending'
peer=msg[2]
if len(peer)==1:
peer=peer[0]
print peer
msg[-1]._send(msg[0],msg[1],peer,msg[3])
clean.append(m)
if clean:
self.cleanCheckPeers(peer,clean)
def cleanCheckPeers(self,peer,clean):
self.checkPeers[peer]=[m for m in self.checkPeers[peer] if m not in clean]
if not self.checkPeers[peer]:
self.checkPeers.pop(peer)
for m in clean:
id=m['id']
for p in m['peers']:
self.checkPeers[p]=[i for i in self.checkPeers[p] if i['id']!=id]
if not self.checkPeers[p]:
self.checkPeers.pop(p)
def punchingFailed(self,peer):
print "hole punching failed for ",peer
self.log.error("hole punching failed for %s",peer)
peer.conProb=True
try:
actions=self.checkPeers.pop(peer)
except:
return
for m in actions:
m['msg'][3].errback(defer.failure.Failure(MessageError(peer)))
id=m['id']
peers=[p for p in m['peers'] if p!=peer]
for p in peers:
for m1 in self.checkPeers[p]:
m1['peers'].remove[peer]
send=False
if peers:
send=True
for p in peers:
if not p.sendOk:
send=False
break
if send:
self.sendIdMessage(m)
def sendIdMessage(self,m):
id=m['id']
msg=m['msg']
peer=msg[2]
if len(peer)==1:
peer=peer[0]
msg[-1]._send(msg[0],msg[1],peer,msg[3])
clean=[]
for p in self.checkPeers.keys():
self.checkPeers[p]=[m1 for m1 in self.checkPeers[p] if m1['id']==id]
if not self.checkPeers[p]:
clean.append(p)
for p in clean:
self.checkPeers.pop(p)
def punchingRecipientFailed(self,peer):
peer.conProb=True
print "hole punching in recipient failed for ",peer
self.log.error("hole punching in recipient failed for %s",peer)
def keepAliveFailed(self,peer):
print "keep alive failed for ",peer
self.log.error("keep alive failed for %s",peer)
| apache-2.0 | 1,949,153,275,522,752,800 | 32.390947 | 125 | 0.576165 | false |
wowkin2/react-redux-api | apps/courses.py | 1 | 3362 | from flask_restful import reqparse, abort, Resource
from common import api, db
from constants import HttpStatus, EMPTY_JSON
from helpers import handle_bson
COLL_COURSES = 'courses'
course_parser = reqparse.RequestParser()
course_parser.add_argument('id', required=True)
course_parser.add_argument('title')
course_parser.add_argument('watchHref')
course_parser.add_argument('authorId')
course_parser.add_argument('category')
course_parser.add_argument('length')
class Course(Resource):
@staticmethod
def get(course_id):
course = db[COLL_COURSES].find_one({'id': course_id})
if course:
return handle_bson(course), HttpStatus.OK
else:
abort(HttpStatus.NOT_FOUND, message='Course "{}" not found'.format(course_id))
@staticmethod
def delete(course_id):
db[COLL_COURSES].remove({'id': course_id}, multi=False)
return EMPTY_JSON, HttpStatus.NO_CONTENT
@staticmethod
def post():
args = course_parser.parse_args()
course = {
'id': args.get('id'),
'authorId': args.get('authorId'),
'category': args.get('category'),
'watchHref': args.get('watchHref'),
'title': args.get('title'),
'length': args.get('length'),
}
if db[COLL_COURSES].find_one({'id': args.get('id')}) is None:
db[COLL_COURSES].insert_one(course)
return handle_bson(course), HttpStatus.CREATED
else:
return handle_bson(course), HttpStatus.CONFLICT
@staticmethod
def put(course_id):
args = course_parser.parse_args()
course = {
'id': args.get('id'),
'authorId': args.get('authorId'),
'category': args.get('category'),
'watchHref': args.get('watchHref'),
'title': args.get('title'),
'length': args.get('length'),
}
db[COLL_COURSES].update_one({'id': course_id}, {'$set': course}, upsert=True)
return handle_bson(course), HttpStatus.OK
class Courses(Resource):
@staticmethod
def get():
courses = list(db[COLL_COURSES].find({}))
return {'courses': handle_bson(courses)}, HttpStatus.OK
api.add_resource(Course, '/api/course', '/api/course/<course_id>')
api.add_resource(Courses, '/api/courses', '/api/courses/')
# @app.route('/api/courses', methods=['GET', 'POST'])
# def courses_handler():
# with open('courses.json', 'r') as f:
# courses = json.loads(f.read())
#
# if request.method == 'POST':
# new_course = request.json
# if new_course.get('id'):
# if new_course.get('id') in [x['id'] for x in courses]:
# # Update existing
# for course in courses:
# if course['id'] == new_course['id']:
# course.update(new_course)
# break
# else:
# # Add new
# courses.append(new_course)
#
# with open('courses.json', 'w') as f:
# f.write(json.dumps(courses, indent=4, separators=(',', ': ')))
#
# return Response(
# json.dumps(courses),
# mimetype='application/json',
# headers={
# 'Cache-Control': 'no-cache',
# 'Access-Control-Allow-Origin': '*'
# }
# )
| mit | 4,885,421,960,994,691,000 | 31.019048 | 90 | 0.560976 | false |
Clinical-Developers/Clinical_Developer_Challenges | HCAs-Nurses/Solution.py | 1 | 1260 | # This is a maths problem not primarily a progamming one.
# As such the solution function call might look like this: This is probably the most compact/clever way to express it in Python.
def staffing(staff_units, patients):
HCA, nurse = 2*staff_units-patients/2, patients/2-staff_units
if HCA < 0 or nurse < 0 or not HCA == int(HCA) or not nurse == int(nurse):
return "No solutions"
return HCA, nurse
'''
So the equation is balanced as follows
with HCAs/nurses expressed as x and y respectively and staff_units and patients expressed as s and p respectively:
x = s*2 - p/2
y = p/2-s
But, there is no need to work them both out. Once you have calculated HCA's for instance you can just do:
y = s-x
since you know that s-x must leave only remainder y. If it doesn't then you have a problem and the equation can't be solved!
'''
# Programmatically this can be more clearly be expressed as: (This is my preferred option as it is much easier to read)
def staffing(staff_units, patients):
HCA = 2*staff_units-patients/2
nurse = staff-units-HCA
if HCA < 0 or nurse < 0 or HCA != int(HCA):
return "No solutions"
return HCA, nurse
# if you still don't believe me check out this repl: https://repl.it/Kewn/3
| gpl-3.0 | -7,423,222,444,443,985,000 | 36.058824 | 128 | 0.706349 | false |
gooddata/openstack-nova | nova/tests/unit/virt/xenapi/test_vm_utils.py | 1 | 104532 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import greenthread
import mock
import os_xenapi
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import fixture as config_fixture
from oslo_utils import fixture as utils_fixture
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from nova.compute import flavors
from nova.compute import power_state
import nova.conf
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields as obj_fields
from nova import test
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
from nova.tests.unit.virt.xenapi import stubs
from nova.virt import hardware
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake
from nova.virt.xenapi.image import utils as image_utils
from nova.virt.xenapi import vm_utils
import time
CONF = nova.conf.CONF
XENSM_TYPE = 'xensm'
ISCSI_TYPE = 'iscsi'
def get_fake_connection_data(sr_type):
fakes = {XENSM_TYPE: {'sr_uuid': 'falseSR',
'name_label': 'fake_storage',
'name_description': 'test purposes',
'server': 'myserver',
'serverpath': '/local/scratch/myname',
'sr_type': 'nfs',
'introduce_sr_keys': ['server',
'serverpath',
'sr_type'],
'vdi_uuid': 'falseVDI'},
ISCSI_TYPE: {'volume_id': 'fake_volume_id',
'target_lun': 1,
'target_iqn': 'fake_iqn:volume-fake_volume_id',
'target_portal': u'localhost:3260',
'target_discovered': False}, }
return fakes[sr_type]
def _fake_noop(*args, **kwargs):
return
class VMUtilsTestBase(stubs.XenAPITestBaseNoDB):
pass
class LookupTestCase(VMUtilsTestBase):
def setUp(self):
super(LookupTestCase, self).setUp()
self.session = mock.Mock()
self.name_label = 'my_vm'
def test_normal(self):
self.session.call_xenapi.return_value = ['x']
result = vm_utils.lookup(self.session, self.name_label)
self.assertEqual('x', result)
self.session.call_xenapi.assert_called_once_with(
"VM.get_by_name_label", self.name_label)
def test_no_result(self):
self.session.call_xenapi.return_value = []
result = vm_utils.lookup(self.session, self.name_label)
self.assertIsNone(result)
self.session.call_xenapi.assert_called_once_with(
"VM.get_by_name_label", self.name_label)
def test_too_many(self):
self.session.call_xenapi.return_value = ['a', 'b']
self.assertRaises(exception.InstanceExists,
vm_utils.lookup,
self.session, self.name_label)
self.session.call_xenapi.assert_called_once_with(
"VM.get_by_name_label", self.name_label)
def test_rescue_none(self):
self.session.call_xenapi.side_effect = [[], ['x']]
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('x', result)
self.session.call_xenapi.assert_has_calls([
mock.call("VM.get_by_name_label", self.name_label + '-rescue'),
mock.call("VM.get_by_name_label", self.name_label)])
def test_rescue_found(self):
self.session.call_xenapi.return_value = ['y']
result = vm_utils.lookup(self.session, self.name_label,
check_rescue=True)
self.assertEqual('y', result)
self.session.call_xenapi.assert_called_once_with(
"VM.get_by_name_label", self.name_label + '-rescue')
def test_rescue_too_many(self):
self.session.call_xenapi.return_value = ['a', 'b', 'c']
self.assertRaises(exception.InstanceExists,
vm_utils.lookup,
self.session, self.name_label,
check_rescue=True)
self.session.call_xenapi.assert_called_once_with(
"VM.get_by_name_label", self.name_label + '-rescue')
class GenerateConfigDriveTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, 'safe_find_sr')
@mock.patch.object(vm_utils, "create_vdi", return_value='vdi_ref')
@mock.patch.object(vm_utils.instance_metadata, "InstanceMetadata")
@mock.patch.object(vm_utils.configdrive, 'ConfigDriveBuilder')
@mock.patch.object(vm_utils.utils, 'execute')
@mock.patch.object(vm_utils.volume_utils, 'stream_to_vdi')
@mock.patch.object(vm_utils.os.path, 'getsize', return_value=100)
@mock.patch.object(vm_utils, 'create_vbd', return_value='vbd_ref')
@mock.patch.object(vm_utils.utils, 'tempdir')
def test_no_admin_pass(self, mock_tmpdir, mock_create_vbd, mock_size,
mock_stream, mock_execute, mock_builder,
mock_instance_metadata, mock_create_vdi,
mock_find_sr):
mock_tmpdir.return_value.__enter__.return_value = '/mock'
with mock.patch.object(six.moves.builtins, 'open') as mock_open:
mock_open.return_value.__enter__.return_value = 'open_fd'
vm_utils.generate_configdrive('session', 'context', 'instance',
'vm_ref', 'userdevice',
'network_info')
mock_size.assert_called_with('/mock/configdrive.vhd')
mock_open.assert_called_with('/mock/configdrive.vhd')
mock_execute.assert_called_with('qemu-img', 'convert', '-Ovpc',
'/mock/configdrive',
'/mock/configdrive.vhd')
mock_instance_metadata.assert_called_with(
'instance', content=None, extra_md={},
network_info='network_info', request_context='context')
mock_stream.assert_called_with('session', 'instance', 'vhd',
'open_fd', 100, 'vdi_ref')
@mock.patch.object(vm_utils, "destroy_vdi")
@mock.patch.object(vm_utils, 'safe_find_sr')
@mock.patch.object(vm_utils, "create_vdi", return_value='vdi_ref')
@mock.patch.object(vm_utils.instance_metadata, "InstanceMetadata",
side_effect=test.TestingException)
def test_vdi_cleaned_up(self, mock_instance_metadata, mock_create,
mock_find_sr, mock_destroy):
self.assertRaises(test.TestingException, vm_utils.generate_configdrive,
'session', None, None, None, None, None)
mock_destroy.assert_called_once_with('session', 'vdi_ref')
class XenAPIGetUUID(VMUtilsTestBase):
@mock.patch.object(vm_utils, '_get_sys_hypervisor_uuid',
return_value='2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f')
def test_get_this_vm_uuid_new_kernel(self, mock_get_sys_hypervisor_uuid):
result = vm_utils.get_this_vm_uuid(None)
self.assertEqual('2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f', result)
mock_get_sys_hypervisor_uuid.assert_called_once_with()
@mock.patch('nova.virt.xenapi.vm_utils._get_sys_hypervisor_uuid',
side_effect=IOError(13, 'Permission denied'))
@mock.patch('nova.privsep.xenapi.xenstore_read',
side_effect=[('27', ''),
('/vm/2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f', '')])
def test_get_this_vm_uuid_old_kernel_reboot(self, fake_read, fake_uuid):
result = vm_utils.get_this_vm_uuid(None)
self.assertEqual('2f46f0f5-f14c-ef1b-1fac-9eeca0888a3f', result)
fake_read.assert_has_calls([
mock.call('domid'),
mock.call('/local/domain/27/vm')])
fake_uuid.assert_called_once_with()
class FakeSession(object):
def call_xenapi(self, *args):
pass
def call_plugin(self, *args):
pass
def call_plugin_serialized(self, plugin, fn, *args, **kwargs):
pass
def call_plugin_serialized_with_retry(self, plugin, fn, num_retries,
callback, *args, **kwargs):
pass
class FetchVhdImageTestCase(VMUtilsTestBase):
def setUp(self):
super(FetchVhdImageTestCase, self).setUp()
self.context = context.get_admin_context()
self.context.auth_token = 'auth_token'
self.session = FakeSession()
self.instance = {"uuid": "uuid"}
self.image_handler = image_utils.get_image_handler(
CONF.xenserver.image_handler)
self.flags(group='glance', api_servers=['http://localhost:9292'])
make_uuid_stack_patcher = mock.patch.object(
vm_utils, '_make_uuid_stack', return_value=["uuid_stack"])
self.addCleanup(make_uuid_stack_patcher.stop)
self.mock_make_uuid_stack = make_uuid_stack_patcher.start()
get_sr_path_patcher = mock.patch.object(
vm_utils, 'get_sr_path', return_value='sr_path')
self.addCleanup(get_sr_path_patcher.stop)
self.mock_get_sr_path = get_sr_path_patcher.start()
def _stub_glance_download_vhd(self, raise_exc=None):
call_plugin_patcher = mock.patch.object(
self.session, 'call_plugin_serialized_with_retry')
self.addCleanup(call_plugin_patcher.stop)
self.mock_call_plugin = call_plugin_patcher.start()
if raise_exc:
self.mock_call_plugin.side_effect = raise_exc
else:
self.mock_call_plugin.return_value = {'root': {'uuid': 'vdi'}}
def _assert_make_uuid_stack_and_get_sr_path(self):
self.mock_make_uuid_stack.assert_called_once_with()
self.mock_get_sr_path.assert_called_once_with(self.session)
def _assert_call_plugin_serialized_with_retry(self):
self.mock_call_plugin.assert_called_once_with(
'glance.py',
'download_vhd2',
0,
mock.ANY,
mock.ANY,
extra_headers={'X-Auth-Token': 'auth_token',
'X-Roles': '',
'X-Tenant-Id': None,
'X-User-Id': None,
'X-Identity-Status': 'Confirmed'},
image_id='image_id',
uuid_stack=["uuid_stack"],
sr_path='sr_path')
@mock.patch.object(vm_utils, '_check_vdi_size')
@mock.patch.object(vm_utils, '_scan_sr')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value="sr")
def test_fetch_vhd_image_works_with_glance(self, mock_safe_find_sr,
mock_scan_sr,
mock_check_vdi_size):
self._stub_glance_download_vhd()
result = vm_utils._fetch_vhd_image(self.context, self.session,
self.instance, 'image_id',
self.image_handler)
self.assertEqual("vdi", result['root']['uuid'])
mock_safe_find_sr.assert_called_once_with(self.session)
mock_scan_sr.assert_called_once_with(self.session, "sr")
mock_check_vdi_size.assert_called_once_with(self.context, self.session,
self.instance, "vdi")
self._assert_call_plugin_serialized_with_retry()
self._assert_make_uuid_stack_and_get_sr_path()
@mock.patch.object(vm_utils, 'destroy_vdi',
side_effect=exception.StorageError(reason=""))
@mock.patch.object(FakeSession, 'call_xenapi', return_value="ref")
@mock.patch.object(
vm_utils, '_check_vdi_size',
side_effect=exception.FlavorDiskSmallerThanImage(flavor_size=0,
image_size=1))
@mock.patch.object(vm_utils, '_scan_sr')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value="sr")
def test_fetch_vhd_image_cleans_up_vdi_on_fail(
self, mock_safe_find_sr, mock_scan_sr, mock_check_vdi_size,
mock_call_xenapi, mock_destroy_vdi):
self._stub_glance_download_vhd()
self.assertRaises(exception.FlavorDiskSmallerThanImage,
vm_utils._fetch_vhd_image, self.context, self.session,
self.instance, 'image_id', self.image_handler)
mock_safe_find_sr.assert_called_once_with(self.session)
mock_scan_sr.assert_called_once_with(self.session, "sr")
mock_check_vdi_size.assert_called_once_with(self.context, self.session,
self.instance, "vdi")
mock_call_xenapi.assert_called_once_with("VDI.get_by_uuid", "vdi")
mock_destroy_vdi.assert_called_once_with(self.session, "ref")
self._assert_call_plugin_serialized_with_retry()
self._assert_make_uuid_stack_and_get_sr_path()
def test_fetch_vhd_image_download_exception(self):
self._stub_glance_download_vhd(raise_exc=RuntimeError)
self.assertRaises(RuntimeError, vm_utils._fetch_vhd_image,
self.context, self.session, self.instance, 'image_id',
self.image_handler)
self._assert_call_plugin_serialized_with_retry()
self._assert_make_uuid_stack_and_get_sr_path()
class TestImageCompression(VMUtilsTestBase):
def test_image_compression(self):
# Testing for nova.conf, too low, negative, and a correct value.
self.assertIsNone(vm_utils.get_compression_level())
self.flags(image_compression_level=6, group='xenserver')
self.assertEqual(vm_utils.get_compression_level(), 6)
class ResizeHelpersTestCase(VMUtilsTestBase):
def setUp(self):
super(ResizeHelpersTestCase, self).setUp()
self.context = context.RequestContext('user', 'project')
@mock.patch('nova.privsep.fs.ext_journal_disable')
@mock.patch('nova.privsep.fs.ext_journal_enable')
@mock.patch('nova.privsep.fs.resize_partition')
@mock.patch('nova.privsep.fs.resize2fs')
@mock.patch('nova.privsep.fs.e2fsck')
def test_resize_part_and_fs_down_succeeds(
self, mock_fsck, mock_resize2fs, mock_resize,
mock_disable_journal, mock_enable_journal):
dev_path = '/dev/fake'
partition_path = '%s1' % dev_path
vm_utils._resize_part_and_fs('fake', 0, 20, 10, 'boot')
mock_fsck.assert_has_calls([
mock.call(partition_path)])
mock_resize2fs.assert_has_calls([
mock.call(partition_path, [0], size='10s')])
mock_resize.assert_has_calls([
mock.call(dev_path, 0, 9, True)])
mock_disable_journal.assert_has_calls([
mock.call(partition_path)])
mock_enable_journal.assert_has_calls([
mock.call(partition_path)])
@mock.patch.object(vm_utils.LOG, 'debug')
def test_log_progress_if_required(self, mock_debug):
current = timeutils.utcnow()
time_fixture = self.useFixture(utils_fixture.TimeFixture(current))
time_fixture.advance_time_seconds(
vm_utils.PROGRESS_INTERVAL_SECONDS + 1)
vm_utils._log_progress_if_required(1, current, 2)
mock_debug.assert_called_once_with(
"Sparse copy in progress, %(complete_pct).2f%% complete. "
"%(left)s bytes left to copy",
{"complete_pct": 50.0, "left": 1})
@mock.patch.object(vm_utils.LOG, 'debug')
def test_log_progress_if_not_required(self, mock_debug):
current = timeutils.utcnow()
time_fixture = self.useFixture(utils_fixture.TimeFixture(current))
time_fixture.advance_time_seconds(
vm_utils.PROGRESS_INTERVAL_SECONDS - 1)
vm_utils._log_progress_if_required(1, current, 2)
mock_debug.assert_not_called()
@mock.patch('nova.privsep.fs.ext_journal_disable')
@mock.patch('nova.privsep.fs.resize2fs',
side_effect=processutils.ProcessExecutionError)
@mock.patch('nova.privsep.fs.e2fsck')
def test_resize_part_and_fs_down_fails_disk_too_big(
self, mock_fsck, mock_resize2fs, mock_disable_journal):
self.assertRaises(exception.ResizeError,
vm_utils._resize_part_and_fs,
"fake", 0, 20, 10, "boot")
mock_fsck.assert_has_calls([mock.call('/dev/fake1')])
@mock.patch('nova.privsep.fs.ext_journal_disable')
@mock.patch('nova.privsep.fs.ext_journal_enable')
@mock.patch('nova.privsep.fs.resize_partition')
@mock.patch('nova.privsep.fs.resize2fs')
@mock.patch('nova.privsep.fs.e2fsck')
def test_resize_part_and_fs_up_succeeds(
self, mock_fsck, mock_resize2fs, mock_resize,
mock_disable_journal, mock_enable_journal):
dev_path = '/dev/fake'
partition_path = '%s1' % dev_path
vm_utils._resize_part_and_fs('fake', 0, 20, 30, '')
mock_fsck.assert_has_calls([
mock.call(partition_path)])
mock_resize2fs.assert_has_calls([
mock.call(partition_path, [0])])
mock_resize.assert_has_calls([
mock.call(dev_path, 0, 29, False)])
mock_disable_journal.assert_has_calls([
mock.call(partition_path)])
mock_enable_journal.assert_has_calls([
mock.call(partition_path)])
def test_resize_disk_throws_on_zero_size(self):
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=0)
self.assertRaises(exception.ResizeError, vm_utils.resize_disk,
"session", "instance", "vdi_ref", flavor)
def test_auto_config_disk_returns_early_on_zero_size(self):
vm_utils.try_auto_configure_disk("bad_session", "bad_vdi_ref", 0)
class CheckVDISizeTestCase(VMUtilsTestBase):
def setUp(self):
super(CheckVDISizeTestCase, self).setUp()
self.context = 'fakecontext'
self.session = 'fakesession'
self.instance = objects.Instance(uuid=uuids.fake)
self.flavor = objects.Flavor()
self.vdi_uuid = 'fakeuuid'
self.stub_out('nova.objects.Instance.get_flavor',
lambda *a, **kw: self.flavor)
@mock.patch.object(vm_utils, '_get_vdi_chain_size',
return_value=1073741824)
def test_not_too_large(self, mock_get_vdi_chain_size):
self.flavor.root_gb = 1
vm_utils._check_vdi_size(self.context, self.session, self.instance,
self.vdi_uuid)
mock_get_vdi_chain_size.assert_called_once_with(self.session,
self.vdi_uuid)
@mock.patch.object(vm_utils, '_get_vdi_chain_size',
return_value=11811160065) # 10GB overhead allowed
def test_too_large(self, mock_get_vdi_chain_size):
self.flavor.root_gb = 1
self.assertRaises(exception.FlavorDiskSmallerThanImage,
vm_utils._check_vdi_size, self.context,
self.session, self.instance, self.vdi_uuid)
mock_get_vdi_chain_size.assert_called_once_with(self.session,
self.vdi_uuid)
def test_zero_root_gb_disables_check(self):
self.flavor.root_gb = 0
vm_utils._check_vdi_size(self.context, self.session, self.instance,
self.vdi_uuid)
class GetInstanceForVdisForSrTestCase(VMUtilsTestBase):
def setUp(self):
super(GetInstanceForVdisForSrTestCase, self).setUp()
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(disable_process_locking=True,
group='oslo_concurrency')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='http://localhost',
connection_password='test_pass',
group='xenserver')
def test_get_instance_vdis_for_sr(self):
vm_ref = fake.create_vm("foo", "Running")
sr_ref = fake.create_sr()
vdi_1 = fake.create_vdi('vdiname1', sr_ref)
vdi_2 = fake.create_vdi('vdiname2', sr_ref)
for vdi_ref in [vdi_1, vdi_2]:
fake.create_vbd(vm_ref, vdi_ref)
stubs.stubout_session(self, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
result = list(vm_utils.get_instance_vdis_for_sr(
driver._session, vm_ref, sr_ref))
self.assertEqual([vdi_1, vdi_2], result)
def test_get_instance_vdis_for_sr_no_vbd(self):
vm_ref = fake.create_vm("foo", "Running")
sr_ref = fake.create_sr()
stubs.stubout_session(self, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
result = list(vm_utils.get_instance_vdis_for_sr(
driver._session, vm_ref, sr_ref))
self.assertEqual([], result)
class VMRefOrRaiseVMFoundTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, 'lookup', return_value='ignored')
def test_lookup_call(self, mock_lookup):
vm_utils.vm_ref_or_raise('session', 'somename')
mock_lookup.assert_called_once_with('session', 'somename')
@mock.patch.object(vm_utils, 'lookup', return_value='vmref')
def test_return_value(self, mock_lookup):
self.assertEqual(
'vmref', vm_utils.vm_ref_or_raise('session', 'somename'))
mock_lookup.assert_called_once_with('session', 'somename')
class VMRefOrRaiseVMNotFoundTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, 'lookup', return_value=None)
def test_exception_raised(self, mock_lookup):
self.assertRaises(
exception.InstanceNotFound,
lambda: vm_utils.vm_ref_or_raise('session', 'somename')
)
mock_lookup.assert_called_once_with('session', 'somename')
@mock.patch.object(vm_utils, 'lookup', return_value=None)
def test_exception_msg_contains_vm_name(self, mock_lookup):
try:
vm_utils.vm_ref_or_raise('session', 'somename')
except exception.InstanceNotFound as e:
self.assertIn('somename', six.text_type(e))
mock_lookup.assert_called_once_with('session', 'somename')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='safe_find_sr')
class CreateCachedImageTestCase(VMUtilsTestBase):
def setUp(self):
super(CreateCachedImageTestCase, self).setUp()
self.session = self.get_fake_session()
@mock.patch.object(vm_utils, '_clone_vdi', return_value='new_vdi_ref')
def test_cached(self, mock_clone_vdi, mock_safe_find_sr):
self.session.call_xenapi.side_effect = ['ext', {'vdi_ref': 2},
None, None, None, 'vdi_uuid']
self.assertEqual((False, {'root': {'uuid': 'vdi_uuid', 'file': None}}),
vm_utils._create_cached_image('context', self.session,
'instance', 'name', 'uuid',
vm_utils.ImageType.DISK_VHD,
'image_handler'))
@mock.patch.object(vm_utils, '_safe_copy_vdi', return_value='new_vdi_ref')
def test_no_cow(self, mock_safe_copy_vdi, mock_safe_find_sr):
self.flags(use_cow_images=False)
self.session.call_xenapi.side_effect = ['ext', {'vdi_ref': 2},
None, None, None, 'vdi_uuid']
self.assertEqual((False, {'root': {'uuid': 'vdi_uuid', 'file': None}}),
vm_utils._create_cached_image('context', self.session,
'instance', 'name', 'uuid',
vm_utils.ImageType.DISK_VHD,
'image_handler'))
def test_no_cow_no_ext(self, mock_safe_find_sr):
self.flags(use_cow_images=False)
self.session.call_xenapi.side_effect = ['non-ext', {'vdi_ref': 2},
'vdi_ref', None, None, None,
'vdi_uuid']
self.assertEqual((False, {'root': {'uuid': 'vdi_uuid', 'file': None}}),
vm_utils._create_cached_image('context', self.session,
'instance', 'name', 'uuid',
vm_utils.ImageType.DISK_VHD,
'image_handler'))
@mock.patch.object(vm_utils, '_clone_vdi', return_value='new_vdi_ref')
@mock.patch.object(vm_utils, '_fetch_image',
return_value={'root': {'uuid': 'vdi_uuid',
'file': None}})
def test_noncached(self, mock_fetch_image, mock_clone_vdi,
mock_safe_find_sr):
self.session.call_xenapi.side_effect = ['ext', {}, 'cache_vdi_ref',
None, None, None, None, None,
None, None, 'vdi_uuid']
self.assertEqual((True, {'root': {'uuid': 'vdi_uuid', 'file': None}}),
vm_utils._create_cached_image('context', self.session,
'instance', 'name', 'uuid',
vm_utils.ImageType.DISK_VHD,
'image_handler'))
class DestroyCachedImageTestCase(VMUtilsTestBase):
def setUp(self):
super(DestroyCachedImageTestCase, self).setUp()
self.session = self.get_fake_session()
@mock.patch.object(vm_utils, '_find_cached_images')
@mock.patch.object(vm_utils, 'destroy_vdi')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(time, 'time')
def test_destroy_cached_image_out_of_keep_days(self,
mock_time,
mock_walk_vdi_chain,
mock_destroy_vdi,
mock_find_cached_images):
fake_cached_time = '0'
mock_find_cached_images.return_value = {'fake_image_id': {
'vdi_ref': 'fake_vdi_ref', 'cached_time': fake_cached_time}}
self.session.call_xenapi.return_value = 'fake_uuid'
mock_walk_vdi_chain.return_value = ('just_one',)
mock_time.return_value = 2 * 3600 * 24
fake_keep_days = 1
expected_return = set()
expected_return.add('fake_uuid')
uuid_return = vm_utils.destroy_cached_images(self.session,
'fake_sr_ref', False, False, fake_keep_days)
mock_find_cached_images.assert_called_once()
mock_walk_vdi_chain.assert_called_once()
mock_time.assert_called()
mock_destroy_vdi.assert_called_once()
self.assertEqual(expected_return, uuid_return)
@mock.patch.object(vm_utils, '_find_cached_images')
@mock.patch.object(vm_utils, 'destroy_vdi')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(time, 'time')
def test_destroy_cached_image(self, mock_time, mock_walk_vdi_chain,
mock_destroy_vdi, mock_find_cached_images):
fake_cached_time = '0'
mock_find_cached_images.return_value = {'fake_image_id': {
'vdi_ref': 'fake_vdi_ref', 'cached_time': fake_cached_time}}
self.session.call_xenapi.return_value = 'fake_uuid'
mock_walk_vdi_chain.return_value = ('just_one',)
mock_time.return_value = 2 * 3600 * 24
fake_keep_days = 1
expected_return = set()
expected_return.add('fake_uuid')
uuid_return = vm_utils.destroy_cached_images(self.session,
'fake_sr_ref', False, False, fake_keep_days)
mock_find_cached_images.assert_called_once()
mock_walk_vdi_chain.assert_called_once()
mock_destroy_vdi.assert_called_once()
self.assertEqual(expected_return, uuid_return)
@mock.patch.object(vm_utils, '_find_cached_images')
@mock.patch.object(vm_utils, 'destroy_vdi')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(time, 'time')
def test_destroy_cached_image_cached_time_not_exceed(
self, mock_time, mock_walk_vdi_chain,
mock_destroy_vdi, mock_find_cached_images):
fake_cached_time = '0'
mock_find_cached_images.return_value = {'fake_image_id': {
'vdi_ref': 'fake_vdi_ref', 'cached_time': fake_cached_time}}
self.session.call_xenapi.return_value = 'fake_uuid'
mock_walk_vdi_chain.return_value = ('just_one',)
mock_time.return_value = 1 * 3600 * 24
fake_keep_days = 2
expected_return = set()
uuid_return = vm_utils.destroy_cached_images(self.session,
'fake_sr_ref', False, False, fake_keep_days)
mock_find_cached_images.assert_called_once()
mock_walk_vdi_chain.assert_called_once()
mock_destroy_vdi.assert_not_called()
self.assertEqual(expected_return, uuid_return)
@mock.patch.object(vm_utils, '_find_cached_images')
@mock.patch.object(vm_utils, 'destroy_vdi')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(time, 'time')
def test_destroy_cached_image_no_cached_time(
self, mock_time, mock_walk_vdi_chain,
mock_destroy_vdi, mock_find_cached_images):
mock_find_cached_images.return_value = {'fake_image_id': {
'vdi_ref': 'fake_vdi_ref', 'cached_time': None}}
self.session.call_xenapi.return_value = 'fake_uuid'
mock_walk_vdi_chain.return_value = ('just_one',)
fake_keep_days = 2
expected_return = set()
uuid_return = vm_utils.destroy_cached_images(self.session,
'fake_sr_ref', False, False, fake_keep_days)
mock_find_cached_images.assert_called_once()
mock_walk_vdi_chain.assert_called_once()
mock_destroy_vdi.assert_not_called()
self.assertEqual(expected_return, uuid_return)
@mock.patch.object(vm_utils, 'is_vm_shutdown', return_value=True)
class ShutdownTestCase(VMUtilsTestBase):
def test_hardshutdown_should_return_true_when_vm_is_shutdown(
self, mock_is_vm_shutdown):
session = FakeSession()
instance = "instance"
vm_ref = "vm-ref"
self.assertTrue(vm_utils.hard_shutdown_vm(
session, instance, vm_ref))
mock_is_vm_shutdown.assert_called_once_with(session, vm_ref)
def test_cleanshutdown_should_return_true_when_vm_is_shutdown(
self, mock_is_vm_shutdown):
session = FakeSession()
instance = "instance"
vm_ref = "vm-ref"
self.assertTrue(vm_utils.clean_shutdown_vm(
session, instance, vm_ref))
mock_is_vm_shutdown.assert_called_once_with(session, vm_ref)
@mock.patch.object(FakeSession, 'call_xenapi', return_value='vbd_ref')
class CreateVBDTestCase(VMUtilsTestBase):
def setUp(self):
super(CreateVBDTestCase, self).setUp()
self.session = FakeSession()
self.vbd_rec = self._generate_vbd_rec()
def _generate_vbd_rec(self):
vbd_rec = {}
vbd_rec['VM'] = 'vm_ref'
vbd_rec['VDI'] = 'vdi_ref'
vbd_rec['userdevice'] = '0'
vbd_rec['bootable'] = False
vbd_rec['mode'] = 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
return vbd_rec
def test_create_vbd_default_args(self, mock_call_xenapi):
result = vm_utils.create_vbd(self.session, "vm_ref", "vdi_ref", 0)
self.assertEqual(result, "vbd_ref")
mock_call_xenapi.assert_called_once_with('VBD.create', self.vbd_rec)
def test_create_vbd_osvol(self, mock_call_xenapi):
result = vm_utils.create_vbd(self.session, "vm_ref", "vdi_ref", 0,
osvol=True)
self.assertEqual(result, "vbd_ref")
mock_call_xenapi.assert_has_calls([
mock.call('VBD.create', self.vbd_rec),
mock.call('VBD.add_to_other_config', "vbd_ref", "osvol", "True")])
def test_create_vbd_extra_args(self, mock_call_xenapi):
self.vbd_rec['VDI'] = 'OpaqueRef:NULL'
self.vbd_rec['type'] = 'a'
self.vbd_rec['mode'] = 'RO'
self.vbd_rec['bootable'] = True
self.vbd_rec['empty'] = True
self.vbd_rec['unpluggable'] = False
result = vm_utils.create_vbd(self.session, "vm_ref", None, 0,
vbd_type="a", read_only=True, bootable=True,
empty=True, unpluggable=False)
self.assertEqual(result, "vbd_ref")
mock_call_xenapi.assert_called_once_with('VBD.create', self.vbd_rec)
@mock.patch.object(vm_utils, 'create_vbd', return_value='vbd_ref')
def test_attach_cd(self, mock_create_vbd, mock_call_xenapi):
mock_call_xenapi.return_value = None
result = vm_utils.attach_cd(self.session, "vm_ref", "vdi_ref", 1)
self.assertEqual(result, "vbd_ref")
mock_create_vbd.assert_called_once_with(
self.session, "vm_ref", None, 1, vbd_type='cd', read_only=True,
bootable=True, empty=True, unpluggable=False)
mock_call_xenapi.assert_called_once_with('VBD.insert', 'vbd_ref',
'vdi_ref')
class UnplugVbdTestCase(VMUtilsTestBase):
@mock.patch.object(greenthread, 'sleep')
def test_unplug_vbd_works(self, mock_sleep):
session = self.get_fake_session()
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
vm_utils.unplug_vbd(session, vbd_ref, vm_ref)
session.call_xenapi.assert_called_once_with('VBD.unplug', vbd_ref)
self.assertEqual(0, mock_sleep.call_count)
def test_unplug_vbd_raises_unexpected_error(self):
session = self.get_fake_session()
session.XenAPI.Failure = fake.Failure
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
session.call_xenapi.side_effect = test.TestingException()
self.assertRaises(test.TestingException, vm_utils.unplug_vbd,
session, vm_ref, vbd_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def test_unplug_vbd_already_detached_works(self):
error = "DEVICE_ALREADY_DETACHED"
session = self.get_fake_session(error)
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
vm_utils.unplug_vbd(session, vbd_ref, vm_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def test_unplug_vbd_already_raises_unexpected_xenapi_error(self):
session = self.get_fake_session("")
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
self.assertRaises(exception.StorageError, vm_utils.unplug_vbd,
session, vbd_ref, vm_ref)
self.assertEqual(1, session.call_xenapi.call_count)
def _test_uplug_vbd_retries(self, mock_sleep, error):
session = self.get_fake_session(error)
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
self.assertRaises(exception.StorageError, vm_utils.unplug_vbd,
session, vm_ref, vbd_ref)
self.assertEqual(11, session.call_xenapi.call_count)
self.assertEqual(10, mock_sleep.call_count)
def _test_uplug_vbd_retries_with_neg_val(self):
session = self.get_fake_session()
self.flags(num_vbd_unplug_retries=-1, group='xenserver')
vbd_ref = "vbd_ref"
vm_ref = 'vm_ref'
vm_utils.unplug_vbd(session, vbd_ref, vm_ref)
self.assertEqual(1, session.call_xenapi.call_count)
@mock.patch.object(greenthread, 'sleep')
def test_uplug_vbd_retries_on_rejected(self, mock_sleep):
self._test_uplug_vbd_retries(mock_sleep,
"DEVICE_DETACH_REJECTED")
@mock.patch.object(greenthread, 'sleep')
def test_uplug_vbd_retries_on_internal_error(self, mock_sleep):
self._test_uplug_vbd_retries(mock_sleep,
"INTERNAL_ERROR")
@mock.patch.object(greenthread, 'sleep')
def test_uplug_vbd_retries_on_missing_pv_drivers_error(self, mock_sleep):
self._test_uplug_vbd_retries(mock_sleep,
"VM_MISSING_PV_DRIVERS")
class VDIOtherConfigTestCase(VMUtilsTestBase):
"""Tests to ensure that the code is populating VDI's `other_config`
attribute with the correct metadta.
"""
def setUp(self):
super(VDIOtherConfigTestCase, self).setUp()
class _FakeSession(object):
def call_xenapi(self, operation, *args, **kwargs):
# VDI.add_to_other_config -> VDI_add_to_other_config
method = getattr(self, operation.replace('.', '_'), None)
if method:
return method(*args, **kwargs)
self.operation = operation
self.args = args
self.kwargs = kwargs
self.session = _FakeSession()
self.context = context.get_admin_context()
self.fake_instance = {'uuid': 'aaaa-bbbb-cccc-dddd',
'name': 'myinstance'}
def test_create_vdi(self):
# Some images are registered with XenServer explicitly by calling
# `create_vdi`
vm_utils.create_vdi(self.session, 'sr_ref', self.fake_instance,
'myvdi', 'root', 1024, read_only=True)
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, self.session.args[0]['other_config'])
@mock.patch.object(vm_utils, '_fetch_image',
return_value={'root': {'uuid': 'fake-uuid'}})
def test_create_image(self, mock_vm_utils):
# Other images are registered implicitly when they are dropped into
# the SR by a dom0 plugin or some other process
self.flags(cache_images='none', group='xenserver')
other_config = {}
def VDI_add_to_other_config(ref, key, value):
other_config[key] = value
# Stubbing on the session object and not class so we don't pollute
# other tests
self.session.VDI_add_to_other_config = VDI_add_to_other_config
self.session.VDI_get_other_config = lambda vdi: {}
vm_utils.create_image(self.context, self.session, self.fake_instance,
'myvdi', 'image1', vm_utils.ImageType.DISK_VHD,
'image_handler')
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, other_config)
@mock.patch.object(os_xenapi.client.vm_management, 'receive_vhd')
@mock.patch.object(vm_utils, 'scan_default_sr')
@mock.patch.object(vm_utils, 'get_sr_path')
def test_import_migrated_vhds(self, mock_sr_path, mock_scan_sr,
mock_recv_vhd):
# Migrated images should preserve the `other_config`
other_config = {}
def VDI_add_to_other_config(ref, key, value):
other_config[key] = value
# Stubbing on the session object and not class so we don't pollute
# other tests
self.session.VDI_add_to_other_config = VDI_add_to_other_config
self.session.VDI_get_other_config = lambda vdi: {}
mock_sr_path.return_value = {'root': {'uuid': 'aaaa-bbbb-cccc-dddd'}}
vm_utils._import_migrated_vhds(self.session, self.fake_instance,
"disk_label", "root", "vdi_label")
expected = {'nova_disk_type': 'root',
'nova_instance_uuid': 'aaaa-bbbb-cccc-dddd'}
self.assertEqual(expected, other_config)
mock_scan_sr.assert_called_once_with(self.session)
mock_recv_vhd.assert_called_with(
self.session, "disk_label",
{'root': {'uuid': 'aaaa-bbbb-cccc-dddd'}}, mock.ANY)
mock_sr_path.assert_called_once_with(self.session)
class GenerateDiskTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, 'vdi_attached')
@mock.patch('nova.privsep.fs.mkfs',
side_effect = test.TestingException())
@mock.patch.object(vm_utils, '_get_dom0_ref', return_value='dom0_ref')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
@mock.patch.object(vm_utils, 'create_vdi', return_value='vdi_ref')
@mock.patch.object(vm_utils, 'create_vbd')
def test_generate_disk_with_no_fs_given(self, mock_create_vbd,
mock_create_vdi, mock_findsr,
mock_dom0ref, mock_mkfs,
mock_attached_here):
session = self.get_fake_session()
vdi_ref = mock.MagicMock()
mock_attached_here.return_value = vdi_ref
instance = {'uuid': 'fake_uuid'}
vm_utils._generate_disk(session, instance, 'vm_ref', '2',
'name', 'user', 10, None, None)
mock_attached_here.assert_called_once_with(session, 'vdi_ref',
read_only=False,
dom0=True)
mock_create_vbd.assert_called_with(session, 'vm_ref', 'vdi_ref', '2',
bootable=False)
@mock.patch.object(vm_utils, 'vdi_attached')
@mock.patch('nova.privsep.fs.mkfs')
@mock.patch.object(vm_utils, '_get_dom0_ref', return_value='dom0_ref')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
@mock.patch.object(vm_utils, 'create_vdi', return_value='vdi_ref')
@mock.patch.object(vm_utils.utils, 'make_dev_path',
return_value='/dev/fake_devp1')
@mock.patch.object(vm_utils, 'create_vbd')
def test_generate_disk_swap(self, mock_create_vbd, mock_make_path,
mock_create_vdi,
mock_findsr, mock_dom0ref, mock_mkfs,
mock_attached_here):
session = self.get_fake_session()
vdi_dev = mock.MagicMock()
mock_attached_here.return_value = vdi_dev
vdi_dev.__enter__.return_value = 'fakedev'
instance = {'uuid': 'fake_uuid'}
vm_utils._generate_disk(session, instance, 'vm_ref', '2',
'name', 'user', 10, 'swap',
'swap-1')
mock_attached_here.assert_any_call(session, 'vdi_ref',
read_only=False,
dom0=True)
# As swap is supported in dom0, mkfs will run there
session.call_plugin_serialized.assert_any_call(
'partition_utils.py', 'mkfs', 'fakedev', '1', 'swap', 'swap-1')
mock_create_vbd.assert_called_with(session, 'vm_ref', 'vdi_ref', '2',
bootable=False)
@mock.patch.object(vm_utils, 'vdi_attached')
@mock.patch('nova.privsep.fs.mkfs')
@mock.patch.object(vm_utils, '_get_dom0_ref', return_value='dom0_ref')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
@mock.patch.object(vm_utils, 'create_vdi', return_value='vdi_ref')
@mock.patch.object(vm_utils.utils, 'make_dev_path',
return_value='/dev/fake_devp1')
@mock.patch.object(vm_utils, 'create_vbd')
def test_generate_disk_ephemeral(self, mock_create_vbd, mock_make_path,
mock_create_vdi, mock_findsr,
mock_dom0ref, mock_mkfs,
mock_attached_here):
session = self.get_fake_session()
vdi_ref = mock.MagicMock()
mock_attached_here.return_value = vdi_ref
instance = {'uuid': 'fake_uuid'}
vm_utils._generate_disk(session, instance, 'vm_ref', '2',
'name', 'ephemeral', 10, 'ext4',
'ephemeral-1')
mock_attached_here.assert_any_call(session, 'vdi_ref',
read_only=False,
dom0=True)
# As ext4 is not supported in dom0, mkfs will run in domU
mock_attached_here.assert_any_call(session, 'vdi_ref',
read_only=False)
mock_mkfs.assert_called_with('ext4', '/dev/fake_devp1',
'ephemeral-1')
mock_create_vbd.assert_called_with(session, 'vm_ref', 'vdi_ref', '2',
bootable=False)
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
@mock.patch.object(vm_utils, 'create_vdi', return_value='vdi_ref')
@mock.patch.object(vm_utils, '_get_dom0_ref',
side_effect = test.TestingException())
@mock.patch.object(vm_utils, 'safe_destroy_vdis')
def test_generate_disk_ensure_cleanup_called(self, mock_destroy_vdis,
mock_dom0ref,
mock_create_vdi,
mock_findsr):
session = self.get_fake_session()
instance = {'uuid': 'fake_uuid'}
self.assertRaises(test.TestingException, vm_utils._generate_disk,
session, instance, None, '2', 'name', 'user', 10,
None, None)
mock_destroy_vdis.assert_called_once_with(session, ['vdi_ref'])
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
@mock.patch.object(vm_utils, 'create_vdi', return_value='vdi_ref')
@mock.patch.object(vm_utils, 'vdi_attached')
@mock.patch.object(vm_utils, '_get_dom0_ref', return_value='dom0_ref')
@mock.patch.object(vm_utils, 'create_vbd')
def test_generate_disk_ephemeral_no_vmref(self, mock_create_vbd,
mock_dom0_ref,
mock_attached_here,
mock_create_vdi,
mock_findsr):
session = self.get_fake_session()
vdi_ref = mock.MagicMock()
mock_attached_here.return_value = vdi_ref
instance = {'uuid': 'fake_uuid'}
vdi_ref = vm_utils._generate_disk(
session, instance,
None, None, 'name', 'user', 10, None, None)
mock_attached_here.assert_called_once_with(session, 'vdi_ref',
read_only=False, dom0=True)
self.assertFalse(mock_create_vbd.called)
@mock.patch.object(vm_utils, '_generate_disk')
class GenerateEphemeralTestCase(VMUtilsTestBase):
def setUp(self):
super(GenerateEphemeralTestCase, self).setUp()
self.session = "session"
self.instance = "instance"
self.vm_ref = "vm_ref"
self.name_label = "name"
self.ephemeral_name_label = "name ephemeral"
self.userdevice = 4
self.fs_label = "ephemeral"
def test_get_ephemeral_disk_sizes_simple(self, mock_generate_disk):
result = vm_utils.get_ephemeral_disk_sizes(20)
expected = [20]
self.assertEqual(expected, list(result))
def test_get_ephemeral_disk_sizes_three_disks_2000(self,
mock_generate_disk):
result = vm_utils.get_ephemeral_disk_sizes(4030)
expected = [2000, 2000, 30]
self.assertEqual(expected, list(result))
def test_get_ephemeral_disk_sizes_two_disks_1024(self, mock_generate_disk):
result = vm_utils.get_ephemeral_disk_sizes(2048)
expected = [1024, 1024]
self.assertEqual(expected, list(result))
def test_generate_ephemeral_adds_one_disk(self, mock_generate_disk):
mock_generate_disk.return_value = self.userdevice
vm_utils.generate_ephemeral(
self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 20)
mock_generate_disk.assert_called_once_with(
self.session, self.instance, self.vm_ref, str(self.userdevice),
self.ephemeral_name_label, 'ephemeral', 20480, None, self.fs_label)
def test_generate_ephemeral_adds_multiple_disks(self, mock_generate_disk):
mock_generate_disk.side_effect = [self.userdevice,
self.userdevice + 1,
self.userdevice + 2]
vm_utils.generate_ephemeral(
self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 4030)
mock_generate_disk.assert_has_calls([
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice), self.ephemeral_name_label,
'ephemeral', 2048000, None, self.fs_label),
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice + 1),
self.ephemeral_name_label + " (1)",
'ephemeral', 2048000, None, self.fs_label + "1"),
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice + 2),
self.ephemeral_name_label + " (2)",
'ephemeral', 30720, None, self.fs_label + "2")])
@mock.patch.object(vm_utils, 'safe_destroy_vdis')
def test_generate_ephemeral_cleans_up_on_error(
self, mock_safe_destroy_vdis, mock_generate_disk):
mock_generate_disk.side_effect = [self.userdevice,
self.userdevice + 1,
exception.NovaException]
self.assertRaises(
exception.NovaException, vm_utils.generate_ephemeral,
self.session, self.instance, self.vm_ref,
str(self.userdevice), self.name_label, 4096)
mock_safe_destroy_vdis.assert_called_once_with(self.session, [4, 5])
mock_generate_disk.assert_has_calls([
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice), self.ephemeral_name_label,
'ephemeral', 1048576, None, self.fs_label),
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice + 1),
self.ephemeral_name_label + " (1)",
'ephemeral', 1048576, None, self.fs_label + "1"),
mock.call(self.session, self.instance, self.vm_ref,
str(self.userdevice + 2),
"name ephemeral (2)",
'ephemeral', 1048576, None, 'ephemeral2')])
@mock.patch.object(vm_utils, '_write_partition')
@mock.patch.object(vm_utils.utils, 'temporary_chown')
@mock.patch.object(vm_utils.utils, 'make_dev_path', return_value='some_path')
class StreamDiskTestCase(VMUtilsTestBase):
def setUp(self):
super(StreamDiskTestCase, self).setUp()
# NOTE(matelakat): This might hide the fail reason, as test runners
# are unhappy with a mocked out open.
self.image_service_func = mock.Mock()
def test_non_ami(self, mock_make_dev_path, mock_temporary_chown,
mock_write_partition):
mock_temporary_chown.return_value.__enter__.return_value = None
mock_open = mock.mock_open()
with mock.patch.object(six.moves.builtins, 'open', mock_open):
vm_utils._stream_disk("session", self.image_service_func,
vm_utils.ImageType.KERNEL, None, 'dev')
mock_make_dev_path.assert_called_once_with('dev')
mock_temporary_chown.assert_called_once_with('some_path')
mock_write_partition.assert_not_called()
mock_open.assert_called_once_with('some_path', 'wb')
fake_file = mock_open()
fake_file.seek.assert_called_once_with(0)
self.image_service_func.assert_called_once_with(fake_file)
def test_ami_disk(self, mock_make_dev_path, mock_temporary_chown,
mock_write_partition):
mock_temporary_chown.return_value.__enter__.return_value = None
mock_open = mock.mock_open()
with mock.patch.object(six.moves.builtins, 'open', mock_open):
vm_utils._stream_disk("session", self.image_service_func,
vm_utils.ImageType.DISK, 100, 'dev')
mock_write_partition.assert_called_once_with("session", 100, 'dev')
mock_make_dev_path.assert_called_once_with('dev')
mock_temporary_chown.assert_called_once_with('some_path')
mock_open.assert_called_once_with('some_path', 'wb')
fake_file = mock_open()
fake_file.seek.assert_called_once_with(vm_utils.MBR_SIZE_BYTES)
self.image_service_func.assert_called_once_with(fake_file)
@mock.patch('os_xenapi.client.session.XenAPISession.call_xenapi')
@mock.patch.object(vm_utils, 'safe_find_sr', return_value='sr_ref')
class VMUtilsSRPath(VMUtilsTestBase):
def setUp(self):
super(VMUtilsSRPath, self).setUp()
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(disable_process_locking=True,
group='oslo_concurrency')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='http://localhost',
connection_password='test_pass',
group='xenserver')
stubs.stubout_session(self, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
self.session = driver._session
self.session.is_local_connection = False
def test_defined(self, mock_safe_find_sr, mock_call_xenapi):
self.session.host_ref = "host_ref"
mock_call_xenapi.return_value = {'pbd_ref': {'device_config':
{'path': 'sr_path'}}}
self.assertEqual('sr_path', vm_utils.get_sr_path(self.session))
mock_safe_find_sr.assert_called_once_with(self.session)
mock_call_xenapi.assert_called_once_with(
'PBD.get_all_records_where',
'field "host"="host_ref" and field "SR"="sr_ref"')
def test_default(self, mock_safe_find_sr, mock_call_xenapi):
self.session.host_ref = "host_ref"
mock_call_xenapi.side_effect = [
{'pbd_ref': {'device_config': {}}},
{'uuid': 'sr_uuid', 'type': 'ext'}]
self.assertEqual('/var/run/sr-mount/sr_uuid',
vm_utils.get_sr_path(self.session))
mock_safe_find_sr.assert_called_once_with(self.session)
mock_call_xenapi.assert_has_calls([
mock.call('PBD.get_all_records_where',
'field "host"="host_ref" and field "SR"="sr_ref"'),
mock.call("SR.get_record", "sr_ref")])
class CreateKernelRamdiskTestCase(VMUtilsTestBase):
def setUp(self):
super(CreateKernelRamdiskTestCase, self).setUp()
self.context = "context"
self.session = FakeSession()
self.instance = {"kernel_id": None, "ramdisk_id": None}
self.name_label = "name"
self.stub_out('os_xenapi.client.session.XenAPISession.call_xenapi',
lambda *a, **k: None)
def test_create_kernel_and_ramdisk_no_create(self):
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual((None, None), result)
@mock.patch.object(uuidutils, 'generate_uuid',
side_effect=['fake_uuid1', 'fake_uuid2'])
@mock.patch.object(os_xenapi.client.disk_management,
'create_kernel_ramdisk')
def test_create_kernel_and_ramdisk_create_both_cached(
self, mock_ramdisk, mock_generate_uuid):
kernel_id = "kernel"
ramdisk_id = "ramdisk"
self.instance["kernel_id"] = kernel_id
self.instance["ramdisk_id"] = ramdisk_id
mock_ramdisk.side_effect = ["k", "r"]
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual(("k", "r"), result)
mock_generate_uuid.assert_has_calls([mock.call(), mock.call()])
@mock.patch.object(uuidutils, 'generate_uuid', return_value='fake_uuid1')
@mock.patch.object(vm_utils, '_fetch_disk_image',
return_value={"kernel": {"file": "k"}})
@mock.patch.object(os_xenapi.client.disk_management,
'create_kernel_ramdisk')
def test_create_kernel_and_ramdisk_create_kernel_not_cached(
self, mock_ramdisk, mock_fetch_disk_image, mock_generate_uuid):
kernel_id = "kernel"
self.instance["kernel_id"] = kernel_id
mock_ramdisk.return_value = ""
result = vm_utils.create_kernel_and_ramdisk(self.context,
self.session, self.instance, self.name_label)
self.assertEqual(("k", None), result)
mock_generate_uuid.assert_called_once_with()
mock_ramdisk.assert_called_once_with(self.session, kernel_id,
'fake_uuid1')
mock_fetch_disk_image.assert_called_once_with(
self.context, self.session, self.instance, self.name_label,
kernel_id, 0)
@mock.patch.object(uuidutils, 'generate_uuid')
@mock.patch.object(vm_utils, '_fetch_disk_image')
def _test_create_kernel_image(self, cache_images, mock_fetch_disk_image,
mock_generate_uuid):
kernel_id = "kernel"
self.instance["kernel_id"] = kernel_id
self.flags(cache_images=cache_images, group='xenserver')
if cache_images == 'all':
mock_generate_uuid.return_value = 'fake_uuid1'
else:
mock_fetch_disk_image.return_value = {
"kernel": {"file": "new_image", "uuid": None}}
result = vm_utils._create_kernel_image(self.context,
self.session,
self.instance,
self.name_label,
kernel_id, 0)
if cache_images == 'all':
self.assertEqual(result, {"kernel":
{"file": "cached_image", "uuid": None}})
mock_generate_uuid.assert_called_once_with()
mock_fetch_disk_image.assert_not_called()
else:
self.assertEqual(result, {"kernel":
{"file": "new_image", "uuid": None}})
mock_fetch_disk_image.assert_called_once_with(
self.context, self.session, self.instance, self.name_label,
kernel_id, 0)
mock_generate_uuid.assert_not_called()
@mock.patch.object(os_xenapi.client.disk_management,
'create_kernel_ramdisk')
def test_create_kernel_image_cached_config(self, mock_ramdisk):
mock_ramdisk.return_value = "cached_image"
self._test_create_kernel_image('all')
mock_ramdisk.assert_called_once_with(self.session, "kernel",
"fake_uuid1")
def test_create_kernel_image_uncached_config(self):
self._test_create_kernel_image('none')
class ScanSrTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, "_scan_sr")
@mock.patch.object(vm_utils, "safe_find_sr")
def test_scan_default_sr(self, mock_safe_find_sr, mock_scan_sr):
mock_safe_find_sr.return_value = "sr_ref"
self.assertEqual("sr_ref", vm_utils.scan_default_sr("fake_session"))
mock_scan_sr.assert_called_once_with("fake_session", "sr_ref")
def test_scan_sr_works(self):
session = mock.Mock()
vm_utils._scan_sr(session, "sr_ref")
session.call_xenapi.assert_called_once_with('SR.scan', "sr_ref")
def test_scan_sr_unknown_error_fails_once(self):
session = mock.Mock()
session.XenAPI.Failure = fake.Failure
session.call_xenapi.side_effect = test.TestingException
self.assertRaises(test.TestingException,
vm_utils._scan_sr, session, "sr_ref")
session.call_xenapi.assert_called_once_with('SR.scan', "sr_ref")
@mock.patch.object(greenthread, 'sleep')
def test_scan_sr_known_error_retries_then_throws(self, mock_sleep):
session = mock.Mock()
class FakeException(Exception):
details = ['SR_BACKEND_FAILURE_40', "", "", ""]
session.XenAPI.Failure = FakeException
session.call_xenapi.side_effect = FakeException
self.assertRaises(FakeException,
vm_utils._scan_sr, session, "sr_ref")
session.call_xenapi.assert_called_with('SR.scan', "sr_ref")
self.assertEqual(4, session.call_xenapi.call_count)
mock_sleep.assert_has_calls([mock.call(2), mock.call(4), mock.call(8)])
@mock.patch.object(greenthread, 'sleep')
def test_scan_sr_known_error_retries_then_succeeds(self, mock_sleep):
session = mock.Mock()
class FakeException(Exception):
details = ['SR_BACKEND_FAILURE_40', "", "", ""]
session.XenAPI.Failure = FakeException
def fake_call_xenapi(*args):
fake_call_xenapi.count += 1
if fake_call_xenapi.count != 2:
raise FakeException()
fake_call_xenapi.count = 0
session.call_xenapi.side_effect = fake_call_xenapi
vm_utils._scan_sr(session, "sr_ref")
session.call_xenapi.assert_called_with('SR.scan', "sr_ref")
self.assertEqual(2, session.call_xenapi.call_count)
mock_sleep.assert_called_once_with(2)
@mock.patch.object(flavors, 'extract_flavor',
return_value={
'memory_mb': 1024,
'vcpus': 1,
'vcpu_weight': 1.0,
})
class CreateVmTestCase(VMUtilsTestBase):
def test_vss_provider(self, mock_extract):
self.flags(vcpu_pin_set="2,3")
session = self.get_fake_session()
instance = objects.Instance(uuid=uuids.nova_uuid,
os_type="windows",
system_metadata={})
with mock.patch.object(instance, 'get_flavor') as get:
get.return_value = objects.Flavor._from_db_object(
None, objects.Flavor(), test_flavor.fake_flavor)
vm_utils.create_vm(session, instance, "label",
"kernel", "ramdisk")
vm_rec = {
'VCPUs_params': {'cap': '0', 'mask': '2,3', 'weight': '1'},
'PV_args': '',
'memory_static_min': '0',
'ha_restart_priority': '',
'HVM_boot_policy': 'BIOS order',
'PV_bootloader': '', 'tags': [],
'VCPUs_max': '4',
'memory_static_max': '1073741824',
'actions_after_shutdown': 'destroy',
'memory_dynamic_max': '1073741824',
'user_version': '0',
'xenstore_data': {'vm-data/allowvssprovider': 'false'},
'blocked_operations': {},
'is_a_template': False,
'name_description': '',
'memory_dynamic_min': '1073741824',
'actions_after_crash': 'destroy',
'memory_target': '1073741824',
'PV_ramdisk': '',
'PV_bootloader_args': '',
'PCI_bus': '',
'other_config': {'nova_uuid': uuids.nova_uuid},
'name_label': 'label',
'actions_after_reboot': 'restart',
'VCPUs_at_startup': '4',
'HVM_boot_params': {'order': 'dc'},
'platform': {'nx': 'true', 'pae': 'true', 'apic': 'true',
'timeoffset': '0', 'viridian': 'true',
'acpi': 'true'},
'PV_legacy_args': '',
'PV_kernel': '',
'affinity': '',
'recommendations': '',
'ha_always_run': False
}
session.call_xenapi.assert_called_once_with("VM.create", vm_rec)
def test_invalid_cpu_mask_raises(self, mock_extract):
self.flags(vcpu_pin_set="asdf")
session = mock.Mock()
instance = objects.Instance(uuid=uuids.fake, system_metadata={})
with mock.patch.object(instance, 'get_flavor') as get:
get.return_value = objects.Flavor._from_db_object(
None, objects.Flavor(), test_flavor.fake_flavor)
self.assertRaises(exception.Invalid,
vm_utils.create_vm,
session, instance, "label",
"kernel", "ramdisk")
def test_destroy_vm(self, mock_extract):
session = mock.Mock()
instance = objects.Instance(uuid=uuids.fake)
vm_utils.destroy_vm(session, instance, "vm_ref")
session.VM.destroy.assert_called_once_with("vm_ref")
def test_destroy_vm_silently_fails(self, mock_extract):
session = mock.Mock()
exc = test.TestingException()
session.XenAPI.Failure = test.TestingException
session.VM.destroy.side_effect = exc
instance = objects.Instance(uuid=uuids.fake)
vm_utils.destroy_vm(session, instance, "vm_ref")
session.VM.destroy.assert_called_once_with("vm_ref")
class DetermineVmModeTestCase(VMUtilsTestBase):
def _fake_object(self, updates):
return fake_instance.fake_instance_obj(None, **updates)
def test_determine_vm_mode_returns_xen_mode(self):
instance = self._fake_object({"vm_mode": "xen"})
self.assertEqual(obj_fields.VMMode.XEN,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_mode(self):
instance = self._fake_object({"vm_mode": "hvm"})
self.assertEqual(obj_fields.VMMode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_xen_for_linux(self):
instance = self._fake_object({"vm_mode": None, "os_type": "linux"})
self.assertEqual(obj_fields.VMMode.XEN,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_for_windows(self):
instance = self._fake_object({"vm_mode": None, "os_type": "windows"})
self.assertEqual(obj_fields.VMMode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_hvm_by_default(self):
instance = self._fake_object({"vm_mode": None, "os_type": None})
self.assertEqual(obj_fields.VMMode.HVM,
vm_utils.determine_vm_mode(instance, None))
def test_determine_vm_mode_returns_xen_for_VHD(self):
instance = self._fake_object({"vm_mode": None, "os_type": None})
self.assertEqual(obj_fields.VMMode.XEN,
vm_utils.determine_vm_mode(instance, vm_utils.ImageType.DISK_VHD))
def test_determine_vm_mode_returns_xen_for_DISK(self):
instance = self._fake_object({"vm_mode": None, "os_type": None})
self.assertEqual(obj_fields.VMMode.XEN,
vm_utils.determine_vm_mode(instance, vm_utils.ImageType.DISK))
class CallXenAPIHelpersTestCase(VMUtilsTestBase):
def test_vm_get_vbd_refs(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vm_get_vbd_refs(session, "vm_ref"))
session.call_xenapi.assert_called_once_with("VM.get_VBDs", "vm_ref")
def test_vbd_get_rec(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vbd_get_rec(session, "vbd_ref"))
session.call_xenapi.assert_called_once_with("VBD.get_record",
"vbd_ref")
def test_vdi_get_rec(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vdi_get_rec(session, "vdi_ref"))
session.call_xenapi.assert_called_once_with("VDI.get_record",
"vdi_ref")
def test_vdi_snapshot(self):
session = mock.Mock()
session.call_xenapi.return_value = "foo"
self.assertEqual("foo", vm_utils._vdi_snapshot(session, "vdi_ref"))
session.call_xenapi.assert_called_once_with("VDI.snapshot",
"vdi_ref", {})
def test_vdi_get_virtual_size(self):
session = mock.Mock()
session.call_xenapi.return_value = "123"
self.assertEqual(123, vm_utils._vdi_get_virtual_size(session, "ref"))
session.call_xenapi.assert_called_once_with("VDI.get_virtual_size",
"ref")
@mock.patch.object(vm_utils, '_get_resize_func_name')
def test_vdi_resize(self, mock_get_resize_func_name):
session = mock.Mock()
mock_get_resize_func_name.return_value = "VDI.fake"
vm_utils._vdi_resize(session, "ref", 123)
session.call_xenapi.assert_called_once_with("VDI.fake", "ref", "123")
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_works(self, mock_get_size, mock_resize):
mock_get_size.return_value = (1024 ** 3) - 1
instance = {"uuid": "a"}
vm_utils.update_vdi_virtual_size("s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
mock_resize.assert_called_once_with("s", "ref", 1024 ** 3)
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_skips_resize_down(self, mock_get_size,
mock_resize):
mock_get_size.return_value = 1024 ** 3
instance = {"uuid": "a"}
vm_utils.update_vdi_virtual_size("s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, '_vdi_resize')
@mock.patch.object(vm_utils, '_vdi_get_virtual_size')
def test_update_vdi_virtual_size_raise_if_disk_big(self, mock_get_size,
mock_resize):
mock_get_size.return_value = 1024 ** 3 + 1
instance = {"uuid": "a"}
self.assertRaises(exception.ResizeError,
vm_utils.update_vdi_virtual_size,
"s", instance, "ref", 1)
mock_get_size.assert_called_once_with("s", "ref")
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, '_vdi_get_rec')
@mock.patch.object(vm_utils, '_vbd_get_rec')
@mock.patch.object(vm_utils, '_vm_get_vbd_refs')
class GetVdiForVMTestCase(VMUtilsTestBase):
def test_get_vdi_for_vm_safely(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_rec):
session = "session"
vm_get_vbd_refs.return_value = ["a", "b"]
vbd_get_rec.return_value = {'userdevice': '0', 'VDI': 'vdi_ref'}
vdi_get_rec.return_value = {}
result = vm_utils.get_vdi_for_vm_safely(session, "vm_ref")
self.assertEqual(('vdi_ref', {}), result)
vm_get_vbd_refs.assert_called_once_with(session, "vm_ref")
vbd_get_rec.assert_called_once_with(session, "a")
vdi_get_rec.assert_called_once_with(session, "vdi_ref")
def test_get_vdi_for_vm_safely_fails(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_rec):
session = "session"
vm_get_vbd_refs.return_value = ["a", "b"]
vbd_get_rec.return_value = {'userdevice': '0', 'VDI': 'vdi_ref'}
self.assertRaises(exception.NovaException,
vm_utils.get_vdi_for_vm_safely,
session, "vm_ref", userdevice='1')
self.assertEqual([], vdi_get_rec.call_args_list)
self.assertEqual(2, len(vbd_get_rec.call_args_list))
@mock.patch.object(vm_utils, '_vdi_get_uuid')
@mock.patch.object(vm_utils, '_vbd_get_rec')
@mock.patch.object(vm_utils, '_vm_get_vbd_refs')
class GetAllVdiForVMTestCase(VMUtilsTestBase):
def _setup_get_all_vdi_uuids_for_vm(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
def fake_vbd_get_rec(session, vbd_ref):
return {'userdevice': vbd_ref, 'VDI': "vdi_ref_%s" % vbd_ref}
def fake_vdi_get_uuid(session, vdi_ref):
return vdi_ref
vm_get_vbd_refs.return_value = ["0", "2"]
vbd_get_rec.side_effect = fake_vbd_get_rec
vdi_get_uuid.side_effect = fake_vdi_get_uuid
def test_get_all_vdi_uuids_for_vm_works(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
self._setup_get_all_vdi_uuids_for_vm(vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid)
result = vm_utils.get_all_vdi_uuids_for_vm('session', "vm_ref")
expected = ['vdi_ref_0', 'vdi_ref_2']
self.assertEqual(expected, list(result))
def test_get_all_vdi_uuids_for_vm_finds_none(self, vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid):
self._setup_get_all_vdi_uuids_for_vm(vm_get_vbd_refs,
vbd_get_rec, vdi_get_uuid)
result = vm_utils.get_all_vdi_uuids_for_vm('session', "vm_ref",
min_userdevice=1)
expected = ["vdi_ref_2"]
self.assertEqual(expected, list(result))
class GetAllVdisTestCase(VMUtilsTestBase):
def test_get_all_vdis_in_sr(self):
def fake_get_rec(record_type, ref):
if ref == "2":
return "vdi_rec_2"
session = mock.Mock()
session.call_xenapi.return_value = ["1", "2"]
session.get_rec.side_effect = fake_get_rec
sr_ref = "sr_ref"
actual = list(vm_utils._get_all_vdis_in_sr(session, sr_ref))
self.assertEqual(actual, [('2', 'vdi_rec_2')])
session.call_xenapi.assert_called_once_with("SR.get_VDIs", sr_ref)
class SnapshotAttachedHereTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, '_snapshot_attached_here_impl')
def test_snapshot_attached_here(self, mock_impl):
def fake_impl(session, instance, vm_ref, label, userdevice,
post_snapshot_callback):
self.assertEqual("session", session)
self.assertEqual("instance", instance)
self.assertEqual("vm_ref", vm_ref)
self.assertEqual("label", label)
self.assertEqual('0', userdevice)
self.assertIsNone(post_snapshot_callback)
yield "fake"
mock_impl.side_effect = fake_impl
with vm_utils.snapshot_attached_here("session", "instance", "vm_ref",
"label") as result:
self.assertEqual("fake", result)
mock_impl.assert_called_once_with("session", "instance", "vm_ref",
"label", '0', None)
@mock.patch.object(vm_utils, '_delete_snapshots_in_vdi_chain')
@mock.patch.object(vm_utils, 'safe_destroy_vdis')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(vm_utils, '_wait_for_vhd_coalesce')
@mock.patch.object(vm_utils, '_vdi_get_uuid')
@mock.patch.object(vm_utils, '_vdi_snapshot')
@mock.patch.object(vm_utils, 'get_vdi_for_vm_safely')
def test_snapshot_attached_here_impl(self, mock_get_vdi_for_vm_safely,
mock_vdi_snapshot, mock_vdi_get_uuid,
mock_wait_for_vhd_coalesce, mock_walk_vdi_chain,
mock_safe_destroy_vdis, mock_delete_snapshots_in_vdi_chain):
session = "session"
instance = {"uuid": "uuid"}
mock_callback = mock.Mock()
mock_get_vdi_for_vm_safely.return_value = ("vdi_ref",
{"SR": "sr_ref",
"uuid": "vdi_uuid"})
mock_vdi_snapshot.return_value = "snap_ref"
mock_vdi_get_uuid.return_value = "snap_uuid"
mock_walk_vdi_chain.return_value = [{"uuid": "a"}, {"uuid": "b"}]
try:
with vm_utils.snapshot_attached_here(session, instance, "vm_ref",
"label", '2', mock_callback) as result:
self.assertEqual(["a", "b"], result)
raise test.TestingException()
self.assertTrue(False)
except test.TestingException:
pass
mock_get_vdi_for_vm_safely.assert_called_once_with(session, "vm_ref",
'2')
mock_vdi_snapshot.assert_called_once_with(session, "vdi_ref")
mock_wait_for_vhd_coalesce.assert_called_once_with(session, instance,
"sr_ref", "vdi_ref", ['a', 'b'])
mock_vdi_get_uuid.assert_called_once_with(session, "snap_ref")
mock_walk_vdi_chain.assert_has_calls([mock.call(session, "vdi_uuid"),
mock.call(session, "snap_uuid")])
mock_callback.assert_called_once_with(
task_state="image_pending_upload")
mock_safe_destroy_vdis.assert_called_once_with(session, ["snap_ref"])
mock_delete_snapshots_in_vdi_chain.assert_called_once_with(session,
instance, ['a', 'b'], "sr_ref")
@mock.patch.object(greenthread, 'sleep')
def test_wait_for_vhd_coalesce_leaf_node(self, mock_sleep):
instance = {"uuid": "fake"}
vm_utils._wait_for_vhd_coalesce("session", instance,
"sr_ref", "vdi_ref", ["uuid"])
self.assertFalse(mock_sleep.called)
@mock.patch.object(vm_utils, '_count_children')
@mock.patch.object(greenthread, 'sleep')
def test_wait_for_vhd_coalesce_parent_snapshot(self, mock_sleep,
mock_count):
mock_count.return_value = 2
instance = {"uuid": "fake"}
vm_utils._wait_for_vhd_coalesce("session", instance,
"sr_ref", "vdi_ref", ["uuid1", "uuid2"])
self.assertFalse(mock_sleep.called)
self.assertTrue(mock_count.called)
@mock.patch.object(greenthread, 'sleep')
@mock.patch.object(vm_utils, '_get_vhd_parent_uuid')
@mock.patch.object(vm_utils, '_count_children')
@mock.patch.object(vm_utils, '_scan_sr')
def test_wait_for_vhd_coalesce_raises(self, mock_scan_sr,
mock_count, mock_get_vhd_parent_uuid, mock_sleep):
mock_count.return_value = 1
instance = {"uuid": "fake"}
self.assertRaises(exception.NovaException,
vm_utils._wait_for_vhd_coalesce, "session", instance,
"sr_ref", "vdi_ref", ["uuid1", "uuid2"])
self.assertTrue(mock_count.called)
self.assertEqual(20, mock_sleep.call_count)
self.assertEqual(20, mock_scan_sr.call_count)
@mock.patch.object(greenthread, 'sleep')
@mock.patch.object(vm_utils, '_get_vhd_parent_uuid')
@mock.patch.object(vm_utils, '_count_children')
@mock.patch.object(vm_utils, '_scan_sr')
def test_wait_for_vhd_coalesce_success(self, mock_scan_sr,
mock_count, mock_get_vhd_parent_uuid, mock_sleep):
mock_count.return_value = 1
instance = {"uuid": "fake"}
mock_get_vhd_parent_uuid.side_effect = ["bad", "uuid2"]
vm_utils._wait_for_vhd_coalesce("session", instance,
"sr_ref", "vdi_ref", ["uuid1", "uuid2"])
self.assertEqual(1, mock_sleep.call_count)
self.assertEqual(2, mock_scan_sr.call_count)
@mock.patch.object(vm_utils, '_get_all_vdis_in_sr')
def test_count_children(self, mock_get_all_vdis_in_sr):
vdis = [('child1', {'sm_config': {'vhd-parent': 'parent1'}}),
('child2', {'sm_config': {'vhd-parent': 'parent2'}}),
('child3', {'sm_config': {'vhd-parent': 'parent1'}})]
mock_get_all_vdis_in_sr.return_value = vdis
self.assertEqual(2, vm_utils._count_children('session',
'parent1', 'sr'))
class ImportMigratedDisksTestCase(VMUtilsTestBase):
@mock.patch.object(vm_utils, '_import_migrate_ephemeral_disks')
@mock.patch.object(vm_utils, '_import_migrated_root_disk')
def test_import_all_migrated_disks(self, mock_root, mock_ephemeral):
session = "session"
instance = "instance"
mock_root.return_value = "root_vdi"
mock_ephemeral.return_value = ["a", "b"]
result = vm_utils.import_all_migrated_disks(session, instance)
expected = {'root': 'root_vdi', 'ephemerals': ["a", "b"]}
self.assertEqual(expected, result)
mock_root.assert_called_once_with(session, instance)
mock_ephemeral.assert_called_once_with(session, instance)
@mock.patch.object(vm_utils, '_import_migrate_ephemeral_disks')
@mock.patch.object(vm_utils, '_import_migrated_root_disk')
def test_import_all_migrated_disks_import_root_false(self, mock_root,
mock_ephemeral):
session = "session"
instance = "instance"
mock_root.return_value = "root_vdi"
mock_ephemeral.return_value = ["a", "b"]
result = vm_utils.import_all_migrated_disks(session, instance,
import_root=False)
expected = {'root': None, 'ephemerals': ["a", "b"]}
self.assertEqual(expected, result)
self.assertEqual(0, mock_root.call_count)
mock_ephemeral.assert_called_once_with(session, instance)
@mock.patch.object(vm_utils, '_import_migrated_vhds')
def test_import_migrated_root_disk(self, mock_migrate):
mock_migrate.return_value = "foo"
instance = {"uuid": "uuid", "name": "name"}
result = vm_utils._import_migrated_root_disk("s", instance)
self.assertEqual("foo", result)
mock_migrate.assert_called_once_with("s", instance, "uuid", "root",
"name")
@mock.patch.object(vm_utils, '_import_migrated_vhds')
def test_import_migrate_ephemeral_disks(self, mock_migrate):
mock_migrate.return_value = "foo"
instance = objects.Instance(id=1, uuid=uuids.fake)
instance.old_flavor = objects.Flavor(ephemeral_gb=4000)
result = vm_utils._import_migrate_ephemeral_disks("s", instance)
self.assertEqual({'4': 'foo', '5': 'foo'}, result)
inst_uuid = instance.uuid
inst_name = instance.name
expected_calls = [mock.call("s", instance,
"%s_ephemeral_1" % inst_uuid,
"ephemeral",
"%s ephemeral (1)" % inst_name),
mock.call("s", instance,
"%s_ephemeral_2" % inst_uuid,
"ephemeral",
"%s ephemeral (2)" % inst_name)]
self.assertEqual(expected_calls, mock_migrate.call_args_list)
@mock.patch.object(vm_utils, 'get_ephemeral_disk_sizes')
def test_import_migrate_ephemeral_disks_use_old_flavor(self,
mock_get_sizes):
mock_get_sizes.return_value = []
instance = objects.Instance(id=1, uuid=uuids.fake, ephemeral_gb=2000)
instance.old_flavor = objects.Flavor(ephemeral_gb=4000)
vm_utils._import_migrate_ephemeral_disks("s", instance)
mock_get_sizes.assert_called_once_with(4000)
@mock.patch.object(os_xenapi.client.vm_management, 'receive_vhd')
@mock.patch.object(vm_utils, '_set_vdi_info')
@mock.patch.object(vm_utils, 'scan_default_sr')
@mock.patch.object(vm_utils, 'get_sr_path')
def test_import_migrated_vhds(self, mock_get_sr_path, mock_scan_sr,
mock_set_info, mock_recv_vhd):
session = mock.Mock()
instance = {"uuid": "uuid"}
mock_recv_vhd.return_value = {"root": {"uuid": "a"}}
session.call_xenapi.return_value = "vdi_ref"
mock_get_sr_path.return_value = "sr_path"
result = vm_utils._import_migrated_vhds(session, instance,
'chain_label', 'disk_type', 'vdi_label')
expected = {'uuid': "a", 'ref': "vdi_ref"}
self.assertEqual(expected, result)
mock_get_sr_path.assert_called_once_with(session)
mock_recv_vhd.assert_called_once_with(session, 'chain_label',
'sr_path', mock.ANY)
mock_scan_sr.assert_called_once_with(session)
session.call_xenapi.assert_called_once_with('VDI.get_by_uuid', 'a')
mock_set_info.assert_called_once_with(session, 'vdi_ref', 'disk_type',
'vdi_label', 'disk_type', instance)
def test_get_vhd_parent_uuid_rec_provided(self):
session = mock.Mock()
vdi_ref = 'vdi_ref'
vdi_rec = {'sm_config': {}}
self.assertIsNone(vm_utils._get_vhd_parent_uuid(session,
vdi_ref,
vdi_rec))
self.assertFalse(session.call_xenapi.called)
class MigrateVHDTestCase(VMUtilsTestBase):
def _assert_transfer_called(self, session, label):
session.call_plugin_serialized.assert_called_once_with(
'migration.py', 'transfer_vhd', instance_uuid=label, host="dest",
vdi_uuid="vdi_uuid", sr_path="sr_path", seq_num=2)
@mock.patch.object(os_xenapi.client.vm_management, 'transfer_vhd')
def test_migrate_vhd_root(self, mock_trans_vhd):
session = mock.Mock()
instance = {"uuid": "a"}
vm_utils.migrate_vhd(session, instance, "vdi_uuid", "dest",
"sr_path", 2)
mock_trans_vhd.assert_called_once_with(session, "a",
"dest", "vdi_uuid", "sr_path",
2)
@mock.patch.object(os_xenapi.client.vm_management, 'transfer_vhd')
def test_migrate_vhd_ephemeral(self, mock_trans_vhd):
session = mock.Mock()
instance = {"uuid": "a"}
vm_utils.migrate_vhd(session, instance, "vdi_uuid", "dest",
"sr_path", 2, 2)
mock_trans_vhd.assert_called_once_with(session, "a_ephemeral_2",
"dest", "vdi_uuid", "sr_path",
2)
@mock.patch.object(os_xenapi.client.vm_management, 'transfer_vhd')
def test_migrate_vhd_converts_exceptions(self, mock_trans_vhd):
session = mock.Mock()
session.XenAPI.Failure = test.TestingException
mock_trans_vhd.side_effect = test.TestingException()
instance = {"uuid": "a"}
self.assertRaises(exception.MigrationError, vm_utils.migrate_vhd,
session, instance, "vdi_uuid", "dest", "sr_path", 2)
mock_trans_vhd.assert_called_once_with(session, "a",
"dest", "vdi_uuid", "sr_path",
2)
class StripBaseMirrorTestCase(VMUtilsTestBase):
def test_strip_base_mirror_from_vdi_works(self):
session = mock.Mock()
vm_utils._try_strip_base_mirror_from_vdi(session, "vdi_ref")
session.call_xenapi.assert_called_once_with(
"VDI.remove_from_sm_config", "vdi_ref", "base_mirror")
def test_strip_base_mirror_from_vdi_hides_error(self):
session = mock.Mock()
session.XenAPI.Failure = test.TestingException
session.call_xenapi.side_effect = test.TestingException()
vm_utils._try_strip_base_mirror_from_vdi(session, "vdi_ref")
session.call_xenapi.assert_called_once_with(
"VDI.remove_from_sm_config", "vdi_ref", "base_mirror")
@mock.patch.object(vm_utils, '_try_strip_base_mirror_from_vdi')
def test_strip_base_mirror_from_vdis(self, mock_strip):
def call_xenapi(method, arg):
if method == "VM.get_VBDs":
return ['VBD_ref_1', 'VBD_ref_2']
if method == "VBD.get_VDI":
return 'VDI' + arg[3:]
return "Unexpected call_xenapi: %s.%s" % (method, arg)
session = mock.Mock()
session.call_xenapi.side_effect = call_xenapi
vm_utils.strip_base_mirror_from_vdis(session, "vm_ref")
expected = [mock.call('VM.get_VBDs', "vm_ref"),
mock.call('VBD.get_VDI', "VBD_ref_1"),
mock.call('VBD.get_VDI', "VBD_ref_2")]
self.assertEqual(expected, session.call_xenapi.call_args_list)
expected = [mock.call(session, "VDI_ref_1"),
mock.call(session, "VDI_ref_2")]
self.assertEqual(expected, mock_strip.call_args_list)
class DeviceIdTestCase(VMUtilsTestBase):
def test_device_id_is_none_if_not_specified_in_meta_data(self):
image_meta = objects.ImageMeta.from_dict({})
session = mock.Mock()
session.product_version = (6, 1, 0)
self.assertIsNone(vm_utils.get_vm_device_id(session, image_meta))
def test_get_device_id_if_hypervisor_version_is_greater_than_6_1(self):
image_meta = objects.ImageMeta.from_dict(
{'properties': {'xenapi_device_id': '0002'}})
session = mock.Mock()
session.product_version = (6, 2, 0)
self.assertEqual(2,
vm_utils.get_vm_device_id(session, image_meta))
session.product_version = (6, 3, 1)
self.assertEqual(2,
vm_utils.get_vm_device_id(session, image_meta))
def test_raise_exception_if_device_id_not_supported_by_hyp_version(self):
image_meta = objects.ImageMeta.from_dict(
{'properties': {'xenapi_device_id': '0002'}})
session = mock.Mock()
session.product_version = (6, 0)
exc = self.assertRaises(exception.NovaException,
vm_utils.get_vm_device_id, session, image_meta)
self.assertEqual("Device id 2 specified is not supported by "
"hypervisor version (6, 0)", exc.message)
session.product_version = ('6a')
exc = self.assertRaises(exception.NovaException,
vm_utils.get_vm_device_id, session, image_meta)
self.assertEqual("Device id 2 specified is not supported by "
"hypervisor version 6a", exc.message)
class CreateVmRecordTestCase(VMUtilsTestBase):
@mock.patch.object(flavors, 'extract_flavor')
def test_create_vm_record_linux(self, mock_extract_flavor):
instance = objects.Instance(uuid=uuids.nova_uuid,
os_type="linux")
self._test_create_vm_record(mock_extract_flavor, instance, False)
@mock.patch.object(flavors, 'extract_flavor')
def test_create_vm_record_windows(self, mock_extract_flavor):
instance = objects.Instance(uuid=uuids.nova_uuid,
os_type="windows")
with mock.patch.object(instance, 'get_flavor') as get:
get.return_value = objects.Flavor._from_db_object(
None, objects.Flavor(), test_flavor.fake_flavor)
self._test_create_vm_record(mock_extract_flavor, instance, True)
def _test_create_vm_record(self, mock_extract_flavor, instance,
is_viridian):
session = self.get_fake_session()
flavor = {"memory_mb": 1024, "vcpus": 1, "vcpu_weight": 2}
mock_extract_flavor.return_value = flavor
with mock.patch.object(instance, 'get_flavor') as get:
get.return_value = objects.Flavor(memory_mb=1024,
vcpus=1,
vcpu_weight=2)
vm_utils.create_vm(session, instance, "name", "kernel", "ramdisk",
device_id=2)
is_viridian_str = str(is_viridian).lower()
expected_vm_rec = {
'VCPUs_params': {'cap': '0', 'weight': '2'},
'PV_args': '',
'memory_static_min': '0',
'ha_restart_priority': '',
'HVM_boot_policy': 'BIOS order',
'PV_bootloader': '',
'tags': [],
'VCPUs_max': '1',
'memory_static_max': '1073741824',
'actions_after_shutdown': 'destroy',
'memory_dynamic_max': '1073741824',
'user_version': '0',
'xenstore_data': {'vm-data/allowvssprovider': 'false'},
'blocked_operations': {},
'is_a_template': False,
'name_description': '',
'memory_dynamic_min': '1073741824',
'actions_after_crash': 'destroy',
'memory_target': '1073741824',
'PV_ramdisk': '',
'PV_bootloader_args': '',
'PCI_bus': '',
'other_config': {'nova_uuid': uuids.nova_uuid},
'name_label': 'name',
'actions_after_reboot': 'restart',
'VCPUs_at_startup': '1',
'HVM_boot_params': {'order': 'dc'},
'platform': {'nx': 'true', 'pae': 'true', 'apic': 'true',
'timeoffset': '0', 'viridian': is_viridian_str,
'acpi': 'true', 'device_id': '0002'},
'PV_legacy_args': '',
'PV_kernel': '',
'affinity': '',
'recommendations': '',
'ha_always_run': False}
session.call_xenapi.assert_called_with('VM.create', expected_vm_rec)
def test_list_vms(self):
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(disable_process_locking=True,
group='oslo_concurrency')
self.flags(instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
self.flags(connection_url='http://localhost',
connection_password='test_pass',
group='xenserver')
fake.create_vm("foo1", "Halted")
vm_ref = fake.create_vm("foo2", "Running")
stubs.stubout_session(self, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
result = list(vm_utils.list_vms(driver._session))
# Will have 3 VMs - but one is Dom0 and one is not running on the host
self.assertEqual(len(driver._session.call_xenapi('VM.get_all')), 3)
self.assertEqual(len(result), 1)
result_keys = [key for (key, value) in result]
self.assertIn(vm_ref, result_keys)
class ChildVHDsTestCase(test.NoDBTestCase):
all_vdis = [
("my-vdi-ref",
{"uuid": "my-uuid", "sm_config": {},
"is_a_snapshot": False, "other_config": {}}),
("non-parent",
{"uuid": "uuid-1", "sm_config": {},
"is_a_snapshot": False, "other_config": {}}),
("diff-parent",
{"uuid": "uuid-1", "sm_config": {"vhd-parent": "other-uuid"},
"is_a_snapshot": False, "other_config": {}}),
("child",
{"uuid": "uuid-child", "sm_config": {"vhd-parent": "my-uuid"},
"is_a_snapshot": False, "other_config": {}}),
("child-snap",
{"uuid": "uuid-child-snap", "sm_config": {"vhd-parent": "my-uuid"},
"is_a_snapshot": True, "other_config": {}}),
]
@mock.patch.object(vm_utils, '_get_all_vdis_in_sr')
def test_child_vhds_defaults(self, mock_get_all):
mock_get_all.return_value = self.all_vdis
result = vm_utils._child_vhds("session", "sr_ref", ["my-uuid"])
self.assertJsonEqual(['uuid-child', 'uuid-child-snap'], result)
@mock.patch.object(vm_utils, '_get_all_vdis_in_sr')
def test_child_vhds_only_snapshots(self, mock_get_all):
mock_get_all.return_value = self.all_vdis
result = vm_utils._child_vhds("session", "sr_ref", ["my-uuid"],
old_snapshots_only=True)
self.assertEqual(['uuid-child-snap'], result)
@mock.patch.object(vm_utils, '_get_all_vdis_in_sr')
def test_child_vhds_chain(self, mock_get_all):
mock_get_all.return_value = self.all_vdis
result = vm_utils._child_vhds("session", "sr_ref",
["my-uuid", "other-uuid"], old_snapshots_only=True)
self.assertEqual(['uuid-child-snap'], result)
def test_is_vdi_a_snapshot_works(self):
vdi_rec = {"is_a_snapshot": True,
"other_config": {}}
self.assertTrue(vm_utils._is_vdi_a_snapshot(vdi_rec))
def test_is_vdi_a_snapshot_base_images_false(self):
vdi_rec = {"is_a_snapshot": True,
"other_config": {"image-id": "fake"}}
self.assertFalse(vm_utils._is_vdi_a_snapshot(vdi_rec))
def test_is_vdi_a_snapshot_false_for_non_snapshot(self):
vdi_rec = {"is_a_snapshot": False,
"other_config": {}}
self.assertFalse(vm_utils._is_vdi_a_snapshot(vdi_rec))
class RemoveOldSnapshotsTestCase(test.NoDBTestCase):
@mock.patch.object(vm_utils, 'get_vdi_for_vm_safely')
@mock.patch.object(vm_utils, '_walk_vdi_chain')
@mock.patch.object(vm_utils, '_delete_snapshots_in_vdi_chain')
def test_remove_old_snapshots(self, mock_delete, mock_walk, mock_get):
instance = {"uuid": "fake"}
mock_get.return_value = ("ref", {"uuid": "vdi", "SR": "sr_ref"})
mock_walk.return_value = [{"uuid": "uuid1"}, {"uuid": "uuid2"}]
vm_utils.remove_old_snapshots("session", instance, "vm_ref")
mock_delete.assert_called_once_with("session", instance,
["uuid1", "uuid2"], "sr_ref")
mock_get.assert_called_once_with("session", "vm_ref")
mock_walk.assert_called_once_with("session", "vdi")
@mock.patch.object(vm_utils, '_child_vhds')
def test_delete_snapshots_in_vdi_chain_no_chain(self, mock_child):
instance = {"uuid": "fake"}
vm_utils._delete_snapshots_in_vdi_chain("session", instance,
["uuid"], "sr")
self.assertFalse(mock_child.called)
@mock.patch.object(vm_utils, '_child_vhds')
def test_delete_snapshots_in_vdi_chain_no_snapshots(self, mock_child):
instance = {"uuid": "fake"}
mock_child.return_value = []
vm_utils._delete_snapshots_in_vdi_chain("session", instance,
["uuid1", "uuid2"], "sr")
mock_child.assert_called_once_with("session", "sr", ["uuid2"],
old_snapshots_only=True)
@mock.patch.object(vm_utils, '_scan_sr')
@mock.patch.object(vm_utils, 'safe_destroy_vdis')
@mock.patch.object(vm_utils, '_child_vhds')
def test_delete_snapshots_in_vdi_chain_calls_destroy(self, mock_child,
mock_destroy, mock_scan):
instance = {"uuid": "fake"}
mock_child.return_value = ["suuid1", "suuid2"]
session = mock.Mock()
session.VDI.get_by_uuid.side_effect = ["ref1", "ref2"]
vm_utils._delete_snapshots_in_vdi_chain(session, instance,
["uuid1", "uuid2"], "sr")
mock_child.assert_called_once_with(session, "sr", ["uuid2"],
old_snapshots_only=True)
session.VDI.get_by_uuid.assert_has_calls([
mock.call("suuid1"), mock.call("suuid2")])
mock_destroy.assert_called_once_with(session, ["ref1", "ref2"])
mock_scan.assert_called_once_with(session, "sr")
class ResizeFunctionTestCase(test.NoDBTestCase):
def _call_get_resize_func_name(self, brand, version):
session = mock.Mock()
session.product_brand = brand
session.product_version = version
return vm_utils._get_resize_func_name(session)
def _test_is_resize(self, brand, version):
result = self._call_get_resize_func_name(brand, version)
self.assertEqual("VDI.resize", result)
def _test_is_resize_online(self, brand, version):
result = self._call_get_resize_func_name(brand, version)
self.assertEqual("VDI.resize_online", result)
def test_xenserver_5_5(self):
self._test_is_resize_online("XenServer", (5, 5, 0))
def test_xenserver_6_0(self):
self._test_is_resize("XenServer", (6, 0, 0))
def test_xcp_1_1(self):
self._test_is_resize_online("XCP", (1, 1, 0))
def test_xcp_1_2(self):
self._test_is_resize("XCP", (1, 2, 0))
def test_xcp_2_0(self):
self._test_is_resize("XCP", (2, 0, 0))
def test_random_brand(self):
self._test_is_resize("asfd", (1, 1, 0))
def test_default(self):
self._test_is_resize(None, None)
def test_empty(self):
self._test_is_resize("", "")
class VMInfoTests(VMUtilsTestBase):
def setUp(self):
super(VMInfoTests, self).setUp()
self.session = mock.Mock()
def test_get_power_state_valid(self):
# Save on test setup calls by having these simple tests in one method
self.session.call_xenapi.return_value = "Running"
self.assertEqual(vm_utils.get_power_state(self.session, "ref"),
power_state.RUNNING)
self.session.call_xenapi.return_value = "Halted"
self.assertEqual(vm_utils.get_power_state(self.session, "ref"),
power_state.SHUTDOWN)
self.session.call_xenapi.return_value = "Paused"
self.assertEqual(vm_utils.get_power_state(self.session, "ref"),
power_state.PAUSED)
self.session.call_xenapi.return_value = "Suspended"
self.assertEqual(vm_utils.get_power_state(self.session, "ref"),
power_state.SUSPENDED)
self.session.call_xenapi.return_value = "Crashed"
self.assertEqual(vm_utils.get_power_state(self.session, "ref"),
power_state.CRASHED)
def test_get_power_state_invalid(self):
self.session.call_xenapi.return_value = "Invalid"
self.assertRaises(KeyError,
vm_utils.get_power_state, self.session, "ref")
_XAPI_record = {'power_state': 'Running',
'memory_static_max': str(10 << 10),
'memory_dynamic_max': str(9 << 10),
'VCPUs_max': '5'}
def test_compile_info(self):
def call_xenapi(method, *args):
if method.startswith('VM.get_') and args[0] == 'dummy':
return self._XAPI_record[method[7:]]
self.session.call_xenapi.side_effect = call_xenapi
info = vm_utils.compile_info(self.session, "dummy")
self.assertEqual(hardware.InstanceInfo(state=power_state.RUNNING),
info)
| apache-2.0 | 6,270,026,338,861,540,000 | 42.446384 | 79 | 0.574169 | false |
bewareoftheapp/fluxapp | user/migrations/0001_initial.py | 1 | 1219 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-09-18 23:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import user.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Department',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='RegistrationToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(default=user.models._generate_token, max_length=40)),
('email', models.EmailField(max_length=254)),
],
),
]
| mit | -2,177,746,098,601,376,000 | 32.861111 | 121 | 0.607055 | false |
tkchafin/mrbait | tests/benchmark_MIS.py | 1 | 5580 | #!/usr/bin/python
import networkx as nx
import time
import networkx.algorithms.approximation as nxaa
import matplotlib.pyplot as plt
import numpy as np
from networkx.utils import powerlaw_sequence
"""Code for ATTEMPTING to approximate the maximal independent set in a graph
of conflicting sequences (e.g. aligned > threshold in pairwise alignment).
Unfortunately, this problem is NP-hard and can't be done efficiently... """
"""Conclusions: My naive version seems to be faster somehow."""
#TODO: Test that nodes retained in NAIVE definitely dont have any edges!!!
"""RESULTS: Mine is slighlty faster and finds more nodes at smaller network sizes.
But the approximate algorithm scales better.
Approximate, Nodes=10, 100 reps
Average number of nodes: 5.07
24 ms
Approximate, Nodes=100, 100 reps
Average number of nodes: 56.09
228 ms
Approximate, Nodes=1000, 100 reps
Average number of nodes: 632.4
3529 ms
Approximate, Nodes=10000, 100 reps
Average number of nodes: 6828.18
95647 ms
------
Naive, Nodes=10, 100 reps
Average number of nodes: 5.62
Average number of edges: 0.0
40 ms
Naive, Nodes=100, 100 reps
Average number of nodes: 62.5
Average number of edges: 0.0
344 ms
Naive, Nodes=1000, 100 reps
Average number of nodes: 676.74
Average number of edges: 0.0
4313 ms
Approximate, Nodes=10000, 100 reps
Average number of nodes: 6796.16
93200 ms
"""
def time_me(method):
def wrapper(*args, **kw):
startTime = int(round(time.time() * 1000))
result = method(*args, **kw)
endTime = int(round(time.time() * 1000))
print(endTime - startTime,'ms')
return result
return wrapper
def multiGraphFromList(data):
G = nx.MultiGraph()
G.add_edges_from(data)
return(G)
@time_me
#Function to use the built-in independent set function in networkx
def approximateIndependentSet(nodes, num):
array = np.empty(num)
for i in range(num):
z = nx.utils.create_degree_sequence(nodes,powerlaw_sequence)
G = nx.configuration_model(z)
graph=nx.Graph(G)
graph.remove_edges_from(graph.selfloop_edges())
new = nx.maximal_independent_set(graph, nodes=None)
array[i] = len(new)
avg = np.average(array)
print("Average number of nodes: ",avg)
@time_me
#Function to use VERY SLOW version I made
def naiveIndependentSet(nodes, num):
array = np.empty(num)
edges = np.empty(num)
for i in range(num):
z = nx.utils.create_degree_sequence(nodes,powerlaw_sequence)
G = nx.configuration_model(z)
G=nx.Graph(G)
G.remove_edges_from(G.selfloop_edges())
#Make a copy of graph
C = G.copy()
#Loop through ALL edges
for n in G.edges_iter():
#If either node has been trimmed from Copy, skip.
if C.has_edge(n[0],n[1]):
right = n[1]
left = n[0]
right_n = len(C.neighbors(right))
left_n = len(C.neighbors(left))
#print("Right neighbor <",right,"> has ", right_n, " connections.")
#print("Left neighbor <",left,"> has ", left_n, " connections.")
#Remove right if it has more neighbors, otherwise remove left
if (right_n > left_n):
C.remove_node(right)
else:
C.remove_node(left)
array[i] = C.number_of_nodes()
edges[i] = C.number_of_edges()
avg = np.average(array)
eavg = np.average(edges)
print("Average number of nodes: ",avg)
print("Average number of edges: ",eavg)
#Function to plot a complete graph, coloring a list of 'chosen' or 'excluded' (subset) nodes
def plotColorNodes(G, listnodes):
color_map = []
for node in G:
if node in listnodes:
color_map.append("red")
else:
color_map.append("black")
nx.draw(G, node_color = color_map, with_labels=True)
plt.show()
#Tests of functions
example_10 = [(1,2),(2,4),(1,3),(1,7),(3,2),(1,4),(5,6),(6,8),(3,7),(4,8),(9,10)]
example_100 = [(19,29),(28,48),(17,36),(16,72),(33,2),(1,47),(55,66),(62,87),(53,57),(64,68),(9,100),
(11,22),(24,46),(11,32),(89,78),(31,24),(19,45),(54,6),(16,88),(3,7),(4,88),(95,43),
(11,28),(27,4),(1,38),(13,7),(3,2),(1,48),(49,57),(61,8),(98,79),(81,80),(97,100),
(12,29),(26,4),(1,37),(1,71),(39,2),(1,47),(50,58),(36,8),(63,78),(24,82),(96,100),
(13,30),(25,4),(78,36),(12,7),(40,2),(1,46),(56,59),(61,99),(3,77),(4,83),(95,11),
(14,12),(24,4),(1,35),(14,15),(3,2),(1,42),(55,60),(6,100),(3,76),(4,84),(92,94),
(15,2),(23,4),(2,31),(1,71),(3,2),(1,43),(51,6),(63,64),(70,7),(4,85),(90,93),
(16,23),(21,34),(14,32),(12,7),(12,13),(1,41),(52,61),(62,8),(71,72),(4,86),(91,10),
(17,21),(22,64),(27,33),(14,7),(83,72),(1,45),(53,69),(65,8),(74,73),(4,87),(89,10),
(18,22),(20,4),(59,34),(1,45),(91,75),(19,44),(54,67),(66,68),(31,75),(45,18),(90,10)
]
G10 = multiGraphFromList(example_10)
G100 = multiGraphFromList(example_100)
# print("Approximate, Nodes=10, 100 reps")
# approximateIndependentSet(10,100)
# print("Approximate, Nodes=100, 100 reps")
# approximateIndependentSet(100,100)
# print("Approximate, Nodes=1000, 100 reps")
# approximateIndependentSet(1000,100)
# print("Approximate, Nodes=10000, 100 reps")
# approximateIndependentSet(10000,100)
# print("\n------\n")
#
# print("Naive, Nodes=10, 100 reps")
# naiveIndependentSet(10,100)
# print("Naive, Nodes=100, 100 reps")
# naiveIndependentSet(100,100)
# print("Naive, Nodes=1000, 100 reps")
# naiveIndependentSet(1000,100)
# print("Approximate, Nodes=10000, 100 reps")
# approximateIndependentSet(10000,100)
| gpl-3.0 | 7,596,444,725,467,537,000 | 31.823529 | 101 | 0.62957 | false |
skapfer/rubber | src/depend.py | 1 | 11127 | """
This module contains code for handling dependency graphs.
"""
# vim: noet:ts=4
import logging
msg = logging.getLogger (__name__)
import os.path
import subprocess
import rubber.contents
from rubber.util import _
class MakeError (Exception):
def __init__ (self, msg, errors):
super (MakeError, self).__init__ (msg)
self.msg = msg
self.errors = errors
# Dictionnary allowing to find a Node by one of its products.
# It should not be used outside this module.
_producer = {}
def clean_all_products ():
"""Clean all products of all recipes."""
for path in _producer:
if os.path.exists (path):
msg.info (_("removing %s"), path)
os.remove (path)
def save_cache (cache_path, final):
msg.debug (_('Creating or overwriting cache file %s') % cache_path)
with open (cache_path, 'tw') as f:
for node in final.all_producers ():
if node.snapshots is not None:
f.write (node.primary_product ())
f.write ('\n')
for i in range (len (node.sources)):
f.write (' ')
f.write (rubber.contents.cs2str (node.snapshots [i]))
f.write (' ')
f.write (node.sources [i])
f.write ('\n')
def load_cache (cache_path):
msg.debug (_('Reading external cache file %s') % cache_path)
with open (cache_path) as f:
line = f.readline ()
while line:
product = line [:-1]
sources = []
snapshots = []
while True:
line = f.readline ()
if not line.startswith (' '): # Including end of file.
break
limit = 2 + rubber.contents.cs_str_len
snapshots.append (rubber.contents.str2cs (line [2:limit]))
sources.append (line [limit + 1:-1])
try:
node = _producer [product]
except KeyError:
msg.debug (_('%s: no such recipe anymore') % product)
else:
if node.sources != sources:
msg.debug (_('%s: depends on %s not anymore on %s'), product,
" ".join (node.sources), " ".join (sources))
elif node.snapshots is not None:
# FIXME: this should not happen. See cweb-latex test.
msg.debug (_('%s: rebuilt before cache read'), product)
else:
msg.debug (_('%s: using cached checksums'), product)
node.snapshots = snapshots
class Node (object):
"""
This is the base class to represent dependency nodes. It provides the base
functionality of date checking and recursive making, supposing the
existence of a method `run()' in the object.
"""
def __init__ (self):
"""
The node registers itself in the dependency set,
and if a given depedency is not known in the set, a leaf node is made
for it.
"""
self.product = None
# All prerequisites for this recipe.
self.sources = []
# A snapshot of each source as they were used during last
# successful build, or None if no build has been attempted
# yet. The order in the list is the one in self.sources,
# which does not change during build.
self.snapshots = None
# making is the lock guarding against making a node while making it
self.making = False
def all_producers (self):
def rec (node):
if not node.making:
node.making = True
try:
yield node
for source in node.sources:
try:
child = _producer [source]
except KeyError:
pass
else:
yield from rec (child)
finally:
self.making = False
yield from rec (self)
def all_leaves (self):
"""Show sources that are not produced."""
# We need to build a set in order to remove duplicates.
result = set ()
def rec (node):
if not node.making:
node.making = True
try:
for source in node.sources:
if source in _producer:
rec (_producer [source])
else:
result.add (source)
finally:
self.making = False
rec (self)
return result
def add_source (self, name):
"""
Register a new source for this node. If the source is unknown, a leaf
node is made for it.
"""
# Do nothing when the name is already listed.
# The same source may be inserted many times in the same
# document (an image containing a logo for example).
if name not in self.sources:
self.sources.append (name)
def remove_source (self, name):
"""
Remove a source for this node.
"""
# Fail if the name is not listed.
self.sources.remove (name)
def products (self):
"""An iterable with all all products for this recipe.
This function is not efficient, but called only once by
cmdline.py with a specific command-line option."""
return (key for key, value in _producer.items () if value is self)
def add_product (self, name):
"""
Register a new product for this node.
"""
# TODO: why does this break? assert name not in _producer, name
_producer [name] = self
if self.product is None:
self.product = name
def primary_product (self):
return self.product
def replace_product (self, name):
"""Trick for latex.py"""
# TODO: why does this break? assert name not in _producer, name
del _producer [self.product]
self.product = name
_producer [name] = self
def make (self):
"""
Make the destination file. This recursively makes all dependencies,
then compiles the target if dependencies were modified. The return
value is
- False when nothing had to be done
- True when something was recompiled (among all dependencies)
MakeError is raised in case of error.
"""
# The recurrence is similar to all_producers, except that we
# try each compilations a few times.
pp = self.primary_product ()
if self.making:
msg.debug (_("%s: cyclic dependency, pruning"), pp)
return False
rv = False
self.making = True
try:
for patience in range (5):
msg.debug (_('%s: made from %s attempt %i'),
self.product, ','.join (self.sources),
patience)
# make our sources
for source in self.sources:
try:
dep = _producer [source]
except KeyError:
msg.debug (_("%s: needs %s, leaf"), pp, source)
else:
msg.debug (_("%s: needs %s, making %s"), pp, source,
dep.primary_product ())
rv = dep.make () or rv
# Once all dependent recipes have been run, check the
# state of the sources on disk.
snapshots = tuple (map (rubber.contents.snapshot, self.sources))
missing = ','.join (
self.sources [i] for i in range (len (snapshots))
if snapshots [i] == rubber.contents.NO_SUCH_FILE)
if missing:
if isinstance (self, rubber.converters.latex.LaTeXDep) \
and self.snapshots is None \
and patience == 0:
msg.debug (_("%s: missing %s, but first LaTeX run"), pp, missing)
else:
msg.debug (_("%s: missing %s, pruning"), pp, missing)
return rv
if self.snapshots is None:
msg.debug (_("%s: first attempt or --force, building"), pp)
else:
# There has already been a successful build.
changed = ','.join (
self.sources [i] for i in range (len (snapshots))
if self.snapshots [i] != snapshots [i])
if not changed:
msg.debug (_("%s: sources unchanged since last build"), pp)
return rv
msg.debug (_("%s: some sources changed: %s"), pp, changed)
if not self.run ():
raise MakeError (_("Recipe for {} failed").format (pp),
self.get_errors ())
# Build was successful.
self.snapshots = snapshots
rv = True
# Patience exhausted.
raise MakeError (_("Contents of {} do not settle").format (pp),
self.get_errors ())
finally:
self.making = False
def run (self):
"""
This method is called when a node has to be (re)built. It is supposed
to rebuild the files of this node, returning true on success and false
on failure. It must be redefined by derived classes.
"""
return False
def get_errors (self):
"""
Report the errors that caused the failure of the last call to run, as
an iterable object.
"""
return []
def clean (self):
"""
Remove additional files for this recipe.
Nothing recursive happens here.
Files registered as products are removed by rubber.clean ().
"""
class Shell (Node):
"""
This class specializes Node for generating files using shell commands.
"""
def __init__ (self, command):
super ().__init__ ()
self.command = command
self.stdout = None
def run (self):
msg.info(_("running: %s") % ' '.join(self.command))
process = subprocess.Popen (self.command,
stdin=subprocess.DEVNULL,
stdout=self.stdout)
if process.wait() != 0:
msg.error(_("execution of %s failed") % self.command[0])
return False
return True
class Pipe (Shell):
"""
This class specializes Node for generating files using the stdout of shell commands.
The 'product' will receive the stdout of 'command'.
"""
def __init__ (self, command, product):
super ().__init__ (command)
self.add_product (product)
def run (self):
with open (self.primary_product (), 'bw') as self.stdout:
ret = super (Pipe, self).run ()
return ret
| gpl-2.0 | 1,698,966,926,278,784,000 | 35.126623 | 89 | 0.512717 | false |
bavardage/qtile | libqtile/widget/base.py | 1 | 5948 | from .. import command, utils, bar
LEFT = object()
CENTER = object()
class _Drawer:
"""
A helper class for drawing and text layout.
"""
_fallbackFont = "-*-fixed-bold-r-normal-*-15-*-*-*-c-*-*-*"
def __init__(self, qtile, window):
self.qtile, self.window = qtile, window
self.win = window.window
self.gc = self.win.create_gc()
self.colormap = qtile.display.screen().default_colormap
self.background, self.foreground = None, None
@utils.LRUCache(100)
def color(self, color):
return self.colormap.alloc_named_color(color).pixel
def setFont(self, font):
f = self.qtile.display.open_font(font)
if not f:
self.qtile.log.add("Could not open font %s, falling back."%font)
f = self.qtile.display.open_font(self._fallbackFont)
self.font = f
self.gc.change(font=f)
@utils.LRUCache(100)
def text_extents(self, font, i):
return font.query_text_extents(i)
def textsize(self, font, *text):
"""
Return a textheight, textwidth tuple, for a box large enough to
enclose any of the passed strings.
"""
textheight, textwidth = 0, 0
for i in text:
data = self.text_extents(font, i)
if data.font_ascent > textheight:
textheight = data.font_ascent
if data.overall_width > textwidth:
textwidth = data.overall_width
return textheight, textwidth
def change(self, **kwargs):
newargs = kwargs.copy()
newargs.pop("background", None)
newargs.pop("foreground", None)
if kwargs.has_key("background") and self.background != kwargs["background"]:
self.background = kwargs["background"]
newargs["background"] = self.color(kwargs["background"])
if kwargs.has_key("foreground") and self.background != kwargs["foreground"]:
self.background = kwargs["foreground"]
newargs["foreground"] = self.color(kwargs["foreground"])
if newargs:
self.gc.change(**newargs)
def textbox(self, text, x, y, width, height, padding = 0,
alignment=LEFT, background=None, **attrs):
"""
Draw text in the specified box using the current font. Text is
centered vertically, and left-aligned.
:background Fill box with the specified color first.
:padding Padding to the left of the text.
"""
text = text or " "
if background:
self.rectangle(x, y, width, height, background)
attrs["background"] = background
if attrs:
self.change(**attrs)
textheight, textwidth = self.textsize(self.font, text)
y = y + textheight + (height - textheight)/2
if alignment == LEFT:
x = x + padding
else:
x = x + (width - textwidth)/2
self.win.draw_text(self.gc, x, y, text)
def rectangle(self, x, y, width, height, fillColor=None, borderColor=None, borderWidth=1):
if fillColor:
self.change(foreground=fillColor)
self.win.fill_rectangle(self.gc, x, 0, width, height)
if borderColor:
self.change(
foreground=borderColor,
line_width=borderWidth
)
self.win.rectangle(self.gc, x, 0, width, height)
class _Widget(command.CommandObject):
"""
Each widget must set its own width attribute when the _configure method
is called. If this is set to the special value bar.STRETCH, the bar itself
will set the width to the maximum remaining space, after all other
widgets have been configured. Only ONE widget per bar can have the
bar.STRETCH width set.
The offset attribute is set by the Bar after all widgets have been
configured.
"""
font = "-*-luxi mono-*-r-*-*-12-*-*-*-*-*-*-*"
width = None
offset = None
name = None
@property
def win(self):
return self.bar.window.window
@property
def colormap(self):
return self.qtile.display.screen().default_colormap
def _configure(self, qtile, bar, event, theme):
self.qtile, self.bar, self.event, self.theme = qtile, bar, event, theme
self._drawer = _Drawer(qtile, self.bar.window)
self._drawer.setFont(self.font)
def clear(self):
self._drawer.rectangle(
self.offset, 0, self.width, self.bar.size,
self.bar.background
)
def info(self):
return dict(
name = self.__class__.__name__,
offset = self.offset,
width = self.width,
)
def click(self, x, y):
pass
def get(self, q, name):
"""
Utility function for quick retrieval of a widget by name.
"""
w = q.widgetMap.get(name)
if not w:
raise command.CommandError("No such widget: %s"%name)
return w
def _items(self, name):
if name == "bar":
return True, None
def _select(self, name, sel):
if name == "bar":
return self.bar
def cmd_info(self):
"""
Info for this object.
"""
return dict(name=self.name)
class _TextBox(_Widget):
PADDING = 5
def __init__(self, text=" ", width=bar.STRETCH):
self.width = width
self.text = text
def _configure(self, qtile, bar, event, theme):
_Widget._configure(self, qtile, bar, event, theme)
if theme.font:
self.font = theme.font
def draw(self):
self._drawer.textbox(
self.text,
self.offset, 0, self.width, self.bar.size,
padding = self.PADDING,
foreground=self.theme.fg_normal,
background=self.theme.bg_normal,
)
| mit | 11,768,831,008,628,596 | 31.502732 | 94 | 0.566745 | false |
TribeMedia/synapse | synapse/rest/client/versions.py | 2 | 1082 | # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.http.servlet import RestServlet
import logging
import re
logger = logging.getLogger(__name__)
class VersionsRestServlet(RestServlet):
PATTERNS = [re.compile("^/_matrix/client/versions$")]
def on_GET(self, request):
return (200, {
"versions": [
"r0.0.1",
"r0.1.0",
"r0.2.0",
]
})
def register_servlets(http_server):
VersionsRestServlet().register(http_server)
| apache-2.0 | -1,936,070,100,155,265,500 | 27.473684 | 74 | 0.669131 | false |
elopio/snapcraft | snapcraft/internal/lxd/_cleanbuilder.py | 1 | 2770 | #!/usr/bin/python3
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import petname
import subprocess
from ._containerbuild import Containerbuild
from snapcraft.internal.errors import ContainerConnectionError
logger = logging.getLogger(__name__)
class Cleanbuilder(Containerbuild):
def __init__(self, *, output=None, source, project_options,
metadata=None, remote=None):
container_name = petname.Generate(3, '-')
super().__init__(output=output, source=source,
project_options=project_options, metadata=metadata,
container_name=container_name, remote=remote)
def _ensure_container(self):
try:
subprocess.check_call([
'lxc', 'launch', '-e', self._image, self._container_name])
except subprocess.CalledProcessError as e:
raise ContainerConnectionError('Failed to setup container')
self._configure_container()
self._wait_for_network()
self._container_run(['apt-get', 'update'])
# Because of https://bugs.launchpad.net/snappy/+bug/1628289
# Needed to run snapcraft as a snap and build-snaps
self._container_run(['apt-get', 'install', 'squashfuse', '-y'])
self._inject_snapcraft(new_container=True)
def _setup_project(self):
logger.info('Setting up container with project assets')
tar_filename = self._source
# os.sep needs to be `/` and on Windows it will be set to `\`
dst = '{}/{}'.format(self._project_folder,
os.path.basename(tar_filename))
self._container_run(['mkdir', self._project_folder])
self._push_file(tar_filename, dst)
self._container_run(['tar', 'xvf', os.path.basename(tar_filename)],
cwd=self._project_folder)
def _finish(self):
# os.sep needs to be `/` and on Windows it will be set to `\`
src = '{}/{}'.format(self._project_folder, self._snap_output)
self._pull_file(src, self._snap_output)
logger.info('Retrieved {}'.format(self._snap_output))
| gpl-3.0 | 4,744,793,001,829,084,000 | 40.343284 | 76 | 0.646209 | false |
i-wind/pyqt_todo | db/model.py | 1 | 2729 | #!/usr/bin/env python
# -*- coding: utf8 -*-
"""
@script : model.py
@created : 2012-11-04 01:48:15.090
@changed : 2012-11-08 10:26:47.237
@creator : mkpy.py --version 0.0.27
@author : Igor A.Vetrov <[email protected]>
@about : model of TODO application
"""
from __future__ import print_function
from argparse import ArgumentParser
from .sqlite import Table, Field
__revision__ = 11
__project__ = "Todo"
def getRevision():
"""Callback method for -r/--revision option"""
return str(__revision__)
class Priority(Table):
"""Priority model class"""
_fields = [
( "code" , Field(fieldtype="integer", notnull=True, primary=True) ),
( "name" , Field(notnull=True) ),
( "created", Field(fieldtype="timestamp", default="(datetime('now', 'localtime'))") ),
]
def __init__(self, db):
self.__class__._tableName = __project__ + self.__class__.__name__
super(Priority, self).__init__(db)
def setDefaults(self):
self.exec( "insert into {} (code, name) values(?, ?)".format(self._tableName), (1, "Low") )
self.exec( "insert into {} (code, name) values(?, ?)".format(self._tableName), (2, "Medium") )
self.exec( "insert into {} (code, name) values(?, ?)".format(self._tableName), (3, "High") )
self.db.commit()
def getCode(self, name):
row = self.select( "select code from {} where name=?;".format(self._tableName), (name,) )[0]
return row["code"]
def getName(self, _id):
return self.getValue(_id, "name")[0]
def listNames(self):
rows = self.select( "select name from {};".format(self._tableName) )
return [row["name"] for row in rows]
class Task(Table):
"""Task model class"""
_fields = [
( "name" , Field(notnull=True) ),
( "priority" , Field(fieldtype="integer", default=2, foreignkey="TodoPriority(code)") ),
( "deadline" , Field(fieldtype="date", notnull=True, default="(date('now', 'localtime'))") ),
# status may be 0 or 1, if 1 - task completed
( "status" , Field(fieldtype="integer", default=0, index=True) ),
( "completed", Field(fieldtype="timestamp") ),
( "created" , Field(fieldtype="timestamp", default="(datetime('now', 'localtime'))") ),
]
def __init__(self, db):
self.__class__._tableName = __project__ + self.__class__.__name__
super(Task, self).__init__(db)
if __name__ == '__main__':
# setup global parser
parser = ArgumentParser(description='Program description goes here...')
parser.add_argument('-r', '--revision', action='version', version='%(prog)s revision: ' + getRevision())
args = parser.parse_args()
# end of model.py
| mit | -2,385,009,913,102,353,000 | 30.011364 | 108 | 0.584463 | false |
UltrosBot/Ultros3K | tests/storage/config/test_ini.py | 1 | 3888 | # coding=utf-8
import os
import secrets
import shutil
import tempfile
from nose.tools import assert_equal, assert_true, assert_raises
from unittest import TestCase
from ultros.core.storage.config.ini import INIConfig
from ultros.core.storage.manager import StorageManager
__author__ = "Gareth Coles"
class TestINI(TestCase):
def setUp(self):
self.directory = os.path.join(tempfile.gettempdir(), secrets.token_urlsafe(10))
if not os.path.exists(self.directory):
os.mkdir(self.directory)
self.config_dir = os.path.join(self.directory, "config")
self.data_dir = os.path.join(self.directory, "data")
if not os.path.exists(self.config_dir):
os.mkdir(self.config_dir)
if not os.path.exists(self.data_dir):
os.mkdir(self.data_dir)
current_dir = os.path.dirname(__file__)
tests_dir = os.path.join(current_dir, "../../")
shutil.copy(os.path.join(tests_dir, "files/test.ini"), os.path.join(self.config_dir, "test.ini"))
self.manager = StorageManager(
ultros=None,
config_location=self.config_dir,
data_location=self.data_dir
)
def tearDown(self):
self.manager.shutdown()
del self.manager
if os.path.exists(self.directory):
shutil.rmtree(self.directory)
def test_ini(self):
"""
INI config testing
"""
def _config_object() -> INIConfig:
return self.manager.get_config(
"test.ini", None
)
config_obj = _config_object()
sections = config_obj.sections()
options = config_obj.options("Test")
assert_equal(
sections,
["Test"],
"Expected a single Test section, got {}".format(sections)
)
assert_equal(
options,
["test", "herp", "int", "float", "boolean", "other_boolean"],
"Expected the following options: ['test', 'herp', 'int', 'float', 'boolean', 'other_boolean'], got {}".format(options)
)
assert_equal(
config_obj.items("Test"),
[
("test", "test"),
("herp", "derp"),
("int", "1"),
("float", "1.1"),
("boolean", "true"),
("other_boolean", "false"),
]
)
assert_equal(
dict(config_obj.items("Test")),
{
"test": "test",
"herp": "derp",
"int": "1",
"float": "1.1",
"boolean": "true",
"other_boolean": "false"
}
)
assert_equal(
config_obj.get("Test", "test"),
"test"
)
assert_equal(
config_obj.get_int("Test", "int"),
1
)
assert_equal(
config_obj.get_float("Test", "float"),
1.1
)
assert_equal(
config_obj.get_boolean("Test", "boolean"),
True
)
assert_equal(
config_obj.get_boolean("Test", "other_boolean"),
False
)
with assert_raises(ValueError):
config_obj.get_int("Test", "herp")
with assert_raises(ValueError):
config_obj.get_float("Test", "herp")
with assert_raises(ValueError):
config_obj.get_boolean("Test", "herp")
assert_equal(
config_obj["Test":"herp"],
"derp"
)
assert_equal(
config_obj["Test"]["herp"],
"derp"
)
assert_true(
"Test" in config_obj
)
assert_equal(
len(config_obj),
2 # Default section is always counted whether it exists or not
)
| artistic-2.0 | 5,767,956,039,330,450,000 | 24.083871 | 130 | 0.495628 | false |
vandenheuvel/tribler | Tribler/community/market/core/ttl.py | 1 | 1473 | class Ttl(object):
"""
The time to live is used for keeping track of how many nodes have relayed this messages.
The number of relayed nodes should be kept low to prevent a flooding of the overlay network.
Two was chosen because it provides the best balance between flooding the network and still
reaching enough nodes to find good trades
"""
DEFAULT = 2
def __init__(self, ttl):
"""
:param ttl: Integer representation of a time to live
:type ttl: int
:raises ValueError: Thrown when one of the arguments are invalid
"""
super(Ttl, self).__init__()
if not isinstance(ttl, int):
raise ValueError("Time to live must be an int")
if ttl < 0:
raise ValueError("Time to live must be greater than zero")
self._ttl = ttl
@classmethod
def default(cls):
"""
Create a time to live with the default value
:return: The ttl
:rtype: Ttl
"""
return cls(cls.DEFAULT)
def is_alive(self):
"""
Check if the ttl is still hig enough to be send on
:return: True if it is alive, False otherwise
:rtype: bool
"""
return self._ttl > 0
def make_hop(self):
"""
Makes a hop by reducing the ttl by 1, to simulate the message being relayed through a node
"""
self._ttl -= 1
def __int__(self):
return self._ttl
| lgpl-3.0 | -1,377,557,892,165,886,200 | 26.792453 | 98 | 0.583842 | false |
trevor/calendarserver | txdav/caldav/datastore/scheduling/test/test_utils.py | 1 | 7711 | ##
# Copyright (c) 2013-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Tests for txdav.caldav.datastore.utils
"""
from pycalendar.datetime import DateTime
from twisted.internet.defer import inlineCallbacks
from twisted.trial import unittest
from txdav.caldav.datastore.scheduling.utils import getCalendarObjectForRecord, \
extractEmailDomain, uidFromCalendarUserAddress
from txdav.common.datastore.test.util import populateCalendarsFrom, CommonCommonTests
now = DateTime.getToday().getYear()
ORGANIZER_ICS = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.1//EN
CALSCALE:GREGORIAN
BEGIN:VEVENT
CREATED:20100303T181216Z
UID:685BC3A1-195A-49B3-926D-388DDACA78A6
TRANSP:OPAQUE
SUMMARY:Ancient event
DTSTART:%(year)s0307T111500Z
DURATION:PT1H
DTSTAMP:20100303T181220Z
ORGANIZER:urn:uuid:user01
ATTENDEE;PARTSTAT=ACCEPTED:urn:uuid:user01
ATTENDEE;PARTSTAT=ACCEPTED:urn:uuid:user02
SEQUENCE:2
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % {"year": now + 1}
ATTENDEE_ICS = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Apple Inc.//iCal 4.0.1//EN
CALSCALE:GREGORIAN
BEGIN:VEVENT
CREATED:20100303T181216Z
UID:685BC3A1-195A-49B3-926D-388DDACA78A6
TRANSP:OPAQUE
SUMMARY:Ancient event
DTSTART:%(year)s0307T111500Z
DURATION:PT1H
DTSTAMP:20100303T181220Z
ORGANIZER:urn:uuid:user01
ATTENDEE;PARTSTAT=ACCEPTED:urn:uuid:user01
ATTENDEE;PARTSTAT=ACCEPTED:urn:uuid:user02
SEQUENCE:2
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % {"year": now + 1}
class RecipientCopy(CommonCommonTests, unittest.TestCase):
"""
Tests for deleting events older than a given date
"""
metadata = {
"accessMode": "PUBLIC",
"isScheduleObject": True,
"scheduleTag": "abc",
"scheduleEtags": (),
"hasPrivateComment": False,
}
requirements = {
"user01" : {
"calendar1" : {
"1.ics" : (ORGANIZER_ICS, metadata,),
}
},
"user02" : {
"calendar2" : {
"2.ics" : (ATTENDEE_ICS, metadata,),
},
"calendar3" : {
"3.ics" : (ATTENDEE_ICS, metadata,),
}
}
}
@inlineCallbacks
def setUp(self):
yield super(RecipientCopy, self).setUp()
yield self.buildStoreAndDirectory()
yield self.populate()
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements, self.storeUnderTest())
self.notifierFactory.reset()
def storeUnderTest(self):
"""
Create and return a L{CalendarStore} for testing.
"""
return self._sqlCalendarStore
@inlineCallbacks
def test_getCalendarObjectForRecord(self):
"""
Test that L{txdav.caldav.datastore.scheduling.utils.getCalendarObjectForRecord} detects and removes
resources with duplicate UIDs in the same calendar home.
"""
# Check that expected resources are present
txn = self.transactionUnderTest()
for home_uid, calendar_name, resource_name in (
("user01", "calendar1", "1.ics",),
("user02", "calendar2", "2.ics",),
("user02", "calendar3", "3.ics",),
):
resource = (yield self.calendarObjectUnderTest(txn, name=resource_name, calendar_name=calendar_name, home=home_uid))
self.assertNotEqual(resource, None)
yield self.commit()
# Look up resource by UID in home where only one exists
principal = yield self.directory.recordWithUID(u"user01")
txn = self.transactionUnderTest()
resource = (yield getCalendarObjectForRecord(txn, principal, "685BC3A1-195A-49B3-926D-388DDACA78A6"))
self.assertEqual(resource.name(), "1.ics")
self.assertEqual(resource._parentCollection.name(), "calendar1")
self.assertEqual(resource._parentCollection.viewerHome().uid(), "user01")
yield self.commit()
# Check that expected resources are still present
txn = self.transactionUnderTest()
for home_uid, calendar_name, resource_name in (
("user01", "calendar1", "1.ics",),
("user02", "calendar2", "2.ics",),
("user02", "calendar3", "3.ics",),
):
resource = (yield self.calendarObjectUnderTest(txn, name=resource_name, calendar_name=calendar_name, home=home_uid))
self.assertNotEqual(resource, None)
yield self.commit()
# Look up resource by UID in home where two exists
principal = yield self.directory.recordWithUID("user02")
txn = self.transactionUnderTest()
resource = (yield getCalendarObjectForRecord(txn, principal, "685BC3A1-195A-49B3-926D-388DDACA78A6"))
self.assertTrue(resource.name() in ("2.ics", "3.ics",))
self.assertTrue(resource._parentCollection.name() in ("calendar2", "calendar3",))
self.assertEqual(resource._parentCollection.viewerHome().uid(), "user02")
yield self.commit()
# Check that expected resources are still present, but the duplicate missing
txn = self.transactionUnderTest()
resource = (yield self.calendarObjectUnderTest(txn, name="1.ics", calendar_name="calendar1", home="user01"))
self.assertNotEqual(resource, None)
resource2 = (yield self.calendarObjectUnderTest(txn, name="2.ics", calendar_name="calendar2", home="user02"))
resource3 = (yield self.calendarObjectUnderTest(txn, name="3.ics", calendar_name="calendar3", home="user02"))
self.assertTrue((resource2 is not None) ^ (resource3 is not None))
yield self.commit()
# Look up resource where principal exists but home does not
principal = yield self.directory.recordWithUID("user102") # ASKCYRUS: but user102 doesn't exist
txn = self.transactionUnderTest()
resource = (yield getCalendarObjectForRecord(txn, principal, "685BC3A1-195A-49B3-926D-388DDACA78A6"))
self.assertTrue(resource is None)
yield self.commit()
def test_uidFromCalendarUserAddress(self):
"""
Test that L{uidFromCalendarUserAddress} returns the expected results.
"""
data = (
("urn:x-uid:foobar", "foobar"),
("urn:uuid:foobar", "foobar"),
("urn:uuid:49DE7436-F01C-4AD8-B685-A94303F40301", "49DE7436-F01C-4AD8-B685-A94303F40301"),
("/principals/__uids__/foobar", "foobar"),
("/principals/users/foobar", None),
("/principals/groups/foobar", None),
("mailto:[email protected]", None),
)
for cuaddr, uid in data:
self.assertEqual(uidFromCalendarUserAddress(cuaddr), uid)
def test_extractEmailDomain(self):
"""
Test that L{extractEmailDomain} returns the expected results.
"""
data = (
("mailto:[email protected]", "example.com"),
("mailto:[email protected]?subject=bar", "example.com"),
("mailto:foo", ""),
("mailto:foo@", ""),
("http://foobar.com", ""),
)
for mailto, domain in data:
self.assertEqual(extractEmailDomain(mailto), domain)
| apache-2.0 | 683,524,874,323,820,200 | 33.734234 | 128 | 0.652315 | false |
n-west/gnuradio-utils | json_plotter.py | 1 | 6162 | #!/usr/bin/python
import json
import re
import copy
import argparse
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
def read_results(input_file, kernels_regexp_list):
'''
Read JSON output of volk_profile returning dict of results with filename
being the top level keys. Value is a list of dicts where each dict
contains results of a specific kernel
'''
res = {}
for file_name in input_file:
# read every input file
with open(file_name, 'r') as f:
# read all results in this file
tmp_res = json.load(f)['volk_tests']
tmp_res_keep = {}
for kernel in tmp_res:
# filter through kernels we care about
for regexp in kernels_regexp_list:
if re.search(regexp, kernel['name']):
tmp_res_keep[kernel['name']] = kernel
res[file_name] = tmp_res_keep
return res
def normalize_a_file(results, test_case, protokernel_normalizer, file_normalizer):
'''
This is currently a do-nothing
'''
normalized_results = {}
for kernel in results[test_case].keys():
#normalized_results[kernel] = {'results':{}}
normalized_results[kernel] = copy.deepcopy(results[test_case][kernel])
for protokernel in results[test_case][kernel]['results'].keys():
this_val = results[test_case][kernel]['results'][protokernel]['time']
try:
normalize_val = results[file_normalizer][kernel]['results'][protokernel_normalizer]['time']
except KeyError as e:
print "kernel {0} has no '{1}' key to normalize by in {2}".format(kernel, protokernel_normalizer, test_case)
raise e
normalized_results[kernel]['results'][protokernel]['time'] = this_val / normalize_val
normalized_results[kernel]['results'][protokernel]['units'] = 'speedup'
return normalized_results
def normalize_results(results, protokernel_normalizer, file_normalizer):
'''
Normalize all results by given normalizers. There's no error
checking, so this will likely fail hard if given bogus names
'''
normalized_results = {}
if protokernel_normalizer is not None and file_normalizer is not None:
# normalize all kernels by the named protokernel in the given file
# e.g. usage: comparing speedup of different compiler flags on same hw
for test_case in results.keys():
normalized_results[test_case] = normalize_a_file(results, test_case, protokernel_normalizer, file_normalizer)
elif protokernel_normalizer is not None:
# normalize all kernels in each file by the name protokernel. If
# multiple files have been given then each file is normalized
# independtly
# e.g. usage: aggregating results of different hw
for test_case in results.keys():
normalized_results[test_case] = normalize_a_file(results, test_case, protokernel_normalizer, test_case)
elif file_normalizer is not None:
# normalize all protokernels by the same protokernels of the named file
# e.g. usage: very similar to first case
# todo...
pass
else:
# do nothing
normalized_results = results
return normalized_results
def plot_results(results):
'''
Actually plot the results now
'''
# Set up the figure window
# we create a subgraph for each kernel, so we need to count nkernels first
# and create a gridspec of nkernels x 1
fig = plt.figure(figsize=[6,10])
width = 0.12
colors = ['orange', 'red', 'blue', 'green', 'purple', 'yellow', 'black', ]
sub_graphs = set()
for test_case in results.keys():
for kernel in results[test_case].keys():
sub_graphs.add(kernel)
nkernels = len(sub_graphs)
axarray = gridspec.GridSpec(nkernels, 1, hspace=1.25)
# Next, for each kernel create lists of rectangles and labels
ii = 0
ax0 = plt.subplot(axarray[ii])
for kernel in sub_graphs:
protokernel_times = []
protokernel_names = []
for test_case in results.keys():
protokernels = results[test_case][kernel]['results'].keys()
for pk in protokernels:
protokernel_times.append(results[test_case][kernel]['results'][pk]['time'])
protokernel_names.append(test_case + ":" + pk)
num_protokernels = protokernel_names.__len__()
# create the subplot for this kernel
ax = plt.subplot(axarray[ii], sharex=ax0)
plt.sca(ax) # hilight the new axis
# actually plot the data
rects = ax.barh( np.arange(num_protokernels)/20.0, protokernel_times, height=0.05, color=colors)
# write the labels on the y-axis
plt.yticks((np.arange(num_protokernels)+0.25)/20.0, protokernel_names, rotation=0)
plt.title(kernel)
plt.ylim([0, num_protokernels/20.0])
ii += 1
ax0.set_xlim(0, 1.1)
plt.show()
print rects
def parse_options():
parser = argparse.ArgumentParser(description="Plot the results of volk profiling")
parser.add_argument('--kernels', dest='kernels', type=str, nargs='+', default=['.*'],
help='The VOLK kernels to plot using a space seperated list of regexps (--list-kernels for a list of options)')
parser.add_argument("--file", dest="infile", type=str, nargs="+",
help="The input file(s) to process")
parser.add_argument("--protokernel_normalizer", dest="protokernel_normalizer", type=str,
help="The protokernel name to normalize by (usually generic)")
parser.add_argument("--file_normalizer", dest="file_normalizer", type=str,
help="The file name to normalize by (usually generic)")
return parser.parse_args()
if __name__ == '__main__':
options = parse_options()
res = read_results(options.infile, options.kernels)
res = normalize_results(res, options.protokernel_normalizer, options.file_normalizer)
print res
plot_results(res)
| gpl-2.0 | -3,680,296,431,761,844,000 | 41.205479 | 135 | 0.632262 | false |
synctree/synctree-awsebcli | ebcli/lib/aws.py | 1 | 10403 | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import time
import random
import warnings
import os
import botocore
import botocore.session
import botocore.exceptions
from botocore.loaders import Loader
from cement.utils.misc import minimal_logger
from ebcli import __version__
from ..objects.exceptions import ServiceError, NotAuthorizedError, \
CredentialsError, NoRegionError, ValidationError, \
InvalidProfileError, ConnectionError, AlreadyExistsError, NotFoundError
from .utils import static_var
from .botopatch import apply_patches
LOG = minimal_logger(__name__)
BOTOCORE_DATA_FOLDER_NAME = 'botocoredata'
_api_clients = {}
_profile = None
_profile_env_var = 'AWS_EB_PROFILE'
_id = None
_key = None
_region_name = None
_verify_ssl = True
_endpoint_url = None
_debug = False
apply_patches()
def _flush():
# Should be used for resetting tests only
global _api_clients, _profile, _id, _key, _region_name, _verify_ssl
_api_clients = {}
_get_botocore_session.botocore_session = None
_profile = None
_id = None
_key = None
_region_name = None
_verify_ssl = True
def set_session_creds(id, key):
global _api_clients, _id, _key
_id = id
_key = key
# invalidate all old clients
_api_clients = {}
def set_profile(profile):
global _profile, _api_clients
_profile = profile
# Invalidate session and old clients
_get_botocore_session.botocore_session = None
_api_clients = {}
def set_region(region_name):
global _region_name
_region_name = region_name
# Invalidate session and old clients
_get_botocore_session.botocore_session = None
_api_clients = {}
def set_endpoint_url(endpoint_url):
global _endpoint_url
_endpoint_url = endpoint_url
def no_verify_ssl():
global _verify_ssl
_verify_ssl = False
def set_profile_override(profile):
global _profile_env_var
set_profile(profile)
_profile_env_var = None
def set_debug():
global _debug
_debug = True
def _set_user_agent_for_session(session):
session.user_agent_name = 'eb-cli'
session.user_agent_version = __version__
def _get_data_loader():
# Creates a botocore data loader that loads custom data files
# FIRST, creating a precedence for custom files.
data_folder = os.path.join(os.path.dirname(os.path.realpath(__file__)),
BOTOCORE_DATA_FOLDER_NAME)
return Loader(extra_search_paths=[data_folder, Loader.BUILTIN_DATA_PATH],
include_default_search_paths=False)
def _get_client(service_name):
aws_access_key_id = _id
aws_secret_key = _key
if service_name in _api_clients:
return _api_clients[service_name]
session = _get_botocore_session()
if service_name == 'elasticbeanstalk':
endpoint_url = _endpoint_url
else:
endpoint_url = None
try:
LOG.debug('Creating new Botocore Client for ' + str(service_name))
client = session.create_client(service_name,
endpoint_url=endpoint_url,
# region_name=_region_name,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_key,
verify=_verify_ssl)
except botocore.exceptions.ProfileNotFound as e:
raise InvalidProfileError(e)
LOG.debug('Successfully created session for ' + service_name)
_api_clients[service_name] = client
return client
@static_var('botocore_session', None)
def _get_botocore_session():
global _region_name
if _get_botocore_session.botocore_session is None:
LOG.debug('Creating new Botocore Session')
LOG.debug('Botocore version: {0}'.format(botocore.__version__))
session = botocore.session.get_session({
'profile': (None, _profile_env_var, _profile, None),
})
if _region_name or not session.get_config_variable('region'):
session.set_config_variable('region', _region_name)
_region_name = session.get_config_variable('region')
session.register_component('data_loader', _get_data_loader())
_set_user_agent_for_session(session)
_get_botocore_session.botocore_session = session
if _debug:
session.set_debug_logger()
return _get_botocore_session.botocore_session
def get_region_name():
return _region_name
def make_api_call(service_name, operation_name, **operation_options):
try:
client = _get_client(service_name)
except botocore.exceptions.UnknownEndpointError as e:
raise NoRegionError(e)
except botocore.exceptions.PartialCredentialsError as e:
LOG.debug('Credentials incomplete')
raise CredentialsError('Your credentials are not complete. Error: {0}'
.format(e))
except botocore.exceptions.NoRegionError:
raise NoRegionError()
if not _verify_ssl:
warnings.filterwarnings("ignore")
operation = getattr(client, operation_name)
region = _region_name
if not region:
region = 'default'
MAX_ATTEMPTS = 10
attempt = 0
while True:
attempt += 1
if attempt > 1:
LOG.debug('Retrying -- attempt #' + str(attempt))
delay = _get_delay(attempt)
time.sleep(delay)
try:
LOG.debug('Making api call: (' +
service_name + ', ' + operation_name +
') to region: ' + region + ' with args:' + str(operation_options))
response_data = operation(**operation_options)
status = response_data['ResponseMetadata']['HTTPStatusCode']
LOG.debug('API call finished, status = ' + str(status))
if response_data:
LOG.debug('Response: ' + str(response_data))
return response_data
except botocore.exceptions.ClientError as e:
response_data = e.response
LOG.debug('Response: ' + str(response_data))
status = response_data['ResponseMetadata']['HTTPStatusCode']
LOG.debug('API call finished, status = ' + str(status))
try:
message = str(response_data['Error']['Message'])
except KeyError:
message = ""
if status == 400:
# Convert to correct 400 error
error = _get_400_error(response_data, message)
if isinstance(error, ThrottlingError):
LOG.debug('Received throttling error')
if attempt > MAX_ATTEMPTS:
raise MaxRetriesError('Max retries exceeded for '
'throttling error')
else:
raise error
elif status == 403:
LOG.debug('Received a 403')
if not message:
message = 'Are your permissions correct?'
raise NotAuthorizedError('Operation Denied. ' + message)
elif status == 404:
LOG.debug('Received a 404')
raise NotFoundError(message)
elif status == 409:
LOG.debug('Received a 409')
raise AlreadyExistsError(message)
elif status in (500, 503, 504):
LOG.debug('Received 5XX error')
if attempt > MAX_ATTEMPTS:
raise MaxRetriesError('Max retries exceeded for '
'service error (5XX)')
else:
raise ServiceError('API Call unsuccessful. '
'Status code returned ' + str(status))
except botocore.exceptions.NoCredentialsError:
LOG.debug('No credentials found')
raise CredentialsError('Operation Denied. You appear to have no'
' credentials')
except botocore.exceptions.PartialCredentialsError as e:
LOG.debug('Credentials incomplete')
raise CredentialsError(str(e))
except (botocore.exceptions.ValidationError,
botocore.exceptions.ParamValidationError) as e:
raise ValidationError(str(e))
except botocore.exceptions.BotoCoreError as e:
LOG.error('Botocore Error')
raise
except IOError as error:
if hasattr(error.args[0], 'reason') and str(error.args[0].reason) == \
'[Errno -2] Name or service not known':
raise ConnectionError()
LOG.error('Error while contacting Elastic Beanstalk Service')
LOG.debug('error:' + str(error))
raise ServiceError(error)
def _get_delay(attempt_number):
if attempt_number == 1:
return 0
# Exponential backoff
rand_int = random.randrange(0, 2**attempt_number)
delay = rand_int * 0.05 # delay time is 50 ms
LOG.debug('Sleeping for ' + str(delay) + ' seconds.')
return delay
def _get_400_error(response_data, message):
code = response_data['Error']['Code']
LOG.debug('Received a 400 Error')
if code == 'InvalidParameterValue':
return InvalidParameterValueError(message)
elif code == 'InvalidQueryParameter':
return InvalidQueryParameterError(message)
elif code.startswith('Throttling'):
return ThrottlingError(message)
elif code.startswith('ResourceNotFound'):
return NotFoundError(message)
else:
# Not tracking this error
return ServiceError(message, code=code)
class InvalidParameterValueError(ServiceError):
pass
class InvalidQueryParameterError(ServiceError):
pass
class ThrottlingError(ServiceError):
pass
class MaxRetriesError(ServiceError):
pass
| apache-2.0 | -8,400,662,935,267,153,000 | 31.108025 | 88 | 0.612804 | false |
dekked/dynamodb-mock | ddbmock/database/storage/sqlite.py | 1 | 3773 | # -*- coding: utf-8 -*-
from ..item import Item
from ddbmock import config
import sqlite3, cPickle as pickle
# I know, using global "variable" for this kind of state *is* bad. But it helps
# keeping execution times to a sane value. In particular, this allows to use
# in-memory version of sqlite
conn = sqlite3.connect(config.STORAGE_SQLITE_FILE)
class Store(object):
def __init__(self, name):
""" Initialize the sqlite store
By contract, we know the table name will only contain alphanum chars,
'_', '.' or '-' so that this is ~ safe
:param name: Table name.
"""
conn.execute('''CREATE TABLE IF NOT EXISTS `{}` (
`hash_key` blob NOT NULL,
`range_key` blob NOT NULL,
`data` blob NOT NULL,
PRIMARY KEY (`hash_key`,`range_key`)
);'''.format(name))
conn.commit()
self.name = name
def truncate(self):
"""Perform a full table cleanup. Might be a good idea in tests :)"""
conn.execute('DELETE FROM `{}`'.format(self.name))
conn.commit()
def _get_by_hash_range(self, hash_key, range_key):
request = conn.execute('''SELECT `data` FROM `{}`
WHERE `hash_key`=? AND `range_key`=?'''
.format(self.name),
(hash_key, range_key))
item = request.fetchone()
if item is None:
raise KeyError("No item found at ({}, {})".format(hash_key, range_key))
return pickle.loads(str(item[0]))
def _get_by_hash(self, hash_key):
items = conn.execute('''SELECT * FROM `{}`
WHERE `hash_key`=? '''.format(self.name),
(hash_key, ))
ret = {item[1]:pickle.loads(str(item[2])) for item in items}
if not ret:
raise KeyError("No item found at hash_key={}".format(hash_key))
return ret
def __getitem__(self, (hash_key, range_key)):
"""Get item at (``hash_key``, ``range_key``) or the dict at ``hash_key`` if
``range_key`` is None.
:param key: (``hash_key``, ``range_key``) Tuple. If ``range_key`` is None, all keys under ``hash_key`` are returned
:return: Item or item dict
:raise: KeyError
"""
if range_key is None:
return self._get_by_hash(hash_key)
return self._get_by_hash_range(hash_key, range_key)
def __setitem__(self, (hash_key, range_key), item):
"""Set the item at (``hash_key``, ``range_key``). Both keys must be
defined and valid. By convention, ``range_key`` may be ``False`` to
indicate a ``hash_key`` only key.
:param key: (``hash_key``, ``range_key``) Tuple.
:param item: the actual ``Item`` data structure to store
"""
db_item = buffer(pickle.dumps(item, 2))
conn.execute('''INSERT OR REPLACE INTO `{}` (`hash_key`,`range_key`, `data`)
VALUES (?, ?, ?)'''.format(self.name),
(hash_key, range_key, db_item))
conn.commit()
def __delitem__(self, (hash_key, range_key)):
"""Delete item at key (``hash_key``, ``range_key``)
:raises: KeyError if not found
"""
conn.execute('DELETE FROM `{}` WHERE `hash_key`=? AND `range_key`=?'
.format(self.name), (hash_key, range_key))
def __iter__(self):
""" Iterate all over the table, abstracting the ``hash_key`` and
``range_key`` complexity. Mostly used for ``Scan`` implementation.
"""
items = conn.execute('SELECT `data` FROM `{}`'.format(self.name))
for item in items:
yield pickle.loads(str(item[0]))
| lgpl-3.0 | 7,474,325,584,043,636,000 | 37.111111 | 123 | 0.536973 | false |
uclouvain/osis_louvain | base/business/proposal_xls.py | 1 | 4218 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.utils.translation import ugettext_lazy as _
from osis_common.document import xls_build
from base.business.learning_unit import get_entity_acronym
from base.business.xls import get_name_or_username
WORKSHEET_TITLE = 'Proposals'
XLS_FILENAME = 'Proposals'
XLS_DESCRIPTION = "List_proposals"
PROPOSAL_TITLES = [str(_('requirement_entity_small')), str(_('code')), str(_('title')), str(_('type')),
str(_('proposal_type')), str(_('proposal_status')), str(_('folder_num')),
str(_('type_declaration_vacant')), str(_('periodicity')), str(_('credits')),
str(_('allocation_entity_small')), str(_('proposal_date'))]
def prepare_xls_content(proposals):
return [extract_xls_data_from_proposal(proposal) for proposal in proposals]
def extract_xls_data_from_proposal(proposal):
return [get_entity_acronym(proposal.learning_unit_year.entities.get('REQUIREMENT_ENTITY')),
proposal.learning_unit_year.acronym,
proposal.learning_unit_year.complete_title,
xls_build.translate(proposal.learning_unit_year.learning_container_year.container_type),
xls_build.translate(proposal.type),
xls_build.translate(proposal.state),
proposal.folder,
xls_build.translate(proposal.learning_unit_year.learning_container_year.type_declaration_vacant),
xls_build.translate(proposal.learning_unit_year.periodicity),
proposal.learning_unit_year.credits,
get_entity_acronym(proposal.learning_unit_year.entities.get('ALLOCATION_ENTITY')),
proposal.date.strftime('%d-%m-%Y')]
def prepare_xls_parameters_list(user, working_sheets_data):
return {xls_build.LIST_DESCRIPTION_KEY: _(XLS_DESCRIPTION),
xls_build.FILENAME_KEY: _(XLS_FILENAME),
xls_build.USER_KEY: get_name_or_username(user),
xls_build.WORKSHEETS_DATA:
[{xls_build.CONTENT_KEY: working_sheets_data,
xls_build.HEADER_TITLES_KEY: PROPOSAL_TITLES,
xls_build.WORKSHEET_TITLE_KEY: _(WORKSHEET_TITLE),
}
]}
def create_xls(user, proposals, filters):
working_sheets_data = prepare_xls_content(proposals)
return xls_build.generate_xls(
xls_build.prepare_xls_parameters_list(working_sheets_data, configure_parameters(user)), filters)
def create_xls_proposal(user, proposals, filters):
return xls_build.generate_xls(prepare_xls_parameters_list(prepare_xls_content(proposals),
configure_parameters(user)), filters)
def configure_parameters(user):
return {xls_build.DESCRIPTION: XLS_DESCRIPTION,
xls_build.USER: get_name_or_username(user),
xls_build.FILENAME: XLS_FILENAME,
xls_build.HEADER_TITLES: PROPOSAL_TITLES,
xls_build.WS_TITLE: WORKSHEET_TITLE}
| agpl-3.0 | -4,826,060,251,302,619,000 | 45.855556 | 109 | 0.64904 | false |
digitalocean/netbox | netbox/dcim/tests/test_models.py | 1 | 20944 | from django.core.exceptions import ValidationError
from django.test import TestCase
from circuits.models import *
from dcim.choices import *
from dcim.models import *
from tenancy.models import Tenant
class RackGroupTestCase(TestCase):
def test_change_rackgroup_site(self):
"""
Check that all child RackGroups and Racks get updated when a RackGroup is moved to a new Site. Topology:
Site A
- RackGroup A1
- RackGroup A2
- Rack 2
- Rack 1
"""
site_a = Site.objects.create(name='Site A', slug='site-a')
site_b = Site.objects.create(name='Site B', slug='site-b')
rackgroup_a1 = RackGroup(site=site_a, name='RackGroup A1', slug='rackgroup-a1')
rackgroup_a1.save()
rackgroup_a2 = RackGroup(site=site_a, parent=rackgroup_a1, name='RackGroup A2', slug='rackgroup-a2')
rackgroup_a2.save()
rack1 = Rack.objects.create(site=site_a, group=rackgroup_a1, name='Rack 1')
rack2 = Rack.objects.create(site=site_a, group=rackgroup_a2, name='Rack 2')
powerpanel1 = PowerPanel.objects.create(site=site_a, rack_group=rackgroup_a1, name='Power Panel 1')
# Move RackGroup A1 to Site B
rackgroup_a1.site = site_b
rackgroup_a1.save()
# Check that all objects within RackGroup A1 now belong to Site B
self.assertEqual(RackGroup.objects.get(pk=rackgroup_a1.pk).site, site_b)
self.assertEqual(RackGroup.objects.get(pk=rackgroup_a2.pk).site, site_b)
self.assertEqual(Rack.objects.get(pk=rack1.pk).site, site_b)
self.assertEqual(Rack.objects.get(pk=rack2.pk).site, site_b)
self.assertEqual(PowerPanel.objects.get(pk=powerpanel1.pk).site, site_b)
class RackTestCase(TestCase):
def setUp(self):
self.site1 = Site.objects.create(
name='TestSite1',
slug='test-site-1'
)
self.site2 = Site.objects.create(
name='TestSite2',
slug='test-site-2'
)
self.group1 = RackGroup.objects.create(
name='TestGroup1',
slug='test-group-1',
site=self.site1
)
self.group2 = RackGroup.objects.create(
name='TestGroup2',
slug='test-group-2',
site=self.site2
)
self.rack = Rack.objects.create(
name='TestRack1',
facility_id='A101',
site=self.site1,
group=self.group1,
u_height=42
)
self.manufacturer = Manufacturer.objects.create(
name='Acme',
slug='acme'
)
self.device_type = {
'ff2048': DeviceType.objects.create(
manufacturer=self.manufacturer,
model='FrameForwarder 2048',
slug='ff2048'
),
'cc5000': DeviceType.objects.create(
manufacturer=self.manufacturer,
model='CurrentCatapult 5000',
slug='cc5000',
u_height=0
),
}
self.role = {
'Server': DeviceRole.objects.create(
name='Server',
slug='server',
),
'Switch': DeviceRole.objects.create(
name='Switch',
slug='switch',
),
'Console Server': DeviceRole.objects.create(
name='Console Server',
slug='console-server',
),
'PDU': DeviceRole.objects.create(
name='PDU',
slug='pdu',
),
}
def test_rack_device_outside_height(self):
rack1 = Rack(
name='TestRack2',
facility_id='A102',
site=self.site1,
u_height=42
)
rack1.save()
device1 = Device(
name='TestSwitch1',
device_type=DeviceType.objects.get(manufacturer__slug='acme', slug='ff2048'),
device_role=DeviceRole.objects.get(slug='switch'),
site=self.site1,
rack=rack1,
position=43,
face=DeviceFaceChoices.FACE_FRONT,
)
device1.save()
with self.assertRaises(ValidationError):
rack1.clean()
def test_rack_group_site(self):
rack_invalid_group = Rack(
name='TestRack2',
facility_id='A102',
site=self.site1,
u_height=42,
group=self.group2
)
rack_invalid_group.save()
with self.assertRaises(ValidationError):
rack_invalid_group.clean()
def test_mount_single_device(self):
device1 = Device(
name='TestSwitch1',
device_type=DeviceType.objects.get(manufacturer__slug='acme', slug='ff2048'),
device_role=DeviceRole.objects.get(slug='switch'),
site=self.site1,
rack=self.rack,
position=10,
face=DeviceFaceChoices.FACE_REAR,
)
device1.save()
# Validate rack height
self.assertEqual(list(self.rack.units), list(reversed(range(1, 43))))
# Validate inventory (front face)
rack1_inventory_front = self.rack.get_rack_units(face=DeviceFaceChoices.FACE_FRONT)
self.assertEqual(rack1_inventory_front[-10]['device'], device1)
del(rack1_inventory_front[-10])
for u in rack1_inventory_front:
self.assertIsNone(u['device'])
# Validate inventory (rear face)
rack1_inventory_rear = self.rack.get_rack_units(face=DeviceFaceChoices.FACE_REAR)
self.assertEqual(rack1_inventory_rear[-10]['device'], device1)
del(rack1_inventory_rear[-10])
for u in rack1_inventory_rear:
self.assertIsNone(u['device'])
def test_mount_zero_ru(self):
pdu = Device.objects.create(
name='TestPDU',
device_role=self.role.get('PDU'),
device_type=self.device_type.get('cc5000'),
site=self.site1,
rack=self.rack,
position=None,
face='',
)
self.assertTrue(pdu)
def test_change_rack_site(self):
"""
Check that child Devices get updated when a Rack is moved to a new Site.
"""
site_a = Site.objects.create(name='Site A', slug='site-a')
site_b = Site.objects.create(name='Site B', slug='site-b')
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
device_type = DeviceType.objects.create(
manufacturer=manufacturer, model='Device Type 1', slug='device-type-1'
)
device_role = DeviceRole.objects.create(
name='Device Role 1', slug='device-role-1', color='ff0000'
)
# Create Rack1 in Site A
rack1 = Rack.objects.create(site=site_a, name='Rack 1')
# Create Device1 in Rack1
device1 = Device.objects.create(site=site_a, rack=rack1, device_type=device_type, device_role=device_role)
# Move Rack1 to Site B
rack1.site = site_b
rack1.save()
# Check that Device1 is now assigned to Site B
self.assertEqual(Device.objects.get(pk=device1.pk).site, site_b)
class DeviceTestCase(TestCase):
def setUp(self):
self.site = Site.objects.create(name='Test Site 1', slug='test-site-1')
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
self.device_type = DeviceType.objects.create(
manufacturer=manufacturer, model='Test Device Type 1', slug='test-device-type-1'
)
self.device_role = DeviceRole.objects.create(
name='Test Device Role 1', slug='test-device-role-1', color='ff0000'
)
# Create DeviceType components
ConsolePortTemplate(
device_type=self.device_type,
name='Console Port 1'
).save()
ConsoleServerPortTemplate(
device_type=self.device_type,
name='Console Server Port 1'
).save()
ppt = PowerPortTemplate(
device_type=self.device_type,
name='Power Port 1',
maximum_draw=1000,
allocated_draw=500
)
ppt.save()
PowerOutletTemplate(
device_type=self.device_type,
name='Power Outlet 1',
power_port=ppt,
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A
).save()
InterfaceTemplate(
device_type=self.device_type,
name='Interface 1',
type=InterfaceTypeChoices.TYPE_1GE_FIXED,
mgmt_only=True
).save()
rpt = RearPortTemplate(
device_type=self.device_type,
name='Rear Port 1',
type=PortTypeChoices.TYPE_8P8C,
positions=8
)
rpt.save()
FrontPortTemplate(
device_type=self.device_type,
name='Front Port 1',
type=PortTypeChoices.TYPE_8P8C,
rear_port=rpt,
rear_port_position=2
).save()
DeviceBayTemplate(
device_type=self.device_type,
name='Device Bay 1'
).save()
def test_device_creation(self):
"""
Ensure that all Device components are copied automatically from the DeviceType.
"""
d = Device(
site=self.site,
device_type=self.device_type,
device_role=self.device_role,
name='Test Device 1'
)
d.save()
ConsolePort.objects.get(
device=d,
name='Console Port 1'
)
ConsoleServerPort.objects.get(
device=d,
name='Console Server Port 1'
)
pp = PowerPort.objects.get(
device=d,
name='Power Port 1',
maximum_draw=1000,
allocated_draw=500
)
PowerOutlet.objects.get(
device=d,
name='Power Outlet 1',
power_port=pp,
feed_leg=PowerOutletFeedLegChoices.FEED_LEG_A
)
Interface.objects.get(
device=d,
name='Interface 1',
type=InterfaceTypeChoices.TYPE_1GE_FIXED,
mgmt_only=True
)
rp = RearPort.objects.get(
device=d,
name='Rear Port 1',
type=PortTypeChoices.TYPE_8P8C,
positions=8
)
FrontPort.objects.get(
device=d,
name='Front Port 1',
type=PortTypeChoices.TYPE_8P8C,
rear_port=rp,
rear_port_position=2
)
DeviceBay.objects.get(
device=d,
name='Device Bay 1'
)
def test_multiple_unnamed_devices(self):
device1 = Device(
site=self.site,
device_type=self.device_type,
device_role=self.device_role,
name=''
)
device1.save()
device2 = Device(
site=device1.site,
device_type=device1.device_type,
device_role=device1.device_role,
name=''
)
device2.full_clean()
device2.save()
self.assertEqual(Device.objects.filter(name='').count(), 2)
def test_device_duplicate_names(self):
device1 = Device(
site=self.site,
device_type=self.device_type,
device_role=self.device_role,
name='Test Device 1'
)
device1.save()
device2 = Device(
site=device1.site,
device_type=device1.device_type,
device_role=device1.device_role,
name=device1.name
)
# Two devices assigned to the same Site and no Tenant should fail validation
with self.assertRaises(ValidationError):
device2.full_clean()
tenant = Tenant.objects.create(name='Test Tenant 1', slug='test-tenant-1')
device1.tenant = tenant
device1.save()
device2.tenant = tenant
# Two devices assigned to the same Site and the same Tenant should fail validation
with self.assertRaises(ValidationError):
device2.full_clean()
device2.tenant = None
# Two devices assigned to the same Site and different Tenants should pass validation
device2.full_clean()
device2.save()
class CableTestCase(TestCase):
def setUp(self):
site = Site.objects.create(name='Test Site 1', slug='test-site-1')
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
devicetype = DeviceType.objects.create(
manufacturer=manufacturer, model='Test Device Type 1', slug='test-device-type-1'
)
devicerole = DeviceRole.objects.create(
name='Test Device Role 1', slug='test-device-role-1', color='ff0000'
)
self.device1 = Device.objects.create(
device_type=devicetype, device_role=devicerole, name='TestDevice1', site=site
)
self.device2 = Device.objects.create(
device_type=devicetype, device_role=devicerole, name='TestDevice2', site=site
)
self.interface1 = Interface.objects.create(device=self.device1, name='eth0')
self.interface2 = Interface.objects.create(device=self.device2, name='eth0')
self.interface3 = Interface.objects.create(device=self.device2, name='eth1')
self.cable = Cable(termination_a=self.interface1, termination_b=self.interface2)
self.cable.save()
self.power_port1 = PowerPort.objects.create(device=self.device2, name='psu1')
self.patch_pannel = Device.objects.create(
device_type=devicetype, device_role=devicerole, name='TestPatchPannel', site=site
)
self.rear_port1 = RearPort.objects.create(device=self.patch_pannel, name='RP1', type='8p8c')
self.front_port1 = FrontPort.objects.create(
device=self.patch_pannel, name='FP1', type='8p8c', rear_port=self.rear_port1, rear_port_position=1
)
self.rear_port2 = RearPort.objects.create(device=self.patch_pannel, name='RP2', type='8p8c', positions=2)
self.front_port2 = FrontPort.objects.create(
device=self.patch_pannel, name='FP2', type='8p8c', rear_port=self.rear_port2, rear_port_position=1
)
self.rear_port3 = RearPort.objects.create(device=self.patch_pannel, name='RP3', type='8p8c', positions=3)
self.front_port3 = FrontPort.objects.create(
device=self.patch_pannel, name='FP3', type='8p8c', rear_port=self.rear_port3, rear_port_position=1
)
self.rear_port4 = RearPort.objects.create(device=self.patch_pannel, name='RP4', type='8p8c', positions=3)
self.front_port4 = FrontPort.objects.create(
device=self.patch_pannel, name='FP4', type='8p8c', rear_port=self.rear_port4, rear_port_position=1
)
self.provider = Provider.objects.create(name='Provider 1', slug='provider-1')
self.circuittype = CircuitType.objects.create(name='Circuit Type 1', slug='circuit-type-1')
self.circuit = Circuit.objects.create(provider=self.provider, type=self.circuittype, cid='1')
self.circuittermination1 = CircuitTermination.objects.create(circuit=self.circuit, site=site, term_side='A')
self.circuittermination2 = CircuitTermination.objects.create(circuit=self.circuit, site=site, term_side='Z')
def test_cable_creation(self):
"""
When a new Cable is created, it must be cached on either termination point.
"""
interface1 = Interface.objects.get(pk=self.interface1.pk)
interface2 = Interface.objects.get(pk=self.interface2.pk)
self.assertEqual(self.cable.termination_a, interface1)
self.assertEqual(interface1._cable_peer, interface2)
self.assertEqual(self.cable.termination_b, interface2)
self.assertEqual(interface2._cable_peer, interface1)
def test_cable_deletion(self):
"""
When a Cable is deleted, the `cable` field on its termination points must be nullified. The str() method
should still return the PK of the string even after being nullified.
"""
self.cable.delete()
self.assertIsNone(self.cable.pk)
self.assertNotEqual(str(self.cable), '#None')
interface1 = Interface.objects.get(pk=self.interface1.pk)
self.assertIsNone(interface1.cable)
self.assertIsNone(interface1._cable_peer)
interface2 = Interface.objects.get(pk=self.interface2.pk)
self.assertIsNone(interface2.cable)
self.assertIsNone(interface2._cable_peer)
def test_cabletermination_deletion(self):
"""
When a CableTermination object is deleted, its attached Cable (if any) must also be deleted.
"""
self.interface1.delete()
cable = Cable.objects.filter(pk=self.cable.pk).first()
self.assertIsNone(cable)
def test_cable_validates_compatible_types(self):
"""
The clean method should have a check to ensure only compatible port types can be connected by a cable
"""
# An interface cannot be connected to a power port
cable = Cable(termination_a=self.interface1, termination_b=self.power_port1)
with self.assertRaises(ValidationError):
cable.clean()
def test_cable_cannot_have_the_same_terminination_on_both_ends(self):
"""
A cable cannot be made with the same A and B side terminations
"""
cable = Cable(termination_a=self.interface1, termination_b=self.interface1)
with self.assertRaises(ValidationError):
cable.clean()
def test_cable_front_port_cannot_connect_to_corresponding_rear_port(self):
"""
A cable cannot connect a front port to its corresponding rear port
"""
cable = Cable(termination_a=self.front_port1, termination_b=self.rear_port1)
with self.assertRaises(ValidationError):
cable.clean()
def test_cable_cannot_terminate_to_an_existing_connection(self):
"""
Either side of a cable cannot be terminated when that side already has a connection
"""
# Try to create a cable with the same interface terminations
cable = Cable(termination_a=self.interface2, termination_b=self.interface1)
with self.assertRaises(ValidationError):
cable.clean()
def test_rearport_connections(self):
"""
Test various combinations of RearPort connections.
"""
# Connecting a single-position RearPort to a multi-position RearPort is ok
Cable(termination_a=self.rear_port1, termination_b=self.rear_port2).full_clean()
# Connecting a single-position RearPort to an Interface is ok
Cable(termination_a=self.rear_port1, termination_b=self.interface3).full_clean()
# Connecting a single-position RearPort to a CircuitTermination is ok
Cable(termination_a=self.rear_port1, termination_b=self.circuittermination1).full_clean()
# Connecting a multi-position RearPort to another RearPort with the same number of positions is ok
Cable(termination_a=self.rear_port3, termination_b=self.rear_port4).full_clean()
# Connecting a multi-position RearPort to an Interface is ok
Cable(termination_a=self.rear_port2, termination_b=self.interface3).full_clean()
# Connecting a multi-position RearPort to a CircuitTermination is ok
Cable(termination_a=self.rear_port2, termination_b=self.circuittermination1).full_clean()
# Connecting a two-position RearPort to a three-position RearPort is NOT ok
with self.assertRaises(
ValidationError,
msg='Connecting a 2-position RearPort to a 3-position RearPort should fail'
):
Cable(termination_a=self.rear_port2, termination_b=self.rear_port3).full_clean()
def test_cable_cannot_terminate_to_a_virtual_interface(self):
"""
A cable cannot terminate to a virtual interface
"""
virtual_interface = Interface(device=self.device1, name="V1", type=InterfaceTypeChoices.TYPE_VIRTUAL)
cable = Cable(termination_a=self.interface2, termination_b=virtual_interface)
with self.assertRaises(ValidationError):
cable.clean()
def test_cable_cannot_terminate_to_a_wireless_interface(self):
"""
A cable cannot terminate to a wireless interface
"""
wireless_interface = Interface(device=self.device1, name="W1", type=InterfaceTypeChoices.TYPE_80211A)
cable = Cable(termination_a=self.interface2, termination_b=wireless_interface)
with self.assertRaises(ValidationError):
cable.clean()
| apache-2.0 | 6,810,999,451,422,508,000 | 35.551483 | 116 | 0.603419 | false |
adamwiggins/cocos2d | tools/skeleton/skeleton_editor.py | 2 | 5454 | import os, sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..'))
#
import math
from math import pi, atan
import cPickle
import glob
from optparse import OptionParser
import pyglet
from pyglet.gl import *
from pyglet.window import key
import cocos
from cocos.director import director
from cocos.sprite import Sprite
from cocos.actions import CallFuncS, CallFunc, IntervalAction
from cocos import euclid
from animator import *
import ui
from cocos.skeleton import Bone, Skeleton, Skin, Animation, Animate, ColorSkin, \
BitmapSkin
class SkinControl(ui.BallWidget):
def __init__(self, skin, idx, bone, delta):
super(SkinControl, self).__init__(7, (0,0,255,255))
self.skin = skin
self.idx = idx
self.bone = bone
self.position = (bone.get_start()+bone.get_end())/2 + delta
def on_dragged(self, dx, dy):
super(SkinControl, self).on_dragged(dx, dy)
self.skin.move(self.idx, dx, dy)
class BonePositionControl(ui.BallWidget):
def __init__(self, bone, delta):
super(BonePositionControl, self).__init__(10, (0,255,0,255))
self.bone = bone
self.position = (bone.get_start()) + delta
def on_dragged(self, dx, dy):
super(BonePositionControl, self).on_dragged(dx, dy)
self.bone.move(dx, dy)
class SkeletonEditorUI(ui.UILayer):
def __init__(self, skeleton, skin):
super(SkeletonEditorUI, self).__init__()
sk_file = imp.load_source("skeleton", args[0])
if skin is None:
self.user_skin = None
else:
skin_data = imp.load_source("skin", args[1]).skin
self.skin_filename = skin
self.user_skin = skin_data
self.skeleton_file = skeleton
self.skeleton = sk_file.skeleton
self.add_skin_for_skeleton(self.skeleton, (255,255,255,255))
def add_skin_for_skeleton(self, skeleton, color, z=-1, editable=False):
if self.user_skin:
skin = BitmapSkin(skeleton, self.user_skin, color[3])
else:
skin = ColorSkin(skeleton, color)
self.skin = skin
self.add( skin, z=z )
xs, ys = director.get_window_size()
skin.position = xs/2-6, ys/2-11
self.generate_control_points()
def on_key_press(self, k, mod):
if k == key.S:
f = open(self.skin_filename, "w")
f.write("\nskin = [\n")
for p in self.skin.skin_parts:
f.write(" %s,\n"%(p,))
f.write(" ]\n")
f.close()
f = open(self.skeleton_file, "w")
f.write("""from cocos.skeleton import Bone, Skeleton\n
def Point2(*args): return args\n
root_bone = %s
skeleton = Skeleton( root_bone )"""%self.skeleton.bone.repr())
f.close()
def update_visual(self):
self.add_skin_for_skeleton(self.skeleton, -1, True)
def clean_control_points(self):
cps = [ cp for cp in self.get_children() if isinstance(cp, ui.BallWidget) ]
for cp in cps:
self.remove(cp)
def clean_skins(self):
skins = [ cp for cp in self.get_children() if isinstance(cp, Skin) ]
for skin in skins:
self.remove(skin)
def on_mouse_release(self, *args):
if self.dragging:
self.clean_control_points()
self.generate_control_points()
super(SkeletonEditorUI, self).on_mouse_release(*args)
def on_mouse_drag(self, x, y, dx, dy, button, modifiers):
if self.hovering:
cps = [ cp for cp in self.get_children()
if isinstance(cp, ui.BallWidget) and cp != self.hovering ]
for cp in cps:
self.remove(cp)
super(SkeletonEditorUI, self).on_mouse_drag(x, y, dx, dy, button, modifiers)
def generate_control_points(self):
skinpos = euclid.Point2(*self.skin.position)
for cp in self.skeleton.get_control_points():
if isinstance(cp, Skeleton):
self.add( SkeletonControl(cp, skinpos), z=3 )
else:
self.add( BoneControl(cp, skinpos), z=4 )
bones = self.skeleton.visit_children(lambda bone: (bone.label, bone))
bones = dict(bones)
for bone in bones.values():
self.add( BonePositionControl( bone, skinpos ), z=2 )
for idx, name in self.skin.get_control_points():
self.add( SkinControl(self.skin, idx, bones[name], skinpos ), z=5)
if __name__ == "__main__":
import sys, imp
director.init()
parser = OptionParser()
parser.add_option("-b", "--background", dest="background",
help="use file as background", default=False, metavar="FILE")
parser.add_option("-s", "--scale", dest="scale",
help="scale image by", default=1, metavar="SCALE")
(options, args) = parser.parse_args()
def usage():
return "python animator.py skeleton.py skin.py"
if len(args) not in [2]:
print usage()
print parser.error("incorrect number of arguments")
sys.exit()
animator = cocos.scene.Scene(SkeletonEditorUI(args[0], args[1]))
if options.background:
background = cocos.sprite.Sprite(options.background)
x,y = director.get_window_size()
animator.add( background, z=-10 )
background.position = x/2, y/2
background.scale = float(options.scale)
director.run(animator)
| bsd-3-clause | 6,242,549,518,128,177,000 | 31.464286 | 84 | 0.595526 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/work_group_v20.py | 1 | 5166 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.external_i_ds_v20 import ExternalIDsV20 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v20 import LastModifiedDateV20 # noqa: F401,E501
from orcid_api_v3.models.work_summary_v20 import WorkSummaryV20 # noqa: F401,E501
class WorkGroupV20(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_modified_date': 'LastModifiedDateV20',
'external_ids': 'ExternalIDsV20',
'work_summary': 'list[WorkSummaryV20]'
}
attribute_map = {
'last_modified_date': 'last-modified-date',
'external_ids': 'external-ids',
'work_summary': 'work-summary'
}
def __init__(self, last_modified_date=None, external_ids=None, work_summary=None): # noqa: E501
"""WorkGroupV20 - a model defined in Swagger""" # noqa: E501
self._last_modified_date = None
self._external_ids = None
self._work_summary = None
self.discriminator = None
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if external_ids is not None:
self.external_ids = external_ids
if work_summary is not None:
self.work_summary = work_summary
@property
def last_modified_date(self):
"""Gets the last_modified_date of this WorkGroupV20. # noqa: E501
:return: The last_modified_date of this WorkGroupV20. # noqa: E501
:rtype: LastModifiedDateV20
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this WorkGroupV20.
:param last_modified_date: The last_modified_date of this WorkGroupV20. # noqa: E501
:type: LastModifiedDateV20
"""
self._last_modified_date = last_modified_date
@property
def external_ids(self):
"""Gets the external_ids of this WorkGroupV20. # noqa: E501
:return: The external_ids of this WorkGroupV20. # noqa: E501
:rtype: ExternalIDsV20
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this WorkGroupV20.
:param external_ids: The external_ids of this WorkGroupV20. # noqa: E501
:type: ExternalIDsV20
"""
self._external_ids = external_ids
@property
def work_summary(self):
"""Gets the work_summary of this WorkGroupV20. # noqa: E501
:return: The work_summary of this WorkGroupV20. # noqa: E501
:rtype: list[WorkSummaryV20]
"""
return self._work_summary
@work_summary.setter
def work_summary(self, work_summary):
"""Sets the work_summary of this WorkGroupV20.
:param work_summary: The work_summary of this WorkGroupV20. # noqa: E501
:type: list[WorkSummaryV20]
"""
self._work_summary = work_summary
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(WorkGroupV20, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WorkGroupV20):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | 2,386,846,838,889,565,700 | 30.120482 | 119 | 0.590205 | false |
argriffing/arbtkf91 | repro/data_source.py | 1 | 1854 | """
yield stuff from a hardcoded data source
"""
from __future__ import print_function, division
import os
import numpy as np
from numpy.testing import assert_array_equal, assert_equal
from Bio import SeqIO
__all__ = ['gen_files', 'gen_sequence_pairs']
#mypath = os.path.realpath('../../stamatakis/benchMark_data')
def _normalized_seq(s):
return ''.join(_normalized_chr(c) for c in s)
def _normalized_chr(c):
if c in 'ACGT':
return c
elif c.isalpha:
return 'A'
else:
msg = ('weird character:', c)
raise Exception(msg)
def gen_sequence_pairs(fin, force_acgt=False):
# yield (10 choose 2) = 45 nucleotide sequence pairs
fasta_objects = list(SeqIO.parse(fin, 'fasta'))
sequences = [str(x.seq) for x in fasta_objects]
available = len(sequences)
requested = 10
indices = _select_indices(available, requested)
selection = [sequences[i] for i in indices]
assert_equal(len(selection), requested)
k = 0
for i in range(requested):
for j in range(i):
a = selection[i]
b = selection[j]
if force_acgt:
a = _normalized_seq(a)
b = _normalized_seq(b)
yield a, b
k += 1
assert_equal(k, 45)
def gen_files(data_path):
# yield (name, handle) pairs
for filename in os.listdir(data_path):
if 'unaligned' in filename:
fullpath = os.path.join(data_path, filename)
with open(fullpath) as fin:
yield filename, fin
def _select_indices(available, requested):
incr = available // requested
return [i*incr for i in range(requested)]
def test():
indices = _select_indices(60, 10)
assert_array_equal(indices[:3], [0, 6, 12])
assert_array_equal(indices[-1:], [54])
if __name__ == '__main__':
test()
| gpl-2.0 | 2,098,660,419,289,982,200 | 24.75 | 61 | 0.600863 | false |
Scriptkiddi/Ankipubsub-Client | pubsub/gui/auto_gen/ankipubsub_settings.py | 1 | 2166 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ankipubsub_settings.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(173, 169)
self.Login = QtGui.QPushButton(Form)
self.Login.setGeometry(QtCore.QRect(30, 120, 92, 27))
self.Login.setObjectName(_fromUtf8("Login"))
self.label = QtGui.QLabel(Form)
self.label.setGeometry(QtCore.QRect(10, 20, 171, 16))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(10, 70, 141, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.username = QtGui.QLineEdit(Form)
self.username.setGeometry(QtCore.QRect(20, 40, 113, 25))
self.username.setObjectName(_fromUtf8("username"))
self.password = QtGui.QLineEdit(Form)
self.password.setGeometry(QtCore.QRect(20, 90, 113, 25))
self.password.setObjectName(_fromUtf8("password"))
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "AnkiPubSub Settings", None))
self.Login.setText(_translate("Form", "Login", None))
self.label.setText(_translate("Form", "Insert your Username:", None))
self.label_2.setText(_translate("Form", "Insert your Password:", None))
self.username.setText(_translate("Form", "Username", None))
self.password.setText(_translate("Form", "Password", None))
| gpl-3.0 | 377,500,660,763,704,260 | 38.381818 | 79 | 0.672207 | false |
shmilee/gdpy3 | src/loaders/tests/test_zipraw.py | 1 | 1387 | # -*- coding: utf-8 -*-
# Copyright (c) 2020 shmilee
import os
import unittest
import tempfile
import zipfile
ZIPFILE = os.path.join(os.path.dirname(__file__), 'raw.zip')
@unittest.skipUnless(zipfile.is_zipfile(ZIPFILE),
"'%s' is not a tar archive!" % ZIPFILE)
class TestZipRawLoader(unittest.TestCase):
'''
Test class ZipRawLoader
'''
def setUp(self):
from ..zipraw import ZipRawLoader
self.tmpfile = tempfile.mktemp(suffix='-test.zip')
with open(self.tmpfile, mode='w') as f:
f.write('test')
self.RawLoader = ZipRawLoader
self.tmpzip = ZIPFILE
def tearDown(self):
if os.path.isfile(self.tmpfile):
os.remove(self.tmpfile)
def test_ziploader_init(self):
with self.assertRaises(ValueError):
loader = self.RawLoader(self.tmpfile)
loader = self.RawLoader(self.tmpzip)
self.assertSetEqual(set(loader.filenames),
{'f1.ignore', 'f1.out', 'd1/f2.out', 'd1/d2/f3.out'})
def test_ziploader_get(self):
loader = self.RawLoader(self.tmpzip)
with loader.get('f1.out') as f1:
self.assertEqual(f1.readlines(), ['test1'])
with loader.get('d1/f2.out') as f2:
self.assertEqual(f2.read(), 'test2')
with self.assertRaises(ValueError):
f2.read()
| mit | -3,828,489,281,951,191,600 | 29.152174 | 81 | 0.596972 | false |
Squishymedia/feedingdb | src/feeddb/feed/migrations/0005_name2title_drop_name.py | 1 | 36453 |
from south.db import db
from django.db import models
from feeddb.feed.models import *
class Migration:
def forwards(self, orm):
# Deleting field 'Trial.name'
db.delete_column('feed_trial', 'name')
# Deleting field 'Experiment.name'
db.delete_column('feed_experiment', 'name')
# Deleting field 'Session.name'
db.delete_column('feed_session', 'name')
# Deleting field 'Study.name'
db.delete_column('feed_study', 'name')
# Changing field 'Trial.title'
# (to signature: django.db.models.fields.CharField(default='new Trial - edit this', max_length=255))
db.alter_column('feed_trial', 'title', orm['feed.trial:title'])
# Changing field 'Experiment.title'
# (to signature: django.db.models.fields.CharField(default='new Experiment - edit this', max_length=255))
db.alter_column('feed_experiment', 'title', orm['feed.experiment:title'])
# Changing field 'Session.title'
# (to signature: django.db.models.fields.CharField(default='new Recording Session - edit this', max_length=255))
db.alter_column('feed_session', 'title', orm['feed.session:title'])
# Changing field 'Study.title'
# (to signature: django.db.models.fields.CharField(max_length=255))
db.alter_column('feed_study', 'title', orm['feed.study:title'])
def backwards(self, orm):
# Adding field 'Trial.name'
db.add_column('feed_trial', 'name', orm['feed.trial:name'])
# Adding field 'Experiment.name'
db.add_column('feed_experiment', 'name', orm['feed.experiment:name'])
# Adding field 'Session.name'
db.add_column('feed_session', 'name', orm['feed.session:name'])
# Adding field 'Study.name'
db.add_column('feed_study', 'name', orm['feed.study:name'])
# Changing field 'Trial.title'
# (to signature: django.db.models.fields.CharField(default='new Trial - edit this', max_length=255, null=True))
db.alter_column('feed_trial', 'title', orm['feed.trial:title'])
# Changing field 'Experiment.title'
# (to signature: django.db.models.fields.CharField(default='new Experiment - edit this', max_length=255, null=True))
db.alter_column('feed_experiment', 'title', orm['feed.experiment:title'])
# Changing field 'Session.title'
# (to signature: django.db.models.fields.CharField(default='new Recording Session - edit this', max_length=255, null=True))
db.alter_column('feed_session', 'title', orm['feed.session:title'])
# Changing field 'Study.title'
# (to signature: django.db.models.fields.CharField(max_length=255, null=True))
db.alter_column('feed_study', 'title', orm['feed.study:title'])
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feed.anteriorposterioraxis': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anteriorposterioraxis_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.behavior': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'behavior_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.channel': {
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'channel_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rate': ('django.db.models.fields.IntegerField', [], {}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.channellineup': {
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Channel']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'channellineup_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Session']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.depthaxis': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'depthaxis_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.developmentstage': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'developmentstage_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.dorsalventralaxis': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dorsalventralaxis_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.electrodetype': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'electrodetype_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.emgchannel': {
'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'emg_filtering': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Emgfiltering']"}),
'emg_unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Emgunit']"}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.EmgSensor']"})
},
'feed.emgelectrode': {
'axisap': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.AnteriorPosteriorAxis']", 'null': 'True', 'blank': 'True'}),
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'axisdv': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DorsalVentralAxis']", 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emgelectrode_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'electrode_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.ElectrodeType']", 'null': 'True', 'blank': 'True'}),
'emg_filtering': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Emgfiltering']"}),
'emg_unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Emgunit']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Muscle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rate': ('django.db.models.fields.IntegerField', [], {}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Setup']"}),
'side': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Side']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.emgfiltering': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emgfiltering_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.emgsensor': {
'axisap': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.AnteriorPosteriorAxis']", 'null': 'True', 'blank': 'True'}),
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'axisdv': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DorsalVentralAxis']", 'null': 'True', 'blank': 'True'}),
'electrode_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.ElectrodeType']", 'null': 'True', 'blank': 'True'}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Muscle']"}),
'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'}),
'side': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Side']"})
},
'feed.emgsetup': {
'preamplifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
'feed.emgunit': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emgunit_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.experiment': {
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'experiment_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impl_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Study']"}),
'subj_age': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subj_devstage': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DevelopmentStage']"}),
'subj_tooth': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_weight': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Subject']"}),
'subject_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'new Experiment - edit this'", 'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.illustration': {
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'illustration_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Experiment']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Setup']", 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Subject']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.muscle': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'muscle_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.restraint': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'restraint_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.sensor': {
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sensor_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.session': {
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['feed.Channel']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'session_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'subj_anesthesia_sedation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_restraint': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Restraint']"}),
'title': ('django.db.models.fields.CharField', [], {'default': "'new Recording Session - edit this'", 'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.setup': {
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'setup_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'technique': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Technique']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.side': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'side_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.sonochannel': {
'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'crystal1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals1_related'", 'to': "orm['feed.SonoSensor']"}),
'crystal2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals2_related'", 'to': "orm['feed.SonoSensor']"}),
'sono_unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Sonounit']"})
},
'feed.sonosensor': {
'axisap': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.AnteriorPosteriorAxis']", 'null': 'True', 'blank': 'True'}),
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'axisdv': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.DorsalVentralAxis']", 'null': 'True', 'blank': 'True'}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Muscle']"}),
'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'}),
'side': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Side']"})
},
'feed.sonosetup': {
'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'}),
'sonomicrometer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'feed.sonounit': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sonounit_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.study': {
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'approval_secured': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'study_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'funding_agency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.studyprivate': {
'approval': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'studyprivate_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'funding': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lab': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pi': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Study']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.subject': {
'breed': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subject_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Study']"}),
'taxon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Taxon']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.taxon': {
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taxon_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'genus': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'species': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.technique': {
'controlled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'technique_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'deprecated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
'feed.trial': {
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'behavior_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'behavior_primary': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Behavior']"}),
'behavior_secondary': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'claimed_duration': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '4', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'trial_related'", 'blank': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'food_property': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_size': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feed.Session']"}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_treatment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'new Trial - edit this'", 'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {}),
'waveform_picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['feed']
| gpl-3.0 | -1,834,002,616,285,841,700 | 81.659864 | 189 | 0.551093 | false |
dashea/redhat-upgrade-tool | redhat_upgrade_tool/sysprep.py | 1 | 8052 | # sysprep.py - utility functions for system prep
#
# Copyright (C) 2012 Red Hat Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Will Woods <[email protected]>
import os, glob
from shutil import copy2
from . import _
from . import cachedir, packagedir, packagelist, update_img_dir
from . import upgradeconf, upgradelink, upgraderoot
from .media import write_systemd_unit
from .util import listdir, mkdir_p, rm_f, rm_rf, is_selinux_enabled, kernelver
from .conf import Config
from . import boot
import logging
log = logging.getLogger(__package__+".sysprep")
upgrade_target_requires = "/lib/systemd/system/system-upgrade.target.requires"
def setup_cleanup_post():
'''Set a flag in upgrade.conf to be read by preupgrade-assistant,
signalling it to cleanup old packages in the post scripts.'''
with Config(upgradeconf) as conf:
conf.set('postupgrade', 'cleanup', 'True')
def link_pkgs(pkgs):
'''link the named pkgs into packagedir, overwriting existing files.
also removes any .rpm files in packagedir that aren't in pkgs.
finally, write a list of packages to upgrade and a list of dirs
to clean up after successful upgrade.'''
log.info("linking required packages into packagedir")
log.info("packagedir = %s", packagedir)
mkdir_p(packagedir)
pkgbasenames = set()
for pkg in pkgs:
pkgpath = pkg.localPkg()
if pkg.remote_url.startswith("file://"):
pkgbasename = "media/%s" % pkg.relativepath
pkgbasenames.add(pkgbasename)
continue
if not os.path.exists(pkgpath):
log.warning("%s missing", pkgpath)
continue
pkgbasename = os.path.basename(pkgpath)
pkgbasenames.add(pkgbasename)
target = os.path.join(packagedir, pkgbasename)
if os.path.exists(target) and os.lstat(pkgpath) == os.lstat(target):
log.info("%s already in packagedir", pkgbasename)
continue
else:
if os.path.isdir(target):
log.info("deleting weirdo directory named %s", pkgbasename)
rm_rf(target)
elif os.path.exists(target):
os.remove(target)
try:
os.link(pkgpath, target)
except OSError as e:
if e.errno == 18:
copy2(pkgpath, target)
else:
raise
# remove spurious / leftover RPMs
for f in os.listdir(packagedir):
if f.endswith(".rpm") and f not in pkgbasenames:
os.remove(os.path.join(packagedir, f))
# write packagelist
with open(packagelist, 'w') as outf:
outf.writelines(p+'\n' for p in pkgbasenames)
# write cleanup data
with Config(upgradeconf) as conf:
# packagedir should probably be last, since it contains upgradeconf
cleanupdirs = [cachedir, packagedir]
conf.set("cleanup", "dirs", ';'.join(cleanupdirs))
def setup_upgradelink():
log.info("setting up upgrade symlink: %s->%s", upgradelink, packagedir)
try:
os.remove(upgradelink)
except OSError:
pass
os.symlink(packagedir, upgradelink)
def setup_media_mount(mnt):
# make a "media" subdir where all the packages are
mountpath = os.path.join(upgradelink, "media")
log.info("setting up mount for %s at %s", mnt.dev, mountpath)
mkdir_p(mountpath)
# make a directory to place a unit
mkdir_p(upgrade_target_requires)
# make a modified mnt entry that puts it at mountpath
mediamnt = mnt._replace(rawmnt=mountpath)
# finally, write out a systemd unit to mount media there
unit = write_systemd_unit(mediamnt, upgrade_target_requires)
log.info("wrote %s", unit)
def setup_upgraderoot():
if os.path.isdir(upgraderoot):
log.info("upgrade root dir %s already exists", upgraderoot)
return
else:
log.info("creating upgraderoot dir: %s", upgraderoot)
os.makedirs(upgraderoot, 0755)
def prep_upgrade(pkgs):
# put packages in packagedir (also writes packagelist)
link_pkgs(pkgs)
# make magic symlink
setup_upgradelink()
# make dir for upgraderoot
setup_upgraderoot()
def modify_bootloader(kernel, initrd):
log.info("adding new boot entry")
args = ["upgrade", "systemd.unit=system-upgrade.target"]
if not is_selinux_enabled():
args.append("selinux=0")
else:
# BLERG. SELinux enforcing will cause problems if the new policy
# disallows something that the previous system did differently.
# See https://bugzilla.redhat.com/bugzilla/show_bug.cgi?id=896010
args.append("enforcing=0")
boot.add_entry(kernel, initrd, banner=_("System Upgrade"), kargs=args)
def prep_boot(kernel, initrd):
# check for systems that need mdadm.conf
if boot.need_mdadmconf():
log.info("appending /etc/mdadm.conf to initrd")
boot.initramfs_append_files(initrd, "/etc/mdadm.conf")
# look for updates, and add them to initrd if found
updates = []
try:
updates = list(listdir(update_img_dir))
except (IOError, OSError) as e:
log.info("can't list update img dir %s: %s", update_img_dir, e.strerror)
if updates:
log.info("found updates in %s, appending to initrd", update_img_dir)
boot.initramfs_append_images(initrd, updates)
# make a dir in /lib/modules to hold a copy of the new kernel's modules
# (the initramfs will copy/bind them into place when we reboot)
kv = kernelver(kernel)
if kv:
moddir = os.path.join("/lib/modules", kv)
log.info("creating module dir %s", moddir)
mkdir_p(moddir)
else:
log.warn("can't determine version of kernel image '%s'", kernel)
# set up the boot args
modify_bootloader(kernel, initrd)
def reset_boot():
'''reset bootloader to previous default and remove our boot entry'''
conf = Config(upgradeconf)
kernel = conf.get("boot", "kernel")
if kernel:
boot.remove_entry(kernel)
def remove_boot():
'''remove boot images'''
conf = Config(upgradeconf)
kernel = conf.get("boot", "kernel")
initrd = conf.get("boot", "initrd")
if kernel:
rm_f(kernel)
if initrd:
rm_f(initrd)
def remove_cache():
'''remove our cache dirs'''
conf = Config(upgradeconf)
cleanup = conf.get("cleanup", "dirs") or ''
cleanup = cleanup.split(';')
cleanup += [cachedir, packagedir] # just to be sure
for d in cleanup:
log.info("removing %s", d)
rm_rf(d)
def misc_cleanup(clean_all_repos=True):
log.info("removing symlink %s", upgradelink)
rm_f(upgradelink)
for d in (upgraderoot, upgrade_target_requires):
log.info("removing %s", d)
rm_rf(d)
repodir = '/etc/yum.repos.d'
log.info("removing repo files")
# If clean_all_repos is false, leave behind the repos with regular
# URLs and just clean the ones with file:// URLs (i.e., repos added
# for upgrades from cdrom or other non-network sources)
for repo in glob.glob(repodir + '/redhat-upgrade-*.repo'):
rmrepo=True
if not clean_all_repos:
with open(repo, "r") as repofile:
for line in repofile:
if line.strip().startswith('baseurl') and 'file://' not in line:
rmrepo=False
break
if rmrepo:
rm_rf(repo)
| gpl-2.0 | 5,812,619,654,188,032,000 | 34.315789 | 84 | 0.643939 | false |
jefftc/changlab | Betsy/Betsy/modules/preprocess_mas5.py | 1 | 1766 | from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
outfile):
"""preprocess the inputfile with MAS5
using preprocess.py will generate a output file"""
import os
import subprocess
from Betsy import module_utils as mlib
in_data = antecedents
#preprocess the cel file to text signal file
PREPROCESS_BIN = mlib.get_config("preprocess", which_assert_file=True)
#PREPROCESS_path = config.preprocess
#PREPROCESS_BIN = module_utils.which(PREPROCESS_path)
#assert PREPROCESS_BIN, 'cannot find the %s' % PREPROCESS_path
command = ['python', PREPROCESS_BIN, 'MAS5', in_data.identifier]
process = subprocess.Popen(
command, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
error_message = process.communicate()[1]
if error_message:
if not "Loading required package: Biobase" in error_message:
raise ValueError(error_message)
outputfiles = os.listdir(".")
outputfile = None
for i in outputfiles:
if i.endswith('.mas5') and not i.endswith('.l2.mas5'):
outputfile = i
assert outputfile, "No output file created."
os.rename(outputfile, outfile)
def name_outfile(self, antecedents, user_options):
#from Betsy import module_utils
#original_file = module_utils.get_inputid(antecedents.identifier)
#filename = 'signal_mas5_' + original_file + '.jeffs'
#return filename
return "signal.txt"
| mit | -8,856,985,929,441,454,000 | 35.791667 | 78 | 0.620612 | false |
DedMemez/ODS-August-2017 | coghq/DistributedCogHQExteriorDoor.py | 1 | 1122 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.coghq.DistributedCogHQExteriorDoor
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import *
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM
import DistributedCogHQDoor
from toontown.hood import ZoneUtil
from BossLobbyGui import BossLobbyGui
class DistributedCogHQExteriorDoor(DistributedCogHQDoor.DistributedCogHQDoor):
def __init__(self, cr):
DistributedCogHQDoor.DistributedCogHQDoor.__init__(self, cr)
self.lobbyGui = None
return
def selectLobby(self, avId):
print '********\nCreating Lobby GUI...\n********'
self.lobbyGui = BossLobbyGui(self.sendConfirmation, avId)
self.lobbyGui.loadFrame(0)
def sendConfirmation(self, avId, status):
self.lobbyGui.destroy()
self.lobbyGui = None
print '********\nGUI Complete.\nSending Confirmation...\n********'
self.sendUpdate('confirmEntrance', [avId, status])
return | apache-2.0 | -3,317,920,898,135,819,300 | 38.142857 | 102 | 0.704991 | false |
MissionCriticalCloud/marvin | marvin/cloudstackAPI/updateNetwork.py | 1 | 11415 | """Updates a network"""
from baseCmd import *
from baseResponse import *
class updateNetworkCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "true"
"""the ID of the network"""
"""Required"""
self.id = None
self.typeInfo['id'] = 'uuid'
"""Force update even if CIDR type is different"""
self.changecidr = None
self.typeInfo['changecidr'] = 'boolean'
"""an optional field, in case you want to set a custom id to the resource. Allowed to Root Admins only"""
self.customid = None
self.typeInfo['customid'] = 'string'
"""an optional field, whether to the display the network to the end user or not."""
self.displaynetwork = None
self.typeInfo['displaynetwork'] = 'boolean'
"""the new display text for the network"""
self.displaytext = None
self.typeInfo['displaytext'] = 'string'
"""The first DNS server of the network"""
self.dns1 = None
self.typeInfo['dns1'] = 'string'
"""The second DNS server of the network"""
self.dns2 = None
self.typeInfo['dns2'] = 'string'
"""CIDR for guest VMs, CloudStack allocates IPs to guest VMs only from this CIDR"""
self.guestvmcidr = None
self.typeInfo['guestvmcidr'] = 'string'
"""IP exclusion list for private networks"""
self.ipexclusionlist = None
self.typeInfo['ipexclusionlist'] = 'string'
"""the new name for the network"""
self.name = None
self.typeInfo['name'] = 'string'
"""network domain"""
self.networkdomain = None
self.typeInfo['networkdomain'] = 'string'
"""network offering ID"""
self.networkofferingid = None
self.typeInfo['networkofferingid'] = 'uuid'
self.required = ["id", ]
class updateNetworkResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the id of the network"""
self.id = None
self.typeInfo['id'] = 'string'
"""the owner of the network"""
self.account = None
self.typeInfo['account'] = 'string'
"""ACL Id associated with the VPC network"""
self.aclid = None
self.typeInfo['aclid'] = 'string'
"""acl type - access type to the network"""
self.acltype = None
self.typeInfo['acltype'] = 'string'
"""Broadcast domain type of the network"""
self.broadcastdomaintype = None
self.typeInfo['broadcastdomaintype'] = 'string'
"""broadcast uri of the network. This parameter is visible to ROOT admins only"""
self.broadcasturi = None
self.typeInfo['broadcasturi'] = 'string'
"""list networks available for vm deployment"""
self.canusefordeploy = None
self.typeInfo['canusefordeploy'] = 'boolean'
"""Cloudstack managed address space, all CloudStack managed VMs get IP address from CIDR"""
self.cidr = None
self.typeInfo['cidr'] = 'string'
"""an optional field, whether to the display the network to the end user or not."""
self.displaynetwork = None
self.typeInfo['displaynetwork'] = 'boolean'
"""the displaytext of the network"""
self.displaytext = None
self.typeInfo['displaytext'] = 'string'
"""the first DNS for the network"""
self.dns1 = None
self.typeInfo['dns1'] = 'string'
"""the second DNS for the network"""
self.dns2 = None
self.typeInfo['dns2'] = 'string'
"""the domain name of the network owner"""
self.domain = None
self.typeInfo['domain'] = 'string'
"""the domain id of the network owner"""
self.domainid = None
self.typeInfo['domainid'] = 'string'
"""the network's gateway"""
self.gateway = None
self.typeInfo['gateway'] = 'string'
"""the cidr of IPv6 network"""
self.ip6cidr = None
self.typeInfo['ip6cidr'] = 'string'
"""the gateway of IPv6 network"""
self.ip6gateway = None
self.typeInfo['ip6gateway'] = 'string'
"""list of ip addresses and/or ranges of addresses to be excluded from the network for assignment"""
self.ipexclusionlist = None
self.typeInfo['ipexclusionlist'] = 'string'
"""true if network is default, false otherwise"""
self.isdefault = None
self.typeInfo['isdefault'] = 'boolean'
"""list networks that are persistent"""
self.ispersistent = None
self.typeInfo['ispersistent'] = 'boolean'
"""true if network is system, false otherwise"""
self.issystem = None
self.typeInfo['issystem'] = 'boolean'
"""the name of the network"""
self.name = None
self.typeInfo['name'] = 'string'
"""the network's netmask"""
self.netmask = None
self.typeInfo['netmask'] = 'string'
"""the network CIDR of the guest network configured with IP reservation. It is the summation of CIDR and RESERVED_IP_RANGE"""
self.networkcidr = None
self.typeInfo['networkcidr'] = 'string'
"""the network domain"""
self.networkdomain = None
self.typeInfo['networkdomain'] = 'string'
"""availability of the network offering the network is created from"""
self.networkofferingavailability = None
self.typeInfo['networkofferingavailability'] = 'string'
"""true if network offering is ip conserve mode enabled"""
self.networkofferingconservemode = None
self.typeInfo['networkofferingconservemode'] = 'boolean'
"""display text of the network offering the network is created from"""
self.networkofferingdisplaytext = None
self.typeInfo['networkofferingdisplaytext'] = 'string'
"""network offering id the network is created from"""
self.networkofferingid = None
self.typeInfo['networkofferingid'] = 'string'
"""name of the network offering the network is created from"""
self.networkofferingname = None
self.typeInfo['networkofferingname'] = 'string'
"""the physical network id"""
self.physicalnetworkid = None
self.typeInfo['physicalnetworkid'] = 'string'
"""the project name of the address"""
self.project = None
self.typeInfo['project'] = 'string'
"""the project id of the ipaddress"""
self.projectid = None
self.typeInfo['projectid'] = 'string'
"""related to what other network configuration"""
self.related = None
self.typeInfo['related'] = 'string'
"""the network's IP range not to be used by CloudStack guest VMs and can be used for non CloudStack purposes"""
self.reservediprange = None
self.typeInfo['reservediprange'] = 'string'
"""true network requires restart"""
self.restartrequired = None
self.typeInfo['restartrequired'] = 'boolean'
"""true if network supports specifying ip ranges, false otherwise"""
self.specifyipranges = None
self.typeInfo['specifyipranges'] = 'boolean'
"""state of the network"""
self.state = None
self.typeInfo['state'] = 'string'
"""true if network can span multiple zones"""
self.strechedl2subnet = None
self.typeInfo['strechedl2subnet'] = 'boolean'
"""true if users from subdomains can access the domain level network"""
self.subdomainaccess = None
self.typeInfo['subdomainaccess'] = 'boolean'
"""the traffic type of the network"""
self.traffictype = None
self.typeInfo['traffictype'] = 'string'
"""the type of the network"""
self.type = None
self.typeInfo['type'] = 'string'
"""The vlan of the network. This parameter is visible to ROOT admins only"""
self.vlan = None
self.typeInfo['vlan'] = 'string'
"""VPC the network belongs to"""
self.vpcid = None
self.typeInfo['vpcid'] = 'string'
"""zone id of the network"""
self.zoneid = None
self.typeInfo['zoneid'] = 'string'
"""the name of the zone the network belongs to"""
self.zonename = None
self.typeInfo['zonename'] = 'string'
"""If a network is enabled for 'streched l2 subnet' then represents zones on which network currently spans"""
self.zonesnetworkspans = None
self.typeInfo['zonesnetworkspans'] = 'set'
"""the list of services"""
self.service = []
"""the list of resource tags associated with network"""
self.tags = []
class capability:
def __init__(self):
""""can this service capability value can be choosable while creatine network offerings"""
self.canchooseservicecapability = None
""""the capability name"""
self.name = None
""""the capability value"""
self.value = None
class provider:
def __init__(self):
""""uuid of the network provider"""
self.id = None
""""true if individual services can be enabled/disabled"""
self.canenableindividualservice = None
""""the destination physical network"""
self.destinationphysicalnetworkid = None
""""the provider name"""
self.name = None
""""the physical network this belongs to"""
self.physicalnetworkid = None
""""services for this provider"""
self.servicelist = None
""""state of the network provider"""
self.state = None
class service:
def __init__(self):
""""the service name"""
self.name = None
""""the list of capabilities"""
self.capability = []
""""can this service capability value can be choosable while creatine network offerings"""
self.canchooseservicecapability = None
""""the capability name"""
self.name = None
""""the capability value"""
self.value = None
""""the service provider name"""
self.provider = []
""""uuid of the network provider"""
self.id = None
""""true if individual services can be enabled/disabled"""
self.canenableindividualservice = None
""""the destination physical network"""
self.destinationphysicalnetworkid = None
""""the provider name"""
self.name = None
""""the physical network this belongs to"""
self.physicalnetworkid = None
""""services for this provider"""
self.servicelist = None
""""state of the network provider"""
self.state = None
class tags:
def __init__(self):
""""the account associated with the tag"""
self.account = None
""""customer associated with the tag"""
self.customer = None
""""the domain associated with the tag"""
self.domain = None
""""the ID of the domain associated with the tag"""
self.domainid = None
""""tag key name"""
self.key = None
""""the project name where tag belongs to"""
self.project = None
""""the project id the tag belongs to"""
self.projectid = None
""""id of the resource"""
self.resourceid = None
""""resource type"""
self.resourcetype = None
""""tag value"""
self.value = None
| apache-2.0 | -8,208,851,891,698,272,000 | 40.061151 | 133 | 0.599124 | false |
python-bonobo/bonobo | bonobo/errors.py | 1 | 1729 | from bonobo.util import get_name
class InactiveIOError(IOError):
pass
class InactiveReadableError(InactiveIOError):
pass
class InactiveWritableError(InactiveIOError):
pass
class ValidationError(RuntimeError):
def __init__(self, inst, message):
super(ValidationError, self).__init__(
"Validation error in {class_name}: {message}".format(class_name=type(inst).__name__, message=message)
)
class ProhibitedOperationError(RuntimeError):
pass
class ConfigurationError(Exception):
pass
class UnrecoverableError(Exception):
"""Flag for errors that must interrupt the workflow, either because they will happen for sure on each node run, or
because you know that your transformation has no point continuing running after a bad event."""
class AbstractError(UnrecoverableError, NotImplementedError):
"""Abstract error is a convenient error to declare a method as "being left as an exercise for the reader"."""
def __init__(self, method):
super().__init__(
"Call to abstract method {class_name}.{method_name}(...): missing implementation.".format(
class_name=get_name(method.__self__), method_name=get_name(method)
)
)
class UnrecoverableTypeError(UnrecoverableError, TypeError):
pass
class UnrecoverableAttributeError(UnrecoverableError, AttributeError):
pass
class UnrecoverableValueError(UnrecoverableError, ValueError):
pass
class UnrecoverableRuntimeError(UnrecoverableError, RuntimeError):
pass
class UnrecoverableNotImplementedError(UnrecoverableError, NotImplementedError):
pass
class MissingServiceImplementationError(UnrecoverableError, KeyError):
pass
| apache-2.0 | 5,472,049,810,391,471,000 | 24.426471 | 118 | 0.727588 | false |
seppius-xbmc-repo/ru | plugin.video.shura.tv/canal_list.py | 1 | 6531 | # -*- coding: utf-8 -*-
dc={
"2x2" : "d001",
"дождь" : "d002",
"домашний" : "d003",
"звезда" : "d004",
"карусель" : "d005",
"мир" : "d006",
"нтв" : "d007",
"первый канал" : "d008",
"перец" : "d009",
"пятый канал" : "d010",
"рен тв" : "d011",
"россия 1" : "d012",
"россия 2" : "d013",
"россия 24" : "d014",
"россия к" : "d015",
"ртр" : "d016",
"стс" : "d017",
"тв 3" : "d018",
"твц" : "d019",
"тнт" : "d020",
"amedia premium" : "amediahd",
"diva universal" : "d022",
"fox" : "d023",
"fox life" : "d024",
"тв21" : "d025",
"rtvi" : "rtvi",
"tv 1000 action" : "d0065",
"tv 1000 east" : "d0064",
"tv 1000 русское кино" : "d0063",
"дом кино" : "d026",
"индия" : "d027",
"кинолюкс" : "d028",
"кино плюс" : "kinoplusntv",
"много tv" : "mnogotv",
"наше любимое кино" : "nahse_lybimoe_kino",
"кинохит" : "d029",
"наше кино" : "d030",
"наше новое кино" : "d031",
"премьера" : "d032",
"cartoon network" : "cartoon_network",
"disney" : "d033",
"gulli" : "d034",
"jimjam" : "d035",
"nick jr." : "d036",
"nickelodeon" : "d037",
"tiji" : "d038",
"детский мир/телеклуб" : "detskiy_mir",
"моя дитина" : "maja_detina",
"авто плюс" : "d041",
"еда" : "d042",
"кухня" : "d043",
"музыка первого" : "d044",
"ост тв" : "d045",
"шансон" : "d046",
"ю тв" : "d047",
"24 док" : "d048",
"24 техно" : "d049",
"animal planet" : "d050",
"amazing life" : "amazing_life",
"da vinci learning" : "d051",
"discovery" : "d052",
"discovery science" : "d053",
"discovery world" : "d054",
"national geographic" : "d055",
"nat geo wild" : "nat_geo_wild",
"investigation discovery" : "id_investigation_discovery",
"загородная жизнь" : "d056",
"наука 2.0" : "d057",
"рбк" : "d058",
"euronews" : "euronews",
"history" : "history",
"eurosport" : "d059",
"eurosport 2" : "d060",
"viasat sport" : "d061",
"нтв+ баскетбол" : "d062",
"боец" : "d063",
"бойцовский клуб" : "d064",
"кхл тв" : "d065",
"нтв+ наш футбол" : "d066",
"нтв+ спорт" : "d067",
"нтв+ спорт союз" : "d068",
"нтв+ теннис" : "d069",
"нтв+ футбол" : "d070",
"нтв+ футбол 2" : "d071",
"нтв+ спортхит" : "ntvsporthit",
"охотник и рыболов" : "d072",
"спорт" : "d073",
"спорт 1" : "d074",
"нтв+ спорт плюс" : "d075",
"baltijos tv btv" : "d076",
"lietuvas ritas" : "d077",
"lnk" : "d078",
"lnt" : "d079",
"ltv1 latvia" : "d080",
"ltv7" : "d081",
"tv 1 (lithuania)" : "d082",
"tv3 latvia" : "d083",
"tv5 riga" : "d084",
"1+1" : "d085",
"ictv" : "d086",
"интер" : "d087",
"интер+" : "d088",
"первый деловой" : "d089",
"стб" : "d090",
"comedy tv" : "comedy_tv",
"travel" : "travel_channel",
"ocean tv" : "ocean_tv",
"rtg tv" : "rtg",
"tlc" : "tlc1",
"hd кино" : "d096",
"hd кино 2" : "d097",
"первый hd" : "d098",
"россия hd" : "d099",
"спорт 1 hd" : "d100",
"ru tv" : "d1066",
"европа+" : "d1122",
"иллюзион+" : "d1800_1",
"моя планета" : "d181",
"совершенно секретно" : "d182",
"sony sci fi" : "d101",
"nano tv" : "russkij_illusion",
"русский иллюзион" : "russkij_illusion_1",
"tdk" : "tdk",
"пятница" : "pyatnitsa",
"отр" : "otr",
"x sport" : "xsport",
"история" : "istoriya",
"viasat explorer" : "viasat_explorer",
"viasat history" : "viasat_history",
"viasat nature" : "viasat_nature",
"кто есть кто" : "kto_est_kto",
"мужской" : "muzkoy",
"ностальгия" : "nostalgy",
"парк развлечений" : "park_razvl_1",
"первый образовательный" : "perviy_nac",
"успех" : "uspeh",
"2+2" : "2+2_ua",
"5 канал" : "5_kanal_ua",
"qtv" : "qtv",
"м1" : "m1",
"м2" : "m2",
"нло тв" : "nlo_tv",
"новый канал" : "noviy_kanal",
"нтн" : "ntn_ua",
"первый национальный" : "perviy_nac",
"просвещение" : "procveschenie",
"плюс плюс" : "plus_plus",
"телеканал футбол" : "telekanal_futbol",
"телеканал украина" : "ukraina",
"спорт 1 украина " : "sport1_ua",
"спорт 2 украина " : "sport2_ua",
"твi" : "tbi_ua",
"тонис" : "tonis",
"тет" : "tet_ua",
"news 24" : "telekanal24",
"футбол 1" : "futbol",
"футбол 2" : "futbol_plus",
"armenia tv" : "d0025",
"lider tv" : "lider_tv",
"bbc world" : "bbcworld",
"cnn international" : "cnninternational",
"docubox hd" : "docubox_hd",
"fightbox hd" : "fightbox_hd",
"filmbox hd" : "filmbox_hd",
"fun and fastbox hd" : "funandfastbox_hd",
"киноклуб" : "kinoklub",
"sony turbo" : "sonyturbo",
"tnt film" : "tnt_film",
"tv 1000 comedy hd" : "tv1000_comedy_hd",
"tv 1000 megahit hd" : "tv1000_megahit_hd",
"tv 1000 premium hd" : "tv1000_premium_hd",
"iconcerts":"iconcerts",
"tv 3+" : "tv3plus",
"кинопоказ 1 hd" : "kinopokaz1_hd",
"кинопоказ 2 hd" : "kinopokaz2_hd",
"кинопремиум hd" : "kinopremium",
"наше hd" : "nashehd",
"остросюжетное hd" : "ostrosugetnoe",
"семейное hd" : "semejnoe_hd",
"сериал hd" : "serial_hd",
"телепутешествия hd" : "teletravel_hd",
"motors tv" : "motorstv",
"eureka hd" : "eureka_hd_ru",
"animal family hd" : "animal_family_hd_ru",
"русский роман" : "rus_roman",
"k2" : "k2",
"сарафан тв" : "sarafan",
"das erste" : "das erste",
"kabel eins" : "kabel eins",
"prosieben" : "prosieben",
"rtl" : "rtl television",
"rtl ii" : "rtl2",
"sat.1" : "sat. 1",
"sixx" : "sixx",
"super rtl" : "super rtl",
"vox" : "vox",
"zdf" : "zdf",
"nat geo wild hd" : "natgeo wild hd",
"national geographic hd" : "national geographic hd",
"amc" : "amc",
"amedia" : "amedia1",
"dtx" : "dtx",
"наш футбол" : "наш футбол",
"нтв+ футбол 1" : "нтв+ футбол 1",
"нтв+ футбол 3" : "нтв+ футбол 3",
"kidzone" : "kidzone",
"discovery hd" : "discovery hd",
"тнт comedy" : "tnt_comedy",
"mtv rock" : "mtv_rock",
"кхл тв hd" : "khl_tv",
"матч!" : "matchtv",
"ctc love" : "sts_love",
"мир сериала" : "мир сериала",
"че" : "chetv",
"бобер тв" : "bober_tv",
"кинозал 1" : "icon2",
"кинорейс 1" : "icon2",
"кинорейс 2" : "icon2"
} | gpl-2.0 | 5,042,035,012,302,814,000 | 24.045249 | 57 | 0.589266 | false |
anthonyfok/frescobaldi | frescobaldi_app/quickinsert/__init__.py | 1 | 2553 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
The Quick Insert panel.
"""
import weakref
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QKeySequence
import actioncollection
import actioncollectionmanager
import panel
class QuickInsertPanel(panel.Panel):
def __init__(self, mainwindow):
super(QuickInsertPanel, self).__init__(mainwindow)
self.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
self.hide()
self.toggleViewAction().setShortcut(QKeySequence("Meta+Alt+I"))
mainwindow.addDockWidget(Qt.LeftDockWidgetArea, self)
self.actionCollection = QuickInsertActions(self)
actioncollectionmanager.manager(mainwindow).addActionCollection(self.actionCollection)
def translateUI(self):
self.setWindowTitle(_("Quick Insert"))
self.toggleViewAction().setText(_("Quick &Insert"))
def createWidget(self):
from . import widget
return widget.QuickInsert(self)
class QuickInsertActions(actioncollection.ShortcutCollection):
"""Manages keyboard shortcuts for the QuickInsert module."""
name = "quickinsert"
def __init__(self, panel):
super(QuickInsertActions, self).__init__(panel.mainwindow())
self.panel = weakref.ref(panel)
def createDefaultShortcuts(self):
self.setDefaultShortcuts('staccato', [QKeySequence('Ctrl+.')])
self.setDefaultShortcuts('spanner_slur', [QKeySequence('Ctrl+(')])
self.setDefaultShortcuts('breathe_rcomma', [QKeySequence("Alt+'")])
def realAction(self, name):
return self.panel().widget().actionForName(name)
def title(self):
return _("Quick Insert")
| gpl-2.0 | -3,089,438,219,552,527,000 | 34.458333 | 94 | 0.712495 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.