repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
cherbib/fofix | src/GuitarScene.py | 1 | 323592 | #####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstil? #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Spikehead777 #
# 2008 Glorandwarf #
# 2008 ShiekOdaSandz #
# 2008 QQStarS #
# 2008 .liquid. #
# 2008 Blazingamer #
# 2008 evilynux <[email protected]> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Scene import Scene, SuppressScene
from Song import Note, TextEvent, PictureEvent, loadSong, Bars, VocalPhrase
from Menu import Menu
from Language import _
import Player
from Player import STAR, KILL, CANCEL, KEY1A
import Dialogs
import Audio
import Stage
import Settings
import Song
from Scorekeeper import ScoreCard
from Shader import shaders
import random
import os
import Log
import locale
from OpenGL.GL import *
class GuitarScene(Scene):
def __init__(self, engine, libraryName, songName):
Scene.__init__(self, engine)
if self.engine.world.sceneName == "GuitarScene": #MFH - dual / triple loading cycle fix
Log.warn("Extra GuitarScene was instantiated, but detected and shut down. Cause unknown.")
raise SuppressScene #stump
else:
self.engine.createdGuitarScene = True
self.engine.world.sceneName = "GuitarScene"
self.playerList = self.players
self.partyMode = False
self.battle = False #QQstarS:new2 Bettle
self.battleGH = False #Qstick
self.coOp = False
self.coOpRB = False #akedrou
self.coOpGH = False
self.coOpType = False
self.practiceMode = False
self.bossBattle = False
self.ready = False
Log.debug("GuitarScene init...")
self.coOpPlayerMeter = 0
#MFH - testing new traceback logging:
#raise TypeError
#myfingershurt: new loading place for "loading" screen for song preparation:
#blazingamer new loading phrases
self.sinfo = Song.loadSongInfo(self.engine, songName, library = libraryName)
phrase = self.sinfo.loadingPhrase
if phrase == "":
phrase = random.choice(self.engine.theme.loadingPhrase)
if phrase == "None":
i = random.randint(0,4)
if i == 0:
phrase = _("Let's get this show on the Road")
elif i == 1:
phrase = _("Impress the Crowd")
elif i == 2:
phrase = _("Don't forget to strum!")
elif i == 3:
phrase = _("Rock the house!")
else:
phrase = _("Jurgen is watching")
splash = Dialogs.showLoadingSplashScreen(self.engine, phrase + " \n " + _("Initializing..."))
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Initializing..."))
self.countdownSeconds = 3 #MFH - don't change this initialization value unless you alter the other related variables to match
self.countdown = 100 #MFH - arbitrary value to prevent song from starting right away
self.countdownOK = False
#MFH - retrieve game parameters:
self.gamePlayers = len(self.engine.world.players)
self.gameMode1p = self.engine.world.gameMode
self.gameMode2p = self.engine.world.multiMode
self.lostFocusPause = self.engine.config.get("game", "lost_focus_pause")
if self.sinfo.bossBattle == "True" and self.gameMode1p == 2 and self.gamePlayers == 1:
self.bossBattle = True
self.engine.world.multiMode = 6
self.gameMode2p = 6
self.gamePlayers = 2
if self.gameMode1p == 2:
self.careerMode = True
else:
self.careerMode = False
#MFH - check for party mode
if self.gameMode2p == 2:
self.partyMode = True
self.gamePlayers = 1
self.partySwitch = 0
self.partyTime = self.engine.config.get("game", "party_time")
self.partyPlayer = 0
elif self.gamePlayers > 1:
#MFH - check for battle mode
if self.gameMode2p == 1:
self.battle = True
self.battleGH = False
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
elif self.gameMode2p == 3:
self.battle = False
self.battleGH = False
self.coOp = True
self.coOpRB = False
self.coOpGH = False
self.coOpType = True
elif self.gameMode2p == 4:
self.battle = False
self.battleGH = False
self.coOp = False
self.coOpRB = True
self.coOpGH = False
self.coOpType = True
elif self.gameMode2p == 5:
self.battle = False
self.battleGH = False
self.coOp = False
self.coOpRB = False
self.coOpGH = True
self.coOpType = True
elif self.gameMode2p == 6:
self.battle = False
self.battleGH = True
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
else:
self.battle = False
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
self.splayers = self.gamePlayers #Spikehead777
#myfingershurt: drums :)
self.instruments = [] # akedrou - this combines Guitars, Drums, and Vocalists
self.keysList = []
self.soloKeysList = []
self.soloShifts = []
self.playingVocals = False
self.numberOfGuitars = len(self.playerList)
self.numOfPlayers = len(self.playerList)
self.numOfSingers = 0
self.firstGuitar = None
self.neckrender = []
gNum = 0
for j,player in enumerate(self.playerList):
guitar = True
if player.part.id == Song.VOCAL_PART:
from Vocalist import Vocalist
inst = Vocalist(self.engine, player, False, j)
if self.coOpRB:
inst.coOpRB = True
self.instruments.append(inst)
self.playingVocals = True
self.numOfSingers += 1
self.numberOfGuitars -= 1
guitar = False
elif player.part.id == Song.DRUM_PART:
#myfingershurt: drums :)
from Drum import Drum
inst = Drum(self.engine,player,False,j)
self.instruments.append(inst)
else:
from Guitar import Guitar
bass = False
if player.part.id == Song.BASS_PART:
bass = True
inst = Guitar(self.engine,player,False,j, bass = bass)
self.instruments.append(inst)
if player.part.id == Song.LEAD_PART or player.part.id == Song.GUITAR_PART: #both these selections should get guitar solos
self.instruments[j].canGuitarSolo = True
if player.practiceMode:
self.practiceMode = True
if guitar:
player.guitarNum = gNum
gNum += 1
if self.firstGuitar is None:
self.firstGuitar = j
self.neckrender.append(self.instruments[j].neck)
if self.instruments[j].isDrum:
self.keysList.append(player.drums)
self.soloKeysList.append(player.drumSolo)
self.soloShifts.append(None)
self.instruments[j].keys = player.drums
self.instruments[j].actions = player.drums
else:
self.keysList.append(player.keys)
self.soloKeysList.append(player.soloKeys)
self.soloShifts.append(player.soloShift)
self.instruments[j].keys = player.keys
self.instruments[j].actions = player.actions
else:
self.neckrender.append(None)
self.keysList.append([])
self.soloKeysList.append([])
self.soloShifts.append([])
self.guitars = self.instruments #for compatibility - I'll try to fix this...
#Log.debug("GuitarScene keysList: " + str(self.keysList))
Log.debug("GuitarScene keysList: %s" % str(self.keysList))
#for number formatting with commas for Rock Band:
locale.setlocale(locale.LC_ALL, '') #more compatible
self.visibility = 1.0
self.libraryName = libraryName
self.songName = songName
self.done = False
#try:
# self.sfxChannel = self.engine.audio.getChannel(5)
#except Exception, e:
# Log.warn("GuitarScene.py: Unable to procure sound effect track: %s" % e)
# self.sfxChannel = None
self.lastMultTime = [None for i in self.playerList]
self.cheatCodes = [
#([117, 112, 116, 111, 109, 121, 116, 101, 109, 112, 111], self.toggleAutoPlay), #Jurgen is enabled in the menu -- Spikehead777
([102, 97, 115, 116, 102, 111, 114, 119, 97, 114, 100], self.goToResults)
]
self.enteredCode = []
self.song = None
#self.finishedProcessingSong = False
#Spikehead777
#self.jurg = self.engine.config.get("game", "jurgtype")
#MFH
#self.jurgenLogic = self.engine.config.get("game", "jurglogic") #logic 0 = original, logic 1 = MFH-1
self.numOfPlayers = len(self.playerList)
self.jurgenLogic = [0 for i in self.playerList]
for i in range(len(self.playerList)):
self.jurgenLogic[i] = self.engine.config.get("game", "jurg_logic_p%d" % i)
self.aiSkill = [0 for i in self.playerList]
self.aiHitPercentage = [0 for i in self.playerList]
self.aiPlayNote = [True for i in self.playerList]
self.jurgBattleWhammyTime = [0 for i in self.playerList]
self.jurgBattleUseTime = [0 for i in self.playerList]
self.aiUseSP = [0 for i in self.playerList]
self.battleItemsHolding = [0 for i in self.playerList]
self.battleTarget = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
self.battleTarget[i] = i-1
if self.battleTarget[i] == -1:
self.battleTarget[i] = self.numOfPlayers - 1
self.aiSkill[i] = self.engine.config.get("game", "jurg_skill_p%d" % i)
if self.aiSkill[i] == 0:
self.aiHitPercentage[i] = 70 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 1000
self.jurgBattleUseTime[i] = 5000
elif self.aiSkill[i] == 1:
self.aiHitPercentage[i] = 80 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 750
self.jurgBattleUseTime[i] = 2000
elif self.aiSkill[i] == 2:
self.aiHitPercentage[i] = 85 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 750
self.jurgBattleUseTime[i] = 2000
elif self.aiSkill[i] == 3:
self.aiHitPercentage[i] = 90 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 500
self.jurgBattleUseTime[i] = 1000
elif self.aiSkill[i] == 4:
self.aiHitPercentage[i] = 95 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 250
self.jurgBattleUseTime[i] = 1000 #this will be replaced by algorithm
elif self.aiSkill[i] == 5:
self.aiHitPercentage[i] = 100
self.jurgBattleWhammyTime[i] = 200
self.jurgBattleUseTime[i] = 1000 #this will be replaced by algorithm
if self.aiHitPercentage[i] > 100:
self.aiHitPercentage[i] = 100
#self.jurgenText = self.engine.config.get("game", "jurgtext")
self.jurgenText = self.engine.theme.jurgTextPos
if float(self.jurgenText[2]) < 0.00035:
self.jurgenText[2] = 0.00035
if float(self.jurgenText[0]) < 0:
self.jurgenText[0] = 0
if float(self.jurgenText[1]) < 0:
self.jurgenText[1] = 0
self.battleJurgMissTime = [0 for i in self.playerList]
self.whammySavesSP = self.engine.config.get("game", "whammy_saves_starpower") #MFH
self.failingEnabled = self.engine.config.get("coffee", "failingEnabled")
self.timeLeft = None
self.processedFirstNoteYet = False
#MFH - MUST be in front of loadSettings call!
#self.autoPlay = self.engine.config.get("game", "jurgmode")
#if self.autoPlay == 0:
# self.autoPlay = True
#else:
# self.autoPlay = False
self.playerAssist = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
if self.instruments[i].isDrum:
if player.autoKick:
self.playerAssist[i] = 3
elif not self.instruments[i].isVocal:
self.playerAssist[i] = player.assistMode
if self.playerAssist[i] == 2 and player.getDifficultyInt() > 1:
self.playerAssist[i] = 0
elif self.playerAssist[i] == 1 and player.getDifficultyInt() > 2:
self.playerAssist[i] = 0
for assistMode in self.playerAssist:
if assistMode > 0:
self.assisting = True
break
else:
self.assisting = False
self.autoPlay = False
self.jurgPlayer = [False for i in self.playerList]
self.jurg = [False for i in self.playerList]
self.customBot = [None for i in self.playerList]
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
continue
if self.engine.config.get("game", "jurg_p%d" % i) == True:
self.jurg[i] = True
self.autoPlay = True
self.lastPickPos = [None for i in self.playerList]
self.lastSongPos = 0.0
self.keyBurstTimeout = [None for i in self.playerList]
self.keyBurstPeriod = 30
self.camera.target = (0.0, 0.0, 4.0)
self.camera.origin = (0.0, 3.0, -3.0)
self.camera.target = (0.0, 1.0, 8.0)
self.camera.origin = (0.0, 2.0, -3.4)
self.targetX = self.engine.theme.povTargetX
self.targetY = self.engine.theme.povTargetY
self.targetZ = self.engine.theme.povTargetZ
self.originX = self.engine.theme.povOriginX
self.originY = self.engine.theme.povOriginY
self.originZ = self.engine.theme.povOriginZ
self.customPOV = False
self.ending = False
povList = [str(self.targetX), str(self.targetY), str(self.targetZ), str(self.originX), str(self.originY), str(self.originZ)]
if "None" not in povList:
self.customPOV = True
Log.debug("All theme POV set. Using custom camera POV.")
self.pause = False
self.failed = False
self.finalFailed = False
self.failEnd = False
self.crowdsCheering = False #akedrou
self.inUnison = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.firstUnison = False
self.firstUnisonDone = False
self.unisonActive = False
self.unisonNum = 0
self.unisonEarn = [False for i in self.playerList]
self.starPowersActive = 0
self.playersInGreen = 0
self.crowdFaderVolume = 0.0
self.coOpStarPower = 0
self.coOpStarPowerTimer = 0
self.coOpStarPowerActive = [0 for i in self.playerList]
self.battleSuddenDeath = False
self.failTimer = 0
self.rockTimer = 0 #myfingershurt
self.youRock = False #myfingershurt
self.rockCountdown = 100
self.soloReviewDispDelay = 300
self.baseScore = 50
self.baseSustainScore = .1
self.rockFinished = False
self.spTimes = [[] for i in self.playerList]
self.midiSP = False
self.oBarScale = 0.0 #volshebnyi - overdrive bar scale factor
#self.bossBattle = False
###Capo###
self.firstClap = True
###endCapo###
self.multi = [1 for i in self.playerList]
self.x1 = [0 for i in self.playerList]
self.y1 = [0 for i in self.playerList]
self.x2 = [0 for i in self.playerList]
self.y2 = [0 for i in self.playerList]
self.x3 = [0 for i in self.playerList]
self.y3 = [0 for i in self.playerList]
if self.coOpType:
self.x1.append(0)
self.y1.append(0)
self.x2.append(0)
self.y2.append(0)
self.x3.append(0)
self.y3.append(0)
#MFH - precalculation variable definition
#Get theme
themename = self.engine.data.themeLabel
self.theme = self.engine.data.theme
self.rmtype = self.theme
if self.engine.theme.hopoIndicatorX != None:
self.hopoIndicatorX = self.engine.theme.hopoIndicatorX
else:
self.hopoIndicatorX = .950
if self.engine.theme.hopoIndicatorY != None:
self.hopoIndicatorY = self.engine.theme.hopoIndicatorY
else:
self.hopoIndicatorY = .710
self.hopoIndicatorActiveColor = self.engine.theme.hopoIndicatorActiveColor
self.hopoIndicatorInactiveColor = self.engine.theme.hopoIndicatorInactiveColor
if self.coOpGH:
for instrument in self.instruments:
instrument.starPowerDecreaseDivisor /= self.numOfPlayers
self.rockMax = 30000.0
self.rockMedThreshold = self.rockMax/3.0 #MFH
self.rockHiThreshold = self.rockMax/3.0*2 #MFH
self.rock = [self.rockMax/2 for i in self.playerList]
self.arrowRotation = [.5 for i in self.playerList]
self.starNotesMissed = [False for i in self.playerList] #MFH
self.notesMissed = [False for i in self.playerList]
self.lessMissed = [False for i in self.playerList]
self.notesHit = [False for i in self.playerList]
self.lessHit = False
self.minBase = 400
self.pluBase = 15
self.minGain = 2
self.pluGain = 7
self.battleMax = 300 #QQstarS:new2 the max adding when battle
self.minusRock = [self.minBase for i in self.playerList]
self.plusRock = [self.pluBase for i in self.playerList]
self.coOpMulti = 1
self.coOpFailDone = [False for i in self.playerList]
if self.coOpRB: #akedrou
self.coOpPlayerMeter = len(self.rock)
self.rock.append(self.rockMax/2)
self.minusRock.append(0.0)
self.plusRock.append(0.0)
self.timesFailed = [0 for i in self.playerList]
if self.coOp or self.coOpGH:
self.coOpPlayerMeter = len(self.rock)-1 #make sure it's the last one
#Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Stage..."))
stage = os.path.join("themes",themename,"stage.ini")
self.stage = Stage.Stage(self, self.engine.resource.fileName(stage))
#Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Settings..."))
self.loadSettings()
self.tsBotNames = [_("KiD"), _("Stump"), _("AkedRobot"), _("Q"), _("MFH"), _("Jurgen")]
#MFH pre-translate text strings:
self.powerUpName = self.engine.theme.power_up_name
if self.battleGH:
self.tsBattleIcons = [None] * 9
self.tsBattleIcons[1] = _("Death Drain")
self.tsBattleIcons[2] = _("Difficulty Up")
self.tsBattleIcons[3] = _("Broken String")
self.tsBattleIcons[4] = _("Whammy")
self.tsBattleIcons[5] = _("Powerup Steal")
self.tsBattleIcons[6] = _("Switch Controls")
self.tsBattleIcons[7] = _("Double Notes")
self.tsBattleIcons[8] = _("Amp Overload")
self.tsNoteStreak = _("%d Note Streak")
self.tsPhraseStreak = _("%d Phrase Streak")
self.tsStarPowerReady = _("%s Ready") % self.powerUpName
self.tsCoOpStarPower = _("Activate %s!") % self.powerUpName
self.tsYouFailedBattle = _("You Failed")
self.tsJurgenIsHere = _("%s is here")
self.tsJurgenWasHere = _("%s was here")
self.tsPercentComplete = _("% Complete")
self.tsHopoIndicator = _("HOPO")
self.tsCompleted = _("COMPLETED")
self.tsPercentOn = _(" % ON ")
self.tsBassGroove = _("BASS GROOVE")
self.tsBassGrooveLabel = _("Bass Groove:")
self.tsHandicapLabel = _("Handicap")
self.tsAvgLabel = _("Avg")
self.tsAccVeryLate = _("Very Late")
self.tsAccLate = _("Late")
self.tsAccSlightlyLate = _("Slightly Late")
self.tsAccExcellentLate = _("-Excellent!")
self.tsAccPerfect = _("Perfect!!")
self.tsAccExcellentEarly= _("+Excellent!")
self.tsAccSlightlyEarly = _("Slightly Early")
self.tsAccEarly = _("Early")
self.tsAccVeryEarly = _("Very Early")
self.msLabel = _("ms")
self.tsSolo = _("Solo!")
self.tsPerfectSolo = _("Perfect Solo!")
self.tsAwesomeSolo = _("Awesome Solo!")
self.tsGreatSolo = _("Great Solo!")
self.tsGoodSolo = _("Good Solo!")
self.tsSolidSolo = _("Solid Solo!")
self.tsOkaySolo = _("Okay Solo")
self.tsMessySolo = _("Messy Solo")
self.tsPtsLabel = _("pts")
self.tsGetReady = _("Get Ready to Rock")
self.tsAsMadeFamousBy = _("as made famous by")
self.tsBy = _("by ")
self.tsFrettedBy = _(" fretted by ")
for player in self.playerList:
player.currentTheme = self.theme
#MFH - precalculate full and player viewports
self.engine.view.setViewport(1,0)
self.wFull, self.hFull = self.engine.view.geometry[2:4]
#Log.debug("GuitarScene wFull = %d, hFull = %d" % (self.wFull, self.hFull) )
self.wPlayer = []
self.hPlayer = []
self.hOffset = []
self.hFontOffset = []
self.stage.wFull = self.wFull #MFH - needed for new stage background handling
self.stage.hFull = self.hFull
#self.fontScreenBottom = 0.75 #from our current viewport's constant 3:4 aspect ratio (which is always stretched to fill the video resolution)
self.fontScreenBottom = self.engine.data.fontScreenBottom
self.oBarScaleCoef = (0.6 + 0.4 * self.numberOfGuitars) * 1.256 * self.hFull / self.wFull #volshebnyi - depends on resolution and number of players
for i, player in enumerate(self.playerList):
if not self.instruments[i].isVocal:
self.engine.view.setViewportHalf(self.numberOfGuitars,player.guitarNum)
w = self.engine.view.geometryAllHalf[self.numberOfGuitars-1,player.guitarNum,2]
h = self.engine.view.geometryAllHalf[self.numberOfGuitars-1,player.guitarNum,3]
else:
w = self.wFull
h = self.hFull
self.wPlayer.append( w )
self.hPlayer.append( h )
self.hOffset.append( h )
self.hFontOffset.append( h )
if not self.instruments[i].isVocal:
self.wPlayer[i] = self.wPlayer[i]*self.numberOfGuitars #QQstarS: set the width to right one
if self.numberOfGuitars>1:
self.hPlayer[i] = self.hPlayer[i]*self.numberOfGuitars/1.5 #QQstarS: Set the hight to right one
self.hOffset[i] = self.hPlayer[i]*.4*(self.numberOfGuitars-1)
else:
self.hPlayer[i] = self.hPlayer[i]*self.numberOfGuitars #QQstarS: Set the hight to right one
self.hOffset[i] = 0
self.hFontOffset[i] = -self.hOffset[i]/self.hPlayer[i]*0.752 #QQstarS: font Hight Offset when there are 2 players
self.engine.view.setViewport(1,0)
self.drumMisses = self.engine.config.get("game", "T_sound") #Faaa Drum sound
if not self.engine.data.bassDrumSoundFound:
self.bassKickSoundEnabled = False
if not self.engine.data.T1DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.T2DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.T3DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.CDrumSoundFound:
self.drumMisses == 0
#MFH - constant definitions, ini value retrievals
self.pitchBendLowestFactor = .90 #stump: perhaps read this from song.ini and fall back on a specific value?
self.lineByLineLyricMaxLineWidth = 0.5
self.lineByLineStartSlopMs = 750
self.digitalKillswitchStarpowerChunkSize = 0.05 * self.engine.audioSpeedFactor
self.digitalKillswitchActiveStarpowerChunkSize = self.digitalKillswitchStarpowerChunkSize / 3.0
# evilynux: was 0.10, now much closer to actual GH3
self.analogKillswitchStarpowerChunkSize = 0.15 * self.engine.audioSpeedFactor
self.analogKillswitchActiveStarpowerChunkSize = self.analogKillswitchStarpowerChunkSize / 3.0
self.rbOverdriveBarGlowFadeInChunk = .07 #this amount added to visibility every run() cycle when fading in - original .2
self.rbOverdriveBarGlowFadeOutChunk = .03 #this amount subtracted from visibility every run() cycle when fading out - original .07
self.crowdCheerFadeInChunk = .02 #added to crowdVolume every run() when fading in
self.crowdCheerFadeOutChunk = .03 #subtracted from crowdVolume every run() on fade out.
self.maxDisplayTextScale = 0.0024 #orig 0.0024
self.displayTextScaleStep2 = 0.00008 #orig 0.00008
self.displayTextScaleStep1 = 0.0001 #orig 0.0001
self.textTimeToDisplay = 100
self.songInfoDisplayScale = self.engine.theme.songInfoDisplayScale
self.songInfoDisplayX = self.engine.theme.songInfoDisplayX #Worldrave - This controls the X position of song info display during countdown
self.songInfoDisplayY = self.engine.theme.songInfoDisplayY #Worldrave - This controls the Y position of song info display during countdown
self.lyricMode = self.engine.config.get("game", "lyric_mode")
self.scriptLyricPos = self.engine.config.get("game", "script_lyric_pos")
self.starClaps = self.engine.config.get("game", "star_claps")
self.rb_sp_neck_glow = self.engine.config.get("game", "rb_sp_neck_glow")
self.accuracy = [0 for i in self.playerList]
self.resumeCountdownEnabled = self.engine.config.get("game", "resume_countdown")
self.resumeCountdown = 0
self.resumeCountdownSeconds = 0
self.pausePos = 0
self.dispAccuracy = [False for i in self.playerList]
self.showAccuracy = self.engine.config.get("game", "accuracy_mode")
self.hitAccuracyPos = self.engine.config.get("game", "accuracy_pos")
self.showUnusedTextEvents = self.engine.config.get("game", "show_unused_text_events")
self.bassKickSoundEnabled = self.engine.config.get("game", "bass_kick_sound")
self.gameTimeMode = self.engine.config.get("game", "game_time")
self.midiLyricsEnabled = self.engine.config.get("game", "rb_midi_lyrics")
self.midiSectionsEnabled = self.engine.config.get("game", "rb_midi_sections") #MFH
if self.numOfPlayers > 1 and self.midiLyricsEnabled == 1:
self.midiLyricsEnabled = 0
if self.numOfPlayers > 1 and self.midiSectionsEnabled == 1:
self.midiSectionsEnabled = 0
self.hopoDebugDisp = self.engine.config.get("game","hopo_debug_disp")
if self.hopoDebugDisp == 1:
for instrument in self.instruments:
if not instrument.isDrum and not instrument.isVocal:
instrument.debugMode = True
self.numDecimalPlaces = self.engine.config.get("game","decimal_places")
self.roundDecimalForDisplay = lambda n: ('%%.%df' % self.numDecimalPlaces) % float(n) #stump
self.starScoring = self.engine.config.get("game", "star_scoring")#MFH
self.ignoreOpenStrums = self.engine.config.get("game", "ignore_open_strums") #MFH
self.muteSustainReleases = self.engine.config.get("game", "sustain_muting") #MFH
self.hopoIndicatorEnabled = self.engine.config.get("game", "hopo_indicator") #MFH
self.fontShadowing = self.engine.config.get("game", "in_game_font_shadowing") #MFH
self.muteLastSecond = self.engine.config.get("audio", "mute_last_second") #MFH
self.mutedLastSecondYet = False
self.muteDrumFill = self.engine.config.get("game", "mute_drum_fill") #MFH
self.starScoreUpdates = self.engine.config.get("performance", "star_score_updates") #MFH
self.currentlyAnimating = True
self.missPausesAnim = self.engine.config.get("game", "miss_pauses_anim") #MFH
self.displayAllGreyStars = self.engine.theme.displayAllGreyStars
self.starpowerMode = self.engine.config.get("game", "starpower_mode") #MFH
self.useMidiSoloMarkers = False
self.logMarkerNotes = self.engine.config.get("game", "log_marker_notes")
self.logStarpowerMisses = self.engine.config.get("game", "log_starpower_misses")
self.soloFrameMode = self.engine.config.get("game", "solo_frame")
self.whammyEffect = self.engine.config.get("audio", "whammy_effect")
if self.whammyEffect == 1 and not Audio.pitchBendSupported: #pitchbend
Dialogs.showMessage(self.engine, "Pitchbend module not found! Forcing Killswitch effect.")
self.whammyEffect = 0
shaders.var["whammy"] = self.whammyEffect
self.bigRockEndings = self.engine.config.get("game", "big_rock_endings")
self.showFreestyleActive = self.engine.config.get("debug", "show_freestyle_active")
#stump: continuous star fillup
self.starFillupCenterX = self.engine.theme.starFillupCenterX
self.starFillupCenterY = self.engine.theme.starFillupCenterY
self.starFillupInRadius = self.engine.theme.starFillupInRadius
self.starFillupOutRadius = self.engine.theme.starFillupOutRadius
self.starFillupColor = self.engine.theme.colorToHex(self.engine.theme.starFillupColor)
self.starContinuousAvailable = self.engine.config.get("performance", "star_continuous_fillup") and \
None not in (self.starFillupCenterX, self.starFillupCenterY, self.starFillupInRadius, self.starFillupOutRadius, self.starFillupColor)
self.showBpm = self.engine.config.get("debug", "show_bpm") #MFH
self.logLyricEvents = self.engine.config.get("log", "log_lyric_events")
#self.logTempoEvents = self.engine.config.get("log", "log_tempo_events")
self.vbpmLogicType = self.engine.config.get("debug", "use_new_vbpm_beta")
#MFH - switch to midi lyric mode option
self.midiLyricMode = self.engine.config.get("game", "midi_lyric_mode")
#self.midiLyricMode = 0
self.currentSimpleMidiLyricLine = ""
self.noMoreMidiLineLyrics = False
self.screenCenterX = self.engine.video.screen.get_rect().centerx
self.screenCenterY = self.engine.video.screen.get_rect().centery
#racer: practice beat claps:
self.beatClaps = self.engine.config.get("game", "beat_claps")
self.killDebugEnabled = self.engine.config.get("game", "kill_debug")
#myfingershurt: for checking if killswitch key is analog for whammy
self.whammyVolAdjStep = 0.1
self.analogKillMode = [self.engine.input.getAnalogKill(i) for i in range(self.numOfPlayers)]
self.isKillAnalog = [False for i in self.playerList]
self.isSPAnalog = [False for i in self.playerList]
self.isSlideAnalog = [False for i in self.playerList]
self.whichJoyKill = [0 for i in self.playerList]
self.whichAxisKill = [0 for i in self.playerList]
self.whichJoyStar = [0 for i in self.playerList]
self.whichAxisStar = [0 for i in self.playerList]
self.whichJoySlide = [0 for i in self.playerList]
self.whichAxisSlide = [0 for i in self.playerList]
self.whammyVol = [0.0 for i in self.playerList]
self.starAxisVal = [0.0 for i in self.playerList]
self.starDelay = [0.0 for i in self.playerList]
self.starActive = [False for i in self.playerList]
self.slideValue = [-1 for i in self.playerList]
self.targetWhammyVol = [0.0 for i in self.playerList]
self.defaultWhammyVol = [self.analogKillMode[i]-1.0 for i in range(self.numOfPlayers)] #makes xbox defaults 1.0, PS2 defaults 0.0
for i in range(self.numOfPlayers):
if self.analogKillMode[i] == 3: #XBOX inverted mode
self.defaultWhammyVol[i] = -1.0
self.actualWhammyVol = [self.defaultWhammyVol[i] for i in range(self.numOfPlayers)]
self.lastWhammyVol = [self.defaultWhammyVol[i] for i in range(self.numOfPlayers)]
KillKeyCode = [0 for i in self.playerList]
StarKeyCode = [0 for i in self.playerList]
SlideKeyCode = [0 for i in self.playerList]
self.lastTapText = "tapp: -"
#myfingershurt: auto drum starpower activation option
#self.autoDrumStarpowerActivate = self.engine.config.get("game", "auto_drum_sp")
self.autoDrumStarpowerActivate = self.engine.config.get("game", "drum_sp_mode")
self.analogSlideMode = [self.engine.input.getAnalogSlide(i) for i in range(self.numOfPlayers)]
self.analogSPMode = [self.engine.input.getAnalogSP(i) for i in range(self.numOfPlayers)]
self.analogSPThresh = [self.engine.input.getAnalogSPThresh(i) for i in range(self.numOfPlayers)]
self.analogSPSense = [self.engine.input.getAnalogSPSense(i) for i in range(self.numOfPlayers)]
self.numDrumFills = 0 #MFH - count drum fills to see whether or not we should use auto SP
#MFH - TODO - rewrite in an expandable fashion; requires creation of some new Player object constants that will link to the appropriate player's control based on which player the object is set to
for i, player in enumerate(self.playerList):
if self.analogKillMode[i] > 0:
KillKeyCode[i] = self.controls.getReverseMapping(player.keyList[KILL])
self.isKillAnalog[i], self.whichJoyKill[i], self.whichAxisKill[i] = self.engine.input.getWhammyAxis(KillKeyCode[i])
if self.isKillAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])
except IndexError:
self.isKillAnalog[i] = False
if self.analogSPMode[i] > 0:
StarKeyCode[i] = self.controls.getReverseMapping(player.keyList[STAR])
self.isSPAnalog[i], self.whichJoyStar[i], self.whichAxisStar[i] = self.engine.input.getWhammyAxis(StarKeyCode[i])
if self.isSPAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoyStar[i]].get_axis(self.whichAxisStar[i])
except IndexError:
self.isSPAnalog[i] = False
if player.controlType == 4:
SlideKeyCode[i] = self.controls.getReverseMapping(player.keyList[KEY1A])
self.isSlideAnalog[i], self.whichJoySlide[i], self.whichAxisSlide[i] = self.engine.input.getWhammyAxis(SlideKeyCode[i])
if self.isSlideAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])
except IndexError:
self.isSlideAnalog[i] = False
self.inGameStats = self.engine.config.get("performance","in_game_stats")
self.inGameStars = self.engine.config.get("game","in_game_stars")
self.partialStars = self.engine.config.get("game","partial_stars")
self.guitarSoloAccuracyDisplayMode = self.engine.config.get("game", "gsolo_accuracy_disp")
self.guitarSoloAccuracyDisplayPos = self.engine.config.get("game", "gsolo_acc_pos")
#need a new flag for each player, showing whether or not they've missed a note during a solo section.
#this way we have a backup detection of Perfect Solo in case a note got left out, picks up the other side of the solo slop
self.guitarSoloBroken = [False for i in self.playerList]
self.deadPlayerList = [] #akedrou - keep the order failed.
self.numDeadPlayers = 0
coOpInstruments = []
self.scoring = []
#self.stars = [0,0]
for instrument in self.instruments:
if instrument.isDrum:
this = Song.DRUM_PART
coOpInstruments.append(this)
elif instrument.isBassGuitar:
this = Song.BASS_PART
coOpInstruments.append(this)
elif instrument.isVocal:
this = Song.VOCAL_PART
coOpInstruments.append(this)
else:
this = Song.GUITAR_PART
coOpInstruments.append(this) #while different guitars exist, they don't affect scoring.
self.scoring.append(ScoreCard([this]))
if self.coOpType:
self.coOpScoreCard = ScoreCard(coOpInstruments, coOpType = True)
else:
self.coOpScoreCard = None
self.partialStar = [0 for i in self.playerList]
self.starRatio = [0.0 for i in self.playerList]
self.dispSoloReview = [False for i in self.playerList]
self.soloReviewText = [[] for i in self.playerList]
self.soloReviewCountdown = [0 for i in self.playerList]
self.guitarSoloAccuracy = [0.0 for i in self.playerList]
self.guitarSoloActive = [False for i in self.playerList]
self.currentGuitarSolo = [0 for i in self.playerList]
#guitar solo display initializations
if self.theme == 2:
self.solo_soloFont = self.engine.data.scoreFont
else:
self.solo_soloFont = self.engine.data.font
self.guitarSoloShown = [False for i in self.playerList]
self.currentGuitarSoloLastHitNotes = [1 for i in self.playerList]
self.solo_xOffset = [0.0 for i in self.playerList]
self.solo_yOffset = [0.0 for i in self.playerList]
self.solo_boxXOffset = [0.0 for i in self.playerList]
self.solo_boxYOffset = [0.0 for i in self.playerList]
self.solo_Tw = [0.0 for i in self.playerList]
self.solo_Th = [0.0 for i in self.playerList]
self.solo_soloText = ["solo" for i in self.playerList]
self.soloAcc_Rect = [None for i in self.playerList]
self.solo_txtSize = 0.00250
for i, playa in enumerate(self.playerList):
if self.guitarSoloAccuracyDisplayPos == 0: #right
if self.guitarSoloAccuracyDisplayMode == 1: #percentage only
self.solo_xOffset[i] = 0.890
else:
self.solo_xOffset[i] = 0.950
else:
self.solo_xOffset[i] = 0.150
self.solo_yOffset[i] = 0.320 #last change -.040
#self.totalNotes = [0,0]
#self.totalSingleNotes = [0,0]
self.currentGuitarSoloTotalNotes = [0 for i in self.playerList]
#self.currentGuitarSoloHitNotes = [0,0]
self.guitarSolos = [ [] for i in self.playerList]
guitarSoloStartTime = 0
isGuitarSoloNow = False
guitarSoloNoteCount = 0
lastSoloNoteTime = 0
self.drumStart = False
soloSlop = 100.0
unisonCheck = []
if self.careerMode:
self.failingEnabled = True
self.tut = self.engine.config.get("game", "tut")
#MFH - no Jurgen in Career mode or tutorial mode or practice mode:
if self.careerMode or self.tut or self.playerList[0].practiceMode:
self.autoPlay = False
#force jurgen player 2 (and only player 2) for boss battles
if self.bossBattle:
self.autoPlay = True
self.jurg = [False for i in self.playerList]
self.jurg[1] = True
self.rockFailUp = True #akedrou - fading mech
self.rockFailViz = 0.0
self.failViz = [0.0 for i in self.playerList]
self.phrases = self.engine.config.get("coffee", "game_phrases")#blazingamer
self.starfx = self.engine.config.get("game", "starfx")#blazingamer
smallMult = self.engine.config.get("game","small_rb_mult")
self.rbmfx = False
if smallMult == 2 or (smallMult == 1 and self.engine.theme.smallMult):
self.rbmfx = True
self.boardY = 2
self.rbOverdriveBarGlowVisibility = 0
self.rbOverdriveBarGlowFadeOut = False
self.counting = self.engine.config.get("video", "counting")
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Song..."))
#MFH - this is where song loading originally took place, and the loading screen was spawned.
self.engine.resource.load(self, "song", lambda: loadSong(self.engine, songName, library = libraryName, part = [player.part for player in self.playerList], practiceMode = self.playerList[0].practiceMode, practiceSpeed = self.playerList[0].practiceSpeed), synch = True, onLoad = self.songLoaded)
# glorandwarf: show the loading splash screen and load the song synchronously
#Dialogs.hideLoadingSplashScreen(self.engine, splash)
#splash = None
#splash = Dialogs.showLoadingSplashScreen(self.engine, phrase)
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Preparing Note Phrases..."))
if self.playerList[0].practiceMode or self.song.info.tutorial or self.tut:
self.failingEnabled = False
self.playerList[0].hopoFreq = self.song.info.hopofreq
bassGrooveEnableSet = self.engine.config.get("game", "bass_groove_enable")
if bassGrooveEnableSet == 1 and self.theme == 2:
self.bassGrooveEnabled = True
elif bassGrooveEnableSet == 2 and self.song.midiStyle == Song.MIDI_TYPE_RB:
self.bassGrooveEnabled = True
elif bassGrooveEnableSet == 3:
self.bassGrooveEnabled = True
else:
self.bassGrooveEnabled = False
for i, drum in enumerate(self.instruments):
if not drum.isDrum:
continue
if drum.drumFlip:
for d in range(len(Song.difficulties)):
self.song.tracks[i][d].flipDrums()
for scoreCard in self.scoring:
scoreCard.bassGrooveEnabled = self.bassGrooveEnabled
#MFH - single audio track song detection
self.isSingleAudioTrack = self.song.isSingleAudioTrack
#myfingershurt: also want to go through song and search for guitar solo parts, and count notes in them in each diff.
#MFH - now, handle MIDI starpower / overdrive / other special marker notes:
#MFH - first, count the markers for each instrument. If a particular instrument does not have at least two starpower phrases
# marked, ignore them and force auto-generation of SP paths.
for i in range(self.numOfPlayers): #MFH - count number of drum fills
if self.instruments[i].isDrum: #MFH - count number of drum fill markers
self.numDrumFills = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and (event.number == Song.freestyleMarkingNote) ) ])
Log.debug("Drum part found, scanning for drum fills.... %d freestyle markings found (the last one may be a Big Rock Ending)." % self.numDrumFills)
#MFH - handle early hit window automatic type determination, and how it compares to the forced handicap if not auto
self.effectiveEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
self.forceEarlyHitWindowSetting = self.engine.config.get("handicap", "early_hit_window")
if self.song.info.early_hit_window_size:
Log.debug("song.ini setting found speficying early_hit_window_size - %s" % self.song.info.early_hit_window_size)
if self.song.info.early_hit_window_size.lower() == "none":
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_NONE
elif self.song.info.early_hit_window_size.lower() == "half":
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
#elif self.song.info.early_hit_window_size.lower() == "full":
else: #all other unrecognized cases, default to "full"
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_FULL
else:
Log.debug("No song.ini setting found speficying early_hit_window_size - using automatic detection...")
if self.song.midiStyle == Song.MIDI_TYPE_RB:
Log.debug("Basic RB1/RB2 type MIDI found - early hitwindow of NONE is set as handicap base.")
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_NONE
if self.forceEarlyHitWindowSetting > 0: #MFH - if user is specifying a specific early hitwindow, then calculate handicap...
self.effectiveEarlyHitWindow = self.forceEarlyHitWindowSetting
tempHandicap = 1.00
if self.automaticEarlyHitWindow > self.effectiveEarlyHitWindow: #MFH - positive handicap
tempHandicap += ( (self.automaticEarlyHitWindow - self.effectiveEarlyHitWindow) * 0.05 )
elif self.automaticEarlyHitWindow < self.effectiveEarlyHitWindow: #MFH - negative handicap
tempHandicap -= ( (self.effectiveEarlyHitWindow - self.automaticEarlyHitWindow) * 0.05 )
for scoreCard in self.scoring:
scoreCard.earlyHitWindowSizeHandicap = tempHandicap
if self.coOpType:
self.coOpScoreCard.earlyHitWindowSizeHandicap = tempHandicap
#Log.debug("User-forced early hit window setting %d, effective handicap determined: %f" % (self.forceEarlyHitWindowSetting,tempHandicap) ) #MFH - not used atm
else:
#Log.debug("Automatic early hit window mode - automatically-detected setting used: %d" % self.automaticEarlyHitWindow) #MFH - not used atm
self.effectiveEarlyHitWindow = self.automaticEarlyHitWindow
tempEarlyHitWindowSizeFactor = 0.5
if self.effectiveEarlyHitWindow == 1: #none
tempEarlyHitWindowSizeFactor = 0.10 #really, none = about 10%
elif self.effectiveEarlyHitWindow == 2: #half
tempEarlyHitWindowSizeFactor = 0.5
else: #any other value will be full
tempEarlyHitWindowSizeFactor = 1.0
#MFH - TODO - single, global BPM here instead of in instrument objects:
#self.tempoBpm = Song.DEFAULT_BPM
#self.actualBpm = 0.0
#self.targetPeriod = 60000.0 / self.targetBpm
self.disableVBPM = self.engine.config.get("game", "disable_vbpm")
self.currentBpm = Song.DEFAULT_BPM
self.currentPeriod = 60000.0 / self.currentBpm
self.targetBpm = self.currentBpm
self.lastBpmChange = -1.0
self.baseBeat = 0.0
#for guit in self.guitars: #MFH - tell guitar / drum objects which VBPM logic to use
# guit.vbpmLogicType = self.vbpmLogicType
for instrument in self.instruments: #MFH - force update of early hit window
instrument.earlyHitWindowSizeFactor = tempEarlyHitWindowSizeFactor
instrument.actualBpm = 0.0
instrument.currentBpm = Song.DEFAULT_BPM
instrument.setBPM(instrument.currentBpm)
#if self.starpowerMode == 2: #auto-MIDI mode only
self.markSolos = self.engine.config.get("game", "mark_solo_sections")
if self.markSolos == 2:
if self.engine.theme.markSolos == 2:
if self.theme == 2:
self.markSolos = 1
else:
self.markSolos = 0
else:
self.markSolos = self.engine.theme.markSolos
if self.song.hasStarpowerPaths:
for i,guitar in enumerate(self.instruments):
if guitar.isVocal:
continue
#MFH - first, count the SP marker notes!
numOfSpMarkerNotes = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and not event.endMarker and (event.number == Song.overDriveMarkingNote or (event.number == Song.starPowerMarkingNote and self.song.midiStyle == Song.MIDI_TYPE_GH) ) ) ])
#also want to count RB solo sections in this track, if the MIDI type is RB. Then we'll know to activate MIDI guitar solo markers or not
# for this instrument
if self.song.midiStyle == Song.MIDI_TYPE_RB:
numMidiSoloMarkerNotes = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and not event.endMarker and event.number == Song.starPowerMarkingNote ) ])
if numMidiSoloMarkerNotes > 0 and self.markSolos > 0: #if at least 1 solo marked in this fashion, tell that guitar to ignore text solo events
self.useMidiSoloMarkers = True
guitar.useMidiSoloMarkers = True
if self.neckrender[self.playerList[i].guitarNum] is not None:
self.neckrender[self.playerList[i].guitarNum].useMidiSoloMarkers = True
if numOfSpMarkerNotes > 1:
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
markStarpower = False
if event.number == Song.overDriveMarkingNote:
markStarpower = True
if event.number == Song.starPowerMarkingNote:
if self.song.midiStyle == Song.MIDI_TYPE_GH:
markStarpower = True
#else: #RB solo marking!
if markStarpower and self.starpowerMode == 2: #auto-MIDI mode only:
tempStarpowerNoteList = self.song.track[i].getEvents(time, time+event.length)
self.spTimes[i].append((time,time+event.length))
lastSpNoteTime = 0
for spTime, spEvent in tempStarpowerNoteList:
if isinstance(spEvent, Note):
if spTime > lastSpNoteTime:
lastSpNoteTime = spTime
spEvent.star = True
#now, go back and mark all of the last chord as finalStar
# BUT only if not drums! If drums, mark only ONE of the last notes!
#lastChordTime = spTime
oneLastSpNoteMarked = False
for spTime, spEvent in tempStarpowerNoteList:
if isinstance(spEvent, Note):
if spTime == lastSpNoteTime:
if (guitar.isDrum and not oneLastSpNoteMarked) or (not guitar.isDrum):
spEvent.finalStar = True
oneLastSpNoteMarked = True
if self.logMarkerNotes == 1:
Log.debug("GuitarScene: P%d overdrive / starpower phrase marked between %f and %f" % ( i+1, time, time+event.length ) )
if lastSpNoteTime == 0:
Log.warn("This starpower phrase doesn't appear to have any finalStar notes marked... probably will not reward starpower!")
self.midiSP = True
unisonCheck.extend(self.spTimes[i])
elif self.starpowerMode == 2: #this particular instrument only has one starpower path marked! Force auto-generation of SP paths.
Log.warn("Instrument %s only has one starpower path marked! ...falling back on auto-generated paths for this instrument." % self.playerList[i].part.text)
guitar.starNotesSet = False #fallback on auto generation.
elif self.starpowerMode == 2:
if self.numberOfGuitars > 0:
Log.warn("This song does not appear to have any starpower or overdrive paths marked, falling back on auto-generated paths.")
for instrument in self.instruments:
if instrument.isVocal:
continue
instrument.starNotesSet = False #fallback on auto generation.
if self.useMidiSoloMarkers or self.song.midiStyle == Song.MIDI_TYPE_RB or self.markSolos == 3: #assume RB Midi-types with no solos don't want any, dammit!
self.markSolos = 0
for i, player in enumerate(self.playerList):
if player.guitarNum is not None:
self.instruments[i].markSolos = self.markSolos
if self.neckrender[player.guitarNum] is not None:
self.neckrender[player.guitarNum].markSolos = self.markSolos
self.lastDrumNoteTime = 0.0
self.lastNoteTimes = [0.0 for i in self.playerList]
#self.lastDrumNoteEvent = None
self.drumScoringEnabled = True
#akedrou - moved this to the part where it loads notes...
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
self.song.track[i].removeTempoEvents()
self.song.track[i].markPhrases()
holdingTap = False
holdingTapLength = 0
holdingTapNotes = 0
phraseId = 0
for time, event in self.song.track[i].getAllEvents():
if isinstance(event, VocalPhrase):
if event.tapPhrase:
if not holdingTap:
holdingTap = True
self.instruments[i].tapPartStart.append(phraseId)
holdingTapLength += 1
holdingTapNotes += len(event)
else:
if holdingTap:
self.instruments[i].tapPartLength.append(holdingTapLength)
self.instruments[i].tapNoteTotals.append(holdingTapNotes)
self.instruments[i].tapNoteHits.append(0)
holdingTap = False
holdingTapLength = 0
holdingTapNotes = 0
phraseId += 1
else:
self.instruments[i].totalPhrases = phraseId
if holdingTap:
self.instruments[i].tapPartLength.append(holdingTapLength)
self.instruments[i].tapNoteTotals.append(holdingTapNotes)
self.instruments[i].tapNoteHits.append(0)
else:
#myfingershurt: preventing ever-thickening BPM lines after restarts
self.song.track[i].markBars()
#MFH - should only be done the first time.
if self.hopoStyle > 0 or self.song.info.hopo == "on":
if not self.instruments[i].isDrum and not self.instruments[i].isVocal:
if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
elif self.hopoStyle == 1: #RF-Mod style HOPO system
self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
#self.song.track[i].removeTempoEvents() #MFH - perform a little event cleanup on these tracks
if self.battleGH and not self.instruments[i].isVocal:
if self.instruments[i].difficulty != 0:
self.song.difficulty[i] = Song.difficulties[self.instruments[i].difficulty-1]
self.song.track[i].markBars()
if self.hopoStyle > 0 or self.song.info.hopo == "on":
if not self.instruments[i].isDrum:
if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
elif self.hopoStyle == 1: #RF-Mod style HOPO system
self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
#self.song.track[i].removeTempoEvents() #MFH - perform a little event cleanup on these tracks
self.song.difficulty[i] = Song.difficulties[self.instruments[i].difficulty]
#myfingershurt: removing buggy disable stats option
lastTime = 0
for i in range(self.numOfPlayers):
for time, event in self.song.track[i].getAllEvents():
if not isinstance(event, Note) and not isinstance(event, VocalPhrase):
continue
if time + event.length > lastTime:
lastTime = time + event.length
self.lastEvent = lastTime + 1000
self.lastEvent = round(self.lastEvent / 1000) * 1000
#self.notesCum = 0
self.noteLastTime = 0
totalBreNotes = 0
#count / init solos and notes
for i,instrument in enumerate(self.instruments):
#MFH - go through, locate, and mark the last drum note. When this is encountered, drum scoring should be turned off.
lastDrumNoteTime = 0.0
lastDrumNoteEvent = None
for time, event in self.song.track[i].getAllEvents():
if isinstance(event, Note) or isinstance(event, VocalPhrase):
if time >= lastDrumNoteTime:
lastDrumNoteTime = time
lastDrumNoteEvent = event
if instrument.isDrum:
self.lastDrumNoteTime = lastDrumNoteTime
Log.debug("Last drum note located at time = " + str(self.lastDrumNoteTime) )
#self.lastDrumNoteEvent = lastDrumNoteEvent
self.scoring[i].totalStreakNotes = len([1 for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, Note)])
elif instrument.isVocal:
self.scoring[i].totalStreakNotes = len([1 for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, VocalPhrase)])
else:
self.scoring[i].totalStreakNotes = len(set(time for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, Note)))
#self.song.track[i].allEvents[self.song.track[i].maxIndex][0]
#self.scoring[i].totalStreakNotes = len(set(time for time, event in self.song.track[i].getAllEvents() if isinstance(event, Note)))
self.scoring[i].lastNoteEvent = lastDrumNoteEvent
self.scoring[i].lastNoteTime = lastDrumNoteTime
self.lastNoteTimes[i] = lastDrumNoteTime
if lastDrumNoteEvent:
if isinstance(lastDrumNoteEvent, Note):
Log.debug("Last note (number %d) found for player %d at time %f" % (lastDrumNoteEvent.number, i, lastDrumNoteTime) )
elif isinstance(lastDrumNoteEvent, VocalPhrase):
Log.debug("Last vocal phrase found for player %d at time %f" % (i, lastDrumNoteTime) )
else:
Log.debug("Last note event not found and is None!")
#- #volshebnyi - don't count notes in BRE zones if BRE active
#- if guitar.freestyleEnabled:
#- self.playerList[i].freestyleSkippedNotes = 0
#- for time, event in self.song.midiEventTrack[i].getAllEvents():
#- if isinstance(event, Song.MarkerNote) and not event.endMarker:
#- if (event.number == Song.freestyleMarkingNote):
#- if guitar.isDrum:
#- guitar.drumFillsTotal += 1
#- else:
#- for freestyleTime, event1 in self.song.track[i].getEvents(time, time + event.length):
#- if isinstance(event1, Note):
#- self.playerList[i].freestyleSkippedNotes += 1
#-
#- self.playerList[i].totalStreakNotes -= self.playerList[i].freestyleSkippedNotes
if instrument.isVocal:
self.scoring[i].totalNotes = self.scoring[i].totalStreakNotes - len(instrument.tapNoteTotals)
self.scoring[i].totalPercNotes = sum(instrument.tapNoteTotals)
self.scoring[i].baseScore = (instrument.vocalBaseScore * self.scoring[i].totalNotes) + (self.scoring[i].totalPercNotes * instrument.baseScore)
else:
self.scoring[i].totalNotes = len([1 for Ntime, event in self.song.track[i].getAllEvents() if isinstance(event, Note)])
if self.song.midiEventTrack[i] is not None: # filters out vocals
#MFH - determine which marker is BRE, and count streak notes behind it to remove from the scorecard
if self.song.hasFreestyleMarkings:
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
if (event.number == Song.freestyleMarkingNote):
thisIsABre = False
#if guitar.isDrum and self.song.breMarkerTime: #MFH - must ensure this song HAS a BRE!
# if time > self.song.breMarkerTime:
# thisIsABre = True
#else: #MFH - guitar or bass; no BRE text event marker required
if not instrument.isDrum:
thisIsABre = True
if thisIsABre: #MFH - only deal with guitar/bass BRE notes here. Drum notes will be handled in realtime as they are encountered under a fill or BRE.
breStart = time
breEnd = time + event.length
#if guitar.isDrum: #MFH - count drum notes individually
# numBreStreakNotes = len([1 for time, event in self.song.track[i].getEvents(breStart, breEnd) if isinstance(event, Note)])
#else: #MFH - count guitar / bass notes with grouped chords
numBreStreakNotes = len(set(time for time, event in self.song.track[i].getEvents(breStart, breEnd) if isinstance(event, Note)))
self.scoring[i].totalStreakNotes -= numBreStreakNotes #MFH - remove BRE notes correctly from streak count.
Log.debug("Removed %d streak notes from player %d" % (numBreStreakNotes, i) )
totalBreNotes += numBreStreakNotes
if instrument.useMidiSoloMarkers: #mark using the new MIDI solo marking system
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
if (event.number == Song.starPowerMarkingNote) and (self.song.midiStyle == Song.MIDI_TYPE_RB): #solo marker note.
startTime = time
endTime = time + event.length
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(startTime, endTime) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
Log.debug("P" + str(i+1) + " MIDI " + self.playerList[i].part.text + " Solo found from: " + str(startTime) + " to: " + str(endTime) + ", containing " + str(guitarSoloNoteCount) + " notes." )
elif instrument.markSolos == 1: #mark using the old text-based system
#Ntime now should contain the last note time - this can be used for guitar solo finishing
#MFH - use new self.song.eventTracks[Song.TK_GUITAR_SOLOS] -- retrieve a gsolo on / off combo, then use it to count notes
# just like before, detect if end reached with an open solo - and add a GSOLO OFF event just before the end of the song.
for time, event in self.song.eventTracks[Song.TK_GUITAR_SOLOS].getAllEvents():
if event.text.find("GSOLO") >= 0:
if event.text.find("ON") >= 0:
isGuitarSoloNow = True
guitarSoloStartTime = time
else:
isGuitarSoloNow = False
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, time) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
Log.debug("GuitarScene: Guitar Solo found: " + str(guitarSoloStartTime) + "-" + str(time) + " = " + str(guitarSoloNoteCount) )
if isGuitarSoloNow: #open solo until end - needs end event!
isGuitarSoloNow = False
#guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, time) if isinstance(Gevent, Note)])
#MFH - must find the real "last note" time, requires another iteration...
for lnTime, lnEvent in self.song.track[i].getAllEvents():
if isinstance(lnEvent, Note):
if lnTime > Ntime:
Ntime = lnTime
#Ntime = Ntime + soloSlop
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, Ntime) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
newEvent = TextEvent("GSOLO OFF", 100.0)
#self.song.eventTracks[Song.TK_GUITAR_SOLOS].addEvent(time - soloSlop,newEvent) #adding the missing GSOLO OFF event
self.song.eventTracks[Song.TK_GUITAR_SOLOS].addEvent(Ntime, newEvent) #adding the missing GSOLO OFF event
Log.debug("GuitarScene: Guitar Solo until end of song found - (guitarSoloStartTime - Ntime = guitarSoloNoteCount): " + str(guitarSoloStartTime) + "-" + str(Ntime) + " = " + str(guitarSoloNoteCount) )
self.unisonConfirm = [] #akedrou
self.unisonPlayers = []
self.unisonIndex = 0
if self.coOpRB:
for spNoted in unisonCheck:
if unisonCheck.count(spNoted) > 1:
if not spNoted in self.unisonConfirm:
self.unisonConfirm.append(spNoted)
if len(self.unisonConfirm) > 0:
self.unisonPlayers = [[] for i in self.unisonConfirm]
for i in range(len(self.unisonConfirm)):
for j in range(len(self.spTimes)):
if self.unisonConfirm[i] in self.spTimes[j]:
self.unisonPlayers[i].append(j)
Log.debug("Unisons confirmed: " + str(self.unisonConfirm))
Log.debug("Unisons between: " + str(self.unisonPlayers))
#MFH - handle gathering / sizing / grouping line-by-line lyric display here, during initialization:
self.midiLyricLineEvents = [] #MFH - this is a list of sublists of tuples.
# The tuples will contain (time, event)
# The sublists will contain:
# references to Lyric text events that will be treated as lines
# such that the game can still use song position to determine each text event's color
self.midiLyricLines = [] #MFH - this is a list of text strings
# it will contain a list of the concactenated midi lines for a simpler lyric display mode
self.nextMidiLyricLine = ""
self.lyricHeight = 0
if self.midiLyricsEnabled > 0 and (self.midiLyricMode == 1 or self.midiLyricMode == 2) and not self.playingVocals: #line-by-line lyrics mode is selected and enabled:
lyricFont = self.engine.data.font
if self.theme == 2:
txtSize = 0.00170
else:
txtSize = 0.00175
self.lyricHeight = lyricFont.getStringSize("A", scale = txtSize)[1]
#MFH - now we need an appropriate array to store and organize the lyric events into "lines"
# -- the first attempt at coding this will probably butcher the measures and timing horribly, but at least
# those of us with older systems can read the lyrics without them jumping all over the place.
tempLyricLine = ""
tempLyricLineEvents = []
firstTime = None
for time, event in self.song.eventTracks[Song.TK_LYRICS].getAllEvents():
if not firstTime:
firstTime = time
lastLyricLineContents = tempLyricLine
tempLyricLine = tempLyricLine + " " + event.text
if lyricFont.getStringSize(tempLyricLine, scale = txtSize)[0] > self.lineByLineLyricMaxLineWidth:
self.midiLyricLineEvents.append(tempLyricLineEvents)
self.midiLyricLines.append( (firstTime, lastLyricLineContents) )
firstTime = None
tempLyricLine = event.text
tempLyricLineEvents = []
tempLyricLineEvents.append( (time, event) )
else: #after last line is accumulated
if len(self.midiLyricLines) > 0:
self.midiLyricLineEvents.append(tempLyricLineEvents)
self.midiLyricLines.append( (firstTime, tempLyricLine) )
#MFH - test unpacking / decoding the lyrical lines:
for midiLyricSubList in self.midiLyricLineEvents:
if self.logLyricEvents == 1:
Log.debug("...New MIDI lyric line:")
for lyricTuple in midiLyricSubList:
time, event = lyricTuple
if self.logLyricEvents == 1:
Log.debug("MIDI Line-by-line lyric unpack test - time, event = " + str(time) + ", " + event.text )
for lineStartTime, midiLyricSimpleLineText in self.midiLyricLines:
if self.logLyricEvents == 1:
Log.debug("MIDI Line-by-line simple lyric line starting at time: " + str(lineStartTime) + ", " + midiLyricSimpleLineText)
self.numMidiLyricLines = len(self.midiLyricLines)
#self.initializeStarScoringThresholds() #MFH
self.coOpTotalStreakNotes = 0
self.coOpTotalNotes = 0
coOpTotalStreakNotes = 0
coOpTotalNotes = 0
if self.coOpScoreCard:
self.coOpScoreCard.lastNoteTime = max(self.lastNoteTimes)
Log.debug("Last note for co-op mode found at %.2f" % self.coOpScoreCard.lastNoteTime)
for i, scoreCard in enumerate(self.scoring): #accumulate base scoring values for co-op
if self.coOpScoreCard:
self.coOpScoreCard.totalStreakNotes += scoreCard.totalStreakNotes
self.coOpScoreCard.totalNotes += scoreCard.totalNotes
self.coOpPlayerIndex = len(range(self.numOfPlayers))
if self.coOpScoreCard:
self.coOpScoreCard.totalStreakNotes -= totalBreNotes
#glorandwarf: need to store the song's beats per second (bps) for later
self.songBPS = self.song.bpm / 60.0
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Graphics..."))
# evilynux - Load stage background(s)
if self.stage.mode == 3:
if Stage.videoAvailable:
songVideo = None
if self.song.info.video is not None:
songVideo = self.song.info.video
songVideoStartTime = self.song.info.video_start_time
songVideoEndTime = self.song.info.video_end_time
if songVideoEndTime == -1:
songVideoEndTime = None
self.stage.loadVideo(self.libraryName, self.songName,
songVideo = songVideo,
songVideoStartTime = songVideoStartTime,
songVideoEndTime = songVideoEndTime)
else:
Log.warn("Video playback is not supported. GStreamer or its python bindings can't be found")
self.engine.config.set("game", "stage_mode", 1)
self.stage.mode = 1
self.stage.load(self.libraryName, self.songName, self.playerList[0].practiceMode)
#MFH - this determination logic should happen once, globally -- not repeatedly.
self.showScriptLyrics = False
if not self.playingVocals:
if self.song.hasMidiLyrics and self.lyricMode == 3: #racer: new option for double lyrics
self.showScriptLyrics = False
elif not self.song.hasMidiLyrics and self.lyricMode == 3: #racer
self.showScriptLyrics = True
elif self.song.info.tutorial:
self.showScriptLyrics = True
elif self.lyricMode == 1 and self.song.info.lyrics: #lyrics: song.ini
self.showScriptLyrics = True
elif self.lyricMode == 2: #lyrics: Auto
self.showScriptLyrics = True
self.ready = True
#lyric sheet!
if not self.playingVocals:
if self.song.hasMidiLyrics and self.midiLyricsEnabled > 0:
if self.midiLyricMode == 0:
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet.png")):
self.lyricSheet = None
else:
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet2.png")):
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet.png")):
self.lyricSheet = None
else:
self.lyricSheet = None
else:
self.lyricSheet = None
if self.lyricSheet:
imgwidth = self.lyricSheet.width1()
self.lyricSheetScaleFactor = 640.000/imgwidth
#brescorebackground.png
if self.engine.loadImgDrawing(self, "breScoreBackground", os.path.join("themes",themename,"brescorebackground.png")):
breScoreBackgroundImgwidth = self.breScoreBackground.width1()
self.breScoreBackgroundWFactor = 640.000/breScoreBackgroundImgwidth
else:
Log.debug("BRE score background image loading problem!")
self.breScoreBackground = None
self.breScoreBackgroundWFactor = None
#brescoreframe.png
if self.engine.loadImgDrawing(self, "breScoreFrame", os.path.join("themes",themename,"brescoreframe.png")):
breScoreFrameImgwidth = self.breScoreFrame.width1()
self.breScoreFrameWFactor = 640.000/breScoreFrameImgwidth
else:
#MFH - fallback on using soloframe.png if no brescoreframe.png is found
if self.engine.loadImgDrawing(self, "breScoreFrame", os.path.join("themes",themename,"soloframe.png")):
breScoreFrameImgwidth = self.breScoreFrame.width1()
self.breScoreFrameWFactor = 640.000/breScoreFrameImgwidth
else:
self.breScoreFrame = None
self.breScoreFrameWFactor = None
if self.engine.loadImgDrawing(self, "soloFrame", os.path.join("themes",themename,"soloframe.png")):
soloImgwidth = self.soloFrame.width1()
self.soloFrameWFactor = 640.000/soloImgwidth
#soloImgheight = self.soloFrame.height1()
#soloHeightYFactor = (640.000*self.hFull)/self.wFull
#self.soloFrameHFactor = soloHeightYFactor/soloImgheight
else:
self.soloFrame = None
self.soloFrameWFactor = None
#self.soloFrameHFactor = None
self.partImage = True
self.part = [None for i in self.playerList]
self.partLoad = None
if self.counting or self.coOpType:
for i in range(self.numOfPlayers):
if not self.partImage:
break
if self.instruments[i].isDrum:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"drum.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("drum.png")):
self.counting = False
self.partImage = False
elif self.instruments[i].isBassGuitar:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"bass.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("bass.png")):
self.counting = False
self.partImage = False
elif self.instruments[i].isVocal:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"mic.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("mic.png")):
self.counting = False
self.partImage = False
else:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"guitar.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("guitar.png")):
self.counting = False
self.partImage = False
if self.partLoad:
self.part[i] = self.partLoad
self.partLoad = None
if self.soloFrameMode == 0:
self.soloFrame = None
#self.soloFrameHFactor = None
self.soloFrameWFactor = None
#Pause Screen
self.engine.loadImgDrawing(self, "pauseScreen", os.path.join("themes",themename,"pause.png"))
if not self.engine.loadImgDrawing(self, "failScreen", os.path.join("themes",themename,"fail.png")):
self.engine.loadImgDrawing(self, "failScreen", os.path.join("themes",themename,"pause.png"))
#failMessage
self.engine.loadImgDrawing(self, "failMsg", os.path.join("themes",themename,"youfailed.png"))
#myfingershurt: youRockMessage
self.engine.loadImgDrawing(self, "rockMsg", os.path.join("themes",themename,"yourock.png"))
self.counterY = -0.1
self.coOpPhrase = 0
self.scaleText = [0.0 for i in self.playerList]
self.displayText = [None for i in self.playerList]
self.displayTextScale = [0.0 for i in self.playerList]
#self.streakFlag = None #QQstarS:Set the flag,to show which one has reach the 50 note
self.textTimer = [0.0 for i in self.playerList]
#self.textChanged = False
self.textY = [.3 for i in self.playerList]
self.scaleText2 = [0.0 for i in self.playerList]
self.goingUP = [False for i in self.playerList]
if self.battleGH:
self.battleJustUsed = [0 for i in self.playerList]
self.battleText = [None for i in self.playerList]
self.battleTextTimer = [0.0 for i in self.playerList]
self.lastStreak = [0 for i in self.playerList]
if self.coOpType:
self.coOpPhrase = len(self.scaleText)
self.scaleText.append(0.0)
self.displayText.append(None)
self.displayTextScale.append(0.0)
self.textTimer.append(0.0)
self.textY.append(.3)
self.scaleText2.append(0.0)
self.goingUP.append(False)
self.lastStreak.append(0)
self.killswitchEngaged = [None for i in self.playerList]
#MFH - retrieve theme.ini pause background & text positions
self.pause_bkg = [float(i) for i in self.engine.theme.pause_bkg_pos]
self.pause_text_x = self.engine.theme.pause_text_xPos
self.pause_text_y = self.engine.theme.pause_text_yPos
if self.pause_text_x == None:
self.pause_text_x = .3
if self.pause_text_y == None:
self.pause_text_y = .31
#MFH - new theme.ini color options:
self.pause_text_color = self.engine.theme.hexToColor(self.engine.theme.pause_text_colorVar)
self.pause_selected_color = self.engine.theme.hexToColor(self.engine.theme.pause_selected_colorVar)
self.fail_text_color = self.engine.theme.hexToColor(self.engine.theme.fail_text_colorVar)
self.fail_selected_color = self.engine.theme.hexToColor(self.engine.theme.fail_selected_colorVar)
self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.fail_completed_colorVar)
settingsMenu = Settings.GameSettingsMenu(self.engine, self.pause_text_color, self.pause_selected_color, players = self.playerList)
careerSettingsMenu = Settings.GameCareerSettingsMenu(self.engine, self.pause_text_color, self.pause_selected_color, players = self.playerList)
settingsMenu.fadeScreen = False
careerSettingsMenu.fadeScreen = False
# evilynux - More themeable options
self.rockmeter_score_color = self.engine.theme.rockmeter_score_colorVar
#self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.song_name_selected_colorVar) # text same color as selected song
#self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.fail_text_colorVar) #No, now same as fail_text color.
self.ingame_stats_color = self.engine.theme.ingame_stats_colorVar
if self.pause_text_color == None:
self.pause_text_color = (1,1,1)
if self.pause_selected_color == None:
self.pause_selected_color = (1,0.75,0)
if self.fail_text_color == None:
self.fail_text_color = (1,1,1)
if self.fail_selected_color == None:
self.fail_selected_color = (1,0.75,0)
if self.fail_completed_color == None:
self.fail_completed_color = self.fail_text_color
Log.debug("Pause text / selected colors: " + str(self.pause_text_color) + " / " + str(self.pause_selected_color))
#racer: theme.ini fail positions
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
self.fail_bkg = [float(i) for i in self.engine.theme.fail_bkg_pos]
self.fail_text_x = self.engine.theme.fail_text_xPos
self.fail_text_y = self.engine.theme.fail_text_yPos
self.failSongPos=(self.engine.theme.fail_songname_xPos,self.engine.theme.fail_songname_yPos)
if self.fail_text_x == None:
self.fail_text_x = .5-size[0]/2.0
if self.fail_text_y == None:
self.fail_text_y = .47
if self.theme == 1: #GH3-like theme
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong), #Worldrave adjusted proper spacing.
(_(" RESTART"), self.restartSong),
#(_(" GIVE UP"), self.changeSong), *Worldrave-commented out just to match GH3. Since this is a GH3 specific instruction.
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" OPTIONS"), careerSettingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color, append_submenu_char = False)
else:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
# (_(" GIVE UP"), self.changeSong),
(_(" END SONG"), self.endSong),
(_(" OPTIONS"), settingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color, append_submenu_char = False)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_("RETRY SONG"), self.restartAfterFail),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Failed menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_("RETRY SONG"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
#FirstTime = True
#self.restartSong(FirstTime)
elif self.theme == 0: #GH2-like theme
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" Resume"), self.resumeSong),
(_(" Start Over"), self.restartSong),
(_(" Change Song"), self.changeSong),
(_(" Practice"), self.practiceSong), #evilynux
(_(" Settings"), careerSettingsMenu),
(_(" Quit to Main Menu"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
else:
self.menu = Menu(self.engine, [
(_(" Resume"), self.resumeSong),
(_(" Start Over"), self.restartSong),
(_(" Change Song"), self.changeSong),
(_(" End Song"), self.endSong),
(_(" Settings"), settingsMenu),
(_(" Quit to Main Menu"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_(" Try Again?"), self.restartAfterFail),
(_(" Give Up?"), self.changeAfterFail),
(_(" Practice?"), self.practiceSong), #evilynux
(_("Quit to Main"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Fail menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_(" Try Again?"), self.restartAfterFail),
(_(" Give Up?"), self.changeAfterFail),
(_("Quit to Main"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
#FirstTime = True
#self.restartSong(FirstTime)
elif self.theme == 2: #RB-like theme
size = self.engine.data.pauseFont.getStringSize("Quit to Main Menu")
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
(_(" CHANGE SONG"), self.changeSong),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" SETTINGS"), careerSettingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
else:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
(_(" CHANGE SONG"), self.changeSong),
(_(" END SONG"), self.endSong),
(_(" SETTINGS"), settingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_(" RETRY"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Fail menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_(" RETRY"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
self.restartSong(firstTime = True)
# hide the splash screen
Dialogs.hideLoadingSplashScreen(self.engine, splash)
splash = None
self.engine.createdGuitarScene = False
#MFH - end of GuitarScene cleint initialization routine
def pauseGame(self):
if self.song and self.song.readyToGo:
self.song.pause()
self.pausePos = self.getSongPosition()
self.pause = True
for instrument in self.instruments:
instrument.paused = True
if instrument.isVocal:
instrument.stopMic()
else:
instrument.neck.paused = True
def failGame(self):
self.engine.view.pushLayer(self.failMenu)
if self.song and self.song.readyToGo and self.pause: #akedrou - don't let the pause menu overlap the fail menu.
self.engine.view.popLayer(self.menu)
self.pause = False
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.stopMic()
else:
instrument.neck.paused = False
self.failEnd = True
def resumeGame(self):
self.loadSettings()
self.setCamera()
if self.resumeCountdownEnabled and not self.failed and not self.countdown:
self.resumeCountdownSeconds = 3
self.resumeCountdown = float(self.resumeCountdownSeconds) * self.songBPS
self.pause = False
else:
if self.song and self.song.readyToGo:
if not self.failed: #akedrou - don't resume the song if you have already failed.
self.song.unpause()
self.pause = False
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.startMic()
else:
instrument.neck.paused = False
def resumeSong(self):
self.engine.view.popLayer(self.menu)
self.resumeGame()
def lostFocus(self): #akedrou - catch to pause on lostFocus
if self.song and self.song.readyToGo:
if not self.failed and not self.pause and self.lostFocusPause == True:
self.engine.view.pushLayer(self.menu)
self.pauseGame()
def setCamera(self):
#x=0 middle
#x=1 rotate left
#x=-1 rotate right
#y=3 middle
#y=4 rotate back
#y=2 rotate front
#z=-3
if self.rmtype == 3:
self.camera.target = (0.0, 1.4, 1.8)
self.camera.origin = (0.0, 2.8, -3.6)
elif self.customPOV:
self.camera.target = (self.targetX, self.targetY, self.targetZ)
self.camera.origin = (self.originX, self.originY*self.boardY, self.originZ)
else:
if self.pov == 1: #GH3
self.camera.target = (0.0, 0.6, 4.4)
self.camera.origin = (0.0, 3.5*self.boardY, -3.8)
elif self.pov == 2: #RB
self.camera.target = (0.0, 0.0, 3.7)
self.camera.origin = (0.0, 2.9*self.boardY, -2.9)
elif self.pov == 3: #GH2
self.camera.target = (0.0, 1.6, 2.0)
self.camera.origin = (0.0, 2.6*self.boardY, -3.6)
elif self.pov == 4: #Rock Rev
self.camera.target = (0.0, -6.0, 2.6666666666)
self.camera.origin = (0.0, 6.0, 2.6666666665)
elif self.pov == 5: #Theme
if self.rmtype == 0:
self.camera.target = (0.0, 1.6, 2.0)
self.camera.origin = (0.0, 2.6*self.boardY, -3.6)
elif self.rmtype == 1:
self.camera.target = (0.0, 0.6, 4.4) #Worldrave - Perfected the proper GH3 POV
self.camera.origin = (0.0, 3.5*self.boardY, -3.8)
elif self.rmtype == 2:
self.camera.target = (0.0, 0.0, 3.7)
self.camera.origin = (0.0, 2.9*self.boardY, -2.9)
else: # FoF
self.camera.target = (0.0, 0.0, 4.0)
self.camera.origin = (0.0, 3.0*self.boardY, -3.0)
def freeResources(self):
self.engine.view.setViewport(1,0)
self.counter = None
self.failScreen = None
self.failMsg = None
self.menu = None
self.mult = None
self.pauseScreen = None
self.rockTop = None
self.rockMsg = None
for instrument in self.instruments:
if instrument.isVocal:
instrument.stopMic()
#MFH - Ensure all event tracks are destroyed before removing Song object!
if self.song:
self.song.tracks = None
self.song.eventTracks = None
self.song.midiEventTracks = None
if self.whammyEffect == 1:
self.song.resetInstrumentPitch(-1)
self.song = None
#MFH - additional cleanup!
self.lyricSheet = None
self.starWhite = None
self.starGrey = None
self.starPerfect = None
self.starGrey1 = None
self.starGrey2 = None
self.starGrey3 = None
self.starGrey4 = None
self.starGrey5 = None
self.starGrey6 = None
self.starGrey7 = None
self.part = [None for i in self.playerList]
for scoreCard in self.scoring:
scoreCard.lastNoteEvent = None
if self.coOpType:
self.coOpScoreCard.lastNoteEvent = None
if self.stage.mode == 3 and Stage.videoAvailable:
self.engine.view.popLayer(self.stage.vidPlayer)
def getHandicap(self):
hopoFreq = self.engine.config.get("coffee", "hopo_frequency")
try:
songHopo = int(self.song.info.hopofreq)
except Exception, e:
songHopo = 1
for i, scoreCard in enumerate(self.scoring):
if self.instruments[i].isVocal:
if self.engine.audioSpeedFactor != 1 or scoreCard.earlyHitWindowSizeHandicap != 1.0: #scalable handicaps
if (scoreCard.handicap>>1)&1 != 1:
scoreCard.handicap += 0x2
if self.coOpType:
if (self.coOpScoreCard.handicap>>1)&1 != 1:
self.coOpScoreCard.handicap += 0x2
if not self.failingEnabled:
if (scoreCard.handicap>>2)&1 != 1:
scoreCard.handicap += 0x4
if self.coOpType:
if (self.coOpScoreCard.handicap>>2)&1 != 1:
self.coOpScoreCard.handicap += 0x4
continue
if self.gh2sloppy == 1 and not self.instruments[i].isDrum: # or self.rb2sloppy == 1:
if (scoreCard.handicap)&1 != 1:
scoreCard.handicap += 1
if self.coOpType:
if self.coOpScoreCard.handicap&1 != 1:
self.coOpScoreCard.handicap += 1
if self.engine.audioSpeedFactor != 1 or scoreCard.earlyHitWindowSizeHandicap != 1.0: #scalable handicaps
if (scoreCard.handicap>>1)&1 != 1:
scoreCard.handicap += 0x2
if self.coOpType:
if (self.coOpScoreCard.handicap>>1)&1 != 1:
self.coOpScoreCard.handicap += 0x2
if not self.failingEnabled:
if (scoreCard.handicap>>2)&1 != 1:
scoreCard.handicap += 0x4
if self.coOpType:
if (self.coOpScoreCard.handicap>>2)&1 != 1:
self.coOpScoreCard.handicap += 0x4
if self.instruments[i].twoChordApply:
if (scoreCard.handicap>>3)&1 != 1:
scoreCard.handicap += 0x8
if self.coOpType:
if (self.coOpScoreCard.handicap>>3)&1 != 1:
self.coOpScoreCard.handicap += 0x8
if self.instruments[i].hitw == 0.70:
if (scoreCard.handicap>>4)&1 != 1:
scoreCard.handicap += 0x10
if self.coOpType:
if (self.coOpScoreCard.handicap>>4)&1 != 1:
self.coOpScoreCard.handicap += 0x10
elif self.instruments[i].hitw == 1.0:
if (scoreCard.handicap>>5)&1 != 1:
scoreCard.handicap += 0x20
if self.coOpType:
if (self.coOpScoreCard.handicap>>5)&1 != 1:
self.coOpScoreCard.handicap += 0x20
elif self.instruments[i].hitw == 1.9:
if (scoreCard.handicap>>6)&1 != 1:
scoreCard.handicap += 0x40
if self.coOpType:
if (self.coOpScoreCard.handicap>>6)&1 != 1:
self.coOpScoreCard.handicap += 0x40
elif self.instruments[i].hitw == 2.3:
if (scoreCard.handicap>>7)&1 != 1:
scoreCard.handicap += 0x80
if self.coOpType:
if (self.coOpScoreCard.handicap>>7)&1 != 1:
self.coOpScoreCard.handicap += 0x80
if self.hopoStyle == 0 and not self.instruments[i].isDrum: #no taps
if (scoreCard.handicap>>8)&1 != 1:
scoreCard.handicap += 0x100
if self.coOpType:
if (self.coOpScoreCard.handicap>>8)&1 != 1:
self.coOpScoreCard.handicap += 0x100
elif hopoFreq == 0 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>9)&1 != 1:
scoreCard.handicap += 0x200
if self.coOpType:
if (self.coOpScoreCard.handicap>>9)&1 != 1:
self.coOpScoreCard.handicap += 0x200
elif hopoFreq == 1 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>10)&1 != 1:
scoreCard.handicap += 0x400
if self.coOpType:
if (self.coOpScoreCard.handicap>>10)&1 != 1:
self.coOpScoreCard.handicap += 0x400
elif hopoFreq == 3 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>11)&1 != 1:
scoreCard.handicap += 0x800
if self.coOpType:
if (self.coOpScoreCard.handicap>>11)&1 != 1:
self.coOpScoreCard.handicap += 0x800
elif hopoFreq == 4 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>12)&1 != 1:
scoreCard.handicap += 0x1000
if self.coOpType:
if (self.coOpScoreCard.handicap>>12)&1 != 1:
self.coOpScoreCard.handicap += 0x1000
elif hopoFreq == 5 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>13)&1 != 1:
scoreCard.handicap += 0x2000
if self.coOpType:
if (self.coOpScoreCard.handicap>>13)&1 != 1:
self.coOpScoreCard.handicap += 0x2000
elif self.allTaps == 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>14)&1 != 1:
scoreCard.handicap += 0x4000
if self.coOpType:
if (self.coOpScoreCard.handicap>>14)&1 != 1:
self.coOpScoreCard.handicap += 0x4000
if self.whammySavesSP and not self.instruments[i].isDrum:
if (scoreCard.handicap>>15)&1 != 1:
scoreCard.handicap += 0x8000
if self.coOpType:
if (self.coOpScoreCard.handicap>>15)&1 != 1:
self.coOpScoreCard.handicap += 0x8000
if self.autoPlay and self.jurg[i]:
if (scoreCard.handicap>>16)&1 != 1:
scoreCard.handicap += 0x10000
if self.coOpType:
if (self.coOpScoreCard.handicap>>16)&1 != 1:
self.coOpScoreCard.handicap += 0x10000
if self.playerAssist[i] == 1:
if (scoreCard.handicap>>17)&1 != 1:
scoreCard.handicap += 0x20000
if self.coOpType:
if (self.coOpScoreCard.handicap>>17)&1 != 1:
self.coOpScoreCard.handicap += 0x20000
if self.playerAssist[i] == 2:
if (scoreCard.handicap>>18)&1 != 1:
scoreCard.handicap += 0x40000
if self.coOpType:
if (self.coOpScoreCard.handicap>>18)&1 != 1:
self.coOpScoreCard.handicap += 0x40000
if self.playerAssist[i] == 3:
if (scoreCard.handicap>>19)&1 != 1:
scoreCard.handicap += 0x80000
if self.coOpType:
if (self.coOpScoreCard.handicap>>19)&1 != 1:
self.coOpScoreCard.handicap += 0x80000
scoreCard.updateHandicapValue()
if self.coOpType:
self.coOpScoreCard.updateHandicapValue()
def loadSettings(self):
self.stage.updateDelays()
self.activeVolume = self.engine.config.get("audio", "guitarvol")
self.screwUpVolume = self.engine.config.get("audio", "screwupvol")
self.killVolume = self.engine.config.get("audio", "kill_volume")
#self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
self.crowdVolume = self.engine.config.get("audio", "crowd_volume") #akedrou
self.crowdsEnabled = self.engine.config.get("audio", "enable_crowd_tracks")
#self.engine.data.sfxVolume = self.sfxVolume #MFH - keep Data updated
self.engine.data.crowdVolume = self.crowdVolume
#MFH - now update volume of all screwup sounds and other SFX:
self.engine.data.SetAllScrewUpSoundFxObjectVolumes(self.screwUpVolume)
#self.engine.data.SetAllSoundFxObjectVolumes(self.sfxVolume)
#Re-apply Jurgen Settings -- Spikehead777
self.autoPlay = False
self.jurg = [False for i in self.playerList]
self.jurgenLogic = [0 for i in self.playerList]
self.aiSkill = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
jurgen = self.engine.config.get("game", "jurg_p%d" % i)
if jurgen == True:
self.jurg[i] = True
self.autoPlay = True
self.aiSkill[i] = self.engine.config.get("game", "jurg_skill_p%d" % i)
if player.part.id == Song.VOCAL_PART:
self.instruments[i].jurgenEnabled = jurgen
self.instruments[i].jurgenSkill = self.aiSkill[i]
self.jurgenLogic[i] = self.engine.config.get("game", "jurg_logic_p%d" % i)
#MFH - no Jurgen in Career mode.
if self.careerMode:
self.autoPlay = False
if self.bossBattle:
self.autoPlay = True
self.jurg = [False for i in self.playerList]
self.jurg[1] = True
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
self.allTaps = 0
self.autoKickBass = [0 for i in self.playerList]
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.hopoAfterChord = self.engine.config.get("game", "hopo_after_chord")
self.pov = self.engine.config.get("fretboard", "point_of_view")
#CoffeeMod
#self.controls = self.engine.input.controls
self.activeGameControls = self.engine.input.activeGameControls
for i,player in enumerate(self.playerList):
if player.part.id == Song.VOCAL_PART:
continue
self.instruments[i].leftyMode = False
self.instruments[i].twoChordMax = False
self.instruments[i].drumFlip = False
if player.lefty > 0:
self.instruments[i].leftyMode = True
if player.drumflip > 0:
self.instruments[i].drumFlip = True
if player.twoChordMax > 0:
self.instruments[i].twoChordMax = True
self.keysList = []
for i, player in enumerate(self.playerList):
if self.instruments[i].isDrum:
self.keysList.append(player.drums)
elif self.instruments[i].isVocal:
self.keysList.append([])
continue
else:
self.keysList.append(player.keys)
if not self.instruments[i].twoChordMax:
if self.controls.twoChord[self.activeGameControls[i]] > 0:
self.instruments[i].twoChordMax = True
if self.song and self.song.readyToGo:
self.getHandicap() #akedrou - to be sure scoring objects are created.
#myfingershurt: ensure that after a pause or restart, the a/v sync delay is refreshed:
self.song.refreshAudioDelay()
#myfingershurt: ensuring the miss volume gets refreshed:
self.song.refreshVolumes()
self.song.setAllTrackVolumes(1)
if self.crowdsCheering == True:
self.song.setCrowdVolume(1)
else:
self.song.setCrowdVolume(0.0)
def songLoaded(self, song):
for i, player in enumerate(self.playerList):
if self.instruments[i].isVocal:
song.difficulty[i] = Song.difficulties[Song.EXP_DIF] #for track-finding purposes! Don't change this, ok?
continue
song.difficulty[i] = player.difficulty
if self.bossBattle == True:
song.difficulty[1] = song.difficulty[0]
self.song.readyToGo = False
def endSong(self):
self.engine.view.popLayer(self.menu)
validScoreFound = False
for scoreCard in self.scoring: #MFH - what if 2p (human) battles 1p (Jurgen / CPU)? He needs a valid score too!
if scoreCard.score > 0:
validScoreFound = True
break
if self.coOpType:
if self.coOpScoreCard.score > 0:
validScoreFound = True
if validScoreFound:
#if self.player.score > 0:
self.goToResults()
else:
self.changeSong()
def quit(self):
if self.song:
self.song.stop()
self.resetVariablesToDefaults()
self.done = True
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
self.engine.world.finishGame()
# evilynux - Switch to Practice
def practiceSong(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
self.engine.world.gameMode = 1
self.engine.world.createScene("SongChoosingScene")
def changeSong(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
# self.session.world.deleteScene(self)
self.engine.world.createScene("SongChoosingScene")
def changeAfterFail(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
# self.session.world.deleteScene(self)
self.engine.world.createScene("SongChoosingScene")
def initBeatAndSpClaps(self):
###Capo###
if self.song:
self.beatTime = []
if (self.starClaps or self.beatClaps):
for time, event in self.song.track[0].getAllEvents():
if isinstance(event, Bars):
if (event.barType == 1 or event.barType == 2):
self.beatTime.append(time)
###endCapo###
def resetVariablesToDefaults(self):
if self.song:
self.song.readyToGo = False
#self.countdown = 4.0 * self.songBPS
self.countdownSeconds = 3 #MFH - This needs to be reset for song restarts, too!
self.countdown = float(self.countdownSeconds) * self.songBPS
self.scaleText = [0.0 for i in self.playerList]
self.displayText = [None for i in self.playerList]
self.displayTextScale = [0.0 for i in self.playerList]
self.textTimer = [0.0 for i in self.playerList]
self.textY = [.3 for i in self.playerList]
self.scaleText2 = [0.0 for i in self.playerList]
self.goingUP = [False for i in self.playerList]
self.lastStreak = [0 for i in self.playerList]
if self.coOpType:
self.coOpPhrase = len(self.scaleText)
self.scaleText.append(0.0)
self.displayText.append(None)
self.displayTextScale.append(0.0)
self.textTimer.append(0.0)
self.textY.append(.3)
self.scaleText2.append(0.0)
self.goingUP.append(False)
self.lastStreak.append(0)
self.midiLyricLineIndex = 0
self.drumStart = False #faaa's drum sound mod restart
self.dispAccuracy = [False for i in self.playerList]
for instrument in self.instruments:
instrument.spEnabled = True
instrument.bigRockEndingMarkerSeen = False
#self.partialStar = [0 for i in self.playerList]
#self.starRatio = [0.0 for i in self.playerList]
for scoreCard in self.scoring:
scoreCard.reset()
self.crowdsCheering = False #akedrou
if self.coOpType:
self.coOpScoreCard.reset()
self.coOpStarPower = 0
self.coOpStarPowerTimer = 0
self.coOpStarPowerActive = [0 for i in self.playerList]
self.mutedLastSecondYet = False
self.dispSoloReview = [False for i in self.playerList]
self.soloReviewCountdown = [0 for i in self.playerList]
self.guitarSoloAccuracy = [0.0 for i in self.playerList]
self.guitarSoloActive = [False for i in self.playerList]
self.currentGuitarSolo = [0 for i in self.playerList]
self.guitarSoloBroken = [False for i in self.playerList]
self.inUnison = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.firstUnison = False
self.firstUnisonDone = False
self.unisonNum = 0
self.unisonIndex = 0
self.unisonActive = False
self.unisonEarn = [False for i in self.playerList]
self.resumeCountdown = 0
self.resumeCountdownSeconds = 0
self.pausePos = 0
self.failTimer = 0 #myfingershurt
self.rockTimer = 0 #myfingershurt
self.youRock = False #myfingershurt
self.rockFinished = False #myfingershurt
if self.battleGH:
if not self.battleSuddenDeath:
self.rock = [self.rockMax/2 for i in self.playerList]
else:
self.rock = [self.rockMax/2 for i in self.playerList]
self.minusRock = [0.0 for i in self.playerList]
self.plusRock = [0.0 for i in self.playerList]
self.coOpMulti = 1
self.deadPlayerList = []
self.numDeadPlayers = 0
self.coOpFailDone = [False for i in self.playerList]
self.rockFailUp = True
self.rockFailViz = 0.0
self.failViz = [0.0 for i in self.playerList]
if self.coOpRB:
self.rock.append(self.rockMax/2)
self.minusRock.append(0.0)
self.plusRock.append(0.0)
self.timesFailed = [0 for i in self.playerList]
if self.battleGH:
self.battleJustUsed = [0 for i in self.playerList]
for instrument in self.instruments:
if self.battleGH:
if not self.battleSuddenDeath:
instrument.battleObjects = [0] * 3
instrument.battleSuddenDeath = False
instrument.battleStatus = [False] * 9
instrument.battleBeingUsed = [0] * 2
#self.guitars[i].battleDiffUp = False
#self.guitars[i].battleLefty = False
#self.guitars[i].battleWhammy = False
#self.guitars[i].battleAmp = False
instrument.starPower = 0
instrument.coOpFailed = False
#volshebnyi - BRE variables reset
instrument.freestyleStart = 0
instrument.freestyleFirstHit = 0
instrument.freestyleLength = 0
instrument.freestyleBonusFret = 0
if instrument.isDrum:
instrument.drumFillsCount = 0
instrument.drumFillsHits = 0
instrument.freestyleLastFretHitTime = [0 for i in range(5)]
if instrument.isVocal:
instrument.doneLastPhrase = False
instrument.phraseIndex = 0
instrument.currentTapPhrase = -1
instrument.phraseInTune = 0
instrument.phraseNoteTime = 0
instrument.phraseTaps = 0
instrument.phraseTapsHit = 0
#volshebnyi - shaders reset
shaders.reset()
if shaders.turnon:
for i, player in enumerate(self.playerList):
shaders.var["fret"][i]=[-10.0]*5
shaders.var["fretpos"][i]=[-10.0]*5
shaders.var["color"][i]=(.0,)*4
shaders.var["scoreMult"][i]=1
shaders.var["multChangePos"][i]=-10.0
self.failed = False
self.battleSuddenDeath = False
self.finalFailed = False
self.failEnd = False
self.drumScoringEnabled = True #MFH
self.initBeatAndSpClaps()
#MFH - init vars for the next time & lyric line to display
self.midiLyricLineIndex = 0
self.nextMidiLyricStartTime = 0
if ( self.numMidiLyricLines > 0 ):
self.nextMidiLyricStartTime, self.nextMidiLyricLine = self.midiLyricLines[self.midiLyricLineIndex]
#MFH - initialize word-by-word 2-line MIDI lyric display / highlighting system:
self.activeMidiLyricLine_GreyWords = ""
self.activeMidiLyricLine_GreenWords = ""
self.activeMidiLyricLine_WhiteWords = ""
self.activeMidiLyricLineIndex = 0
self.activeMidiLyricWordSubIndex = 0
self.numWordsInCurrentMidiLyricLine = 0
self.currentSimpleMidiLyricLine = ""
self.nextLyricWordTime = 0
self.nextLyricEvent = None
self.nextLyricIsOnNewLine = False
#MFH - reset global tempo variables
self.currentBpm = Song.DEFAULT_BPM
self.currentPeriod = 60000.0 / self.currentBpm
self.targetBpm = self.currentBpm
self.lastBpmChange = -1.0
self.baseBeat = 0.0
if self.midiLyricMode == 2 and not self.playingVocals:
if self.numMidiLyricLines > self.activeMidiLyricLineIndex:
self.numWordsInCurrentMidiLyricLine = 0
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.numWordsInCurrentMidiLyricLine += 1
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
else:
self.noMoreMidiLineLyrics = True #t'aint no lyrics t'start wit!
#self.activeMidiLyricWordSubIndex += 1
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+2: #is there a second line of lyrics?
tempTime, self.currentSimpleMidiLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex+1]
for player in self.playerList:
player.reset()
self.stage.reset()
self.enteredCode = []
self.jurgPlayer = [False for i in self.playerList] #Jurgen hasn't played the restarted song =P
for instrument in self.instruments:
instrument.scoreMultiplier = 1
if instrument.isVocal:
instrument.phraseIndex = 0
instrument.currentTapPhrase = -1
instrument.tapNoteHits = [0 for i in instrument.tapNoteTotals]
instrument.currentPhraseTime = 0
instrument.currentPhraseLength = 0
instrument.activePhrase = None
continue
instrument.twoChord = 0
instrument.hopoActive = 0
instrument.wasLastNoteHopod = False
instrument.sameNoteHopoString = False
instrument.hopoLast = -1
instrument.guitarSolo = False
instrument.neck.guitarSolo = False
instrument.currentGuitarSoloHitNotes = 0
if self.partyMode == True:
self.instruments[0].keys = self.playerList[0].keys
self.instruments[0].actions = self.playerList[0].actions
self.keysList = self.playerList[0].keys
if self.battle == True:
for i in range(self.numOfPlayers):
self.instruments[i].actions = self.playerList[i].actions
self.engine.collectGarbage()
self.boardY = 2
self.setCamera()
if self.song:
self.song.readyToGo = True
def restartSong(self, firstTime = False): #QQstarS: Fix this function
self.resetVariablesToDefaults()
self.engine.data.startSound.play()
self.engine.view.popLayer(self.menu)
if not self.song:
return
# glorandwarf: the countdown is now the number of beats to run
# before the song begins
self.partySwitch = 0
for instrument in self.instruments:
if instrument.isVocal:
instrument.stopMic()
else:
instrument.endPick(0) #akedrou: this is the position of the song, not a player number!
self.song.stop()
self.initBeatAndSpClaps()
if self.stage.mode == 3:
self.stage.restartVideo()
def restartAfterFail(self): #QQstarS: Fix this function
self.resetVariablesToDefaults()
self.engine.data.startSound.play()
self.engine.view.popLayer(self.failMenu)
if not self.song:
return
self.partySwitch = 0
for i,instrument in enumerate(self.instruments):
if instrument.isVocal:
instrument.stopMic()
else:
instrument.endPick(0)
self.song.stop()
#MFH - unnecessary re-marking of HOPOs
#for i, guitar in enumerate(self.guitars):
# #myfingershurt: next line commented to prevent everthickening BPM lines
# if self.hopoStyle > 0 or self.song.info.hopo == "on":
# if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
# self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
# elif self.hopoStyle == 1: #RF-Mod style HOPO system
# self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
def startSolo(self, playerNum): #MFH - more modular and general handling of solos
i = playerNum
#Guitar Solo Start
self.currentGuitarSoloTotalNotes[i] = self.guitarSolos[i][self.currentGuitarSolo[i]]
self.guitarSoloBroken[i] = False
self.instruments[i].guitarSolo = True
if not self.instruments[i].isVocal:
self.instruments[i].neck.guitarSolo = True
#self.displayText[i] = _("Guitar Solo!")
instrumentSoloString = "%s %s" % (self.playerList[i].part.text, self.tsSolo)
if self.phrases > 1:
self.newScalingText(self.playerList[i].number, instrumentSoloString )
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.crowdSound.play()
def endSolo(self, playerNum): #MFH - more modular and general handling of solos
i = playerNum
#Guitar Solo End
self.instruments[i].guitarSolo = False
if not self.instruments[i].isVocal:
self.instruments[i].neck.guitarSolo = False
#self.sfxChannel.setVolume(self.sfxVolume) #liquid
self.guitarSoloAccuracy[i] = (float(self.instruments[i].currentGuitarSoloHitNotes) / float(self.currentGuitarSoloTotalNotes[i]) ) * 100.0
if not self.guitarSoloBroken[i]: #backup perfect solo detection
if self.instruments[i].currentGuitarSoloHitNotes > 0: #MFH - need to make sure someone didn't just not play a guitar solo at all - and still wind up with 100%
self.guitarSoloAccuracy[i] = 100.0
if self.guitarSoloAccuracy[i] > 100.0:
self.guitarSoloAccuracy[i] = 100.0
if self.guitarSoloBroken[i] and self.guitarSoloAccuracy[i] == 100.0: #streak was broken, not perfect solo, force 99%
self.guitarSoloAccuracy[i] = 99.0
if self.guitarSoloAccuracy[i] == 100.0: #fablaculp: soloDescs changed
soloDesc = self.tsPerfectSolo
soloScoreMult = 100
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 95.0:
soloDesc = self.tsAwesomeSolo
soloScoreMult = 50
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 90.0:
soloDesc = self.tsGreatSolo
soloScoreMult = 30
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 80.0:
soloDesc = self.tsGoodSolo
soloScoreMult = 20
elif self.guitarSoloAccuracy[i] >= 70.0:
soloDesc = self.tsSolidSolo
soloScoreMult = 10
elif self.guitarSoloAccuracy[i] >= 60.0:
soloDesc = self.tsOkaySolo
soloScoreMult = 5
else: #0% - 59.9%
soloDesc = self.tsMessySolo
soloScoreMult = 0
self.engine.data.failSound.play() #liquid
soloBonusScore = soloScoreMult * self.instruments[i].currentGuitarSoloHitNotes
self.scoring[i].score += soloBonusScore
if self.coOpType:
self.coOpScoreCard.score += soloBonusScore
trimmedSoloNoteAcc = self.roundDecimalForDisplay(self.guitarSoloAccuracy[i])
#self.soloReviewText[i] = [soloDesc,str(trimmedSoloNoteAcc) + "% = " + str(soloBonusScore) + _(" pts")]
#ptsText = _("pts")
self.soloReviewText[i] = [soloDesc,
"%(soloNoteAcc)s%% = %(soloBonus)d %(pts)s" % \
{'soloNoteAcc': str(trimmedSoloNoteAcc), 'soloBonus': soloBonusScore, 'pts': self.tsPtsLabel} ]
self.dispSoloReview[i] = True
self.soloReviewCountdown[i] = 0
#reset for next solo
self.instruments[i].currentGuitarSoloHitNotes = 0
self.currentGuitarSolo[i] += 1
def updateGuitarSolo(self, playerNum):
i = playerNum
#if self.guitars[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
#update guitar solo for player i
#if we hit more notes in the solo than were counted, update the solo count (for the slop)
if self.instruments[i].currentGuitarSoloHitNotes > self.currentGuitarSoloTotalNotes[i]:
self.currentGuitarSoloTotalNotes[i] = self.instruments[i].currentGuitarSoloHitNotes
if self.instruments[i].currentGuitarSoloHitNotes != self.currentGuitarSoloLastHitNotes[i]: #changed!
self.currentGuitarSoloLastHitNotes[i] = self.instruments[i].currentGuitarSoloHitNotes #update.
if self.guitarSoloAccuracyDisplayMode > 0: #if not off:
tempSoloAccuracy = (float(self.instruments[i].currentGuitarSoloHitNotes)/float(self.currentGuitarSoloTotalNotes[i]) * 100.0)
trimmedIntSoloNoteAcc = self.roundDecimalForDisplay(tempSoloAccuracy)
if self.guitarSoloAccuracyDisplayMode == 1: #percentage only
#soloText = str(trimmedIntSoloNoteAcc) + "%"
self.solo_soloText[i] = "%s%%" % str(trimmedIntSoloNoteAcc)
elif self.guitarSoloAccuracyDisplayMode == 2: #detailed
#soloText = str(self.guitars[i].currentGuitarSoloHitNotes) + "/" + str(self.currentGuitarSoloTotalNotes[i]) + ": " + str(trimmedIntSoloNoteAcc) + "%"
self.solo_soloText[i] = "%(hitSoloNotes)d/ %(totalSoloNotes)d: %(soloAcc)s%%" % \
{'hitSoloNotes': self.instruments[i].currentGuitarSoloHitNotes, 'totalSoloNotes': self.currentGuitarSoloTotalNotes[i], 'soloAcc': str(trimmedIntSoloNoteAcc)}
self.solo_soloText[i] = self.solo_soloText[i].replace("0","O")
#if self.fontMode==0: #0 = oGL Hack, 1=LaminaScreen, 2=LaminaFrames
self.solo_Tw[i], self.solo_Th[i] = self.solo_soloFont.getStringSize(self.solo_soloText[i],self.solo_txtSize)
self.solo_boxXOffset[i] = self.solo_xOffset[i]
if self.guitarSoloAccuracyDisplayPos == 0: #right
self.solo_xOffset[i] -= self.solo_Tw[i]
self.solo_boxXOffset[i] -= self.solo_Tw[i]/2
#soloFont.render(soloText, (xOffset - Tw, yOffset),(1, 0, 0),txtSize) #right-justified
elif self.guitarSoloAccuracyDisplayPos == 1: #centered
self.solo_xOffset[i] = 0.5 - self.solo_Tw[i]/2
self.solo_boxXOffset[i] = 0.5
#soloFont.render(soloText, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #centered
elif self.guitarSoloAccuracyDisplayPos == 3: #racer: rock band
if self.hitAccuracyPos == 0: #Center - need to move solo text above this!
self.solo_yOffset[i] = 0.100 #above Jurgen Is Here
elif self.jurgPlayer[i] and self.autoPlay:
self.solo_yOffset[i] = 0.140 #above Jurgen Is Here
else: #no jurgens here:
self.solo_yOffset[i] = 0.175 #was 0.210, occluded notes
self.solo_xOffset[i] = 0.5 - self.solo_Tw[i]/2
self.solo_boxXOffset[i] = 0.5
#soloFont.render(soloText, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #rock band
else: #left
self.solo_boxXOffset[i] += self.solo_Tw[i]/2
self.guitarSoloShown[i] = True
else: #not currently a guitar solo - clear Lamina solo accuracy surface (but only once!)
if self.guitarSoloShown[i]:
self.guitarSoloShown[i] = False
self.currentGuitarSoloLastHitNotes[i] = 1
#MFH - single, global BPM here instead of in instrument objects:
#self.tempoBpm = Song.DEFAULT_BPM
#self.actualBpm = 0.0
#self.currentBpm = Song.DEFAULT_BPM
#self.currentPeriod = 60000.0 / self.currentBpm
#self.targetBpm = self.currentBpm
#self.targetPeriod = 60000.0 / self.targetBpm
#self.lastBpmChange = -1.0
#self.baseBeat = 0.0
#self.disableVBPM = self.engine.config.get("game", "disable_vbpm")
def handleTempo(self, song, pos):
if not song:
return
if self.lastBpmChange > 0 and self.disableVBPM == True: #MFH - only handle tempo once if the VBPM feature is off.
return
#tempo = song.tempoEventTrack.getCurrentTempo(pos)
#if tempo != self.targetBpm: #MFH - get latest tempo target
# self.targetBpm = tempo
tempEventHolder = song.tempoEventTrack.getNextTempoChange(pos)
if tempEventHolder:
time, event = tempEventHolder
#if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
if ( (time < pos or self.lastBpmChange < 0) or (pos - time < self.currentPeriod or self.lastBpmChange < 0) ) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
#self.targetBpm = song.tempoEventTrack.getCurrentTempo(pos)
self.targetBpm = event.bpm
song.tempoEventTrack.currentIndex += 1 #MFH = manually increase current event
self.lastBpmChange = time
#adjust tempo gradually to meet new target:
if self.targetBpm != self.currentBpm:
diff = self.targetBpm - self.currentBpm
tempDiff = round( (diff * .03), 4) #MFH - better to calculate this once and reuse the variable instead of recalculating every use
if tempDiff != 0:
self.currentBpm = self.currentBpm + tempDiff
else:
self.currentBpm = self.targetBpm
#recalculate all variables dependant on the tempo, apply to instrument objects - only if currentBpm has changed:
self.currentPeriod = 60000.0 / self.currentBpm
for instrument in self.instruments:
instrument.setBPM(self.currentBpm)
instrument.lastBpmChange = self.lastBpmChange
instrument.baseBeat = self.baseBeat
def handleWhammy(self, playerNum):
i = playerNum
if self.resumeCountdown > 0: #MFH - conditions to completely ignore whammy
return
try: #since analog axis might be set but joystick not present = crash
#MFH - adding another nest of logic filtration; don't even want to run these checks unless there are playedNotes present!
if self.battleGH:
if self.isKillAnalog[i]:
if self.analogKillMode[i] == 2: #XBOX mode: (1.0 at rest, -1.0 fully depressed)
self.whammyVol[i] = 1.0 - (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
elif self.analogKillMode[i] == 3: #XBOX Inverted mode: (-1.0 at rest, 1.0 fully depressed)
self.whammyVol[i] = (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
else: #PS2 mode: (0.0 at rest, fluctuates between 1.0 and -1.0 when pressed)
self.whammyVol[i] = (round(10*(abs(self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i]))))/10.0)
if self.whammyVol[i] > 0.0 and self.whammyVol[i] < 0.1:
self.whammyVol[i] = 0.1
#MFH - simple whammy tail determination:
if self.whammyVol[i] > 0.1:
self.instruments[i].battleWhammyDown = True
else:
if self.instruments[i].battleWhammyDown:
self.instruments[i].battleWhammyDown = False
if self.instruments[i].battleStatus[4]:
self.instruments[i].battleWhammyNow -= 1
if self.instruments[i].battleWhammyNow == 0:
self.instruments[i].battleStatus[4] = False
for k, nowUsed in enumerate(self.instruments[i].battleBeingUsed):
if self.instruments[i].battleBeingUsed[k] == 4:
self.instruments[i].battleBeingUsed[k] = 0
else:
self.battleTarget[i] += 1
if self.battleTarget[i] == self.numOfPlayers:
self.battleTarget[i] = 0
if self.battleTarget[i] == i:
self.battleTarget[i] += 1
else:
if self.killswitchEngaged[i] == True: #QQstarS:new Fix the killswitch
self.killswitchEngaged[i] = True
if self.instruments[i].battleStatus[4]:
self.instruments[i].battleWhammyDown = True
else:
if self.instruments[i].battleStatus[4] and self.instruments[i].battleWhammyDown:
self.instruments[i].battleWhammyNow -= 1
self.instruments[i].battleWhammyDown = False
if self.instruments[i].battleWhammyNow == 0:
self.instruments[i].battleStatus[4] = False
for k, nowUsed in enumerate(self.instruments[i].battleBeingUsed):
if self.instruments[i].battleBeingUsed[k] == 4:
self.instruments[i].battleBeingUsed[k] = 0
if self.instruments[i].playedNotes:
#Player i kill / whammy check:
if self.isKillAnalog[i]:
if self.CheckForValidKillswitchNote(i): #if a note has length and is being held enough to get score
#rounding to integers, setting volumes 0-10 and only when changed from last time:
#want a whammy reading of 0.0 to = full volume, as that's what it reads at idle
if self.analogKillMode[i] == 2: #XBOX mode: (1.0 at rest, -1.0 fully depressed)
self.whammyVol[i] = 1.0 - (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
elif self.analogKillMode[i] == 3: #XBOX Inverted mode: (-1.0 at rest, 1.0 fully depressed)
self.whammyVol[i] = (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
else: #PS2 mode: (0.0 at rest, fluctuates between 1.0 and -1.0 when pressed)
self.whammyVol[i] = (round(10*(abs(self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i]))))/10.0)
if self.whammyVol[i] > 0.0 and self.whammyVol[i] < 0.1:
self.whammyVol[i] = 0.1
#MFH - simple whammy tail determination:
if self.whammyVol[i] > 0.1:
self.killswitchEngaged[i] = True
else:
self.killswitchEngaged[i] = False
if self.whammyVol[i] != self.lastWhammyVol[i] and self.whammyVol[i] > 0.1:
if self.instruments[i].killPoints:
self.instruments[i].starPower += self.analogKillswitchStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
elif (self.instruments[i].starPowerActive and self.whammySavesSP):
self.instruments[i].starPower += self.analogKillswitchActiveStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
self.lastWhammyVol[i] = self.whammyVol[i]
#here, scale whammyVol to match kill volume setting:
self.targetWhammyVol[i] = self.whammyVol[i] * (self.activeVolume - self.killVolume)
if self.actualWhammyVol[i] < self.targetWhammyVol[i]:
self.actualWhammyVol[i] += self.whammyVolAdjStep
whammyVolSet = self.activeVolume - self.actualWhammyVol[i]
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(whammyVolSet, self.players[i].part)
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*(1.0-self.whammyVol[i])), self.players[i].part)
elif self.actualWhammyVol[i] > self.targetWhammyVol[i]:
self.actualWhammyVol[i] -= self.whammyVolAdjStep
whammyVolSet = 1.0 - self.actualWhammyVol[i]
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(whammyVolSet, self.players[i].part)
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*(1.0-self.whammyVol[i])), self.players[i].part)
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.actualWhammyVol[i] = self.defaultWhammyVol[i]
else: #digital killswitch:
if self.CheckForValidKillswitchNote(i): #if a note has length and is being held enough to get score
if self.killswitchEngaged[i] == True: #QQstarS:new Fix the killswitch
if self.instruments[i].isKillswitchPossible() == True:
self.killswitchEngaged[i] = True
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(self.killVolume, self.players[i].part) #MFH
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*self.whammyVol[i]), self.players[i].part)
if self.instruments[i].killPoints:
self.instruments[i].starPower += self.digitalKillswitchStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
elif (self.instruments[i].starPowerActive and self.whammySavesSP and not self.instruments[i].isVocal):
self.instruments[i].starPower += self.digitalKillswitchActiveStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
else:
self.killswitchEngaged[i] = None
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.killswitchEngaged[i] = False
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.killswitchEngaged[i] = False
else:
self.killswitchEngaged[i] = False
except Exception, e:
self.whammyVol[i] = self.defaultWhammyVol[i]
def handleAnalogSP(self, playerNum, ticks):
i = playerNum
if self.resumeCountdown > 0:
return
if self.isSPAnalog[i]:
self.starAxisVal[i] = abs(self.engine.input.joysticks[self.whichJoyStar[i]].get_axis(self.whichAxisStar[i]))
if self.starAxisVal[i] > (self.analogSPThresh[i]/100.0):
if self.starDelay[i] == 0 and not self.starActive[i]:
self.starDelay[i] = (10-self.analogSPSense[i])*25
else:
self.starDelay[i] -= ticks
if self.starDelay[i] <= 0 and not self.starActive[i]:
self.activateSP(i)
self.starActive[i] = True
else:
self.starActive[i] = False
self.starDelay[i] = 0
def handleAnalogSlider(self, playerNum): #akedrou
i = playerNum
if self.resumeCountdown > 0:
return
if self.isSlideAnalog[i]:
oldSlide = self.slideValue[i]
if self.analogSlideMode[i] == 1: #Inverted mode
slideVal = -(self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])+1.0)/2.0
else: #Default
slideVal = (self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])+1.0)/2.0
if slideVal > 0.9 or slideVal < 0.01:
self.slideValue[i] = 4
elif slideVal > 0.77:
self.slideValue[i] = 4
self.markSlide(i)
elif slideVal > 0.68:
self.slideValue[i] = 3
elif slideVal > 0.60:
self.slideValue[i] = 3
self.markSlide(i)
elif slideVal > 0.54:
self.slideValue[i] = 2
elif slideVal > 0.43:
self.slideValue[i] = -1
#mark that sliding is not happening.
elif slideVal > 0.34:
self.slideValue[i] = 2
self.markSlide(i)
elif slideVal > 0.28:
self.slideValue[i] = 1
elif slideVal > 0.16:
self.slideValue[i] = 1
self.markSlide(i)
else:
self.slideValue[i] = 0
if self.slideValue[i] != oldSlide:
for n, k in enumerate(self.keysList[i]):
if n == self.slideValue[i] and not self.controls.getState(k):
self.controls.toggle(k, True)
self.keyPressed3(None, 0, k) #mfh
elif self.controls.getState(k):
self.controls.toggle(k, False)
self.keyReleased3(k)
if self.slideValue[i] > -1:
self.handlePick(i)
def markSlide(self, playerNum):
pass #akedrou - this will eventually handle the switch that you are, in fact, sliding up the analog fret bar.
def handlePhrases(self, playerNum, playerStreak):
if self.phrases > 0:
i = playerNum
vocalPart = False
if not (self.coOpType and i == self.coOpPhrase):
if self.instruments[i].isVocal:
vocalPart = True
if (self.coOpType and i == self.coOpPhrase) or not self.coOpType:
if self.lastStreak[i] < playerStreak:
textChanged = True
else:
textChanged = False
self.lastStreak[i] = playerStreak
if vocalPart:
streakModulo = playerStreak % 5
if ( (streakModulo == 0) or (self.lastStreak[i] % 5 > streakModulo) ) and playerStreak > 4 and textChanged:
self.newScalingText(i, self.tsPhraseStreak % (playerStreak - streakModulo) )
elif (playerStreak == 50 or (self.lastStreak[i] < 50 and playerStreak > 50) ) and textChanged:
#self.displayText[i] = _("50 Note Streak!!!") #kk69: more GH3-like
#self.newScalingText(i, _("50 Note Streak!!!") )
self.newScalingText(i, self.tsNoteStreak % 50)
#self.streakFlag = "%d" % (i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
#MFH - I think a simple integer modulo would be more efficient here:
else:
streakModulo = playerStreak % 100
if ( (streakModulo == 0) or (self.lastStreak[i] % 100 > streakModulo) ) and playerStreak > 50 and textChanged:
#self.displayText[i] = _("%d Note Streak!!!") % playerStreak #kk69: more GH3-like
#self.newScalingText(i, _("%d Note Streak!!!") % playerStreak )
#self.newScalingText(i, _("%d Note Streak!!!") % (playerStreak - streakModulo) )
self.newScalingText(i, self.tsNoteStreak % (playerStreak - streakModulo) )
#self.streakFlag = "%d" % (i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
if self.scaleText[i] >= self.maxDisplayTextScale:
self.displayTextScale[i] = self.scaleText[i] + self.scaleText2[i]
if self.scaleText2[i] <= -0.0005:
self.goingUP[i] = True
elif self.scaleText2[i] >= 0.0005:
self.goingUP[i] = False
if self.goingUP[i]:
self.scaleText2[i] += self.displayTextScaleStep2
else:
self.scaleText2[i] -= self.displayTextScaleStep2
else:
self.displayTextScale[i] = self.scaleText[i]
if not self.displayText[i] == None and not self.scaleText[i] >= self.maxDisplayTextScale:
self.scaleText[i] += self.displayTextScaleStep1
if self.scaleText[i] > self.maxDisplayTextScale:
self.scaleText[i] = self.maxDisplayTextScale
if not self.displayText[i] == None:
self.textTimer[i] += 1
if self.battleGH:
if not self.battleText[i] == None:
self.battleTextTimer[i] += 1
if self.battleTextTimer[i] > 500:
self.battleText[i] = None
self.battleTextTimer[i] = 0
if self.textTimer[i] > self.textTimeToDisplay:
self.textY[i] -= 0.02
if self.textY[i] < 0:
self.scaleText[i] = 0
self.textTimer[i] = 0
self.displayText[i] = None
#textChanged = False
self.textY[i] = .3
self.scaleText2[i] = 0.0
self.goingUP[i] = False
def newScalingText(self, playerNum, text):
i = playerNum
self.scaleText[i] = 0
self.textTimer[i] = 0
self.textY[i] = .3
self.scaleText2[i] = 0.0
self.goingUP[i] = False
self.displayText[i] = text
def handlePick(self, playerNum, hopo = False, pullOff = False):
i = playerNum
num = playerNum
guitar = self.instruments[num]
if self.resumeCountdown > 0: #MFH - conditions to completely ignore picks
return
#MFH - only actually pick if the player has not failed already!
if self.rock[i] > 0 and guitar.battleStatus[4] == False:
# Volshebnyi - new BRE and drum fills scoring
if guitar.freestyleActive or (guitar.isDrum and guitar.drumFillsActive):
if guitar.freestyleActive: #MFH - only for BREs, not drum fills. Will depend on BRE sound option when implemented.
self.song.setInstrumentVolume(1.0, self.players[i].part) #MFH - ensure that every freestyle pick, the volume for that track is set to 1.0
pos = self.getSongPosition()
score = 0
numFreestyleHits = guitar.freestylePick(self.song, pos, self.controls)
if numFreestyleHits>0 or guitar.isDrum:
if guitar.freestyleFirstHit + guitar.freestyleLength < pos :
guitar.freestyleFirstHit = pos
guitar.freestylePeriod = 1500
guitar.freestyleBaseScore = 150
score = 600 * numFreestyleHits
if guitar.isDrum:
guitar.drumFillsHits = 0
guitar.freestyleLastHit = pos - guitar.freestylePeriod
for fret in range (0,5):
guitar.freestyleLastFretHitTime[fret] = pos - guitar.freestylePeriod
if guitar.isDrum:
guitar.drumFillsHits += 1
#if guitar.freestyleSP: #MFH - this logic should be in the run() function, not conditional here...
# self.activateSP(num)
# guitar.freestyleSP = False
for fret in range (5):
if self.controls.getState(guitar.keys[fret]) or (self.playerList[i].controlType == 0 and self.controls.getState(guitar.keys[fret+5])):
hitspeed = min((pos - guitar.freestyleLastFretHitTime[fret]) / guitar.freestylePeriod, 1.0)
score += guitar.freestyleBaseScore * hitspeed
if numFreestyleHits > 0: #MFH - to prevent a float division!
score = int ( score / numFreestyleHits )
for fret in range (5):
if self.controls.getState(guitar.keys[fret]) or (self.playerList[i].controlType == 0 and self.controls.getState(guitar.keys[fret+5])):
guitar.freestyleLastFretHitTime[fret] = pos
#MFH - Add all BRE score to a temporary score accumulator with a separate display box
# and only reward if all notes after the BRE are hit without breaking streak!
if guitar.freestyleActive: #MFH - only want to add the score if this is a BRE - drum fills get no scoring...
if self.coOpType:
self.scoring[num].endingScore += score
self.scoring[num].endingStreakBroken = False
self.scoring[num].freestyleWasJustActive = True
self.coOpScoreCard.endingScore += score
self.coOpScoreCard.endingStreakBroken = False
self.coOpScoreCard.freestyleWasJustActive = True
else:
#self.playerList[num].addScore( score )
self.scoring[num].endingScore += score
#also, when this happens, want to set a flag indicating that all of the remaining notes in the song must be hit without
# breaking streak, or this score will not be kept!
self.scoring[num].endingStreakBroken = False
self.scoring[num].freestyleWasJustActive = True
#MFH - also must ensure notes that pass during this time are marked as skipped without resetting the streak
#missedNotes = self.guitars[num].getMissedNotesMFH(self.song, pos, catchup = True)
missedNotes = guitar.getMissedNotesMFH(self.song, pos + guitar.earlyMargin, catchup = True) #MFh - check slightly ahead here.
for tym, theNote in missedNotes: #MFH - also want to mark these notes as Played so they don't count against the note total!
#theNote.played = True
theNote.skipped = True
if guitar.isDrum:
if self.coOpType:
self.coOpScoreCard.totalStreakNotes -= 1
else:
self.scoring[num].totalStreakNotes -= 1
else:
if guitar.isDrum:
self.doPick(i)
else:
if self.hopoStyle == 1: #1 = rf-mod
self.doPick3RF(i, hopo)
elif self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO
self.doPick3GH2(i, hopo, pullOff)
else: #2 = no HOPOs
self.doPick(i)
def handleJurgen(self, pos):
chordFudge = 1 #MFH - was 10 - #myfingershurt - needed to detect chords
if self.firstGuitar is not None:
chordFudge = self.song.track[self.firstGuitar].chordFudge
if self.autoPlay or self.assisting:
for i,instrument in enumerate(self.instruments):
#Allow Jurgen per player...Spikehead777
if self.jurg[i] == True: #if it is this player
self.jurgPlayer[i] = True
else: #and if not
if self.playerAssist[i] == 0: #and no assist
continue
if instrument.isVocal:
continue
guitar = instrument
if self.battleGH:
self.aiUseSP[i] = 0
if self.aiSkill[i] == 4 or self.aiSkill[i] == 5:
self.aiUseSP[i] += 25 * self.battleItemsHolding[i] #Number of Items in Holding
if self.instruments[self.battleTarget[i]].isStarPhrase:
self.aiUseSP[i] += 100 #always use when target is in starphrase
self.aiUseSP[i] += max((100 - (300*self.rock[self.battleTarget[i]])/self.rockMax), 0) #use when they're almost dead
self.aiUseSP[i] += max((100 - (500*self.rock[i])/self.rockMax), 0) #use when they're almost dead
else:
self.aiUseSP[i] = 100
if self.battleGH: #PRELIM LOGIC until algorithm goes in
if guitar.battleObjects[0] != 0:
if self.aiUseSP[i] > 50 and pos > guitar.battleGetTime + self.jurgBattleUseTime[i]:
self.activateSP(i)
if guitar.battleStatus[4]:
if guitar.battleWhammyNow == 0:
guitar.battleStatus[4] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 4:
guitar.battleBeingUsed[k] = 0
if guitar.battleWhammyNow != 0:
if pos - guitar.battleStartTimes[4] > self.jurgBattleWhammyTime[i]:
guitar.battleStartTimes[4] = pos
guitar.battleWhammyNow -= 1
if self.jurgenLogic[i] == 0: #original FoF / RF-Mod style Jurgen Logic (cannot handle fast notes / can only handle 1 strum per notewindow)
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
notes = [note.number for time, note in notes]
changed = False
held = 0
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in notes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed3(None, 0, k) #mfh
elif not n in notes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased3(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 1: #Jurgen logic style MFH-Early -- will separate notes out by time index, with chord slop detection, and strum every note
#MFH - Jurgen needs some logic that can handle notes that may be coming too fast to retrieve one set at a time
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(0,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
jurgStrumNotes = []
changed = False
held = 0
if self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 2: #Jurgen logic style MFH-OnTime1 -- Have Jurgen attempt to strum on time instead of as early as possible
#This method simply shrinks the note retrieval window to only notes that are on time and late. No early notes are even considered.
#MFH - Jurgen needs some logic that can handle notes that may be coming too fast to retrieve one set at a time
notes = guitar.getRequiredNotesForJurgenOnTime(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(0,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
jurgStrumNotes = []
self.aiPlayNote[i] = True
changed = False
held = 0
if self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 3: #Jurgen logic style MFH-OnTime2 -- Have Jurgen attempt to strum on time instead of as early as possible
#This method retrieves all notes in the window and only attempts to play them as they pass the current position, like a real player
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
else:
jurgStrumTime = 0
jurgStrumNotes = []
changed = False
held = 0
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(1,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
#MFH - check if jurgStrumTime is close enough to the current position (or behind it) before actually playing the notes:
if (not notes or jurgStrumTime <= (pos + 30)) and self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
#MFH - release all frets - who cares about held notes, I want a test player (actually if no keyReleased call, will hold notes fine)
for n, k in enumerate(self.keysList[i]):
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if self.controls.getState(k):
self.controls.toggle(k, False)
def rockmeterDecrease(self, playerNum, vScore = 0):
i = playerNum
if self.instruments[i].isVocal:
rockMinusAmount = 500 * (3 - vScore)
self.rock[i] -= rockMinusAmount
if (not self.coOpRB) and (self.rock[i]/self.rockMax <= 0.667) and ((self.rock[i]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
return
rockMinusAmount = 0 #akedrou - simplify the various incarnations of minusRock.
if self.instruments[i].isDrum:
self.drumStart = True
if not self.drumScoringEnabled: #MFH - ignore when drum scoring is disabled
return
if self.starNotesMissed[i] or self.instruments[i].isStarPhrase:
self.instruments[i].isStarPhrase = True
self.instruments[i].spEnabled = False
#self.instruments[i].spNote = False
if not self.failingEnabled or self.practiceMode:
return
if self.battle and self.numOfPlayers > 1: #battle mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.multi[i]
#self.rock[i] -= self.minusRock[i]/self.multi[i]
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.multi[i]
if self.lessMissed[i]: #QQstarS:Set [i] to [i]
self.minusRock[i] += self.minGain/5.0/self.multi[i]
#self.rock[i] -= self.minusRock[i]/5.0/self.multi[i]
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.multi[i]
elif (self.coOp or self.coOpGH) and self.numOfPlayers > 1: #co-op mode
if self.notesMissed[i]:
self.minusRock[self.coOpPlayerMeter] += self.minGain/self.multi[i]
rockMinusAmount = self.minusRock[self.coOpPlayerMeter]/self.multi[i]
self.rock[self.coOpPlayerMeter] -= rockMinusAmount
if self.plusRock[self.coOpPlayerMeter] > self.pluBase:
self.plusRock[self.coOpPlayerMeter] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[self.coOpPlayerMeter] <= self.pluBase:
self.plusRock[self.coOpPlayerMeter] = self.pluBase/self.multi[i]
if self.lessMissed[i]:
self.minusRock[self.coOpPlayerMeter] += self.minGain/5.0/self.multi[i]
rockMinusAmount = self.minusRock[0]/5.0/self.multi[i]
self.rock[self.coOpPlayerMeter] -= rockMinusAmount
if self.plusRock[self.coOpPlayerMeter] > self.pluBase:
self.plusRock[self.coOpPlayerMeter] -= self.pluGain/2.5/self.multi[i]
if (self.rock[self.coOpPlayerMeter]/self.rockMax <= 0.667) and ((self.rock[self.coOpPlayerMeter]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
elif self.coOpRB and self.numOfPlayers > 1: #RB co-op mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.coOpMulti
if self.numDeadPlayers > 0:
self.minusRock[self.coOpPlayerMeter] += self.minGain/self.coOpMulti
rockMinusAmount = self.minusRock[self.coOpPlayerMeter]/self.coOpMulti
self.rock[self.coOpPlayerMeter] -= rockMinusAmount/self.numOfPlayers
self.rock[i] -= self.minusRock[i]/self.coOpMulti
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.coOpMulti
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.coOpMulti
if self.lessMissed[i]:
self.minusRock[i] += self.minGain/5.0/self.coOpMulti
if self.numDeadPlayers > 0:
self.minusRock[self.coOpPlayerMeter] += self.minGain/5.0/self.coOpMulti
rockMinusAmount = self.minusRock[i]/5.0/self.coOpMulti
self.rock[self.coOpPlayerMeter] -= rockMinusAmount/(self.numOfPlayers - self.numDeadPlayers)
self.rock[i] -= self.minusRock[i]/5.0/self.coOpMulti
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.coOpMulti
else: #normal mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.multi[i]
rockMinusAmount = self.minusRock[i]/self.multi[i]
self.rock[i] -= rockMinusAmount
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.multi[i]
if self.lessMissed[i]:
self.minusRock[i] += self.minGain/5.0/self.multi[i]
rockMinusAmount = self.minusRock[i]/5.0/self.multi[i]
self.rock[i] -= rockMinusAmount
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.multi[i]
if (self.rock[i]/self.rockMax <= 0.667) and ((self.rock[i]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
if self.minusRock[i] <= self.minBase:
self.minusRock[i] = self.minBase
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase
def rockmeterIncrease(self, playerNum, vScore = 0):
i = playerNum
if self.instruments[i].isVocal:
rockPlusAmt = 500 + (500 * (vScore-2))
self.rock[i] += rockPlusAmt
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if not self.coOpRB:
if (self.rock[i]/self.rockMax > 0.667) and ((self.rock[i]-rockPlusAmt)/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
return
if self.instruments[i].isDrum:
self.drumStart = True
if not self.failingEnabled or self.practiceMode:
return
if not self.notesHit[i]: return
if self.battle and self.numOfPlayers > 1: #battle mode
if self.notesHit[i]:
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.multi[i]
if self.plusRock[i] > self.battleMax:
self.plusRock[i] = self.battleMax
self.rock[i] += self.plusRock[i]*self.multi[i]
self.rock[self.battleTarget[i]] -= self.plusRock[i]*self.multi[i]
if self.rock[self.battleTarget[i]] < 0:
self.rock[self.battleTarget[i]] = 0
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.multi[i]
#MFH TODO maintain separate rock status for each player
elif (self.coOp or self.coOpGH) and self.numOfPlayers > 1:
if self.rock[self.coOpPlayerMeter] < self.rockMax:
self.plusRock[self.coOpPlayerMeter] += self.pluGain*self.multi[i]
self.rock[self.coOpPlayerMeter] += self.plusRock[self.coOpPlayerMeter]*self.multi[i]
if self.rock[self.coOpPlayerMeter] >= self.rockMax:
self.rock[self.coOpPlayerMeter] = self.rockMax
if self.minusRock[self.coOpPlayerMeter] > self.minBase:
self.minusRock[self.coOpPlayerMeter] -= self.minGain/2.0*self.multi[i]
if (self.rock[self.coOpPlayerMeter]/self.rockMax > 0.667) and ((self.rock[self.coOpPlayerMeter]-(self.plusRock[self.coOpPlayerMeter]*self.multi[i]))/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
elif self.coOpRB and self.numOfPlayers > 1:
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.coOpMulti
self.rock[i] += (self.plusRock[i]*self.coOpMulti)
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.coOpMulti
else: #normal mode
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.multi[i]
self.rock[i] += self.plusRock[i]*self.multi[i]
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.multi[i]
#Log.debug(str((self.rock[i]-(self.plusRock[i]*self.multi[i]))/self.rockMax) % "AND" % str(self.rock[i]/self.rockMax))
if (self.rock[i]/self.rockMax > 0.667) and ((self.rock[i]-(self.plusRock[i]*self.multi[i]))/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
if self.minusRock[i] <= self.minBase:
self.minusRock[i] = self.minBase
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase
def rockmeterDrain(self, playerNum):
if self.battleGH:
self.rock[playerNum] -= 70.0
else:
self.rock[playerNum] -= 15.0
self.minusRock[playerNum] += self.minGain/10/self.coOpMulti
def run(self, ticks): #QQstarS: Fix this funcion
if self.song and self.song.readyToGo and not self.pause and not self.failed:
Scene.run(self, ticks)
if not self.resumeCountdown and not self.pause:
pos = self.getSongPosition()
self.song.update(ticks)
# update stage
else:
pos = self.pausePos
if self.vbpmLogicType == 1:
self.handleTempo(self.song, pos) #MFH - new global tempo / BPM handling logic
if self.bossBattle and self.rock[1] < 0:
if self.careerMode and not self.song.info.completed:
if self.song.info.count:
count = int(self.song.info.count)
else:
count = 0
count += 1
Log.debug("Song completed")
self.song.info.completed = True
self.song.info.count = "%d" % count
self.song.info.save()
#MFH - new failing detection logic
if self.failingEnabled:
#if self.numOfPlayers > 1:
if self.numOfPlayers > 1 and self.coOpType:
if self.rock[self.coOpPlayerMeter] <= 0:
self.failed = True
else:
if self.coOpRB:
for i, player in enumerate(self.playerList):
if self.rock[i] <= 0 and not self.coOpFailDone[i]:
self.instruments[i].coOpFailed = True
self.instruments[i].starPower = 0.0
self.engine.data.coOpFailSound.play()
self.deadPlayerList.append(i)
self.numDeadPlayers += 1
self.timesFailed[i] += 1
self.crowdsCheering = False
self.song.setInstrumentVolume(0.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.coOpFailDone[i] = True
elif self.numOfPlayers > 1 and self.battleGH:
for i, player in enumerate(self.playerList):
if self.rock[i] <= 0:
self.failed = True
else:
somebodyStillAlive = False
for i, player in enumerate(self.playerList):
if self.rock[i] > 0:
somebodyStillAlive = True
if not somebodyStillAlive: #only if everybody has failed
self.failed = True
if pos > self.lastDrumNoteTime: #MFH - disable drum scoring so that the drummer can get down with his bad self at the end of the song without penalty.
self.drumScoringEnabled = False # ...is that what drummers do?
for i,instrument in enumerate(self.instruments):
if instrument.isVocal:
instrument.requiredNote = instrument.getRequiredNote(pos, self.song)
instrument.run(ticks, pos)
scoreBack = instrument.getScoreChange()
if scoreBack is not None:
points, scoreThresh, taps = scoreBack
self.scoring[i].score += points * instrument.scoreMultiplier * self.multi[i]
self.scoring[i].percNotesHit += taps
scoreThresh = 5-scoreThresh
if scoreThresh > 3:
self.rockmeterIncrease(i, scoreThresh)
self.scoring[i].notesHit += 1
self.scoring[i].streak += 1
elif scoreThresh == 3:
self.scoring[i].streak = 0
elif scoreThresh < 3:
self.rockmeterDecrease(i, scoreThresh)
self.scoring[i].streak = 0
self.scoring[i].updateAvMult()
self.scoring[i].getStarScores()
if instrument.starPowerGained:
if instrument.starPower >= 50 and not instrument.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if self.phrases > 1:
if instrument.starPower >= 50 and not instrument.starPowerActive:
self.newScalingText(i, self.tsStarPowerReady)
instrument.starPowerGained = False
if instrument.starPowerActivate:
self.activateSP(i)
instrument.starPowerActivate = False
continue
self.stage.run(pos, instrument.currentPeriod)
playerNum = i
guitar = instrument
if guitar.battleObjects[0] != 0:
self.battleItemsHolding[i] = 1
else:
self.battleItemsHolding[i] = 0
if guitar.battleObjects[1] != 0:
self.battleItemsHolding[i] = 2
if guitar.battleObjects[2] != 0:
self.battleItemsHolding[i] = 3
if self.battleGH:
if guitar.battleBeingUsed[0] == 0 and guitar.battleBeingUsed[1] != 0:
guitar.battleBeingUsed[0] = guitar.battleBeingUsed[1]
guitar.battleBeingUsed[1] = 0
#Log.debug("Battle Being Used: %s" % str(guitar.battleBeingUsed))
time = self.getSongPosition()
if guitar.battleStatus[1]:
if time - guitar.battleDrainStart > guitar.battleDrainLength:
Log.debug("Drain for Player %d disabled" % i)
guitar.battleStatus[1] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 1:
guitar.battleBeingUsed[k] = 0
else:
self.rockmeterDrain(i)
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 5:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[6]:
if time - guitar.battleStartTimes[6] > guitar.battleLeftyLength:
Log.debug("Lefty Mode for Player %d disabled" % i)
guitar.battleStatus[6] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 6:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[8]:
if time - guitar.battleStartTimes[8] > guitar.battleAmpLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[8] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 8:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[7]:
if time - guitar.battleStartTimes[7] > guitar.battleDoubleLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[7] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 7:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[3]:
if guitar.battleBreakNow <= 0:
guitar.battleStatus[3] = False
guitar.battleBreakString = 0
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 3:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[2]:
if time - guitar.battleStartTimes[2] > guitar.battleDiffUpLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[2] = False
self.song.difficulty[i] = Song.difficulties[guitar.battleDiffUpValue]
guitar.difficulty = guitar.battleDiffUpValue
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 2:
guitar.battleBeingUsed[k] = 0
if guitar.isDrum and guitar.freestyleSP: #MFH - this drum fill starpower activation logic should always be checked.
self.activateSP(i)
guitar.freestyleSP = False
#MFH - check for any unplayed notes and for an unbroken streak since the BRE, then award bonus scores
#akedrou - does not work for co-op.
if self.coOpType:
scoreCard = self.coOpScoreCard
if scoreCard.freestyleWasJustActive and not scoreCard.endingAwarded:
if scoreCard.lastNoteTime < pos and not scoreCard.endingStreakBroken:
Log.debug("Big Rock Ending bonus awarded for co-op players! %d points." % scoreCard.endingScore)
if scoreCard.endingScore > 0:
scoreCard.addEndingScore()
self.engine.data.starActivateSound.play()
scoreCard.endingAwarded = True
else:
scoreCard = self.scoring[playerNum]
if scoreCard.freestyleWasJustActive and not scoreCard.endingAwarded:
if scoreCard.lastNoteEvent and not scoreCard.endingStreakBroken:
if scoreCard.lastNoteEvent.played or scoreCard.lastNoteEvent.hopod:
Log.debug("Big Rock Ending bonus awarded for player %d: %d points" % (playerNum, scoreCard.endingScore) )
if scoreCard.endingScore > 0:
scoreCard.addEndingScore()
self.engine.data.starActivateSound.play()
scoreCard.endingAwarded = True
if guitar.starPowerGained == True:
if self.unisonActive and self.inUnison[i]:
self.unisonEarn[i] = True
if self.coOpGH:
self.coOpStarPower += (25 * self.numOfPlayers) #lets 2 SP phrases give SP
if self.coOpStarPower > (100 * self.numOfPlayers):
self.coOpStarPower = (100 * self.numOfPlayers)
if self.coOpStarPower >= (50 * self.numOfPlayers) and not guitar.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if guitar.isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(playerNum)
else:
#myfingershurt: auto drum starpower activation option:
if guitar.isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(playerNum)
if guitar.starPower >= 50 and not guitar.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if self.phrases > 1:
if self.coOpGH:
if guitar.starPowerGained and self.coOpStarPower >= (50 * self.numOfPlayers) and not guitar.starPowerActive:
self.newScalingText(self.coOpPhrase, self.tsStarPowerReady )
elif self.battleGH:
if guitar.battleObjectGained and guitar.battleObjects[0] != 0:
self.battleText[i] = self.tsBattleIcons[guitar.battleObjects[0]]
guitar.battleObjectGained = False
else:
if guitar.starPower >= 50 and not guitar.starPowerActive: #QQstarS:Set [0] to [i]
self.newScalingText(playerNum, self.tsStarPowerReady )
self.hopFretboard(i, 0.04) #stump
guitar.starPowerGained = False #QQstarS:Set [0] to [i]
# update board
#for i,guitar in enumerate(self.guitars):
if self.coOpGH:
for k, theGuitar in enumerate(self.instruments):
theGuitar.starPower = self.coOpStarPower/self.numOfPlayers
if not guitar.run(ticks, pos, self.controls):
# done playing the current notes
self.endPick(i)
if guitar.drumFillsActive:
if self.muteDrumFill > 0 and not self.jurg[i]:
self.song.setInstrumentVolume(0.0, self.playerList[i].part)
#MFH - ensure this missed notes check doesn't fail you during a freestyle section
if guitar.freestyleActive or guitar.drumFillsActive:
missedNotes = guitar.getMissedNotesMFH(self.song, pos + guitar.lateMargin*2, catchup = True) #MFH - get all notes in the freestyle section.
for tym, theNote in missedNotes: #MFH - also want to mark these notes as Played so they don't count against the note total!
#theNote.played = True
theNote.skipped = True
if guitar.isDrum:
if self.coOpType:
self.coOpScoreCard.totalStreakNotes -= 1
self.scoring[playerNum].totalStreakNotes -= 1
else:
missedNotes = guitar.getMissedNotesMFH(self.song, pos)
if guitar.paused:
missedNotes = []
if missedNotes:
if guitar.isDrum:
self.drumStart = True
self.lessMissed[i] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
self.scoring[playerNum].notesMissed += 1
if self.coOpType:
self.coOpScoreCard.notesMissed += 1
if theNote.star or theNote.finalStar:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: run(), note: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[i] = True
if self.unisonActive:
self.inUnison[i] = False
if (self.scoring[i].streak != 0 or not self.processedFirstNoteYet) and not guitar.playedNotes and len(missedNotes) > 0:
if not self.processedFirstNoteYet:
self.stage.triggerMiss(pos)
self.notesMissed[i] = True
self.processedFirstNoteYet = True
self.currentlyAnimating = False
guitar.setMultiplier(1)
guitar.hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[playerNum].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[playerNum].part)
self.guitarSoloBroken[i] = True
if self.coOpType:
self.coOpScoreCard.streak = 0
self.coOpScoreCard.endingStreakBroken = True
self.scoring[playerNum].streak = 0
self.scoring[playerNum].endingStreakBroken = True #MFH
if self.hopoDebugDisp == 1:
missedNoteNums = [noat.number for time, noat in missedNotes]
#Log.debug("Miss: run(), found missed note(s)... %s" % str(missedNoteNums) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: run(), found missed note(s)... %(missedNotes)s, Song time=%(songTime)s" % \
{'missedNotes': str(missedNoteNums), 'songTime': str(self.timeLeft)})
guitar.hopoActive = 0
guitar.wasLastNoteHopod = False
guitar.sameNoteHopoString = False
guitar.hopoProblemNoteNum = -1
#self.problemNotesP1 = []
#self.problemNotesP2 = []
#notes = self.guitars[i].getRequiredNotesMFH(self.song, pos) #MFH - wtf was this doing here? I must have left it by accident o.o
#if not self.pause and not self.failed:
#myfingershurt: Capo's starpower claps on a user setting:
#if self.starClaps and self.song and len(self.beatTime) > 0 or (self.beatClaps and self.song and len(self.beatTime) > 0):
if (self.starClaps or self.beatClaps) and len(self.beatTime) > 0:
###Capo###
#Play a sound on each beat on starpower
clap = False
if self.playerList[0].practiceMode and self.beatClaps:
clap = True
else:
for i,player in enumerate(self.playerList):
if self.instruments[i].starPowerActive == True:
clap = True
break
#pos = self.getSongPosition()
if pos >= (self.beatTime[0] - 100):
self.beatTime.pop(0)
if clap == True:
if self.firstClap == False:
#self.sfxChannel.setVolume(self.sfxVolume)
#self.sfxChannel.play(self.engine.data.clapSound)
self.engine.data.clapSound.play()
else:
self.firstClap = False
else:
self.firstClap = True
###endCapo###
#MFH - new refugees from the render() function:
if self.theme == 2:
if self.rbOverdriveBarGlowFadeOut == False:
self.rbOverdriveBarGlowVisibility = self.rbOverdriveBarGlowVisibility + self.rbOverdriveBarGlowFadeInChunk
elif self.rbOverdriveBarGlowFadeOut == True:
self.rbOverdriveBarGlowVisibility = self.rbOverdriveBarGlowVisibility - self.rbOverdriveBarGlowFadeOutChunk
if self.rbOverdriveBarGlowVisibility >= 1 and self.rbOverdriveBarGlowFadeOut == False:
self.rbOverdriveBarGlowFadeOut = True
elif self.rbOverdriveBarGlowVisibility <= 0 and self.rbOverdriveBarGlowFadeOut == True:
self.rbOverdriveBarGlowFadeOut = False
for playerNum in range(self.numOfPlayers):
self.handlePhrases(playerNum, self.scoring[playerNum].streak) #MFH - streak #1 for player #1...
self.handleAnalogSP(playerNum, ticks)
self.handleWhammy(playerNum)
if self.playerList[playerNum].controlType == 4:
self.handleAnalogSlider(playerNum)
self.updateGuitarSolo(playerNum)
if self.coOpType:
self.handlePhrases(self.coOpPhrase, self.coOpScoreCard.streak)
self.handleJurgen(pos)
#stage rotation
#MFH - logic to prevent advancing rotation frames if you have screwed up, until you resume a streak
if (self.currentlyAnimating and self.missPausesAnim == 1) or self.missPausesAnim == 0:
self.stage.rotate()
self.starPowersActive = 0
self.coOpStarPower = 0
#MFH - new logic to update the starpower pre-multiplier
#akedrou - broken up to support RB Co-op properly.
for i in range(self.numOfPlayers):
if self.instruments[i].starPowerActive:
self.multi[i] = 2
self.starPowersActive += 1
else:
self.multi[i] = 1
sp = self.instruments[i].starPower
if self.coOpGH:
self.coOpStarPower += sp
if self.coOpRB:
if self.unisonIndex < len(self.unisonConfirm) and not self.unisonActive: #akedrou - unison bonuses
while self.unisonConfirm[self.unisonIndex][0] < pos:
self.unisonIndex += 1
if len(self.unisonConfirm) == self.unisonIndex:
break
if len(self.unisonConfirm) > self.unisonIndex:
if self.unisonConfirm[self.unisonIndex][0] - pos < self.song.period * 2:
self.unisonActive = True
self.firstUnison = True
self.unisonNum = len(self.unisonPlayers[self.unisonIndex])
if self.starPowersActive > 0:
self.coOpMulti = 2 * self.starPowersActive
else:
self.coOpMulti = 1
#MFH - rewritten rockmeter / starpower miss logic, and Faaa's drum sounds:
#the old logic was ridiculously complicated
# For each existing player
if self.coOpRB:
oldCoOpRock = self.rock[self.coOpPlayerMeter]
coOpRock = 0.0
for i in range(self.numOfPlayers):
if (self.coOpRB and not guitar.coOpFailed) or not self.coOpRB:
if self.notesMissed[i] or self.lessMissed[i]: #(detects missed note or overstrum)
if self.instruments[i].isDrum:
if self.drumMisses == 0: #mode: always
self.rockmeterDecrease(i)
#elif self.drumMisses == 1 and self.countdownSeconds < 1: #mode: song start
elif self.drumMisses == 1 and self.countdown < 1: #mode: song start
self.rockmeterDecrease(i)
elif self.drumMisses == 2 and self.drumStart: #mode: song start
self.rockmeterDecrease(i)
else: #not drums
self.rockmeterDecrease(i)
if self.notesHit[i]:
self.rockmeterIncrease(i)
if self.coOpRB:
coOpRock += self.rock[i]
else:
if not self.instruments[i].coOpRestart:
self.rockmeterDrain(self.coOpPlayerMeter)
else:
oldCoOpRock = 0.0
coOpRock += self.rock[i]
self.notesMissed[i] = False
self.starNotesMissed[i] = False
self.notesHit[i] = False
self.lessMissed[i] = False
if self.unisonActive:
if self.firstUnison and i in self.unisonPlayers[self.unisonIndex]:
self.inUnison[i] = True
self.haveUnison[i] = True
#battle failing
if self.battle and self.numOfPlayers>1:
if self.rock[i] <= 0:
#self.displayText[i] = "You Failed!!!!"
#self.newScalingText(i, _("You Failed!!!!") )
self.newScalingText(i, self.tsYouFailedBattle )
#self.streakFlag = str(i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
guitar.actions = [0,0,0]
if self.coOpRB: #RB co-op meter is just an average until someone dies.
if self.numDeadPlayers == 0:
self.rock[self.coOpPlayerMeter] = coOpRock/self.numOfPlayers
if (self.rock[self.coOpPlayerMeter]/self.rockMax > 0.667) and (oldCoOpRock/self.rockMax <= 0.667):
self.playersInGreen = 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
if (self.rock[self.coOpPlayerMeter]/self.rockMax <= 0.667) and (oldCoOpRock/self.rockMax > 0.667):
self.playersInGreen = 0
if self.unisonActive: #akedrou unison bonuses
if self.firstUnison:
self.firstUnison = False
self.firstUnisonDone = True
if pos - self.unisonConfirm[self.unisonIndex][1] > 0 and self.firstUnisonDone:
for i in range(len(self.inUnison)):
if self.inUnison[i] != self.haveUnison[i]:
break
else:
if self.engine.data.cheerSoundFound > 0:
self.engine.data.crowdSound.play()
for i,guitar in enumerate(self.instruments):
if self.inUnison[i]:
guitar.starPower += 25
if guitar.starPower > 100:
guitar.starPower = 100
self.firstUnisonDone = False
if pos - self.unisonConfirm[self.unisonIndex][1] > self.song.period * 2:
self.unisonIndex+=1
self.unisonActive = False
self.unisonEarn = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.inUnison = [False for i in self.playerList]
#akedrou Song/Crowd logic
if self.numDeadPlayers == 0:
if self.crowdsEnabled == 3 and self.crowdsCheering == False and not self.countdown: #prevents cheer-cut-cheer
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
elif self.crowdsEnabled == 0 and self.crowdsCheering == True: #setting change
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
elif self.crowdsEnabled == 1:
if self.starPowersActive > 0:
if self.crowdsCheering == False:
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
else:
if self.crowdsCheering == True:
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
elif self.crowdsEnabled == 2:
if self.starPowersActive > 0 or self.playersInGreen > 0:
if self.crowdsCheering == False:
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
else:
if self.crowdsCheering == True:
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
#Crowd fade-in/out
if self.crowdsCheering == True and self.crowdFaderVolume < self.crowdVolume:
self.crowdFaderVolume += self.crowdCheerFadeInChunk
if self.crowdFaderVolume > self.crowdVolume:
self.crowdFaderVolume = self.crowdVolume
self.song.setCrowdVolume(self.crowdFaderVolume)
if self.crowdsCheering == False and self.crowdFaderVolume > 0.0:
self.crowdFaderVolume -= self.crowdCheerFadeOutChunk
if self.crowdFaderVolume < 0.0:
self.crowdFaderVolume = 0.0
self.song.setCrowdVolume(self.crowdFaderVolume)
if self.countdown > 0 and self.countdownOK: #MFH won't start song playing if you failed or pause
self.countdown = max(self.countdown - ticks / self.song.period, 0)
self.countdownSeconds = self.countdown / self.songBPS + 1
if not self.countdown: #MFH - when countdown reaches zero, will only be executed once
#RF-mod should we collect garbage when we start?
self.engine.collectGarbage()
self.getHandicap()
self.song.setAllTrackVolumes(1)
self.song.setCrowdVolume(0.0)
self.song.clearPause()
self.crowdsCheering = False #catches crowdsEnabled != 3, pause before countdown, set to 3
self.starPowersActive = 0
self.playersInGreen = 0
for instrument in self.instruments:
if instrument.isVocal:
instrument.mic.start()
if self.playerList[0].practiceMode and self.engine.audioSpeedFactor == 1:
self.playerList[0].startPos -= self.song.period*4
if self.playerList[0].startPos < 0.0:
self.playerList[0].startPos = 0.0
self.song.play(start = self.playerList[0].startPos)
else:
self.song.play()
if self.resumeCountdown > 0: #unpause delay
self.resumeCountdown = max(self.resumeCountdown - ticks / self.song.period, 0)
self.resumeCountdownSeconds = self.resumeCountdown / self.songBPS + 1
if not self.resumeCountdown:
self.song.unpause()
self.pause = False
missedNotes = []
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.startMic()
if self.timeLeft == "0:01" and not self.mutedLastSecondYet and self.muteLastSecond == 1:
self.song.setAllTrackVolumes(0.0)
self.mutedLastSecondYet = True
#myfingershurt: this detects the end of the song and displays "you rock"
if self.countdown <= 0 and not self.song.isPlaying() and not self.done:
#must render fail message in render function, set and check flag here
self.youRock = True
#myfingershurt: This ends the song after 100 ticks of displaying "you rock" - if the use hasn't paused the game.
if self.rockFinished and not self.pause:
if self.battleGH:
self.restartSong()
else:
self.goToResults()
return
#MFH
if self.midiLyricMode == 1 and self.numMidiLyricLines > 0 and (not self.noMoreMidiLineLyrics) and not self.playingVocals: #line-by-line lyrics mode:
if pos >= (self.nextMidiLyricStartTime-self.lineByLineStartSlopMs):
self.currentSimpleMidiLyricLine = self.nextMidiLyricLine
if ( self.numMidiLyricLines > self.midiLyricLineIndex+1 ):
self.midiLyricLineIndex += 1
self.nextMidiLyricStartTime, self.nextMidiLyricLine = self.midiLyricLines[self.midiLyricLineIndex]
else:
self.noMoreMidiLineLyrics = True
elif self.midiLyricMode == 2 and self.numMidiLyricLines > 0 and (not self.noMoreMidiLineLyrics) and not self.playingVocals: #MFH - handle 2-line lyric mode with current-word highlighting advancement
#MFH - first, prepare / handle the active / top line (which will have highlighted words / syllables):
if pos >= self.nextLyricWordTime: #time to switch to this word
if self.nextLyricIsOnNewLine:
self.activeMidiLyricLineIndex += 1
self.activeMidiLyricWordSubIndex = 0
self.nextLyricIsOnNewLine = False
self.activeMidiLyricLine_GreyWords = ""
self.activeMidiLyricLine_GreenWords = "%s " % self.nextLyricEvent.text
self.numWordsInCurrentMidiLyricLine = 0
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.numWordsInCurrentMidiLyricLine += 1
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.activeMidiLyricWordSubIndex += 1
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
self.activeMidiLyricLine_WhiteWords = ""
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]:
if nextLyricTime > pos:
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
else: #next lyric is on the same line
if self.activeMidiLyricWordSubIndex > 0: #set previous word as grey
lastLyricTime, lastLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex-1]
self.activeMidiLyricLine_GreyWords = "%s%s " % (self.activeMidiLyricLine_GreyWords, lastLyricEvent.text)
self.activeMidiLyricLine_GreenWords = "%s " % self.nextLyricEvent.text
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.activeMidiLyricWordSubIndex += 1
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
self.activeMidiLyricLine_WhiteWords = ""
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]:
if nextLyricTime > pos:
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
else: #no more words in this line
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+1: #there is another line
self.nextLyricIsOnNewLine = True
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex+1][0]
self.activeMidiLyricLine_WhiteWords = ""
else: #no more lines
self.noMoreMidiLineLyrics = True
self.activeMidiLyricLine_WhiteWords = ""
self.currentSimpleMidiLyricLine = ""
#Log.notice("No more MIDI lyric lines to handle!")
#MFH - then, prepare / handle the next / bottom line (which will just be a simple line with all white text):
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+1:
tempTime, self.currentSimpleMidiLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex+1]
else:
self.currentSimpleMidiLyricLine = ""
def endPick(self, num):
score = self.getExtraScoreForCurrentlyPlayedNotes(num)
if not self.instruments[num].endPick(self.song.getPosition()):
#if self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was detected, and it was deemed too early, and muting was attempted.")
if self.muteSustainReleases > 0:
self.song.setInstrumentVolume(0.0, self.players[num].part)
#elif self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was detected, and it was not deemed too early, so muting was not attempted.")
if score != 0:
scoreTemp = score*self.multi[num]
if self.coOpType:
if not self.coOpGH:
self.coOpScoreCard.score += (scoreTemp*self.scoring[num].getScoreMultiplier())
else: #shared mult
self.coOpScoreCard.addScore(scoreTemp)
else:
self.scoring[num].addScore(scoreTemp)
def render3D(self):
if self.stage.mode == 3 and Stage.videoAvailable:
if self.countdown <= 0:
if self.pause == True or self.failed == True:
self.stage.vidPlayer.paused = True
else:
self.stage.vidPlayer.paused = False
else:
self.stage.vidPlayer.paused = True
self.stage.render(self.visibility)
def renderVocals(self):
for i, vocalist in enumerate(self.instruments):
if vocalist.isVocal:
vocalist.render(self.visibility, self.song, self.getSongPosition(), self.numOfPlayers)
def renderGuitar(self):
for i, guitar in enumerate(self.instruments):
if guitar.isVocal:
continue
self.engine.view.setViewport(self.numberOfGuitars,self.playerList[i].guitarNum)
if self.theme not in (0, 1, 2) or (not self.pause and not self.failed):
glPushMatrix()
if guitar.fretboardHop > 0.0:
glTranslatef(0.0, guitar.fretboardHop, 0.0) #stump: fretboard hop
guitar.fretboardHop -= 0.005
if guitar.fretboardHop < 0.0:
guitar.fretboardHop = 0.0
self.neckrender[i].render(self.visibility, self.song, self.getSongPosition())
guitar.render(self.visibility, self.song, self.getSongPosition(), self.controls, self.killswitchEngaged[i]) #QQstarS: new
glPopMatrix()
if self.coOp or self.coOpGH:
guitar.rockLevel = self.rock[self.coOpPlayerMeter] / self.rockMax
if self.rock[self.coOpPlayerMeter]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
elif self.coOpRB:
guitar.rockLevel = self.rock[i] / self.rockMax
if self.rock[i]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
elif self.numDeadPlayers > 0 and self.rock[self.coOpPlayerMeter]< self.rockMax/6.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
else:
guitar.rockLevel = self.rock[i] / self.rockMax
if self.rock[i]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
self.engine.view.setViewport(1,0)
def getSongPosition(self):
if self.song and self.song.readyToGo:
if not self.done:
self.lastSongPos = self.song.getPosition()
return self.lastSongPos - self.countdown * self.song.period
else:
# Nice speeding up animation at the end of the song
return self.lastSongPos + 4.0 * (1 - self.visibility) * self.song.period
return 0.0
def screwUp(self, num, controls):
if self.screwUpVolume > 0.0:
#self.sfxChannel.setVolume(self.screwUpVolume)
#if `self.playerList[num].part` == "Bass Guitar":
if self.instruments[num].isBassGuitar:
#self.sfxChannel.play(self.engine.data.screwUpSoundBass)
self.engine.data.screwUpSoundBass.play()
elif self.instruments[num].isDrum:
if self.drumMisses > 0: #MFH's cleaned-up - Faaa Drum sound
self.instruments[num].playDrumSounds(controls)
#- if self.instruments[num].lastFretWasT1:
#- self.engine.data.T1DrumSound.play()
#- elif self.instruments[num].lastFretWasT2:
#- self.engine.data.T2DrumSound.play()
#- elif self.instruments[num].lastFretWasT3:
#- self.engine.data.T3DrumSound.play()
#- elif self.instruments[num].lastFretWasC:
#- self.engine.data.CDrumSound.play()
else:
self.engine.data.screwUpSoundDrums.play() #plays random drum sounds
else: #guitar
self.engine.data.screwUpSound.play()
def doPick(self, num):
if not self.song:
return
pos = self.getSongPosition()
if self.instruments[num].playedNotes:
# If all the played notes are tappable, there are no required notes and
# the last note was played recently enough, ignore this pick
if self.instruments[num].areNotesTappable(self.instruments[num].playedNotes) and \
not self.instruments[num].getRequiredNotes(self.song, pos) and \
pos - self.lastPickPos[num] <= self.song.period / 2:
return
self.endPick(num)
self.lastPickPos[num] = pos
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
#volshebnyi - disable failing if BRE is active
if self.instruments[num].startPick(self.song, pos, self.controls):
if self.instruments[num].isDrum:
self.drumStart = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
self.currentlyAnimating = True
self.notesHit[num] = True #QQstarS:Set [0] to [i]
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType and not self.coOpGH:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
self.scoring[num].addScore(tempScoreValue)
scoreCard.notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
scoreCard.streak += 1
if self.coOpType:
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
self.scoring[num].streak += 1
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
elif not self.battleGH:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
#myfingershurt
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
else:
ApplyPenalty = True
if self.instruments[num].isDrum:
if self.instruments[num].drumFillWasJustActive:
ApplyPenalty = False
self.instruments[num].freestylePick(self.song, pos, self.controls) #MFH - to allow late drum fill SP activation
self.instruments[num].drumFillWasJustActive = False
if ApplyPenalty:
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.currentlyAnimating = False
self.stage.triggerMiss(pos)
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls) #MFH - call screw-up sound handling function
#myfingershurt: ensure accuracy display off when miss
self.dispAccuracy[num] = False
#myfingershurt: bass drum sound play
if self.instruments[num].isDrum and self.bassKickSoundEnabled:
self.instruments[num].playDrumSounds(self.controls, playBassDrumOnly = True)
#if self.guitars[num].lastFretWasBassDrum:
# #self.sfxChannel.setVolume(self.screwUpVolume)
# self.engine.data.bassDrumSound.play()
def doPick2(self, num, hopo = False):
if not self.song:
return
pos = self.getSongPosition()
#clear out any missed notes before this pick since they are already missed by virtue of the pick
missedNotes = self.instruments[num].getMissedNotes(self.song, pos, catchup = True)
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
self.starNotesMissed[num] = True
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
if len(activeList) == 1 and (self.instruments[num].keys[self.instruments[num].hopoLast] == activeList[0] or self.instruments[num].keys[self.instruments[num].hopoLast+5] == activeList[0]):
if self.instruments[num].wasLastNoteHopod and hopoFudge > 0 and hopoFudge < self.instruments[num].lateMargin:
return
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick2(self.song, pos, self.controls, hopo):
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
if self.instruments[num].playedNotes:
scoreCard.streak += 1
self.currentlyAnimating = True
if self.coOpType:
self.scoring[num].streak += 1
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
self.notesHit[num] = True #QQstarS:Set [0] to [i]
scoreCard.notesHit += 1 # glorandwarf: was len(self.guitars[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType and not self.coOpGH:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
#self.updateStars(num)
#self.playerList[num].stars, self.partialStar[num], self.starRatio[num] = self.getStarScores(num)
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
elif not self.battleGH:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
else:
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.currentlyAnimating = False
self.instruments[num].hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls)
#-----------------------
def doPick3RF(self, num, hopo = False):
if not self.song:
return
pos = self.getSongPosition()
#clear out any past the window missed notes before this pick since they are already missed by virtue of the pick
missedNotes = self.instruments[num].getMissedNotes(self.song, pos, catchup = True)
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #qqstars
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
self.starNotesMissed[num] = True
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
if len(activeList) == 1 and (self.instruments[num].keys[self.instruments[num].hopoLast] == activeList[0] or self.instruments[num].keys[self.instruments[num].hopoLast+5] == activeList[0]):
if self.instruments[num].wasLastNoteHopod and hopoFudge > 0 and hopoFudge < self.instruments[num].lateMargin:
return
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick3(self.song, pos, self.controls, hopo):
self.processedFirstNoteYet = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
#Any previous notes missed, but new ones hit, reset streak counter
if len(self.instruments[num].missedNotes) != 0:
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #qqstars
for chord in self.instruments[num].missedNotes:
for tym, theNote in chord: #MFH
if not theNote.played and (theNote.star or theNote.finalStar):
self.starNotesMissed[num] = True
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
scoreCard.streak += 1
self.notesHit[num] = True #qqstars
self.currentlyAnimating = True
scoreCard.notesHit += 1 # glorandwarf: was len(self.instruments[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType:
self.scoring[num].streak += 1
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
if self.coOpGH:
scoreCard.addScore(tempScoreValue)
else:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
else:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
#myfingershurt
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
else:
self.currentlyAnimating = False
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = 0
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
self.notesMissed[num] = True #qqstars
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls)
#myfingershurt: ensure accuracy display off when miss
self.dispAccuracy[num] = False
#-----------------------
def doPick3GH2(self, num, hopo = False, pullOff = False): #MFH - so DoPick knows when a pull-off was performed
if not self.song:
return
pos = self.getSongPosition()
chordFudge = 1 #MFH - was 10 #myfingershurt - needed to detect chords
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
missedNotes = self.instruments[num].getMissedNotesMFH(self.song, pos, catchup = True)
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
#self.problemNotesP1 = []
#self.problemNotesP2 = []
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), foundMissedCatchupNote: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
if self.hopoDebugDisp == 1:
missedNoteNums = [noat.number for time, noat in missedNotes]
#Log.debug("Miss: dopick3gh2(), found missed note(s).... %s" % str(missedNoteNums) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: dopick3gh2(), found missed note(s)... %(missedNotes)s, Song time=%(songTime)s" % \
{'missedNotes': str(missedNoteNums), 'songTime': str(self.timeLeft)})
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
#myfingershurt
#Perhaps, if I were to just treat all tappable = 3's as problem notes, and just accept a potential overstrum, that would cover all the bases...
# maybe, instead of checking against a known list of chord notes that might be associated, just track whether or not
# the original problem note (tappable = 3) is still held. If it is still held, whether or not it matches the notes, it means
# it can still be involved in the problematic pattern - so continue to monitor for an acceptable overstrum.
#On areas where it's just a tappable = 3 note with no other notes in the hitwindow, it will be marked as a problem and then
# if strummed, that would be considered the acceptable overstrum and it would behave the same. MUCH simpler logic!
activeKeyList = []
#myfingershurt: the following checks should be performed every time so GH2 Strict pull-offs can be detected properly.
LastHopoFretStillHeld = False
HigherFretsHeld = False
problemNoteStillHeld = False
for n, k in enumerate(self.keysList[num]):
if self.controls.getState(k):
activeKeyList.append(k)
if self.instruments[num].hopoLast == n or self.instruments[num].hopoLast == n - 5:
LastHopoFretStillHeld = True
elif (n > self.instruments[num].hopoLast and n < 5) or (n - 5 > self.instruments[num].hopoLast and n > 4):
HigherFretsHeld = True
if self.instruments[num].hopoProblemNoteNum == n or self.instruments[num].hopoProblemNoteNum == n - 5:
problemNoteStillHeld = True
#ImpendingProblem = False
if not hopo and self.instruments[num].wasLastNoteHopod and not self.instruments[num].LastStrumWasChord and not self.instruments[num].sameNoteHopoString:
#if not hopo and self.instruments[num].wasLastNoteHopod:
if LastHopoFretStillHeld == True and HigherFretsHeld == False:
if self.instruments[num].wasLastNoteHopod and hopoFudge >= 0 and hopoFudge < self.instruments[num].lateMargin:
if self.instruments[num].hopoActive < 0:
self.instruments[num].wasLastNoteHopod = False
#if self.hopoDebugDisp == 1:
# Log.debug("HOPO Strum ignored: Standard HOPO strum (hopoActive < 0). Time left=" + str(self.timeLeft))
return
elif self.instruments[num].hopoActive > 0: #make sure it's hopoActive!
self.instruments[num].wasLastNoteHopod = False
#if self.hopoDebugDisp == 1:
# Log.debug("HOPO Strum ignored: Standard HOPO strum (hopoActive not < 0). Time left=" + str(self.timeLeft))
return
#MFH - here, just check to see if we can release the expectation for an acceptable overstrum:
if self.instruments[num].sameNoteHopoString and not problemNoteStillHeld:
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick3(self.song, pos, self.controls, hopo):
self.processedFirstNoteYet = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
#Any previous notes missed, but new ones hit, reset streak counter
if len(self.instruments[num].missedNotes) > 0:
if self.hopoDebugDisp == 1 and not self.instruments[num].isDrum:
#Log.debug("Skipped note(s) detected in startpick3: " + str(self.instruments[num].missedNoteNums))
problemNoteMatchingList = [(int(tym), noat.number, noat.played) for tym, noat in self.instruments[num].matchingNotes]
#Log.debug("Skipped note(s) detected in startpick3: " + str(self.instruments[num].missedNoteNums) + ", problemMatchingNotes: " + str(problemNoteMatchingList) + ", activeKeys= " + str(activeKeyList) + ", Time left=" + str(self.timeLeft))
Log.debug("Skipped note(s) detected in startpick3: %(missedNotes)s, notesToMatch: %(matchNotes)s, activeFrets: %(activeFrets)s, Song time=%(songTime)s" % \
{'missedNotes': str(self.instruments[num].missedNoteNums), 'matchNotes': str(problemNoteMatchingList), 'activeFrets': str(activeKeyList), 'songTime': str(self.timeLeft)})
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for chord in self.instruments[num].missedNotes:
for tym, theNote in chord: #MFH
if not theNote.played and (theNote.star or theNote.finalStar):
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), afterStartPick3Ok-foundMissedCatchupNote: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
scoreCard.streak += 1
self.notesHit[num] = True #QQstarS:Set [0] to [i]
self.currentlyAnimating = True
scoreCard.notesHit += 1 # glorandwarf: was len(self.guitars[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType:
self.scoring[num].streak += 1 #needed in co-op GH for RF HO/PO
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
if self.coOpGH:
scoreCard.addScore(tempScoreValue)
else:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = self.getSongPosition()
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
else:
ApplyPenalty = True
if self.hopoDebugDisp == 1:
sameNoteHopoFlagWas = self.instruments[num].sameNoteHopoString #MFH - need to store this for debug info
lastStrumWasChordWas = self.instruments[num].LastStrumWasChord #MFH - for debug info
#problemNotesForP1Were = self.problemNotesP1
if pullOff: #always ignore bad pull-offs
ApplyPenalty = False
if (self.hopoStyle == 2 and hopo == True): #GH2 Strict
if (self.instruments[num].LastStrumWasChord or (self.instruments[num].wasLastNoteHopod and LastHopoFretStillHeld)):
ApplyPenalty = False
if (self.hopoStyle == 4 and hopo == True): #GH2 Sloppy
ApplyPenalty = False
if (self.hopoStyle == 3 and hopo == True): #GH2
ApplyPenalty = False
if not (self.instruments[num].LastStrumWasChord or (self.instruments[num].wasLastNoteHopod and LastHopoFretStillHeld)):
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].LastStrumWasChord = False
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.instruments[num].hopoLast = -1
if self.instruments[num].sameNoteHopoString:
#if LastHopoFretStillHeld and not HigherFretsHeld:
if LastHopoFretStillHeld:
ApplyPenalty = False
self.instruments[num].playedNotes = self.instruments[num].lastPlayedNotes #restore played notes status
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
elif HigherFretsHeld:
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
if ApplyPenalty == True:
self.currentlyAnimating = False
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.instruments[num].hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
if self.hopoDebugDisp == 1 and not self.instruments[num].isDrum:
problemNoteMatchingList = [(int(tym), noat.number, noat.played) for tym, noat in self.instruments[num].matchingNotes]
#Log.debug("Miss: dopick3gh2(), fail-startpick3()...HigherFretsHeld: " + str(HigherFretsHeld) + ", LastHopoFretHeld: " + str(LastHopoFretStillHeld) + ", lastStrumWasChord: " + str(lastStrumWasChordWas) + ", sameNoteHopoStringFlag: " + str(sameNoteHopoFlagWas) + ", problemNoteMatchingList: " + str(problemNoteMatchingList) + ", activeKeys= " + str(activeKeyList) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: dopick3gh2(), fail-startpick3()...HigherFretsHeld: %(higherFrets)s, LastHopoFretHeld: %(lastHopoFret)s, lastStrumWasChord: %(lastStrumChord)s, sameNoteHopoStringFlag: %(sameNoteHopoFlag)s, notesToMatch: %(matchNotes)s, activeFrets: %(activeFrets)s, Song time=%(songTime)s" % \
{'higherFrets': str(HigherFretsHeld), 'lastHopoFret': str(LastHopoFretStillHeld), 'lastStrumChord': str(lastStrumWasChordWas), 'sameNoteHopoFlag': str(sameNoteHopoFlagWas), 'matchNotes': str(problemNoteMatchingList), 'activeFrets': str(activeKeyList), 'songTime': str(self.timeLeft)})
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), afterStartPick3Fail, matchingNote: %d, gameTime: %s" % (noat.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
isFirst = False
self.screwUp(num, self.controls)
self.dispAccuracy[num] = False
#myfingershurt: bass drum sound play
if self.instruments[num].isDrum and self.bassKickSoundEnabled:
self.instruments[num].playDrumSounds(self.controls, playBassDrumOnly = True)
#if self.guitars[num].lastFretWasBassDrum:
# #self.sfxChannel.setVolume(self.screwUpVolume)
# self.engine.data.bassDrumSound.play()
#stump: hop a fretboard
def hopFretboard(self, num, height):
if self.instruments[num].fretboardHop < height:
self.instruments[num].fretboardHop = height
def activateSP(self, num): #QQstarS: Fix this function, add a element "num"
if self.battleGH: #from akedrou: this will die horribly if you allow vocal players in. Just sayin'. ... sorry?
time = self.getSongPosition()
if time - self.battleJustUsed[num] > 1500: #must wait 1.5sec before next object use
if self.instruments[num].battleObjects[0] != 0:
self.engine.data.battleUsedSound.play()
self.instruments[self.battleTarget[num]].battleStatus[self.instruments[num].battleObjects[0]] = True
#start object use on other player
self.instruments[self.battleTarget[num]].battleStartTimes[self.instruments[num].battleObjects[0]] = time
if self.instruments[num].battleObjects[0] == 1:
self.instruments[self.battleTarget[num]].battleDrainStart = time
elif self.instruments[num].battleObjects[0] == 3:
#Log.debug("String Cut")
self.instruments[self.battleTarget[num]].battleBreakNow = self.instruments[self.battleTarget[num]].battleBreakLimit
self.instruments[self.battleTarget[num]].battleBreakString = random.randint(0,4)
self.endPick(self.battleTarget[num])
elif self.instruments[num].battleObjects[0] == 4:
#Log.debug("Wammy")
self.instruments[self.battleTarget[num]].battleWhammyNow = self.instruments[self.battleTarget[num]].battleWhammyLimit
self.endPick(self.battleTarget[num])
elif self.instruments[num].battleObjects[0] == 5:
#Log.debug("Take Object")
if self.instruments[self.battleTarget[num]].battleObjects[0] != 0:
self.instruments[num].battleObjects[0] = self.instruments[self.battleTarget[num]].battleObjects[0]
self.instruments[self.battleTarget[num]].battleObjects[0] = self.instruments[self.battleTarget[num]].battleObjects[1]
self.instruments[self.battleTarget[num]].battleObjects[1] = self.instruments[self.battleTarget[num]].battleObjects[2]
self.instruments[self.battleTarget[num]].battleObjects[2] = 0
self.instruments[self.battleTarget[num]].battleStatus[5] = False
self.battleText[num] = None
self.battleTextTimer[num] = 0
self.instruments[num].battleObjectGained = self.instruments[num].battleObjects[0]
self.battleJustUsed[num] = time
return
#tells us which objects are currently running
if self.instruments[self.battleTarget[num]].battleBeingUsed[1] != 0:
self.instruments[self.battleTarget[num]].battleStatus[self.instruments[self.battleTarget[num]].battleBeingUsed[1]] = False
if self.instruments[self.battleTarget[num]].battleBeingUsed[0] != 0:
if self.instruments[self.battleTarget[num]].battleBeingUsed[0] != self.instruments[num].battleObjects[0]:
self.instruments[self.battleTarget[num]].battleBeingUsed[1] = self.instruments[self.battleTarget[num]].battleBeingUsed[0]
self.instruments[self.battleTarget[num]].battleBeingUsed[0] = self.instruments[num].battleObjects[0]
#bring up other objects in players queue
self.instruments[num].battleObjects[0] = self.instruments[num].battleObjects[1]
self.instruments[num].battleObjects[1] = self.instruments[num].battleObjects[2]
self.instruments[num].battleObjects[2] = 0
self.battleText[num] = None
self.battleTextTimer[num] = 0
self.battleJustUsed[num] = time
#Log.debug("Battle Object used, Objects left %s" % str(self.instruments[num].battleObjects))
elif self.coOpGH: #akedrou also says don't let vocal players in GH Co-Op.
if self.coOpStarPower >= (50 * self.numOfPlayers) and self.instruments[num].starPowerActive == False:
time = self.getSongPosition()
Log.debug("Star Power Activated at: " + str(time))
self.coOpStarPowerActive[num] = time
if time - min(self.coOpStarPowerActive) < 300.0 and not self.instruments[i].starPowerActive:
self.engine.data.starActivateSound.play()
for i in range(self.numOfPlayers):
self.hopFretboard(i, 0.07) #stump
self.instruments[i].starPowerActive = True
self.instruments[i].neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.instruments[i].neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
else:
if time - self.coOpStarPowerTimer > 1000.0:
for i in range(self.numOfPlayers):
Log.debug(str(time - self.coOpStarPowerActive[i]))
if time - self.coOpStarPowerActive[i] < 300.0:
continue
if self.instruments[i].isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(i)
break
if self.phrases > 1:
self.newScalingText(i, self.tsCoOpStarPower )
self.coOpStarPowerTimer = time
else:
guitar = self.instruments[num]
if guitar.starPower >= 50: #QQstarS:Set [0] to [i]
#self.sfxChannel.setVolume(self.sfxVolume)
#if self.engine.data.cheerSoundFound:
#self.engine.data.crowdSound.play()
if self.coOpRB:
while len(self.deadPlayerList) > 0:
i = self.deadPlayerList.pop(0) #keeps order intact (with >2 players)
if self.instruments[i].coOpFailed and self.timesFailed[i]<3:
self.instruments[i].coOpRescue(self.getSongPosition())
self.rock[i] = self.rockMax * 0.667
guitar.starPower -= 50
self.engine.data.rescueSound.play()
self.coOpFailDone[i] = False
self.numDeadPlayers -= 1
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
break
else:
if not guitar.starPowerActive:
self.engine.data.starActivateSound.play()
guitar.starPowerActive = True #QQstarS:Set [0] to [i]
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
else:
if not guitar.starPowerActive:
self.engine.data.starActivateSound.play()
guitar.starPowerActive = True #QQstarS:Set [0] to [i]
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
def goToResults(self):
self.ending = True
if self.song:
self.song.stop()
self.done = True
noScore = False
for i, player in enumerate(self.playerList):
player.twoChord = self.instruments[i].twoChord
if self.playerList[0].practiceMode:
self.scoring[i].score = 0
if self.scoring[i].score > 0:
noScore = False
break
else:
if not (self.coOpType and self.coOpScoreCard.score > 0):
noScore = True
#Reset Score if Jurgen played -- Spikehead777 - handled by GameResults now. You can watch your score evaporate!
# if self.jurgPlayer[i]:
# self.playerList[i].score = 0
# if self.coOpType and True in self.jurgPlayer:
# self.coOpScore = 0
# if not self.engine.audioSpeedFactor == 1: #MFH - only allow score uploads and unlocking when songs are played at full speed.
# noScore = True
# self.changeSong()
#if self.playerList[0].score == 0:
#if self.numOfPlayers == 1:
#noScore = True
#self.changeSong()
#if self.numOfPlayers == 2:
# if self.coOpType:
# if self.coOpScore == 0:
# noScore = True
# self.changeSong()
# if self.playerList[0].score == 0 and self.playerList[1].score == 0:
# noScore = True
# self.changeSong()
if not noScore:
#MFH/akedrou - force one stat update before gameresults just in case:
self.getHandicap()
for scoreCard in self.scoring:
scoreCard.updateAvMult()
scoreCard.getStarScores()
if self.coOpType:
#self.updateStars(self.coOpPlayerIndex, forceUpdate = True)
self.coOpScoreCard.updateAvMult()
self.coOpScoreCard.getStarScores()
#akedrou - begin the implementation of the ScoreCard
if self.coOpType:
scoreList = self.scoring
scoreList.append(self.coOpScoreCard)
if self.coOp:
coOpType = 1
elif self.coOpRB:
coOpType = 2
elif self.coOpGH:
coOpType = 3
else:
coOpType = 1
else:
scoreList = self.scoring
coOpType = 0
self.engine.view.setViewport(1,0)
#self.session.world.deleteScene(self)
self.freeResources()
self.engine.world.createScene("GameResultsScene", libraryName = self.libraryName, songName = self.songName, scores = scoreList, coOpType = coOpType, careerMode = self.careerMode)
else:
self.changeSong()
def keyPressed(self, key, unicode, control = None):
#RF style HOPO playing
#myfingershurt: drums :)
for i in range(self.numOfPlayers):
if self.instruments[i].isDrum and control in (self.instruments[i].keys):
if control in Player.bassdrums:
self.instruments[i].drumsHeldDown[0] = 100
elif control in Player.drum1s:
self.instruments[i].drumsHeldDown[1] = 100
elif control in Player.drum2s:
self.instruments[i].drumsHeldDown[2] = 100
elif control in Player.drum3s:
self.instruments[i].drumsHeldDown[3] = 100
elif control in Player.drum5s:
self.instruments[i].drumsHeldDown[4] = 100
self.handlePick(i)
return True
if self.hopoStyle > 0: #HOPOs enabled
res = self.keyPressed3(key, unicode, control)
return res
actual = False
if not control:
actual = True
control = self.controls.keyPressed(key)
num = self.getPlayerNum(control)
if num is None:
return True
if self.instruments[num].isDrum and control in self.instruments[num].keys:
if actual:
if control in Player.bassdrums:
self.instruments[num].drumsHeldDown[0] = 100
self.instruments[num].playedSound[0] = False
elif control in Player.drum1s:
self.instruments[num].drumsHeldDown[1] = 100
self.instruments[num].playedSound[1] = False
elif control in Player.drum2s:
self.instruments[num].drumsHeldDown[2] = 100
self.instruments[num].playedSound[2] = False
elif control in Player.drum3s:
self.instruments[num].drumsHeldDown[3] = 100
self.instruments[num].playedSound[3] = False
elif control in Player.drum5s:
self.instruments[num].drumsHeldDown[4] = 100
self.instruments[num].playedSound[4] = False
if self.battleGH:
if self.instruments[num].battleStatus[3]:
if control == self.instruments[num].keys[self.instruments[num].battleBreakString]:
self.instruments[num].battleBreakNow -= 1
self.controls.toggle(control, False)
if control in (self.instruments[num].actions):
for k in self.keysList[num]:
if self.controls.getState(k):
self.keyBurstTimeout[num] = None
break
else:
#self.keyBurstTimeout[num] = self.engine.timer.time + self.keyBurstPeriod
return True
if control in (self.instruments[num].actions) and self.song:
self.doPick(num)
elif control in self.keysList[num] and self.song:
# Check whether we can tap the currently required notes
pos = self.getSongPosition()
notes = self.instruments[num].getRequiredNotes(self.song, pos)
if ((self.scoring[num].streak > 0 and self.instruments[num].areNotesTappable(notes)) or \
(self.instruments[num].guitarSolo and control in self.soloKeysList[num])) and \
self.instruments[num].controlsMatchNotes(self.controls, notes):
self.doPick(num)
elif control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
self.player.cheating = True
func()
break
else:
self.enteredCode = []
#myfingershurt: Adding starpower and killswitch for "no HOPOs" mode
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def keyPressed2(self, key, unicode, control = None):
hopo = False
if not control:
control = self.controls.keyPressed(key)
else:
hopo = True
if self.battleGH:
if self.instruments[0].battleStatus[3]:
if control == self.instruments[0].keys[self.instruments[0].battleBreakString]:
self.instruments[0].battleBreakNow -=1
self.controls.toggle(control, False)
if self.instruments[1].battleStatus[3]:
if control == self.instruments[1].keys[self.instruments[1].battleBreakString]:
self.instruments[1].battleBreakNow -=1
self.controls.toggle(control, False)
if len(self.instruments) > 2:
if self.instruments[2].battleStatus[3]:
if control == self.instruments[2].keys[self.instruments[2].battleBreakString]:
self.instruments[2].battleBreakNow -= 1
self.controls.toggle(control, False)
#if True: #akedrou - Probably not the best place for ontological discussions. Let's just assume True is always True.
pressed = -1
for i in range(self.numOfPlayers):
if control in (self.instruments[i].actions):
hopo = False
pressed = i
numpressed = [len([1 for k in guitar.keys if self.controls.getState(k)]) for guitar in self.instruments]
activeList = [k for k in self.keysList[pressed] if self.controls.getState(k)]
for i in range(self.numOfPlayers):
if control in (self.instruments[i].keys) and self.song and numpressed[i] >= 1:
if self.instruments[i].wasLastNoteHopod and self.instruments[i].hopoActive >= 0:
hopo = True
pressed = i
if pressed >= 0:
for k in self.keysList[pressed]:
if self.controls.getState(k):
self.keyBurstTimeout[pressed] = None
break
else:
self.keyBurstTimeout[pressed] = self.engine.timer.time + self.keyBurstPeriod
return True
if pressed >= 0 and self.song:
self.doPick2(pressed, hopo)
if control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
for player in self.playerList:
player.cheating = True
func()
break
else:
self.enteredCode = []
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def keyPressed3(self, key, unicode, control = None, pullOff = False): #MFH - gonna pass whether this was called from a pull-off or not
hopo = False
actual = False
if not control:
actual = True
control = self.controls.keyPressed(key)
else:
hopo = True
num = self.getPlayerNum(control)
if self.battleGH and num is not None:
if self.instruments[num].battleStatus[3]:
if control == self.instruments[num].keys[self.instruments[num].battleBreakString]:
self.instruments[num].battleBreakNow -=1
self.controls.toggle(control, False)
pressed = -1
for i in range(self.numOfPlayers):
if self.instruments[i].isDrum and control in self.instruments[i].keys and actual:
if control in Player.bassdrums:
self.instruments[num].drumsHeldDown[0] = 100
self.instruments[num].playedSound[0] = False
elif control in Player.drum1s:
self.instruments[num].drumsHeldDown[1] = 100
self.instruments[num].playedSound[1] = False
elif control in Player.drum2s:
self.instruments[num].drumsHeldDown[2] = 100
self.instruments[num].playedSound[2] = False
elif control in Player.drum3s:
self.instruments[num].drumsHeldDown[3] = 100
self.instruments[num].playedSound[3] = False
elif control in Player.drum5s:
self.instruments[num].drumsHeldDown[4] = 100
self.instruments[num].playedSound[4] = False
if control in (self.instruments[i].actions):
hopo = False
pressed = i
numpressed = [len([1 for k in guitar.keys if self.controls.getState(k)]) for guitar in self.instruments]
activeList = [k for k in self.keysList[pressed] if self.controls.getState(k)]
if self.ignoreOpenStrums and len(activeList) < 1: #MFH - filter out strums without frets
pressed = -1
for i in range(self.numOfPlayers): #akedrou- probably loopable...
if control in self.instruments[i].keys and numpressed[i] >= 1:
if self.instruments[i].hopoActive > 0 or (self.instruments[i].wasLastNoteHopod and self.instruments[i].hopoActive == 0):
if not pullOff and (self.hopoStyle == 2 or self.hopoStyle == 3): #GH2 or GH2 Strict, don't allow lower-fret tapping while holding a higher fret
activeKeyList = []
LastHopoFretStillHeld = False
HigherFretsHeld = False
for p, k in enumerate(self.keysList[i]):
if self.controls.getState(k):
activeKeyList.append(k)
if self.instruments[i].hopoLast == p or self.instruments[i].hopoLast-5 == p:
LastHopoFretStillHeld = True
elif (p > self.instruments[i].hopoLast and p < 5) or (p > self.instruments[i].hopoLast and p > 4):
HigherFretsHeld = True
if not(LastHopoFretStillHeld and not HigherFretsHeld): #tapping a lower note should do nothing.
hopo = True
pressed = i
else: #GH2 Sloppy or RF-Mod
hopo = True
pressed = i
break
#MFH - this is where the marked little block above used to be - possibly causing false "late pick" detections from HOPOs...
if pressed >= 0:
#myfingershurt:
self.handlePick(pressed, hopo = hopo, pullOff = pullOff)
#if self.hopoStyle == 1: #1 = rf-mod
# self.doPick3RF(pressed, hopo)
#elif self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO
# self.doPick3GH2(pressed, hopo, pullOff)
#else: #2 = no HOPOs
# self.doPick(pressed)
if control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
for player in self.playerList:
player.cheating = True
func()
break
else:
self.enteredCode = []
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def CheckForValidKillswitchNote(self, num):
if not self.song:
return False
noteCount = len(self.instruments[num].playedNotes)
if noteCount > 0:
pickLength = self.instruments[num].getPickLength(self.getSongPosition())
if pickLength > 0.5 * (self.song.period / 4):
return True
else:
return False
else:
return False
def getExtraScoreForCurrentlyPlayedNotes(self, num):
if not self.song or self.instruments[num].isDrum or self.instruments[num].isVocal:
return 0
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
noteCount = len(self.instruments[num].playedNotes)
pickLength = self.instruments[num].getPickLength(self.getSongPosition())
if pickLength > 1.1 * self.song.period / 4:
tempExtraScore = self.baseSustainScore * pickLength * noteCount
if self.starScoreUpdates == 1:
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores(tempExtraScore = tempExtraScore)
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
return int(tempExtraScore) #original FoF sustain scoring
return 0
def keyReleased(self, key):
#RF style HOPO playing
control = self.controls.keyReleased(key)
num = self.getPlayerNum(control)
if num is None:
return
if self.instruments[num].isDrum:
return True
#myfingershurt:
if self.hopoStyle > 0: #hopos enabled
res = self.keyReleased3(key)
return res
if control in self.keysList[num] and self.song:
# Check whether we can tap the currently required notes
pos = self.getSongPosition()
notes = self.instruments[num].getRequiredNotes(self.song, pos)
if ((self.scoring[num].streak > 0 and self.instruments[num].areNotesTappable(notes)) or \
(self.instruments[num].guitarSolo and control in self.soloKeysList[num])) and \
self.instruments[num].controlsMatchNotes(self.controls, notes):
self.doPick(num)
# Otherwise we end the pick if the notes have been playing long enough
elif self.lastPickPos[num] is not None and pos - self.lastPickPos[num] > self.song.period / 2:
self.endPick(num)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
def keyReleased2(self, key):
control = self.controls.keyReleased(key)
for i, keys in enumerate(self.keysList):
if control in keys and self.song:
for time, note in self.instruments[i].playedNotes:
if not self.instruments[i].wasLastNoteHopod or (self.instruments[i].hopoActive < 0 and (control == self.keysList[i][note.number] or control == self.keysList[i][note.number+5])):
self.endPick(i)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
for i in range(self.numOfPlayers):
activeList = [k for k in self.keysList[i] if self.controls.getState(k) and k != control]
if len(activeList) != 0 and self.instruments[i].wasLastNoteHopod and activeList[0] != self.keysList[i][self.instruments[i].hopoLast] and activeList[0] != self.keysList[i][self.instruments[i].hopoLast+5] and control in self.keysList[i]:
self.keyPressed2(None, 0, activeList[0])
def keyReleased3(self, key):
control = self.controls.keyReleased(key)
#myfingershurt: this is where the lower-fret-release causes a held note to break:
for i, keys in enumerate(self.keysList):
if keys is None:
continue
if control in keys and self.song: #myfingershurt: if the released control was a fret:
for time, note in self.instruments[i].playedNotes:
#if self.instruments[i].hopoActive == 0 or (self.instruments[i].hopoActive < 0 and control == self.keysList[i][note.number]):
#if not self.instruments[i].wasLastNoteHopod or (self.instruments[i].hopoActive < 0 and control == self.keysList[i][note.number]):
#myfingershurt: only end the pick if no notes are being held.
if (self.instruments[i].hit[note.number] == True and (control == self.keysList[i][note.number] or control == self.keysList[i][note.number+5])):
#if control == self.keysList[i][note.number]:
#if self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was just detected.")
self.endPick(i)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
for i in range(self.numOfPlayers):
if self.keysList[i] is None:
continue
activeList = [k for k in self.keysList[i] if self.controls.getState(k) and k != control]
#myfingershurt: removing check for hopolast for GH2 system after-chord HOPOs
#myfingershurt: also added self.hopoAfterChord conditional to ensure this logic doesn't apply without HOPOs after chord
if self.hopoAfterChord and (self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4): #for GH2 systems: so user can release lower fret from chord to "tap" held HOPO
#if len(activeList) != 0 and guitar.wasLastNoteHopod and control in self.keysList[i]:
if len(activeList) != 0 and self.instruments[i].hopoActive > 0 and control in self.keysList[i]:
self.keyPressed3(None, 0, activeList[0], pullOff = True)
else:
#if len(activeList) != 0 and guitar.wasLastNoteHopod and activeList[0] != self.keysList[i][guitar.hopoLast] and control in self.keysList[i]:
if len(activeList) != 0 and self.instruments[i].hopoActive > 0 and activeList[0] != self.keysList[i][self.instruments[i].hopoLast] and activeList[0] != self.keysList[i][self.instruments[i].hopoLast+5] and control in self.keysList[i]:
self.keyPressed3(None, 0, activeList[0], pullOff = True)
def getPlayerNum(self, control):
for i, player in enumerate(self.playerList):
if control and control in player.keyList:
return i
else:
return -1
def render(self, visibility, topMost): #QQstarS: Fix this function for mostly. And there are lots of change in this, I just show the main ones
#MFH render function reorganization notes:
#Want to render all background / single-viewport graphics first
#if self.song:
#myfingershurt: Alarian's auto-stage scaling update
w = self.wFull
h = self.hFull
wBak = w
hBak = h
if self.fontShadowing:
font = self.engine.data.shadowfont
else:
font = self.engine.data.font
lyricFont = self.engine.data.font
bigFont = self.engine.data.bigFont
sphraseFont = self.engine.data.streakFont2
scoreFont = self.engine.data.scoreFont
streakFont = self.engine.data.streakFont
if self.song and self.song.readyToGo:
pos = self.getSongPosition()
if self.boardY <= 1:
self.setCamera()
if self.countdown > 0:
self.countdownOK = True
self.boardY = 1
elif self.boardY > 1:
self.boardY -= 0.01
self.setCamera()
#self.setCamera()
#self.engine.theme.setBaseColor()
Scene.render(self, visibility, topMost) #MFH - I believe this eventually calls the renderGuitar function, which also involves two viewports... may not be easy to move this one...
self.visibility = v = 1.0 - ((1 - visibility) ** 2)
self.engine.view.setOrthogonalProjection(normalize = True)
self.renderVocals()
#MFH: render the note sheet just on top of the background:
if self.lyricSheet != None and not self.playingVocals:
self.engine.drawImage(self.lyricSheet, scale = (self.lyricSheetScaleFactor,-self.lyricSheetScaleFactor), coord = (w/2, h*0.935))
#the timing line on this lyric sheet image is approx. 1/4 over from the left
#MFH - also render the scrolling lyrics & sections before changing viewports:
for instrument in self.instruments:
if instrument.isVocal == True:
minInst = instrument.currentPeriod * 2
maxInst = instrument.currentPeriod * 7
slopPer = instrument.currentPeriod
break
else:
if len(self.instruments) > 0:
minInst = (self.instruments[0].currentPeriod * self.instruments[0].beatsPerBoard) / 2
maxInst = (self.instruments[0].currentPeriod * self.instruments[0].beatsPerBoard) * 1.5
slopPer = self.instruments[0].currentPeriod
else: #This should never trigger...
minInst = 1000
maxInst = 3000
slopPer = 2000
minPos = pos - minInst
maxPos = pos + maxInst
eventWindow = (maxPos - minPos)
#lyricSlop = ( self.instruments[0].currentPeriod / (maxPos - minPos) ) / 4
lyricSlop = ( slopPer / ((maxPos - minPos)/2) ) / 2
if not self.pause and not self.failed and not self.ending:
if self.countdown <= 0: #MFH - only attempt to handle sections / lyrics / text events if the countdown is complete!
#handle the sections track
if self.midiSectionsEnabled > 0:
for time, event in self.song.eventTracks[Song.TK_SECTIONS].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
text = event.text
yOffset = 0.00005 #last change -.00035
txtSize = 0.00150
lyricFont.render(text, (xOffset, yOffset),(1, 0, 0),txtSize)
#handle the lyrics track
if self.midiLyricsEnabled > 0 and not self.playingVocals:
if self.midiLyricMode == 0: #scrolling lyrics mode:
for time, event in self.song.eventTracks[Song.TK_LYRICS].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
yOffset = 0.0696 #last change +0.0000
txtSize = 0.00160
text = event.text
if text.find("+") >= 0: #shift the pitch adjustment markers down one line
text = text.replace("+","~")
txtSize = 0.00145 #last change +.0000
yOffset -= 0.0115 #last change -.0005
lyricFont.render(text, (xOffset, yOffset),(1, 0, 0),txtSize)
#MFH - TODO - handle line-by-line lyric display and coloring here:
elif self.midiLyricMode == 1: #line-by-line lyrics mode:
if self.theme == 2:
txtSize = 0.00170
else:
#gh3 or other standard mod
txtSize = 0.00175
yOffset = 0.0696
xOffset = 0.5 - (lyricFont.getStringSize(self.currentSimpleMidiLyricLine, scale = txtSize)[0] / 2.0)
glColor3f(1, 1, 1)
lyricFont.render(self.currentSimpleMidiLyricLine, (xOffset, yOffset),(1, 0, 0),txtSize)
elif self.midiLyricMode == 2 and (self.numMidiLyricLines > self.activeMidiLyricLineIndex): #line-by-line lyrics mode:
if self.theme == 2:
txtSize = 0.00170
else:
#gh3 or other standard mod
txtSize = 0.00175
yOffset = 0.0696
#xOffset = 0.5 - (lyricFont.getStringSize(self.currentSimpleMidiLyricLine, scale = txtSize)[0] / 2.0)
tempTime, tempLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex]
xOffset = 0.5 - (lyricFont.getStringSize(tempLyricLine, scale = txtSize)[0] / 2.0)
glColor3f(0.75, 0.75, 0.75)
lyricFont.render(self.activeMidiLyricLine_GreyWords, (xOffset, yOffset),(1, 0, 0),txtSize)
xOffset += lyricFont.getStringSize(self.activeMidiLyricLine_GreyWords, scale = txtSize)[0]
glColor3f(0, 1, 0)
lyricFont.render(self.activeMidiLyricLine_GreenWords, (xOffset, yOffset),(1, 0, 0),txtSize)
xOffset += lyricFont.getStringSize(self.activeMidiLyricLine_GreenWords, scale = txtSize)[0]
glColor3f(1, 1, 1)
lyricFont.render(self.activeMidiLyricLine_WhiteWords, (xOffset, yOffset),(1, 0, 0),txtSize)
yOffset += self.lyricHeight
xOffset = 0.25
glColor3f(1, 1, 1)
lyricFont.render(self.currentSimpleMidiLyricLine, (xOffset, yOffset),(1, 0, 0),txtSize)
#finally, handle the unused text events track
if self.showUnusedTextEvents:
for time, event in self.song.eventTracks[Song.TK_UNUSED_TEXT].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
yOffset = 0.0190 #last change -.0020
txtSize = 0.00124
lyricFont.render(event.text, (xOffset, yOffset),(1, 0, 0),txtSize)
try:
now = self.getSongPosition()
countdownPos = self.lastEvent - now
for i,player in enumerate(self.playerList): #QQstarS: This part has big fix. I add the code into it,So he can shown corect
p = player.guitarNum
if p is not None:
self.engine.view.setViewportHalf(self.numberOfGuitars,p)
else:
self.engine.view.setViewportHalf(1,0)
streakFlag = 0 #set the flag to 0
#if not self.coOpGH or self.rmtype == 2:
#self.engine.view.setViewportHalf(self.numOfPlayers,i)
if self.coOpGH and self.rmtype != 2:
self.engine.view.setViewport(1,0)
self.engine.theme.setBaseColor()
if i is not None:
if self.song:
if self.youRock == True:
if self.rockTimer == 1:
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.rockSound.play()
if self.rockTimer < self.rockCountdown:
self.rockTimer += 1
self.engine.drawImage(self.rockMsg, scale = (0.5, -0.5), coord = (w/2,h/2))
if self.rockTimer >= self.rockCountdown:
self.rockFinished = True
if self.failed:
if self.failTimer == 0:
self.song.pause()
if self.failTimer == 1:
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.failSound.play()
if self.failTimer < 100:
self.failTimer += 1
self.engine.drawImage(self.failMsg, scale = (0.5, -0.5), coord = (w/2,h/2))
else:
self.finalFailed = True
if self.pause:
self.engine.view.setViewport(1,0)
if self.engine.graphicMenuShown == False:
self.engine.drawImage(self.pauseScreen, scale = (self.pause_bkg[2], -self.pause_bkg[3]), coord = (w*self.pause_bkg[0],h*self.pause_bkg[1]), stretched = 3)
if self.finalFailed and self.song:
self.engine.view.setViewport(1,0)
if self.engine.graphicMenuShown == False:
self.engine.drawImage(self.failScreen, scale = (self.fail_bkg[2], -self.fail_bkg[3]), coord = (w*self.fail_bkg[0],h*self.fail_bkg[1]), stretched = 3)
# evilynux - Closer to actual GH3
font = self.engine.data.pauseFont
text = Song.removeSongOrderPrefixFromName(self.song.info.name).upper()
scale = font.scaleText(text, maxwidth = 0.398, scale = 0.0038)
size = font.getStringSize(text, scale = scale)
font.render(text, (.5-size[0]/2.0,.37-size[1]), scale = scale)
#now = self.getSongPosition()
diff = str(self.playerList[0].difficulty)
# compute initial position
pctComplete = min(100, int(now/self.lastEvent*100))
curxpos = font.getStringSize(_("COMPLETED")+" ", scale = 0.0015)[0]
curxpos += font.getStringSize(str(pctComplete), scale = 0.003)[0]
curxpos += font.getStringSize( _(" % ON "), scale = 0.0015)[0]
curxpos += font.getStringSize(diff, scale = 0.003)[0]
curxpos = .5-curxpos/2.0
c1,c2,c3 = self.fail_completed_color
glColor3f(c1,c2,c3)
# now render
text = _("COMPLETED") + " "
size = font.getStringSize(text, scale = 0.0015)
# evilynux - Again, for this very font, the "real" height value is 75% of returned value
font.render(text, (curxpos, .37+(font.getStringSize(text, scale = 0.003)[1]-size[1])*.75), scale = 0.0015)
text = str(pctComplete)
curxpos += size[0]
size = font.getStringSize(text, scale = 0.003)
font.render(text, (curxpos, .37), scale = 0.003)
text = _(" % ON ")
curxpos += size[0]
size = font.getStringSize(text, scale = 0.0015)
font.render(text, (curxpos, .37+(font.getStringSize(text, scale = 0.003)[1]-size[1])*.75), scale = 0.0015)
text = diff
curxpos += size[0]
font.render(text, (curxpos, .37), scale = 0.003)
if not self.failEnd:
self.failGame()
if self.hopoIndicatorEnabled and not self.instruments[i].isDrum and not self.pause and not self.failed: #MFH - HOPO indicator (grey = strums required, white = strums not required)
text = _("HOPO")
if self.instruments[i].hopoActive > 0:
glColor3f(1.0, 1.0, 1.0) #white
else:
glColor3f(0.4, 0.4, 0.4) #grey
w, h = font.getStringSize(text,0.00150)
font.render(text, (.950 - w / 2, .710),(1, 0, 0),0.00150) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #cracker white
#MFH - new location for star system support - outside theme-specific logic:
#if (self.coOp and i == 0) or not self.coOp: #MFH only render for player 0 if co-op mode
if (self.coOp and i == self.coOpPlayerMeter) or ((self.coOpRB or self.coOpGH) and i == 0) or not self.coOpType: #MFH only render for player 1 if co-op mode
if self.coOpType:
stars=self.coOpScoreCard.stars
partialStars=self.coOpScoreCard.partialStars
self.engine.view.setViewport(1,0)
ratio=self.coOpScoreCard.starRatio
else:
stars=self.scoring[i].stars
partialStars=self.scoring[i].partialStars
ratio=self.scoring[i].starRatio
w = wBak
h = hBak
vocaloffset = 0
if self.numOfSingers > 0 and self.numOfPlayers > 1:
vocaloffset = .05
if self.song and self.song.readyToGo:
if not self.coOpRB and not self.coOpGH:
if self.playerList[i].guitarNum is not None:
self.engine.view.setViewportHalf(self.numberOfGuitars,self.playerList[i].guitarNum)
else:
self.engine.view.setViewportHalf(1,0)
#MFH: Realtime hit accuracy display:
#if ((self.inGameStats == 2 or (self.inGameStats == 1 and self.theme == 2)) and (not self.pause and not self.failed)) and ( (not self.pause and not self.failed) or self.hopoDebugDisp == 1 ):
if ((self.inGameStats == 2 or (self.inGameStats == 1 and self.theme == 2) or self.hopoDebugDisp == 1 ) and (not self.pause and not self.failed) and not (self.coOpType and not i==0 and not self.coOp) and not self.battleGH):
#will not show on pause screen, unless HOPO debug is on (for debugging)
if self.coOpRB or self.coOpGH:
sNotesHit = self.coOpScoreCard.notesHit
sTotalNotes = self.coOpScoreCard.totalStreakNotes
sHitAcc = self.coOpScoreCard.hitAccuracy
sAvMult = self.coOpScoreCard.avMult
sEfHand = self.coOpScoreCard.handicapValue
else:
sNotesHit = self.scoring[i].notesHit
sTotalNotes = self.scoring[i].totalStreakNotes
sHitAcc = self.scoring[i].hitAccuracy
sAvMult = self.scoring[i].avMult
sEfHand = self.scoring[i].handicapValue
trimmedTotalNoteAcc = self.roundDecimalForDisplay(sHitAcc)
#text = str(self.playerList[i].notesHit) + "/" + str(self.playerList[i].totalStreakNotes) + ": " + str(trimmedTotalNoteAcc) + "%"
text = "%(notesHit)s/%(totalNotes)s: %(hitAcc)s%%" % \
{'notesHit': str(sNotesHit), 'totalNotes': str(sTotalNotes), 'hitAcc': str(trimmedTotalNoteAcc)}
c1,c2,c3 = self.ingame_stats_color
glColor3f(c1, c2, c3) #wht
w, h = font.getStringSize(text,0.00160)
if self.theme == 2:
if self.numDecimalPlaces < 2:
accDispX = 0.755
else:
accDispX = 0.740 #last change -0.015
accDispYac = 0.147
accDispYam = 0.170
else:
accDispX = 0.890 #last change -0.010
accDispYac = 0.140
accDispYam = 0.164
if self.battleGH:
if i == 0:
accDispX = 0.890
else:
accDispX = 0.110
font.render(text, (accDispX - w/2, accDispYac),(1, 0, 0),0.00140) #top-centered by streak under score
trimmedAvMult = self.roundDecimalForDisplay(sAvMult)
#text = _("Avg: ") + str(trimmedAvMult) + "x"
#avgLabel = _("Avg")
text = "%(avLab)s: %(avMult)sx" % \
{'avLab': self.tsAvgLabel, 'avMult': str(trimmedAvMult)}
glColor3f(c1, c2, c3)
w, h = font.getStringSize(text,0.00160)
font.render(text, (accDispX - w/2, accDispYam),(1, 0, 0),0.00140) #top-centered by streak under score
if sEfHand != 100.0:
text = "%s: %.1f%%" % (self.tsHandicapLabel, sEfHand)
w, h = font.getStringSize(text, .00160)
font.render(text, (.98 - w, .246), (1, 0, 0),0.00140)
if self.coOpRB or self.coOpGH:
if not self.instruments[i].isVocal:
self.engine.view.setViewportHalf(self.numberOfGuitars,self.playerList[i].guitarNum)
if not self.instruments[i].isVocal:
if self.dispSoloReview[i] and not self.pause and not self.failed:
if self.soloReviewCountdown[i] < self.soloReviewDispDelay:
self.soloReviewCountdown[i] += 1
if not (self.instruments[i].freestyleActive or self.scoring[i].freestyleWasJustActive):
#glColor3f(0, 0.85, 1) #grn-blu
glColor3f(1, 1, 1) #cracker white
text1 = self.soloReviewText[i][0]
text2 = self.soloReviewText[i][1]
xOffset = 0.950
if self.hitAccuracyPos == 0: #Center - need to move solo review above this!
yOffset = 0.080
elif self.jurgPlayer[i]: # and self.autoPlay: #akedrou - jurgPlayer checks if jurg was ever in town. This would block his notice if he came and left.
yOffset = 0.115 #above Jurgen Is Here
else: #no jurgens here:
yOffset = 0.155 #was 0.180, occluded notes
txtSize = 0.00185
Tw, Th = self.solo_soloFont.getStringSize(text1,txtSize)
Tw2, Th2 = self.solo_soloFont.getStringSize(text2,txtSize)
#MFH - scale and display self.soloFrame behind / around the text
lineSpacing = self.solo_soloFont.getLineSpacing(txtSize)
if self.soloFrame:
frameWidth = (max(Tw,Tw2))*1.15
#frameHeight = (Th+Th2)*1.10
frameHeight = lineSpacing*2.05
boxXOffset = 0.5
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + lineSpacing) / self.fontScreenBottom) )
tempWScale = frameWidth*self.soloFrameWFactor
tempHScale = -(frameHeight)*self.soloFrameWFactor
self.engine.drawImage(self.soloFrame, scale = (tempWScale,tempHScale), coord = (self.wPlayer[i]*boxXOffset,boxYOffset))
self.solo_soloFont.render(text1, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #centered
self.solo_soloFont.render(text2, (0.5 - Tw2/2, yOffset+lineSpacing),(1, 0, 0),txtSize) #centered
else:
self.dispSoloReview[i] = False
if self.hopoDebugDisp == 1 and not self.pause and not self.failed and not self.instruments[i].isDrum:
#MFH: PlayedNote HOPO tappable marking
if self.instruments[i].playedNotes:
if len(self.instruments[i].playedNotes) > 1:
self.lastTapText = "tapp: %d, %d" % (self.instruments[i].playedNotes[0][1].tappable, self.instruments[i].playedNotes[1][1].tappable)
else:
self.lastTapText = "tapp: %d" % (self.instruments[i].playedNotes[0][1].tappable)
#self.lastTapText = "tapp: " + str(self.instruments[i].playedNotes[0][1].tappable)
#if len(self.instruments[i].playedNotes) > 1:
# self.lastTapText += ", " + str(self.instruments[i].playedNotes[1][1].tappable)
w, h = font.getStringSize(self.lastTapText,0.00170)
font.render(self.lastTapText, (.750 - w / 2, .440),(1, 0, 0),0.00170) #off to the right slightly above fretboard
#MFH: HOPO active debug
#text = "HOact: "
if self.instruments[i].hopoActive > 0:
glColor3f(1, 1, 0) #yel
#text += "+"
hoActDisp = "+"
elif self.instruments[i].hopoActive < 0:
glColor3f(0, 1, 1) #blu-grn
#text += "-"
hoActDisp = "-"
else:
glColor3f(0.5, 0.5, 0.5) #gry
#text += "0"
hoActDisp = "0"
text = "HOact: %s" % hoActDisp
w, h = font.getStringSize(text,0.00175)
font.render(text, (.750 - w / 2, .410),(1, 0, 0),0.00170) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey
#MFH: HOPO intention determination flag debug
if self.instruments[i].sameNoteHopoString:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
#text = "HOflag: " + str(self.instruments[i].sameNoteHopoString)
text = "HOflag: %s" % str(self.instruments[i].sameNoteHopoString)
w, h = font.getStringSize(text,0.00175)
font.render(text, (.750 - w / 2, .385),(1, 0, 0),0.00170) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey
##MFH: HOPO intention determination flag problematic note list debug
##glColor3f(1, 1, 1) #whitey
#text = "pNotes: " + str(self.problemNotesP1)
#w, h = font.getStringSize(text,0.00175)
#font.render(text, (.750 - w / 2, .355),(1, 0, 0),0.00170) #off to the right slightly above fretboard
##glColor3f(1, 1, 1) #whitey
#MFH: guitarSoloNoteCount list debug
text = str(self.guitarSolos[i])
glColor3f(0.9, 0.9, 0.9) #offwhite
w, h = font.getStringSize(text,0.00110)
font.render(text, (.900 - w / 2, .540),(1, 0, 0),0.00110) #off to the right slightly above fretboard
if self.killDebugEnabled and not self.pause and not self.failed:
killXpos = 0.760 #last change: +0.010
killYpos = 0.365 #last change: -0.010
killTsize = 0.00160 #last change: -0.00010
#if self.playerList[i].part.text != "Drums":
if not self.instruments[i].isDrum:
if self.isKillAnalog[i]:
if self.analogKillMode[i] == 2: #xbox mode:
if self.actualWhammyVol[i] < 1.0:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
else: #ps2 mode:
if self.actualWhammyVol[i] > 0.0:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
text = str(self.roundDecimalForDisplay(self.actualWhammyVol[i]))
w, h = font.getStringSize(text,killTsize)
font.render(text, (killXpos - w / 2, killYpos),(1, 0, 0),killTsize) #off to the right slightly above fretboard
else:
if self.killswitchEngaged[i]:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
text = str(self.killswitchEngaged[i])
w, h = font.getStringSize(text,killTsize)
font.render(text, (killXpos - w / 2, killYpos),(1, 0, 0),killTsize) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey reset (cracka cracka)
#MFH - freestyle active status debug display
if self.showFreestyleActive == 1 and not self.pause and not self.failed: #MFH - shows when freestyle is active
if self.instruments[i].isDrum: #also show the active status of drum fills
text = "BRE: %s, Fill: %s" % ( str(self.instruments[i].freestyleActive), str(self.instruments[i].drumFillsActive) )
else:
text = "BRE: %s" % str(self.instruments[i].freestyleActive)
freeX = .685
freeY = .510
freeTsize = 0.00150
font.render(text, (freeX, freeY),(1, 0, 0),freeTsize)
#MFH - TODO - show current tempo / BPM and neckspeed if enabled for debugging
if self.showBpm == 1 and i == 0:
if self.vbpmLogicType == 0: #MFH - VBPM (old)
currentBPM = self.instruments[i].currentBpm
targetBPM = self.instruments[i].targetBpm
else:
currentBPM = self.currentBpm
targetBPM = self.targetBpm
text = "BPM/Target:%.2f/%.2f, NS:%.2f" % (currentBPM, targetBPM, instrument.neckSpeed)
bpmX = .35
bpmY = .330
bpmTsize = 0.00120
font.render(text, (bpmX, bpmY),(1, 0, 0),bpmTsize)
#myfingershurt: lyrical display conditional logic:
# show the comments (lyrics)
if not self.instruments[i].isVocal:
#myfingershurt: first display the accuracy readout:
if self.dispAccuracy[i] and not self.pause and not self.failed:
trimmedAccuracy = self.roundDecimalForDisplay(self.accuracy[i])
if self.showAccuracy == 1: #numeric mode
#MFH string concatenation -> modulo formatting
#text = str(trimmedAccuracy) + " ms"
text = "%s %s" % (str(trimmedAccuracy), self.msLabel)
elif self.showAccuracy >= 2: #friendly / descriptive mode
#MFH Precalculated these hit accuracy thresholds instead of every frame
if (self.accuracy[i] >= self.instruments[i].accThresholdWorstLate) and (self.accuracy[i] < self.instruments[i].accThresholdVeryLate):
text = self.tsAccVeryLate
glColor3f(1, 0, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdVeryLate) and (self.accuracy[i] < self.instruments[i].accThresholdLate):
text = self.tsAccLate
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdLate) and (self.accuracy[i] < self.instruments[i].accThresholdSlightlyLate):
text = self.tsAccSlightlyLate
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdSlightlyLate) and (self.accuracy[i] < self.instruments[i].accThresholdExcellentLate):
text = self.tsAccExcellentLate
glColor3f(0, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdExcellentLate) and (self.accuracy[i] < self.instruments[i].accThresholdPerfect):
#give the "perfect" reading some slack, -1.0 to 1.0
text = self.tsAccPerfect
glColor3f(0, 1, 1) #changed color
elif (self.accuracy[i] >= self.instruments[i].accThresholdPerfect) and (self.accuracy[i] < self.instruments[i].accThresholdExcellentEarly):
text = self.tsAccExcellentEarly
glColor3f(0, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdExcellentEarly) and (self.accuracy[i] < self.instruments[i].accThresholdSlightlyEarly):
text = self.tsAccSlightlyEarly
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdSlightlyEarly) and (self.accuracy[i] < self.instruments[i].accThresholdEarly):
text = self.tsAccEarly
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdEarly) and (self.accuracy[i] < self.instruments[i].accThresholdVeryEarly):
text = self.tsAccVeryEarly
glColor3f(1, 0, 0)
else:
#bug catch - show the problematic number:
#text = str(trimmedAccuracy) + _(" ms")
text = "%(acc)s %(ms)s" % \
{'acc': str(trimmedAccuracy), 'ms': self.msLabel}
glColor3f(1, 0, 0)
w, h = font.getStringSize(text,0.00175)
posX = 0.98 - (w / 2)
if self.theme == 2:
posY = 0.284
else:
if self.coOpGH:
posY = 0.25
else:
posY = 0.296
if self.hitAccuracyPos == 0: #Center
posX = .500
posY = .305 + h
if self.showAccuracy == 3: #for displaying numerical below descriptive
posY = .305
#if self.pov != 1: #not GH POV
# posY = y + 4 * h -- MFH: this line causes a bad hang.
elif self.hitAccuracyPos == 2:#Left-bottom
posX = .193
posY = .700 #(.193-size[0]/2, 0.667-size[1]/2+self.hFontOffset[i]))
elif self.hitAccuracyPos == 3: #Center-bottom
posX = .500
posY = .710
font.render(text, (posX - w / 2, posY - h / 2),(1, 0, 0),0.00170)
if self.showAccuracy == 3: #for displaying numerical below descriptive
#text = str(self.accuracy)
#text = str(trimmedAccuracy) + " ms"
#msText = _("ms")
text = "%(acc)s %(ms)s" % \
{'acc': str(trimmedAccuracy), 'ms': self.msLabel}
w, h = font.getStringSize(text,0.00140)
font.render(text, (posX - w / 2, posY - h / 2 + .030),(1, 0, 0),0.00140)
glColor3f(1, 1, 1)
#handle the guitar solo track
#if (self.readTextAndLyricEvents == 2 or (self.readTextAndLyricEvents == 1 and self.theme == 2)) and (not self.pause and not self.failed and not self.ending):
if (not self.pause and not self.failed and not self.ending):
#MFH - only use the TK_GUITAR_SOLOS track if at least one player has no MIDI solos marked:
if self.instruments[i].useMidiSoloMarkers: #mark using the new MIDI solo marking system
for time, event in self.song.midiEventTrack[i].getEvents(minPos, maxPos):
if isinstance(event, Song.MarkerNote):
if (event.number == Song.starPowerMarkingNote) and (self.song.midiStyle == Song.MIDI_TYPE_RB): #solo marker note.
soloChangeNow = False
xOffset = (time - pos) / eventWindow
if xOffset < lyricSlop / 16.0: #present
soloChangeNow = True
if soloChangeNow:
if event.endMarker: #solo ending
if self.instruments[i].guitarSolo and not event.happened:
self.endSolo(i)
event.happened = True
else: #solo beginning
if not self.instruments[i].guitarSolo and not event.happened:
self.startSolo(i)
event.happened = True
elif self.markSolos == 1: #fall back on old guitar solo marking system
for time, event in self.song.eventTracks[Song.TK_GUITAR_SOLOS].getEvents(minPos, maxPos):
#is event happening now?
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
EventHappeningNow = False
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
if EventHappeningNow: #process the guitar solo event
if event.text.find("ON") >= 0:
if self.instruments[i].canGuitarSolo:
if not self.instruments[i].guitarSolo:
self.startSolo(i)
else:
#if self.instruments[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
self.endSolo(i)
#MFH - render guitar solo in progress - stats
#try:
#if self.instruments[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
#MFH - scale and display self.soloFrame behind / around the solo accuracy text display
#if self.fontMode==0: #0 = oGL Hack, 1=LaminaScreen, 2=LaminaFrames
if self.soloFrame:
frameWidth = self.solo_Tw[i]*1.15
frameHeight = self.solo_Th[i]*1.07
self.solo_boxYOffset[i] = self.hPlayer[i]-(self.hPlayer[i]* ((self.solo_yOffset[i] + self.solo_Th[i]/2.0 ) / self.fontScreenBottom) )
tempWScale = frameWidth*self.soloFrameWFactor
tempHScale = -(frameHeight)*self.soloFrameWFactor
self.engine.drawImage(self.soloFrame, scale = (tempWScale,tempHScale), coord = (self.wPlayer[i]*self.solo_boxXOffset[i],self.solo_boxYOffset[i]))
self.solo_soloFont.render(self.solo_soloText[i], (self.solo_xOffset[i], self.solo_yOffset[i]),(1, 0, 0),self.solo_txtSize)
#self.solo_soloFont.render("test", (0.5,0.0) ) #appears to render text from given position, down / right...
#self.solo_soloFont.render("test", (0.5,0.5) ) #this test confirms that the Y scale is in units relative to the X pixel width - 1280x960 yes but 1280x1024 NO
#this test locates the constant that the font rendering routine always considers the "bottom" of the screen
#self.solo_soloFont.render("test", (0.5,0.75-self.solo_Th[i]), scale=self.solo_txtSize ) #ah-ha! 4:3 AR viewport = 0.75 max!
#self.engine.view.setViewport(1,0)
#except Exception, e:
# Log.warn("Unable to render guitar solo accuracy text: %s" % e)
if self.coOpType: #1 BRE in co-op
scoreCard = self.coOpScoreCard
if i == 0:
self.engine.view.setViewportHalf(1,0)
oneTime = True
else:
oneTime = False
else:
scoreCard = self.scoring[i]
oneTime = True
#MFH - show BRE temp score frame
if (self.instruments[i].freestyleActive or (scoreCard.freestyleWasJustActive and not scoreCard.endingStreakBroken and not scoreCard.endingAwarded)) and oneTime == True:
#to render BEFORE the bonus is awarded.
text = "End Bonus"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = (1.0-((yOffset + tH/2.0 ) / self.fontScreenBottom))*self.hFull
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58), color = (.8, .8, .8, 1))
text = "%s" % scoreCard.endingScore
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
elif scoreCard.freestyleWasJustActive and not scoreCard.endingStreakBroken and scoreCard.endingAwarded and oneTime == True:
#MFH - TODO - ending bonus was awarded - scale up obtained score & box to signify rockage
text = "Success!"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58))
text = "%s" % scoreCard.endingScore
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
elif scoreCard.freestyleWasJustActive and scoreCard.endingStreakBroken and oneTime == True:
#akedrou - ending bonus was not awarded - scale up to signify failure
text = "Failed!"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
if self.scoring[j].endingStreakBroken:
partcolor = (.4, .4, .4, 1)
else:
partcolor = (.8, .8, .8, 1)
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58), color = partcolor)
text = "%s" % 0
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
self.engine.view.setViewportHalf(1,0)
# evilynux - Display framerate
if self.engine.show_fps: #probably only need to once through.
c1,c2,c3 = self.ingame_stats_color
glColor3f(c1, c2, c3)
text = _("FPS: %.2f" % self.engine.fpsEstimate)
w, h = font.getStringSize(text, scale = 0.00140)
font.render(text, (.85, .055 - h/2), (1,0,0), 0.00140)
pos = self.getSongPosition()
if self.showScriptLyrics and not self.pause and not self.failed:
#for time, event in self.song.track[i].getEvents(pos - self.song.period * 2, pos + self.song.period * 4):
for time, event in self.song.eventTracks[Song.TK_SCRIPT].getEvents(pos - self.song.period * 2, pos + self.song.period * 4): #MFH - script track
if isinstance(event, PictureEvent):
if pos < time or pos > time + event.length:
continue
try:
picture = event.picture
except:
self.engine.loadImgDrawing(event, "picture", os.path.join(self.libraryName, self.songName, event.fileName))
picture = event.picture
w = self.wFull
h = self.hFull
if self.theme == 2:
yOffset = 0.715
else:
#gh3 or other standard mod
yOffset = 0.69
fadePeriod = 500.0
f = (1.0 - min(1.0, abs(pos - time) / fadePeriod) * min(1.0, abs(pos - time - event.length) / fadePeriod)) ** 2
self.engine.drawImage(picture, scale = (1, -1), coord = (w / 2, (f * -2 + 1) * h/2+yOffset))
elif isinstance(event, TextEvent):
if pos >= time and pos <= time + event.length and not self.ending: #myfingershurt: to not display events after ending!
xOffset = 0.5
if self.scriptLyricPos == 0:
if self.theme == 2:
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
yOffset = 0.69
txtSize = 0.00175
else: #display in lyric bar position
yOffset = 0.0696 #last change +0.0000
txtSize = 0.00160
#MFH TODO - pre-retrieve and translate all current tutorial script.txt events, if applicable.
if self.song.info.tutorial:
text = _(event.text)
w, h = lyricFont.getStringSize(text,txtSize)
lyricFont.render(text, (xOffset - w / 2, yOffset),(1, 0, 0),txtSize)
#elif event.text.find("TXT:") < 0 and event.text.find("LYR:") < 0 and event.text.find("SEC:") < 0 and event.text.find("GSOLO") < 0: #filter out MIDI text events, only show from script here.
else:
text = event.text
w, h = lyricFont.getStringSize(text,txtSize)
lyricFont.render(text, (xOffset - w / 2, yOffset),(1, 0, 0),txtSize)
#-------------after "if showlyrics"
#self.engine.view.setViewport(1,0)
#scrolling lyrics & sections: moved to before player viewport split
#Show Jurgen played Spikehead777
self.engine.view.setViewport(1,0)
gN = 0
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
continue
if self.jurgPlayer[i] == True:
if self.jurg[i]:
if self.customBot[i]:
text = self.tsJurgenIsHere % self.customBot[i]
else:
text = self.tsJurgenIsHere % self.tsBotNames[self.aiSkill[i]]
else:
if self.customBot[i]:
text = self.tsJurgenWasHere % self.customBot[i]
else:
text = self.tsJurgenWasHere % self.tsBotNames[self.aiSkill[i]]
#jurgScale = .001/self.numOfPlayers
jurgScale = float(self.jurgenText[2])
w, h = bigFont.getStringSize(text, scale = jurgScale)
self.engine.theme.setBaseColor()
if jurgScale > .2 or jurgScale < .0001:
jurgScale = .001
jurgX = float(self.jurgenText[0])
if jurgX < 0:
jurgX = 0
jurgX = (jurgX+gN)/self.numberOfGuitars
if jurgX > ((gN+1)/self.numberOfGuitars) - w:
jurgX = ((gN+1)/self.numberOfGuitars) - w
jurgY = float(self.jurgenText[1])
if jurgY > .75 - h:
jurgY = .75 - h
if not self.failed:
bigFont.render(text, (jurgX, jurgY), scale = jurgScale)#MFH - y was 0.4 - more positioning weirdness.
gN += 1
#End Jurgen Code
#MFH - Get Ready to Rock & countdown, song info during countdown, and song time left display on top of everything else
if (not self.pause and not self.failed and not self.ending):
if self.coOpType: #render co-op phrases (full screen width) above the rest.
if self.displayText[self.coOpPhrase] != None:
glColor3f(.8,.75,.01)
size = sphraseFont.getStringSize(self.displayText[self.coOpPhrase], scale = self.displayTextScale[self.coOpPhrase])
sphraseFont.render(self.displayText[self.coOpPhrase], (.5-size[0]/2,self.textY[self.coOpPhrase]-size[1]), scale = self.displayTextScale[self.coOpPhrase])
# show countdown
# glorandwarf: fixed the countdown timer
if self.countdownSeconds > 1:
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = self.tsGetReady
w, h = font.getStringSize(text)
font.render(text, (.5 - w / 2, .3))
if self.countdownSeconds < 6:
if self.counting:
for i,player in enumerate(self.playerList):
if not self.instruments[i].isVocal:
w = self.wPlayer[i]
h = self.hPlayer[i]
partImgwidth = self.part[i].width1()
partwFactor = 250.000/partImgwidth
partX = ((i*2)+1) / (self.numOfPlayers*2.0)
self.engine.drawImage(self.part[i], scale = (partwFactor*0.25,partwFactor*-0.25), coord = (w*partX,h*.4), color = (1,1,1, 3.0 - abs(4.0 - self.countdownSeconds)))
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = player.name
w, h = font.getStringSize(text)
font.render(text, (partX - w*.5, .5))
else:
w = self.wFull
h = self.hFull
partImgWidth = self.part[i].width1()
partwFactor = 250.000/partImgWidth
self.engine.drawImage(self.part[i], scale = (partwFactor*0.25, partwFactor*-0.25), coord = (w*.5,h*.75), color = (1,1,1, 3.0 - abs(4.0 - self.countdownSeconds)))
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = player.name
w, h = font.getStringSize(text)
font.render(text, (.5 - w*.5, .25))
else:
scale = 0.002 + 0.0005 * (self.countdownSeconds % 1) ** 3
text = "%d" % (self.countdownSeconds)
w, h = bigFont.getStringSize(text, scale = scale)
self.engine.theme.setBaseColor()
bigFont.render(text, (.5 - w / 2, .45 - h / 2), scale = scale)
if self.resumeCountdownSeconds > 1:
scale = 0.002 + 0.0005 * (self.resumeCountdownSeconds % 1) ** 3
text = "%d" % (self.resumeCountdownSeconds)
w, h = bigFont.getStringSize(text, scale = scale)
self.engine.theme.setBaseColor()
bigFont.render(text, (.5 - w / 2, .45 - h / 2), scale = scale)
w, h = font.getStringSize(" ")
y = .05 - h / 2 - (1.0 - v) * .2
songFont = self.engine.data.songFont
# show song name
if self.countdown and self.song:
cover = ""
if self.song.info.findTag("cover") == True: #kk69: misc changes to make it more GH/RB
cover = "%s \n " % self.tsAsMadeFamousBy #kk69: no more ugly colon! ^_^
else:
if self.theme == 2:
cover = "" #kk69: for RB
else:
cover = self.tsBy #kk69: for GH
self.engine.theme.setBaseColor(min(1.0, 4.0 - abs(4.0 - self.countdown)))
comma = ""
extra = ""
if self.song.info.year: #add comma between year and artist
comma = ", "
if self.song.info.frets:
extra = "%s \n %s%s" % (extra, self.tsFrettedBy, self.song.info.frets)
if self.song.info.version:
extra = "%s \n v%s" % (extra, self.song.info.version)
if self.theme != 1: #shift this stuff down so it don't look so bad over top the lyricsheet:
Dialogs.wrapText(songFont, (self.songInfoDisplayX, self.songInfoDisplayX - h / 2), "%s \n %s%s%s%s%s" % (Song.removeSongOrderPrefixFromName(self.song.info.name), cover, self.song.info.artist, comma, self.song.info.year, extra), rightMargin = .6, scale = self.songInfoDisplayScale)#kk69: incorporates song.ttf
else:
Dialogs.wrapText(songFont, (self.songInfoDisplayX, self.songInfoDisplayY - h / 2), "%s \n %s%s%s%s%s" % (Song.removeSongOrderPrefixFromName(self.song.info.name), cover, self.song.info.artist, comma, self.song.info.year, extra), rightMargin = .6, scale = self.songInfoDisplayScale)
else:
#mfh: this is where the song countdown display is generated:
if pos < 0:
pos = 0
if countdownPos < 0:
countdownPos = 0
self.engine.theme.setBaseColor()
#Party mode
if self.partyMode == True:
timeleft = (now - self.partySwitch) / 1000
if timeleft > self.partyTime:
self.partySwitch = now
if self.partyPlayer == 0:
self.instruments[0].keys = PLAYER2KEYS
self.instruments[0].actions = PLAYER2ACTIONS
self.keysList = [PLAYER2KEYS]
self.partyPlayer = 1
else:
self.instruments[0].keys = PLAYER1KEYS
self.instruments[0].actions = PLAYER1ACTIONS
self.keysList = [PLAYER1KEYS]
self.partyPlayer = 0
t = "%d" % (self.partyTime - timeleft + 1)
if self.partyTime - timeleft < 5:
glColor3f(1, 0, 0)
w, h = font.getStringSize(t)#QQstarS:party
font.render(t, (.5 - w / 2, 0.4)) #QQstarS:party
elif self.partySwitch != 0 and timeleft < 1:
t = "Switch"
glColor3f(0, 1, 0)
w, h = font.getStringSize(t)#QQstarS:party
font.render(t, (.5 - w / 2, 0.4))#QQstarS:party
else:#QQstarS:party
w, h = font.getStringSize(t)
font.render(t, (.5 - w / 2, y + h))
finally:
self.engine.view.resetProjection()
| gpl-2.0 | 3,266,319,482,678,174,000 | 46.720395 | 411 | 0.613254 | false |
michaelhidalgo/7WCSQ | Tools/SQLMap/sqlmap/lib/techniques/brute/use.py | 1 | 10676 | #!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import time
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import filterListValue
from lib.core.common import getFileItems
from lib.core.common import Backend
from lib.core.common import getPageWordSet
from lib.core.common import hashDBWrite
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import safeStringFormat
from lib.core.common import safeSQLIdentificatorNaming
from lib.core.common import unsafeSQLIdentificatorNaming
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.enums import HASHDB_KEYS
from lib.core.enums import PAYLOAD
from lib.core.exception import SqlmapDataException
from lib.core.exception import SqlmapMissingMandatoryOptionException
from lib.core.settings import BRUTE_COLUMN_EXISTS_TEMPLATE
from lib.core.settings import BRUTE_TABLE_EXISTS_TEMPLATE
from lib.core.settings import METADB_SUFFIX
from lib.core.threads import getCurrentThreadData
from lib.core.threads import runThreads
from lib.request import inject
def _addPageTextWords():
wordsList = []
infoMsg = "adding words used on web page to the check list"
logger.info(infoMsg)
pageWords = getPageWordSet(kb.originalPage)
for word in pageWords:
word = word.lower()
if len(word) > 2 and not word[0].isdigit() and word not in wordsList:
wordsList.append(word)
return wordsList
def tableExists(tableFile, regex=None):
if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common table existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.tableExistsChoice = test[0] in ("y", "Y")
if not kb.tableExistsChoice:
return None
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr())))
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
if result:
errMsg = "can't use table existence check because of detected invalid results "
errMsg += "(most likely caused by inability of the used injection "
errMsg += "to distinguish erroneous results)"
raise SqlmapDataException(errMsg)
tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True)
infoMsg = "checking table existence using items from '%s'" % tableFile
logger.info(infoMsg)
tables.extend(_addPageTextWords())
tables = filterListValue(tables, regex)
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(tables)
threadData.shared.value = []
threadData.shared.unique = set()
def tableExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True)
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
fullTableName = "%s.%s" % (conf.db, table)
else:
fullTableName = table
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName)))
kb.locks.io.acquire()
if result and table.lower() not in threadData.shared.unique:
threadData.shared.value.append(table)
threadData.shared.unique.add(table.lower())
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, tableExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during table existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no table(s) found"
logger.warn(warnMsg)
else:
for item in threadData.shared.value:
if conf.db not in kb.data.cachedTables:
kb.data.cachedTables[conf.db] = [item]
else:
kb.data.cachedTables[conf.db].append(item)
for _ in ((conf.db, item) for item in threadData.shared.value):
if _ not in kb.brute.tables:
kb.brute.tables.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True)
return kb.data.cachedTables
def columnExists(columnFile, regex=None):
if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common column existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.columnExistsChoice = test[0] in ("y", "Y")
if not kb.columnExistsChoice:
return None
if not conf.tbl:
errMsg = "missing table parameter"
raise SqlmapMissingMandatoryOptionException(errMsg)
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr())))
if result:
errMsg = "can't use column existence check because of detected invalid results "
errMsg += "(most likely caused by inability of the used injection "
errMsg += "to distinguish erroneous results)"
raise SqlmapDataException(errMsg)
infoMsg = "checking column existence using items from '%s'" % columnFile
logger.info(infoMsg)
columns = getFileItems(columnFile, unique=True)
columns.extend(_addPageTextWords())
columns = filterListValue(columns, regex)
table = safeSQLIdentificatorNaming(conf.tbl, True)
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table)
kb.threadContinue = True
kb.bruteMode = True
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(columns)
threadData.shared.value = []
def columnExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
column = safeSQLIdentificatorNaming(columns[threadData.shared.count])
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table)))
kb.locks.io.acquire()
if result:
threadData.shared.value.append(column)
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, columnExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during column existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no column(s) found"
logger.warn(warnMsg)
else:
columns = {}
for column in threadData.shared.value:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column)))
else:
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
if result:
columns[column] = "numeric"
else:
columns[column] = "non-numeric"
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()):
if _ not in kb.brute.columns:
kb.brute.columns.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True)
return kb.data.cachedColumns
| apache-2.0 | -1,944,711,308,653,261,800 | 38.25 | 167 | 0.65474 | false |
balint256/gr-rds | src/python/__init__.py | 1 | 1094 | #
# Copyright 2008,2009 Free Software Foundation, Inc.
#
# This application is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The presence of this file turns this directory into a Python package
'''
This is the GNU Radio HOWTO module. Place your Python package
description here (python/__init__.py).
'''
# import swig generated symbols into the howto namespace
#from rds_swig import *
from rds import *
# import any pure python here
#
from rdspanel import *
| gpl-2.0 | -437,571,094,281,650,370 | 33.1875 | 74 | 0.761426 | false |
avikivity/scylla | test/alternator/test_table.py | 1 | 15449 | # Copyright 2019 ScyllaDB
#
# This file is part of Scylla.
#
# Scylla is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scylla is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Scylla. If not, see <http://www.gnu.org/licenses/>.
# Tests for basic table operations: CreateTable, DeleteTable, ListTables.
import pytest
from botocore.exceptions import ClientError
from util import list_tables, test_table_name, create_test_table, random_string
# Utility function for create a table with a given name and some valid
# schema.. This function initiates the table's creation, but doesn't
# wait for the table to actually become ready.
def create_table(dynamodb, name, BillingMode='PAY_PER_REQUEST', **kwargs):
return dynamodb.create_table(
TableName=name,
BillingMode=BillingMode,
KeySchema=[
{
'AttributeName': 'p',
'KeyType': 'HASH'
},
{
'AttributeName': 'c',
'KeyType': 'RANGE'
}
],
AttributeDefinitions=[
{
'AttributeName': 'p',
'AttributeType': 'S'
},
{
'AttributeName': 'c',
'AttributeType': 'S'
},
],
**kwargs
)
# Utility function for creating a table with a given name, and then deleting
# it immediately, waiting for these operations to complete. Since the wait
# uses DescribeTable, this function requires all of CreateTable, DescribeTable
# and DeleteTable to work correctly.
# Note that in DynamoDB, table deletion takes a very long time, so tests
# successfully using this function are very slow.
def create_and_delete_table(dynamodb, name, **kwargs):
table = create_table(dynamodb, name, **kwargs)
table.meta.client.get_waiter('table_exists').wait(TableName=name)
table.delete()
table.meta.client.get_waiter('table_not_exists').wait(TableName=name)
##############################################################################
# Test creating a table, and then deleting it, waiting for each operation
# to have completed before proceeding. Since the wait uses DescribeTable,
# this tests requires all of CreateTable, DescribeTable and DeleteTable to
# function properly in their basic use cases.
# Unfortunately, this test is extremely slow with DynamoDB because deleting
# a table is extremely slow until it really happens.
def test_create_and_delete_table(dynamodb):
create_and_delete_table(dynamodb, 'alternator_test')
# Test that recreating a table right after deleting it works without issues
def test_recreate_table(dynamodb):
create_and_delete_table(dynamodb, 'alternator_recr_test')
create_and_delete_table(dynamodb, 'alternator_recr_test')
# DynamoDB documentation specifies that table names must be 3-255 characters,
# and match the regex [a-zA-Z0-9._-]+. Names not matching these rules should
# be rejected, and no table be created.
def test_create_table_unsupported_names(dynamodb):
from botocore.exceptions import ParamValidationError, ClientError
# Intererstingly, the boto library tests for names shorter than the
# minimum length (3 characters) immediately, and failure results in
# ParamValidationError. But the other invalid names are passed to
# DynamoDB, which returns an HTTP response code, which results in a
# CientError exception.
with pytest.raises(ParamValidationError):
create_table(dynamodb, 'n')
with pytest.raises(ParamValidationError):
create_table(dynamodb, 'nn')
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, 'n' * 256)
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, 'nyh@test')
# On the other hand, names following the above rules should be accepted. Even
# names which the Scylla rules forbid, such as a name starting with .
def test_create_and_delete_table_non_scylla_name(dynamodb):
create_and_delete_table(dynamodb, '.alternator_test')
# names with 255 characters are allowed in Dynamo, but they are not currently
# supported in Scylla because we create a directory whose name is the table's
# name followed by 33 bytes (underscore and UUID). So currently, we only
# correctly support names with length up to 222.
def test_create_and_delete_table_very_long_name(dynamodb):
# In the future, this should work:
#create_and_delete_table(dynamodb, 'n' * 255)
# But for now, only 222 works:
create_and_delete_table(dynamodb, 'n' * 222)
# We cannot test the following on DynamoDB because it will succeed
# (DynamoDB allows up to 255 bytes)
#with pytest.raises(ClientError, match='ValidationException'):
# create_table(dynamodb, 'n' * 223)
# Tests creating a table with an invalid schema should return a
# ValidationException error.
def test_create_table_invalid_schema(dynamodb):
# The name of the table "created" by this test shouldn't matter, the
# creation should not succeed anyway.
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'p', 'KeyType': 'HASH' },
{ 'AttributeName': 'c', 'KeyType': 'HASH' }
],
AttributeDefinitions=[
{ 'AttributeName': 'p', 'AttributeType': 'S' },
{ 'AttributeName': 'c', 'AttributeType': 'S' },
],
)
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'p', 'KeyType': 'RANGE' },
{ 'AttributeName': 'c', 'KeyType': 'RANGE' }
],
AttributeDefinitions=[
{ 'AttributeName': 'p', 'AttributeType': 'S' },
{ 'AttributeName': 'c', 'AttributeType': 'S' },
],
)
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'c', 'KeyType': 'RANGE' }
],
AttributeDefinitions=[
{ 'AttributeName': 'c', 'AttributeType': 'S' },
],
)
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'c', 'KeyType': 'HASH' },
{ 'AttributeName': 'p', 'KeyType': 'RANGE' },
{ 'AttributeName': 'z', 'KeyType': 'RANGE' }
],
AttributeDefinitions=[
{ 'AttributeName': 'c', 'AttributeType': 'S' },
{ 'AttributeName': 'p', 'AttributeType': 'S' },
{ 'AttributeName': 'z', 'AttributeType': 'S' }
],
)
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'c', 'KeyType': 'HASH' },
],
AttributeDefinitions=[
{ 'AttributeName': 'z', 'AttributeType': 'S' }
],
)
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(
TableName='name_doesnt_matter',
BillingMode='PAY_PER_REQUEST',
KeySchema=[
{ 'AttributeName': 'k', 'KeyType': 'HASH' },
],
AttributeDefinitions=[
{ 'AttributeName': 'k', 'AttributeType': 'Q' }
],
)
# Test that trying to create a table that already exists fails in the
# appropriate way (ResourceInUseException)
def test_create_table_already_exists(dynamodb, test_table):
with pytest.raises(ClientError, match='ResourceInUseException.*Table.*already exists'):
create_table(dynamodb, test_table.name)
# Test that BillingMode error path works as expected - only the values
# PROVISIONED or PAY_PER_REQUEST are allowed. The former requires
# ProvisionedThroughput to be set, the latter forbids it.
# If BillingMode is outright missing, it defaults (as original
# DynamoDB did) to PROVISIONED so ProvisionedThroughput is allowed.
def test_create_table_billing_mode_errors(dynamodb, test_table):
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, test_table_name(), BillingMode='unknown')
# billing mode is case-sensitive
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, test_table_name(), BillingMode='pay_per_request')
# PAY_PER_REQUEST cannot come with a ProvisionedThroughput:
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, test_table_name(),
BillingMode='PAY_PER_REQUEST', ProvisionedThroughput={'ReadCapacityUnits': 10, 'WriteCapacityUnits': 10})
# On the other hand, PROVISIONED requires ProvisionedThroughput:
# By the way, ProvisionedThroughput not only needs to appear, it must
# have both ReadCapacityUnits and WriteCapacityUnits - but we can't test
# this with boto3, because boto3 has its own verification that if
# ProvisionedThroughput is given, it must have the correct form.
with pytest.raises(ClientError, match='ValidationException'):
create_table(dynamodb, test_table_name(), BillingMode='PROVISIONED')
# If BillingMode is completely missing, it defaults to PROVISIONED, so
# ProvisionedThroughput is required
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.create_table(TableName=test_table_name(),
KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }],
AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }])
# Even before Alternator gains full support for the DynamoDB stream API
# and CreateTable's StreamSpecification option, we should support the
# options which mean it is turned *off*.
def test_table_streams_off(dynamodb):
# If StreamSpecification is given, but has StreamEnabled=false, it's as
# if StreamSpecification was missing. StreamViewType isn't needed.
table = create_test_table(dynamodb, StreamSpecification={'StreamEnabled': False},
KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }],
AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]);
table.delete();
# DynamoDB doesn't allow StreamSpecification to be empty map - if it
# exists, it must have a StreamEnabled
# Unfortunately, new versions of boto3 doesn't let us pass this...
#with pytest.raises(ClientError, match='ValidationException'):
# table = create_test_table(dynamodb, StreamSpecification={},
# KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }],
# AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]);
# table.delete();
# Unfortunately, boto3 doesn't allow us to pass StreamSpecification=None.
# This is what we had in issue #5796.
def test_table_streams_on(dynamodb):
for type in [ 'OLD_IMAGE', 'NEW_IMAGE', 'KEYS_ONLY', 'NEW_AND_OLD_IMAGES']:
table = create_test_table(dynamodb,
StreamSpecification={'StreamEnabled': True, 'StreamViewType': type},
KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }],
AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]);
table.delete();
# Our first implementation had a special column name called "attrs" where
# we stored a map for all non-key columns. If the user tried to name one
# of the key columns with this same name, the result was a disaster - Scylla
# goes into a bad state after trying to write data with two updates to same-
# named columns.
special_column_name1 = 'attrs'
special_column_name2 = ':attrs'
@pytest.fixture(scope="session")
def test_table_special_column_name(dynamodb):
table = create_test_table(dynamodb,
KeySchema=[
{ 'AttributeName': special_column_name1, 'KeyType': 'HASH' },
{ 'AttributeName': special_column_name2, 'KeyType': 'RANGE' }
],
AttributeDefinitions=[
{ 'AttributeName': special_column_name1, 'AttributeType': 'S' },
{ 'AttributeName': special_column_name2, 'AttributeType': 'S' },
],
)
yield table
table.delete()
@pytest.mark.xfail(reason="special attrs column not yet hidden correctly")
def test_create_table_special_column_name(test_table_special_column_name):
s = random_string()
c = random_string()
h = random_string()
expected = {special_column_name1: s, special_column_name2: c, 'hello': h}
test_table_special_column_name.put_item(Item=expected)
got = test_table_special_column_name.get_item(Key={special_column_name1: s, special_column_name2: c}, ConsistentRead=True)['Item']
assert got == expected
# Test that all tables we create are listed, and pagination works properly.
# Note that the DyanamoDB setup we run this against may have hundreds of
# other tables, for all we know. We just need to check that the tables we
# created are indeed listed.
def test_list_tables_paginated(dynamodb, test_table, test_table_s, test_table_b):
my_tables_set = {table.name for table in [test_table, test_table_s, test_table_b]}
for limit in [1, 2, 3, 4, 50, 100]:
print("testing limit={}".format(limit))
list_tables_set = set(list_tables(dynamodb, limit))
assert my_tables_set.issubset(list_tables_set)
# Test that pagination limit is validated
def test_list_tables_wrong_limit(dynamodb):
# lower limit (min. 1) is imposed by boto3 library checks
with pytest.raises(ClientError, match='ValidationException'):
dynamodb.meta.client.list_tables(Limit=101)
# Even before Alternator gains support for configuring server-side encryption
# ("encryption at rest") with CreateTable's SSESpecification option, we should
# support the option "Enabled=false" which is the default, and means the server
# takes care of whatever server-side encryption is done, on its own.
# Reproduces issue #7031.
def test_table_sse_off(dynamodb):
# If StreamSpecification is given, but has StreamEnabled=false, it's as
# if StreamSpecification was missing, and fine. No other attribues are
# necessary.
table = create_test_table(dynamodb, SSESpecification = {'Enabled': False},
KeySchema=[{ 'AttributeName': 'p', 'KeyType': 'HASH' }],
AttributeDefinitions=[{ 'AttributeName': 'p', 'AttributeType': 'S' }]);
table.delete();
| agpl-3.0 | 5,202,553,839,563,282,000 | 46.682099 | 134 | 0.661208 | false |
umitproject/openmonitor-desktop-agent | umit/icm/agent/Upgrade.py | 1 | 5079 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
#
# Author: Zhongjie Wang <[email protected]>
# Tianwei Liu <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import shutil
import sys
import time
import zlib
import subprocess
from umit.icm.agent.logger import g_logger
from umit.icm.agent.Global import *
from umit.icm.agent.BasePaths import *
from umit.icm.agent.Application import theApp
def onerror(func,path,exec_info):
"""
shutil.rmtree callback(Attention:The rmtree cannot remove the readonly files in windows)
"""
import stat
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
g_logger.debug("rm:path%s"%path) #ignore errors
def update_agent(result, *args, **kw):
"""
update back
"""
g_logger.info("Close task looping...")
theApp.task_assgin_lc.stop()
theApp.task_run_lc.stop()
theApp.report_proc_lc.stop()
theApp.test_sets_fetch_lc()
g_logger.info("Updating Desktop Agent...")
# args = ((version, check_code=0), {})
version = args[0]
if len(args) == 2:
check_code = args[1]
else:
check_code = 0
filename = 'icm-agent_' + version + '.tar.gz'
path = os.path.join(TMP_DIR, filename)
if not os.path.exists(path):
g_logger.error("Package %s can't be found under '/tmp' folder." %
filename)
return
if check_code != 0:
# Verify if the file is the correct one
content = open(filename, 'rb').read()
crc = zlib.crc32(content) & 0xffffffff
if crc != check_code:
g_logger.error("Package %s is corrupt. Try to download it again." %
filename)
return
# Stop current agent
open(os.path.join(
ROOT_DIR, 'umit', 'icm', 'agent', 'agent_restart_mark'), 'w').close()
from twisted.internet import reactor
reactor.callInThread(restart_agent, path)
reactor.stop()
g_logger.debug("reactor stopped.")
def restart_agent(path):
while os.path.exists(
os.path.join(ROOT_DIR, 'umit', 'icm', 'agent', 'running')):
time.sleep(0.1)
# Remove files
remove_files = ["umit","bin","conf","deps","docs","install_scripts",
"share","tools"]
for folder in remove_files:
if os.name == 'nt': #rmtree cannot remove the readonly files
shutil.rmtree(os.path.join(ROOT_DIR, folder),onerror=onerror)
else:
shutil.rmtree(os.path.join(ROOT_DIR, folder))
# Extract tarfile
import tarfile
t = tarfile.open(path)
t.extractall(ROOT_DIR)
#t.extractall(TMP_DIR)
restart_function()
def restart_function():
# Restart
g_logger.info("Restarting Desktop Agent.")
bin_path = os.path.join(ROOT_DIR, 'bin', 'icm-agent')
subprocess.Popen([sys.executable, bin_path] + sys.argv[1:])
g_logger.info("Desktop Agent Updated.")
def update_test_mod(result, *args, **kw):
g_logger.info("Updating Test Module...")
# args = ((version, check_code=0), {})
version = args[0]
if len(args) == 2:
check_code = args[1]
else:
check_code = 0
filename = 'test_' + version + '.py'
path = os.path.join(TMP_DIR, filename)
if not os.path.exists(path):
g_logger.error("Test mod %s can't be found under '/tmp' folder." %
filename)
return
if check_code != 0:
# Verify if the file is the correct one
content = open(filename, 'rb').read()
crc = zlib.crc32(content) & 0xffffffff
if crc != check_code:
g_logger.error("Test mod %s is corrupt. Try to download it again." %
filename)
return
# Rename the original test.py to test.py.bak,
# and replace test.py with new one
origin = os.path.join(ROOT_DIR, 'umit', 'icm', 'agent', 'test.py')
shutil.copy(origin, origin + '.bak')
shutil.copy(path, origin)
if 'umit.icm.agent.test' in sys.modules:
reload(sys.modules['umit.icm.agent.test'])
g_logger.info("Test Module updated.")
if __name__ == "__main__":
pass
| gpl-2.0 | -915,598,988,228,271,500 | 32.317568 | 92 | 0.603859 | false |
theCatWisel/ThreatExchange | pytx/pytx/threat_descriptor.py | 1 | 1324 | from .common import Common
from .vocabulary import ThreatDescriptor as td
from .vocabulary import ThreatExchange as t
class ThreatDescriptor(Common):
_URL = t.URL + t.VERSION + t.THREAT_DESCRIPTORS
_DETAILS = t.URL + t.VERSION
_RELATED = t.URL + t.VERSION
_fields = [
td.ADDED_ON,
td.ATTACK_TYPE,
td.CONFIDENCE,
td.DESCRIPTION,
td.EXPIRED_ON,
td.ID,
td.INDICATOR,
td.LAST_UPDATED,
td.METADATA,
td.OWNER,
td.PRECISION,
td.PRIVACY_MEMBERS,
td.PRIVACY_TYPE,
td.RAW_INDICATOR,
td.REVIEW_STATUS,
td.SEVERITY,
td.SHARE_LEVEL,
td.SOURCE_URI,
td.STATUS,
td.TAGS,
td.THREAT_TYPE,
td.TYPE,
]
_default_fields = [
td.ADDED_ON,
td.ATTACK_TYPE,
td.CONFIDENCE,
td.DESCRIPTION,
td.EXPIRED_ON,
td.ID,
td.INDICATOR,
td.LAST_UPDATED,
td.METADATA,
td.OWNER,
td.PRECISION,
td.PRIVACY_MEMBERS,
td.PRIVACY_TYPE,
td.RAW_INDICATOR,
td.REVIEW_STATUS,
td.SEVERITY,
td.SHARE_LEVEL,
td.SOURCE_URI,
td.STATUS,
td.TAGS,
td.THREAT_TYPE,
td.TYPE,
]
_unique = [
]
| bsd-3-clause | -7,972,141,957,186,459,000 | 20.015873 | 51 | 0.522659 | false |
Yawning/py-uniformdh | known-value-test.py | 1 | 4273 | #
# known-value-test: Test against test vectors
# Yawning Angel (yawning at schwanenlied dot me)
#
# Depends on obfsproxy for some utility stuff.
#
import obfsproxy.transports.obfs3_dh as obfs3_dh
#
# Test keypair x/X:
#
# The test vector specifies "... 756e" for x but this forces the UniformDH
# code to return p - X as the public key, and more importantly that's what
# the original material I used ends with.
#
x = int(
"""6f59 2d67 6f53 6874 746f 2068 6e6b 776f
2073 6874 2065 6167 6574 202e 6f59 2d67
6f53 6874 746f 2068 7369 7420 6568 6720
7461 2e65 5920 676f 532d 746f 6f68 6874
6920 2073 6874 2065 656b 2079 6e61 2064
7567 7261 6964 6e61 6f20 2066 6874 2065
6167 6574 202e 6150 7473 202c 7270 7365
6e65 2c74 6620 7475 7275 2c65 6120 6c6c
6120 6572 6f20 656e 6920 206e 6f59 2d67
6f53 6874 746f 2e68 4820 2065 6e6b 776f
2073 6877 7265 2065 6874 2065 6c4f 2064
6e4f 7365 6220 6f72 656b 7420 7268 756f""".replace(' ','').replace('\n',''), 16)
x_str = obfs3_dh.int_to_bytes(x, 192)
X = int(
"""76a3 d17d 5c55 b03e 865f a3e8 2679 90a7
24ba a24b 0bdd 0cc4 af93 be8d e30b e120
d553 3c91 bf63 ef92 3b02 edcb 84b7 4438
3f7d e232 cca6 eb46 d07c ad83 dcaa 317f
becb c68c a13e 2c40 19e6 a365 3106 7450
04ae cc0b e1df f0a7 8733 fb0e 7d5c b7c4
97ca b77b 1331 bf34 7e5f 3a78 47aa 0bc0
f4bc 6414 6b48 407f ed7b 931d 1697 2d25
fb4d a5e6 dc07 4ce2 a58d aa8d e762 4247
cdf2 ebe4 e4df ec6d 5989 aac7 78c8 7559
d321 3d60 40d4 111c e3a2 acae 19f9 ee15
3250 9e03 7f69 b252 fdc3 0243 cbbc e9d0""".replace(' ','').replace('\n',''), 16)
X_str = obfs3_dh.int_to_bytes(X, 192)
#
# Test keypair y/Y
#
y = int(
"""7365 6220 6f72 656b 7420 7268 756f 6867
6f20 2066 6c6f 2c64 6120 646e 7720 6568
6572 5420 6568 2079 6873 6c61 206c 7262
6165 206b 6874 6f72 6775 2068 6761 6961
2e6e 4820 2065 6e6b 776f 2073 6877 7265
2065 6854 7965 6820 7661 2065 7274 646f
6520 7261 6874 7327 6620 6569 646c 2c73
6120 646e 7720 6568 6572 5420 6568 2079
7473 6c69 206c 7274 6165 2064 6874 6d65
202c 6e61 2064 6877 2079 6f6e 6f20 656e
6320 6e61 6220 6865 6c6f 2064 6854 6d65
6120 2073 6854 7965 7420 6572 6461 0a2e""".replace(' ','').replace('\n',''), 16)
y_str = obfs3_dh.int_to_bytes(y, 192)
Y = int(
"""d04e 156e 554c 37ff d7ab a749 df66 2350
1e4f f446 6cb1 2be0 5561 7c1a 3687 2237
36d2 c3fd ce9e e0f9 b277 7435 0849 112a
a5ae b1f1 2681 1c9c 2f3a 9cb1 3d2f 0c3a
7e6f a2d3 bf71 baf5 0d83 9171 534f 227e
fbb2 ce42 27a3 8c25 abdc 5ba7 fc43 0111
3a2c b206 9c9b 305f aac4 b72b f21f ec71
578a 9c36 9bca c84e 1a7d cf07 54e3 42f5
bc8f e491 7441 b882 5443 5e2a baf2 97e9
3e1e 5796 8672 d45b d7d4 c8ba 1bc3 d314
889b 5bc3 d3e4 ea33 d4f2 dfdd 34e5 e5a7
2ff2 4ee4 6316 d475 7dad 0936 6a0b 66b3""".replace(' ','').replace('\n',''), 16)
Y_str = obfs3_dh.int_to_bytes(Y, 192)
#
# Shared secret: x + Y/y + X
#
xYyX = int(
"""78af af5f 457f 1fdb 832b ebc3 9764 4a33
038b e9db a10c a2ce 4a07 6f32 7f3a 0ce3
151d 477b 869e e7ac 4677 5529 2ad8 a77d
b9bd 87ff bbc3 9955 bcfb 03b1 5838 88c8
fd03 7834 ff3f 401d 463c 10f8 99aa 6378
4451 40b7 f838 6a7d 509e 7b9d b19b 677f
062a 7a1a 4e15 0960 4d7a 0839 ccd5 da61
73e1 0afd 9eab 6dda 7453 9d60 493c a37f
a5c9 8cd9 640b 409c d8bb 3be2 bc51 36fd
42e7 64fc 3f3c 0ddb 8db3 d87a bcf2 e659
8d2b 101b ef7a 56f5 0ebc 658f 9df1 287d
a813 5954 3e77 e4a4 cfa7 598a 4152 e4c0""".replace(' ','').replace('\n',''), 16)
xYyX_str = obfs3_dh.int_to_bytes(xYyX, 192)
#
# Test my shit
#
import uniformdh as ssl_dh
ssl_x = ssl_dh.UniformDH(x_str)
if ssl_x.get_public() == X_str:
print("X - Public key - MATCH")
else:
print("X - Public key - MISMATCH")
ssl_y = ssl_dh.UniformDH(y_str)
if ssl_y.get_public() == Y_str:
print("Y - Public key - MATCH")
else:
print("Y - Public key - MISMATCH")
ssl_x_Y = ssl_x.get_secret(ssl_y.get_public())
ssl_y_X = ssl_y.get_secret(ssl_x.get_public())
if ssl_x_Y == ssl_y_X:
print(" Shared secret - CONSISTENT")
else:
print(" Shared secret - CONSISTENT")
if ssl_x_Y == xYyX_str:
print(" Shared secret - MATCH")
else:
print(" Shared secret - MISMATCH")
| bsd-2-clause | -1,549,477,454,173,569,300 | 33.184 | 85 | 0.67868 | false |
cayetanobv/QGIS-Plugin-MBTiles2img | mbtiles2img.py | 1 | 9872 | """
/***************************************************************************
MBTiles2img
A QGIS plugin
This plugin takes an mbtiles file and split it apart into a folder hierarchy
of individual image tile files.
-------------------
begin : 2014-12-09
copyright : (C) 2014 by Cayetano Benavent
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os.path
from PyQt5.QtCore import QSettings, QTranslator, qVersion, QCoreApplication
from PyQt5.QtWidgets import QAction, QFileDialog, QMessageBox
from PyQt5.QtGui import QIcon
from qgis.core import Qgis
from MBTiles2img import resources_rc
from MBTiles2img.mbtilesextractor import MBTilesExtractor
from MBTiles2img.mbtiles2img_dialog import MBTiles2imgDialog
class MBTiles2img:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'MBTiles2img_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Create the dialog (after translation) and keep reference
self.dlg = MBTiles2imgDialog(parent=self.iface.mainWindow())
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&MBTiles images extract')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'MBTiles2img')
self.toolbar.setObjectName(u'MBTiles2img')
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('MBTiles2img', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/MBTiles2img/icon.png'
self.add_action(
icon_path,
text=self.tr(u'MBTiles images extract'),
callback=self.run,
parent=self.iface.mainWindow())
self.dlg.loadFileButton.clicked.connect(self.loadMBTilesFile)
self.dlg.selectDestFolderButton.clicked.connect(self.setDestFolder)
self.dlg.runExtractionButton.clicked.connect(self.runTileExtraction)
self.dlg.helpButton.clicked.connect(self.getHelp)
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&MBTiles images extract'),
action)
self.iface.removeToolBarIcon(action)
def run(self):
#show the dialog
self.dlg.show()
def runTileExtraction(self):
"""
Run tiles extraction from MBTiles file
"""
input_file = self.dlg.getPathMBTiles()
dest_folder = self.dlg.getPathDestFolder()
res = self.tileExtractor(input_file, dest_folder)
if res == 1:
self.dlg.clearLabelPathMBTiles()
self.dlg.clearLabelPathDestFolder()
self.dlg.setLabelPathDestFolder("Destination folder...")
self.dlg.setLabelPathMBTiles("MBTiles to extract...")
def loadMBTilesFile(self):
"""
Load MBTiles file
"""
self.dlg.progressBar.setValue(0)
# open file dialog to load MBTiles file
start_dir = '/home'
fl_types = "MBTiles files (*.mbtiles)"
file_path, _ = QFileDialog.getOpenFileName(self.iface.mainWindow(),
'Open MBTiles file',
start_dir, fl_types)
if file_path:
self.dlg.setLabelPathMBTiles(file_path)
else:
self.dlg.setLabelPathMBTiles("MBTiles to extract...")
def setDestFolder(self):
"""
Set Destination folder to save exported images
"""
self.dlg.progressBar.setValue(0)
# open file dialog to select folder
start_dir = '/home'
folder_path = QFileDialog.getExistingDirectory(self.iface.mainWindow(),
'Select destination folder to save exported images',
start_dir)
if folder_path:
self.dlg.setLabelPathDestFolder(folder_path)
else:
self.dlg.setLabelPathDestFolder("Destination folder...")
def tileExtractor(self, input_file, dest_folder):
"""
MBTiles images extraction method
This method uses MBTilesextractor library to do the work
"""
try:
ex_mbt = MBTilesExtractor(input_file, dirname=dest_folder, overwrite=True)
self.dlg.progressBar.setValue(10)
ex_mbt.extractTiles()
msg_type= "Info"
level = Qgis.Info
progress_value = 100
outfolder = os.path.join(dest_folder,os.path.basename(input_file).split('.')[0])
result = 'Tile extraction done! Output folder: {}'.format(outfolder)
self.iface.messageBar().pushMessage(msg_type, result, level=level, duration=10)
self.dlg.progressBar.setValue(progress_value)
return 1
except Exception as err:
result = 'Error: {0}'.format(err)
self.iface.messageBar().pushMessage("Error", result, level=Qgis.Critical, duration=10)
self.dlg.progressBar.setValue(0)
def getHelp(self):
"""
Show help to users
"""
QMessageBox.information(self.iface.mainWindow(),"Help",
"""
1) Select MBTiles to extract.
2) Select destination folder to
save exported images.
3) Push button "Run tile extraction".
Developed by Cayetano Benavent 2014-2018.
""")
| gpl-2.0 | 6,449,369,633,904,403,000 | 33.15917 | 105 | 0.571212 | false |
chjdev/euler | python/problem28.py | 1 | 1182 | # Number spiral diagonals
#
# Problem 28
#
# Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows:
#
# 21 22 23 24 25
# 20 7 8 9 10
# 19 6 1 2 11
# 18 5 4 3 12
# 17 16 15 14 13
#
# It can be verified that the sum of the numbers on the diagonals is 101.
#
# What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral formed in the same way?
# 43 44 45 46 47 48 49
# 42 21 22 23 24 25 26
# 41 20 7 8 9 10 27
# 40 19 6 1 2 11 28
# 39 18 5 4 3 12 29
# 38 17 16 15 14 13 30
# 37 36 35 34 33 32 31
# 3x3 -> 1
# 5x5 -> +2 -> 3
# 7x7 -> +2 -> 5
# 1 (2) 3 (4) 5 (6) 7 (8) 9
# (10) (11) (12) 13 (14) (15) (16) 17 (18) (19) (20) 21 (22) (23) (24) 25
# (26) (27) (28) (29) (30) 31 and so on
# in english: iterate 4 times using steps according to the sequence above
import itertools
def spiral_numbers(max_dim=None):
dim = 1
num = 1
yield num
for offset in itertools.count(2, 2):
dim += 2
for i in range(0, 4):
num += offset
yield num
if not max_dim is None and dim >= max_dim:
break
print(sum(spiral_numbers(1001)))
| bsd-2-clause | 7,553,869,055,711,668,000 | 22.176471 | 115 | 0.584602 | false |
liushuaikobe/yixin | lib/yixin.py | 1 | 10990 | # -*- coding: utf-8 -*-
import hashlib
import time
import simplejson
from xml.etree import ElementTree as etree
from xml.etree.ElementTree import Element, SubElement, ElementTree
import utils
import log
import constant
import messagebuilder
class YiXin(object):
'''
Main class of this lib.
'''
def __init__(self, token, appId, appSecret):
self.token = token
self.appId = appId
self.appSecret = appSecret
self.accessToken = None
self.accessTokenExpiresIn = None
self.accessTokenGetTimeStamp = None
self.reply = Reply()
self.textMsgBuilder = None
self.picMsgBuilder = None
self.locationMsgBuilder = None
self.eventMsgBuilder = None
self.onTextMsgReceivedCallback = None
self.onPicMsgReceivedCallback = None
self.onLocationMsgReceivedCallback = None
self.onEventMsgReceivedCallback = None
self.onButtonClickCallback = None
self.onUserSubscribeCallback = None
self.onUserUnsbscribeCallback = None
def checkSignature(self, signature, timestamp, nonce, echostr):
'''
check the signature,
'''
if not utils.checkType(type(''), signature, timestamp, nonce, echostr):
log.log(log.ERROR, 'Your args for signature checking must be ' + str(type('')))
return None
tmpLst = [self.token, timestamp, nonce]
tmpLst.sort()
tmpStr = ''.join(tuple(tmpLst))
tmpStr = hashlib.sha1(tmpStr).hexdigest()
if tmpStr == signature:
log.log(log.INFO, 'Signature checking successfully.')
return echostr
else:
log.log(log.ERROR, 'Signature checking failed.')
return None
## ## ### ## ## ######## ## ######## ## ## ###### ######
## ## ## ## ### ## ## ## ## ## ### ### ## ## ## ##
## ## ## ## #### ## ## ## ## ## #### #### ## ##
######### ## ## ## ## ## ## ## ## ###### ## ### ## ###### ## ####
## ## ######### ## #### ## ## ## ## ## ## ## ## ##
## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ######## ######## ######## ####### ## ## ###### ######
def handleMessage(self, rawMsg, callback=None):
'''
Handle the message posted from YiXin Server.
'''
msgType = self.getMsgType(rawMsg)
msg = None
# we received a text message
if msgType == constant.TEXT_TYPE:
if not self.textMsgBuilder:
self.textMsgBuilder = messagebuilder.TextMsgBuilder(rawMsg)
else:
self.textMsgBuilder.setXmlStr(rawMsg)
msg = self.textMsgBuilder.build()
if callable(self.onTextMsgReceivedCallback):
self.onTextMsgReceivedCallback(msgType, msg)
# we received a image message
elif msgType == constant.PIC_TYPE:
if not self.picMsgBuilder:
self.picMsgBuilder = messagebuilder.PicMsgBuilder(rawMsg)
else:
self.picMsgBuilder.setXmlStr(rawMsg)
msg = self.picMsgBuilder.build()
if callable(self.onPicMsgReceivedCallback):
self.onPicMsgReceivedCallback(msgType, msg)
# we received a image message
elif msgType == constant.LOCATION_TYPE:
if not self.locationMsgBuilder:
self.locationMsgBuilder = messagebuilder.LocationMsgBuilder(rawMsg)
else:
self.locationMsgBuilder.setXmlStr(rawMsg)
msg = self.locationMsgBuilder.build()
if callable(self.onLocationMsgReceivedCallback):
self.onLocationMsgReceivedCallback(msgType, msg)
# we received a event push
elif msgType == constant.EVENT_TYPE:
if not self.eventMsgBuilder:
self.eventMsgBuilder = messagebuilder.EventMsgBuilder(rawMsg)
else:
self.eventMsgBuilder.setXmlStr(rawMsg)
msg = self.eventMsgBuilder.build()
if callable(self.onEventMsgReceivedCallback):
self.onEventMsgReceivedCallback(msgType, msg)
# dispatch the specific event
event = msg.getEvent().lower()
# new subscribe
if event == constant.SUBSCRIBE_EVENT:
if callable(self.onUserSubscribeCallback):
self.onUserSubscribeCallback(msgType, msg)
# new unsubscribe
elif event == constant.UNSUBSCRIBE_EVENT:
if callable(self.onUserUnsbscribeCallback):
self.onUserUnsbscribeCallback(msgType, msg)
# button clicked
elif event == constant.CLICK_EVETN:
if callable(self.onButtonClickCallback):
self.onButtonClickCallback(msgType, msg)
if callable(callback):
callback(msgType, msg)
return msg
######## ######## ######## ## ## ##
## ## ## ## ## ## ## ##
## ## ## ## ## ## ####
######## ###### ######## ## ##
## ## ## ## ## ##
## ## ## ## ## ##
## ## ######## ## ######## ##
def replyText(self, toUser, fromUser, content=''):
'''
Wrpper for replying text message.
'''
return self.reply.replyText(toUser, fromUser, content)
def replyMusic(self, toUser, fromUser, title, description, musicUrl, HQMusicUrl):
'''
Wrapper for replying music message.
'''
return self.reply.replyMusic(toUser, fromUser, title, description, musicUrl, HQMusicUrl)
def replyNews(self, toUser, fromUser, articleCount, articles):
'''
Wrapper for replying news message.
'''
return self.reply.replyNews(toUser, fromUser, articleCount, articles)
def getMsgType(self, rawMsg):
root = etree.fromstring(rawMsg)
return root.find(constant.MSG_TYPE_NODE_NAME).text
###### ### ## ## ######## ### ###### ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ######## ## ## ## #####
## ######### ## ## ## ## ######### ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ######## ######## ######## ## ## ###### ## ##
def setOnTextMsgReceivedCallback(self, callback):
assert callable(callback)
self.onTextMsgReceivedCallback = callback
def setOnPicMsgReceivedCallback(self, callback):
assert callable(callback)
self.onPicMsgReceivedCallback = callback
def setOnLocationMsgReceivedCallback(self, callback):
assert callable(callback)
self.onLocationMsgReceivedCallback = callback
def setOnEventMsgReceivedCallback(self, callback):
assert callable(callback)
self.onEventMsgReceivedCallback = callback
def setOnButtonClickCallback(self, callback):
assert callable(callback)
self.onButtonClickCallback = callback
def setOnUserSubscribeCallback(self, callback):
assert callable(callback)
self.onUserSubscribeCallback = callback
def setOnUserUnsbscribeCallback(self, callback):
assert callable(callback)
self.onUserUnsbscribeCallback = callback
def getAccessToken(self):
if self.accessToken and self.accessTokenExpiresIn and self.accessTokenGetTimeStamp: # We have got the access token.
if time.time() - self.accessTokenGetTimeStamp < self.accessTokenExpiresIn: # The access token is valid until now.
log.log(log.DEBUG, self.accessToken + ' old')
return self.accessToken
url = constant.GET_TOKEN_URL
params = {
'grant_type' : 'client_credential',
'appid' : self.appId,
'secret' : self.appSecret
}
result = simplejson.loads(utils.doGet(url, params))
self.accessToken = result['access_token']
self.accessTokenExpiresIn = float(result['expires_in'])
self.accessTokenGetTimeStamp = time.time()
log.log(log.DEBUG, self.accessToken + ' new')
return self.accessToken
## ## ######## ## ## ## ##
### ### ## ### ## ## ##
#### #### ## #### ## ## ##
## ### ## ###### ## ## ## ## ##
## ## ## ## #### ## ##
## ## ## ## ### ## ##
## ## ######## ## ## #######
def addMenu(self, buttonGroup):
log.log(log.DEBUG, simplejson.dumps(buttonGroup.meta))
utils.doPostWithoutParamsEncoding(''.join((constant.ADD_MENU_URL, self.getAccessToken())), \
simplejson.dumps(buttonGroup.meta))
def deleteMenu(self):
'''
Delete the menu.
'''
log.log(log.DEBUG, 'Delete menu.')
params = {
'access_token' : self.getAccessToken()
}
result = utils.doGet(constant.DELETE_MENU_URL, params)
log.log(log.DEBUG, result)
def queryCurrentMenu(self):
'''
Get the current structure of the menu.
'''
pass
class Reply(object):
'''
Get the reply message.
'''
def __init__(self):
pass
def replyText(self, toUser, fromUser, content=''):
return self.render(constant.REPLY_TEXT_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), content))
def replyMusic(self, toUser, fromUser, title, description, musicUrl, HQMusicUrl):
return self.render(constant.REPLY_MUSIC_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), title, description, musicUrl, HQMusicUrl))
def replyNews(self, toUser, fromUser, articleCount, articles):
root = Element(Article.ROOT_TAG_NAME)
for artile in articles:
item = SubElement(root, Article.ITEM_TAG_NAME)
for tag in artile.meta:
subElement = SubElement(item, tag)
subElement.text = str(artile.meta[tag])
return self.render(constant.REPLY_NEWS_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), str(articleCount), etree.tostring(root)))
def getCurrentTime(self):
return str(int(time.time()))
def render(self, template, args):
return template % tuple(args)
class Article(object):
'''
Sub nodes of News type message that reply to the user.
NOTICE : the object of this class is used for replying to the user rather than being built from received message.
'''
ROOT_TAG_NAME = 'Articles'
ITEM_TAG_NAME = 'item'
def __init__(self):
self.meta = {
'Title' : '',
'Description' : '',
'PicUrl' : '',
'Url' : ''
}
def setTitle(self, title):
self.meta['Title'] = title
def setDescription(self, description):
self.meta['Description'] = description
def setPicUrl(self, picUrl):
self.meta['PicUrl'] = picUrl
def setUrl(self, url):
self.meta['Url'] = url
class Button(object):
'''
Base class of the Menu Button.
'''
CLICK_TYPE = 'click'
def __init__(self):
self.meta = {
'name' : '',
}
def setName(self, name):
self.meta['name'] = name
class CommonClickButton(Button):
'''
A common click-type Button including name and type and key.
'''
def __init__(self):
Button.__init__(self)
self.meta.update({
'type' : Button.CLICK_TYPE,
'key' : ''
})
def setKey(self, key):
self.meta['key'] = key
class TopLevelButton(Button):
'''
A top level button than contains some sub-buttons.
'''
def __init__(self):
Button.__init__(self)
self.meta.update({
'sub_button' : []
})
def addSubButton(self, commonButton):
self.meta['sub_button'].append(commonButton.meta)
class ButtonGroup(object):
'''
A group of buttons.
'''
def __init__(self):
self.meta = {
'button' : []
}
def addButton(self, button):
self.meta['button'].append(button.meta)
| mit | -6,823,000,264,583,889,000 | 30.489971 | 136 | 0.602821 | false |
atupal/ccrawler | request/baseRequestHandler.py | 1 | 5371 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
baseRequestHandler.py
~~~~~~~~~~~~~~~~~~~~~
Base request handler
"""
import gevent
from gevent import monkey
monkey.patch_all()
import requests
import logging
import os
import socket
import time
import json
from requests import ConnectionError
from random import choice
from response import Response
import proxy
import cookie
class BaseRequestHandler(object):
def __init__(self, use_proxy=False, proxy_module=proxy, cookie_module=cookie):
self.use_proxy = use_proxy
self.proxy_module = proxy
self.cookie_module = cookie_module
self._proxy_pool = []
self._proxy_pool_size = 0
self.proxy_list_cache_file = '/tmp/ccrawler_proxy_list.cache'
if os.path.exists(self.proxy_list_cache_file):
with open(self.proxy_list_cache_file) as f:
self._proxy_pool = json.load(f)
self._proxy_pool_size = len(self._proxy_pool)
self._redis_proxy_pool_connetion = None
self._proxy_lock = gevent.lock.Semaphore()
self._cookie_pool = {}
def handle(self, task, **kwargs):
return self.request(task, **kwargs)
def request(self, task, **kwargs):
url = task.get('url')
if not url:
logging.error('invalid url: emptry url!')
return task
_kwargs = {
'params': {}, # dict or bytes
'data': {}, # dict, bytes or file object
'headers': {'user-agent': 'googleBot'},
'cookies': {}, # dict or cookiejar object
'files': {},
'auth': None,
'timeout': 5,
'allow_redirects': True,
'proxies': {},
'verify': False,
'stream': False,
'cert': None,
}
_kwargs.update(kwargs)
if self.use_proxy or task.get('proxy'):
proxy = task.get('proxy') or self._pop_proxy()
_kwargs['proxies'].update(proxy)
if (not task.get('method')
or task.get('method', '').uppper() == 'GET'):
method = 'get'
elif task.get('method', '').upper() == 'POST':
method = 'post'
else:
raise 'Invalid or unsupported method!'
proxy_retry_cnt = 0
while 1:
try:
resp = requests.request(method, url, **_kwargs)
break
except (requests.exceptions.ProxyError,
requests.exceptions.Timeout,
ConnectionError,
socket.timeout) as e:
proxy_retry_cnt += 1
if self.use_proxy:
proxy = self._pop_proxy()
_kwargs['proxies'].update(proxy)
if proxy_retry_cnt >= 10:
raise e
if self.use_proxy and proxy:
self._add_proxy(proxy)
response = {
'content': resp.content,
'origin_url': task['url'],
'url': resp.url,
'cookies': dict(resp.cookies),
'status_code': resp.status_code,
'headers': dict(resp.headers),
}
#response['content'] = resp.content
task['response'] = response
if resp.status_code != 200:
#logging.error('not 200 http response')
#logging.error(url)
#logging.error(_kwargs)
raise Exception('not 200 http response')
if 'url_depth' in task:
task['url_depth'] += 1
else:
task['url_depth'] = 1
return task
def _pop_proxy(self):
fetch_cnt = 0
with self._proxy_lock:
while self._proxy_pool_size <= 0:
try:
self._fetch_new_proxy_list()
except:
raise
fetch_cnt += 1
if fetch_cnt == 3:
raise Exception('Can not fetch proxy list!')
proxy = self._proxy_pool.pop(0)
self._proxy_pool_size -= 1
return proxy
def _get_fastest_proxy(self):
pass
def _add_proxy(self, proxy):
self._proxy_pool.append(proxy)
self._proxy_pool_size += 1
def _fetch_new_proxy_list(self):
proxy_list = self.proxy_module.get_proxy_list()
try:
with open(self.proxy_list_cache_file, 'w') as f:
json.dump(proxy_list, f, indent=2)
except IOError:
pass
#while self._proxy_checking:
#gevent.sleep(0.1)
self._proxy_pool += proxy_list
self._proxy_pool_size += len(proxy_list)
def _check_proxy_pool_health(self):
self._proxy_checking = True
jobs = []
self._proxy_checking = False
@property
def proxy_pool_size(self):
return self._proxy_pool_size
def test():
requestHandler = BaseRequestHandler(use_proxy=True)
jobs = []
st = time.time()
for i in xrange(100):
jobs.append( gevent.spawn( requestHandler.handle, {'url': 'http://baidu.com'} ) )
for job in jobs:
try:
print job.get()
except:
pass
gevent.joinall(jobs)
print time.time() - st
if __name__ == '__main__':
test()
| mit | 4,501,589,908,298,685,400 | 29.174157 | 89 | 0.508658 | false |
minlexx/pyevemon | esi_client/apis/status_api.py | 1 | 5536 | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class StatusApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_status(self, **kwargs):
"""
Retrieve the uptime and player counts
EVE Server status --- This route is cached for up to 30 seconds
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_status(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetStatusOk
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_status_with_http_info(**kwargs)
else:
(data) = self.get_status_with_http_info(**kwargs)
return data
def get_status_with_http_info(self, **kwargs):
"""
Retrieve the uptime and player counts
EVE Server status --- This route is cached for up to 30 seconds
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_status_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str datasource: The server name you would like data from
:param str user_agent: Client identifier, takes precedence over headers
:param str x_user_agent: Client identifier, takes precedence over User-Agent
:return: GetStatusOk
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['datasource', 'user_agent', 'x_user_agent']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_status" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/v1/status/'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'datasource' in params:
query_params['datasource'] = params['datasource']
if 'user_agent' in params:
query_params['user_agent'] = params['user_agent']
header_params = {}
if 'x_user_agent' in params:
header_params['X-User-Agent'] = params['x_user_agent']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetStatusOk',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| gpl-3.0 | 7,186,159,517,841,950,000 | 36.154362 | 100 | 0.561416 | false |
ContinuumIO/dask | dask/callbacks.py | 2 | 3971 | from contextlib import contextmanager
__all__ = ["Callback", "add_callbacks"]
class Callback(object):
""" Base class for using the callback mechanism
Create a callback with functions of the following signatures:
>>> def start(dsk):
... pass
>>> def start_state(dsk, state):
... pass
>>> def pretask(key, dsk, state):
... pass
>>> def posttask(key, result, dsk, state, worker_id):
... pass
>>> def finish(dsk, state, failed):
... pass
You may then construct a callback object with any number of them
>>> cb = Callback(pretask=pretask, finish=finish) # doctest: +SKIP
And use it either as a context manager over a compute/get call
>>> with cb: # doctest: +SKIP
... x.compute() # doctest: +SKIP
Or globally with the ``register`` method
>>> cb.register() # doctest: +SKIP
>>> cb.unregister() # doctest: +SKIP
Alternatively subclass the ``Callback`` class with your own methods.
>>> class PrintKeys(Callback):
... def _pretask(self, key, dask, state):
... print("Computing: {0}!".format(repr(key)))
>>> with PrintKeys(): # doctest: +SKIP
... x.compute() # doctest: +SKIP
"""
active = set()
def __init__(
self, start=None, start_state=None, pretask=None, posttask=None, finish=None
):
if start:
self._start = start
if start_state:
self._start_state = start_state
if pretask:
self._pretask = pretask
if posttask:
self._posttask = posttask
if finish:
self._finish = finish
@property
def _callback(self):
fields = ["_start", "_start_state", "_pretask", "_posttask", "_finish"]
return tuple(getattr(self, i, None) for i in fields)
def __enter__(self):
self._cm = add_callbacks(self)
self._cm.__enter__()
return self
def __exit__(self, *args):
self._cm.__exit__(*args)
def register(self):
Callback.active.add(self._callback)
def unregister(self):
Callback.active.remove(self._callback)
def unpack_callbacks(cbs):
"""Take an iterable of callbacks, return a list of each callback."""
if cbs:
return [[i for i in f if i] for f in zip(*cbs)]
else:
return [(), (), (), (), ()]
@contextmanager
def local_callbacks(callbacks=None):
"""Allows callbacks to work with nested schedulers.
Callbacks will only be used by the first started scheduler they encounter.
This means that only the outermost scheduler will use global callbacks."""
global_callbacks = callbacks is None
if global_callbacks:
callbacks, Callback.active = Callback.active, set()
try:
yield callbacks or ()
finally:
if global_callbacks:
Callback.active = callbacks
def normalize_callback(cb):
"""Normalizes a callback to a tuple"""
if isinstance(cb, Callback):
return cb._callback
elif isinstance(cb, tuple):
return cb
else:
raise TypeError("Callbacks must be either `Callback` or `tuple`")
class add_callbacks(object):
"""Context manager for callbacks.
Takes several callbacks and applies them only in the enclosed context.
Callbacks can either be represented as a ``Callback`` object, or as a tuple
of length 4.
Examples
--------
>>> def pretask(key, dsk, state):
... print("Now running {0}").format(key)
>>> callbacks = (None, pretask, None, None)
>>> with add_callbacks(callbacks): # doctest: +SKIP
... res.compute()
"""
def __init__(self, *callbacks):
self.callbacks = [normalize_callback(c) for c in callbacks]
Callback.active.update(self.callbacks)
def __enter__(self):
return
def __exit__(self, type, value, traceback):
for c in self.callbacks:
Callback.active.discard(c)
| bsd-3-clause | 6,475,224,179,857,707,000 | 27.163121 | 84 | 0.594812 | false |
dshulyak/solar | solar/solar/core/handlers/ansible_template.py | 1 | 1892 | # -*- coding: utf-8 -*-
from fabric import api as fabric_api
from fabric.state import env
import os
from solar.core.log import log
from solar.core.handlers.base import TempFileHandler
from solar import errors
# otherwise fabric will sys.exit(1) in case of errors
env.warn_only = True
class AnsibleTemplate(TempFileHandler):
def action(self, resource, action_name):
inventory_file = self._create_inventory(resource)
playbook_file = self._create_playbook(resource, action_name)
log.debug('inventory_file: %s', inventory_file)
log.debug('playbook_file: %s', playbook_file)
call_args = ['ansible-playbook', '--module-path', '/vagrant/library', '-i', inventory_file, playbook_file]
log.debug('EXECUTING: %s', ' '.join(call_args))
with fabric_api.shell_env(ANSIBLE_HOST_KEY_CHECKING='False'):
out = fabric_api.local(' '.join(call_args), capture=True)
if out.failed:
raise errors.SolarError(out)
def _create_inventory(self, r):
directory = self.dirs[r.name]
inventory_path = os.path.join(directory, 'inventory')
with open(inventory_path, 'w') as inv:
inv.write(self._render_inventory(r))
return inventory_path
def _render_inventory(self, r):
inventory = '{0} ansible_ssh_host={1} ansible_connection=ssh ansible_ssh_user={2} ansible_ssh_private_key_file={3} {4}'
host, user, ssh_key = r.args['ip'].value, r.args['ssh_user'].value, r.args['ssh_key'].value
args = []
for arg in r.args:
args.append('{0}="{1}"'.format(arg, r.args[arg].value))
args = ' '.join(args)
inventory = inventory.format(host, host, user, ssh_key, args)
log.debug(inventory)
return inventory
def _create_playbook(self, resource, action):
return self._compile_action_file(resource, action)
| apache-2.0 | -6,704,512,065,824,343,000 | 38.416667 | 127 | 0.641121 | false |
psykzz/flask-admin | flask_admin/contrib/sqla/view.py | 1 | 29504 | import logging
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.expression import desc
from sqlalchemy import Column, Boolean, func, or_
from sqlalchemy.exc import IntegrityError
from flask import flash
from flask.ext.admin._compat import string_types
from flask.ext.admin.babel import gettext, ngettext, lazy_gettext
from flask.ext.admin.model import BaseModelView
from flask.ext.admin.actions import action
from flask.ext.admin._backwards import ObsoleteAttr
from flask.ext.admin.contrib.sqla import form, filters, tools
from .typefmt import DEFAULT_FORMATTERS
from .tools import is_inherited_primary_key, get_column_for_current_model, get_query_for_ids
from .ajax import create_ajax_loader
# Set up logger
log = logging.getLogger("flask-admin.sqla")
class ModelView(BaseModelView):
"""
SQLAlchemy model view
Usage sample::
admin = Admin()
admin.add_view(ModelView(User, db.session))
"""
column_auto_select_related = ObsoleteAttr('column_auto_select_related',
'auto_select_related',
True)
"""
Enable automatic detection of displayed foreign keys in this view
and perform automatic joined loading for related models to improve
query performance.
Please note that detection is not recursive: if `__unicode__` method
of related model uses another model to generate string representation, it
will still make separate database call.
"""
column_select_related_list = ObsoleteAttr('column_select_related',
'list_select_related',
None)
"""
List of parameters for SQLAlchemy `subqueryload`. Overrides `column_auto_select_related`
property.
For example::
class PostAdmin(ModelView):
column_select_related_list = ('user', 'city')
You can also use properties::
class PostAdmin(ModelView):
column_select_related_list = (Post.user, Post.city)
Please refer to the `subqueryload` on list of possible values.
"""
column_display_all_relations = ObsoleteAttr('column_display_all_relations',
'list_display_all_relations',
False)
"""
Controls if list view should display all relations, not only many-to-one.
"""
column_searchable_list = ObsoleteAttr('column_searchable_list',
'searchable_columns',
None)
"""
Collection of the searchable columns. Only text-based columns
are searchable (`String`, `Unicode`, `Text`, `UnicodeText`).
Example::
class MyModelView(ModelView):
column_searchable_list = ('name', 'email')
You can also pass columns::
class MyModelView(ModelView):
column_searchable_list = (User.name, User.email)
The following search rules apply:
- If you enter *ZZZ* in the UI search field, it will generate *ILIKE '%ZZZ%'*
statement against searchable columns.
- If you enter multiple words, each word will be searched separately, but
only rows that contain all words will be displayed. For example, searching
for 'abc def' will find all rows that contain 'abc' and 'def' in one or
more columns.
- If you prefix your search term with ^, it will find all rows
that start with ^. So, if you entered *^ZZZ*, *ILIKE 'ZZZ%'* will be used.
- If you prefix your search term with =, it will perform an exact match.
For example, if you entered *=ZZZ*, the statement *ILIKE 'ZZZ'* will be used.
"""
column_filters = None
"""
Collection of the column filters.
Can contain either field names or instances of :class:`flask.ext.admin.contrib.sqla.filters.BaseFilter` classes.
For example::
class MyModelView(BaseModelView):
column_filters = ('user', 'email')
or::
class MyModelView(BaseModelView):
column_filters = (BooleanEqualFilter(User.name, 'Name'))
"""
model_form_converter = form.AdminModelConverter
"""
Model form conversion class. Use this to implement custom field conversion logic.
For example::
class MyModelConverter(AdminModelConverter):
pass
class MyAdminView(ModelView):
model_form_converter = MyModelConverter
"""
inline_model_form_converter = form.InlineModelConverter
"""
Inline model conversion class. If you need some kind of post-processing for inline
forms, you can customize behavior by doing something like this::
class MyInlineModelConverter(AdminModelConverter):
def post_process(self, form_class, info):
form_class.value = wtf.TextField('value')
return form_class
class MyAdminView(ModelView):
inline_model_form_converter = MyInlineModelConverter
"""
filter_converter = filters.FilterConverter()
"""
Field to filter converter.
Override this attribute to use non-default converter.
"""
fast_mass_delete = False
"""
If set to `False` and user deletes more than one model using built in action,
all models will be read from the database and then deleted one by one
giving SQLAlchemy a chance to manually cleanup any dependencies (many-to-many
relationships, etc).
If set to `True`, will run a `DELETE` statement which is somewhat faster,
but may leave corrupted data if you forget to configure `DELETE
CASCADE` for your model.
"""
inline_models = None
"""
Inline related-model editing for models with parent-child relations.
Accepts enumerable with one of the following possible values:
1. Child model class::
class MyModelView(ModelView):
inline_models = (Post,)
2. Child model class and additional options::
class MyModelView(ModelView):
inline_models = [(Post, dict(form_columns=['title']))]
3. Django-like ``InlineFormAdmin`` class instance::
class MyInlineModelForm(InlineFormAdmin):
form_columns = ('title', 'date')
class MyModelView(ModelView):
inline_models = (MyInlineModelForm(MyInlineModel),)
You can customize the generated field name by:
1. Using the `form_name` property as a key to the options dictionary:
class MyModelView(ModelView):
inline_models = ((Post, dict(form_label='Hello')))
2. Using forward relation name and `column_labels` property:
class Model1(Base):
pass
class Model2(Base):
# ...
model1 = relation(Model1, backref='models')
class MyModel1View(Base):
inline_models = (Model2,)
column_labels = {'models': 'Hello'}
"""
column_type_formatters = DEFAULT_FORMATTERS
form_choices = None
"""
Map choices to form fields
Example::
class MyModelView(BaseModelView):
form_choices = {'my_form_field': [
('db_value', 'display_value'),
]
"""
form_optional_types = (Boolean,)
"""
List of field types that should be optional if column is not nullable.
Example::
class MyModelView(BaseModelView):
form_optional_types = (Boolean, Unicode)
"""
def __init__(self, model, session,
name=None, category=None, endpoint=None, url=None):
"""
Constructor.
:param model:
Model class
:param session:
SQLAlchemy session
:param name:
View name. If not set, defaults to the model name
:param category:
Category name
:param endpoint:
Endpoint name. If not set, defaults to the model name
:param url:
Base URL. If not set, defaults to '/admin/' + endpoint
"""
self.session = session
self._search_fields = None
self._search_joins = dict()
self._filter_joins = dict()
if self.form_choices is None:
self.form_choices = {}
super(ModelView, self).__init__(model, name, category, endpoint, url)
# Primary key
self._primary_key = self.scaffold_pk()
if self._primary_key is None:
raise Exception('Model %s does not have primary key.' % self.model.__name__)
# Configuration
if not self.column_select_related_list:
self._auto_joins = self.scaffold_auto_joins()
else:
self._auto_joins = self.column_select_related_list
# Internal API
def _get_model_iterator(self, model=None):
"""
Return property iterator for the model
"""
if model is None:
model = self.model
return model._sa_class_manager.mapper.iterate_properties
# Scaffolding
def scaffold_pk(self):
"""
Return the primary key name from a model
PK can be a single value or a tuple if multiple PKs exist
"""
return tools.get_primary_key(self.model)
def get_pk_value(self, model):
"""
Return the PK value from a model object.
PK can be a single value or a tuple if multiple PKs exist
"""
try:
return getattr(model, self._primary_key)
except TypeError:
v = []
for attr in self._primary_key:
v.append(getattr(model, attr))
return tuple(v)
def scaffold_list_columns(self):
"""
Return a list of columns from the model.
"""
columns = []
for p in self._get_model_iterator():
# Verify type
if hasattr(p, 'direction'):
if self.column_display_all_relations or p.direction.name == 'MANYTOONE':
columns.append(p.key)
elif hasattr(p, 'columns'):
column_inherited_primary_key = False
if len(p.columns) != 1:
if is_inherited_primary_key(p):
column = get_column_for_current_model(p)
else:
raise TypeError('Can not convert multiple-column properties (%s.%s)' % (self.model, p.key))
else:
# Grab column
column = p.columns[0]
# An inherited primary key has a foreign key as well
if column.foreign_keys and not is_inherited_primary_key(p):
continue
if not self.column_display_pk and column.primary_key:
continue
columns.append(p.key)
return columns
def scaffold_sortable_columns(self):
"""
Return a dictionary of sortable columns.
Key is column name, value is sort column/field.
"""
columns = dict()
for p in self._get_model_iterator():
if hasattr(p, 'columns'):
# Sanity check
if len(p.columns) > 1:
# Multi-column properties are not supported
continue
column = p.columns[0]
# Can't sort on primary or foreign keys by default
if column.foreign_keys:
continue
if not self.column_display_pk and column.primary_key:
continue
columns[p.key] = column
return columns
def _get_columns_for_field(self, field):
if isinstance(field, string_types):
attr = getattr(self.model, field, None)
if field is None:
raise Exception('Field %s was not found.' % field)
else:
attr = field
if (not attr or
not hasattr(attr, 'property') or
not hasattr(attr.property, 'columns') or
not attr.property.columns):
raise Exception('Invalid field %s: does not contains any columns.' % field)
return attr.property.columns
def _need_join(self, table):
return table not in self.model._sa_class_manager.mapper.tables
def init_search(self):
"""
Initialize search. Returns `True` if search is supported for this
view.
For SQLAlchemy, this will initialize internal fields: list of
column objects used for filtering, etc.
"""
if self.column_searchable_list:
self._search_fields = []
self._search_joins = dict()
for p in self.column_searchable_list:
for column in self._get_columns_for_field(p):
column_type = type(column.type).__name__
if not self.is_text_column_type(column_type):
raise Exception('Can only search on text columns. ' +
'Failed to setup search for "%s"' % p)
self._search_fields.append(column)
# If it belongs to different table - add a join
if self._need_join(column.table):
self._search_joins[column.table.name] = column.table
return bool(self.column_searchable_list)
def is_text_column_type(self, name):
"""
Verify if the provided column type is text-based.
:returns:
``True`` for ``String``, ``Unicode``, ``Text``, ``UnicodeText``
"""
if name:
name = name.lower()
return name in ('string', 'unicode', 'text', 'unicodetext')
def scaffold_filters(self, name):
"""
Return list of enabled filters
"""
join_tables = []
if isinstance(name, string_types):
model = self.model
for attribute in name.split('.'):
value = getattr(model, attribute)
if (hasattr(value, 'property') and
hasattr(value.property, 'direction')):
model = value.property.mapper.class_
table = model.__table__
if self._need_join(table):
join_tables.append(table)
attr = value
else:
attr = name
if attr is None:
raise Exception('Failed to find field for filter: %s' % name)
# Figure out filters for related column
if hasattr(attr, 'property') and hasattr(attr.property, 'direction'):
filters = []
for p in self._get_model_iterator(attr.property.mapper.class_):
if hasattr(p, 'columns'):
# TODO: Check for multiple columns
column = p.columns[0]
if column.foreign_keys or column.primary_key:
continue
visible_name = '%s / %s' % (self.get_column_name(attr.prop.table.name),
self.get_column_name(p.key))
type_name = type(column.type).__name__
flt = self.filter_converter.convert(type_name,
column,
visible_name)
if flt:
table = column.table
if join_tables:
self._filter_joins[table.name] = join_tables
elif self._need_join(table.name):
self._filter_joins[table.name] = [table.name]
filters.extend(flt)
return filters
else:
columns = self._get_columns_for_field(attr)
if len(columns) > 1:
raise Exception('Can not filter more than on one column for %s' % name)
column = columns[0]
if self._need_join(column.table) and name not in self.column_labels:
visible_name = '%s / %s' % (
self.get_column_name(column.table.name),
self.get_column_name(column.name)
)
else:
if not isinstance(name, string_types):
visible_name = self.get_column_name(name.property.key)
else:
visible_name = self.get_column_name(name)
type_name = type(column.type).__name__
if join_tables:
self._filter_joins[column.table.name] = join_tables
flt = self.filter_converter.convert(
type_name,
column,
visible_name,
options=self.column_choices.get(name),
)
if flt and not join_tables and self._need_join(column.table):
self._filter_joins[column.table.name] = [column.table]
return flt
def is_valid_filter(self, filter):
"""
Verify that the provided filter object is derived from the
SQLAlchemy-compatible filter class.
:param filter:
Filter object to verify.
"""
return isinstance(filter, filters.BaseSQLAFilter)
def scaffold_form(self):
"""
Create form from the model.
"""
converter = self.model_form_converter(self.session, self)
form_class = form.get_form(self.model, converter,
base_class=self.form_base_class,
only=self.form_columns,
exclude=self.form_excluded_columns,
field_args=self.form_args,
extra_fields=self.form_extra_fields)
if self.inline_models:
form_class = self.scaffold_inline_form_models(form_class)
return form_class
def scaffold_inline_form_models(self, form_class):
"""
Contribute inline models to the form
:param form_class:
Form class
"""
inline_converter = self.inline_model_form_converter(self.session,
self,
self.model_form_converter)
for m in self.inline_models:
form_class = inline_converter.contribute(self.model, form_class, m)
return form_class
def scaffold_auto_joins(self):
"""
Return a list of joined tables by going through the
displayed columns.
"""
if not self.column_auto_select_related:
return []
relations = set()
for p in self._get_model_iterator():
if hasattr(p, 'direction'):
# Check if it is pointing to same model
if p.mapper.class_ == self.model:
continue
if p.direction.name in ['MANYTOONE', 'MANYTOMANY']:
relations.add(p.key)
joined = []
for prop, name in self._list_columns:
if prop in relations:
joined.append(getattr(self.model, prop))
return joined
# AJAX foreignkey support
def _create_ajax_loader(self, name, options):
return create_ajax_loader(self.model, self.session, name, name, options)
# Database-related API
def get_query(self):
"""
Return a query for the model type.
If you override this method, don't forget to override `get_count_query` as well.
"""
return self.session.query(self.model)
def get_count_query(self):
"""
Return a the count query for the model type
"""
return self.session.query(func.count('*')).select_from(self.model)
def _order_by(self, query, joins, sort_field, sort_desc):
"""
Apply order_by to the query
:param query:
Query
:param joins:
Joins set
:param sort_field:
Sort field
:param sort_desc:
Ascending or descending
"""
# TODO: Preprocessing for joins
# Try to handle it as a string
if isinstance(sort_field, string_types):
# Create automatic join against a table if column name
# contains dot.
if '.' in sort_field:
parts = sort_field.split('.', 1)
if parts[0] not in joins:
query = query.join(parts[0])
joins.add(parts[0])
elif isinstance(sort_field, InstrumentedAttribute):
# SQLAlchemy 0.8+ uses 'parent' as a name
mapper = getattr(sort_field, 'parent', None)
if mapper is None:
# SQLAlchemy 0.7.x uses parententity
mapper = getattr(sort_field, 'parententity', None)
if mapper is not None:
table = mapper.tables[0]
if self._need_join(table) and table.name not in joins:
query = query.outerjoin(table)
joins.add(table.name)
elif isinstance(sort_field, Column):
pass
else:
raise TypeError('Wrong argument type')
if sort_field is not None:
if sort_desc:
query = query.order_by(desc(sort_field))
else:
query = query.order_by(sort_field)
return query, joins
def _get_default_order(self):
order = super(ModelView, self)._get_default_order()
if order is not None:
field, direction = order
if isinstance(field, string_types):
field = getattr(self.model, field)
return field, direction
return None
def get_list(self, page, sort_column, sort_desc, search, filters, execute=True):
"""
Return models from the database.
:param page:
Page number
:param sort_column:
Sort column name
:param sort_desc:
Descending or ascending sort
:param search:
Search query
:param execute:
Execute query immediately? Default is `True`
:param filters:
List of filter tuples
"""
# Will contain names of joined tables to avoid duplicate joins
joins = set()
query = self.get_query()
count_query = self.get_count_query()
# Apply search criteria
if self._search_supported and search:
# Apply search-related joins
if self._search_joins:
for jn in self._search_joins.values():
query = query.join(jn)
count_query = count_query.join(jn)
joins = set(self._search_joins.keys())
# Apply terms
terms = search.split(' ')
for term in terms:
if not term:
continue
stmt = tools.parse_like_term(term)
filter_stmt = [c.ilike(stmt) for c in self._search_fields]
query = query.filter(or_(*filter_stmt))
count_query = count_query.filter(or_(*filter_stmt))
# Apply filters
if filters and self._filters:
for idx, value in filters:
flt = self._filters[idx]
# Figure out joins
tbl = flt.column.table.name
join_tables = self._filter_joins.get(tbl, [])
for table in join_tables:
if table.name not in joins:
query = query.join(table)
count_query = count_query.join(table)
joins.add(table.name)
# Apply filter
query = flt.apply(query, value)
count_query = flt.apply(count_query, value)
# Calculate number of rows
count = count_query.scalar()
# Auto join
for j in self._auto_joins:
query = query.options(joinedload(j))
# Sorting
if sort_column is not None:
if sort_column in self._sortable_columns:
sort_field = self._sortable_columns[sort_column]
query, joins = self._order_by(query, joins, sort_field, sort_desc)
else:
order = self._get_default_order()
if order:
query, joins = self._order_by(query, joins, order[0], order[1])
# Pagination
if page is not None:
query = query.offset(page * self.page_size)
query = query.limit(self.page_size)
# Execute if needed
if execute:
query = query.all()
return count, query
def get_one(self, id):
"""
Return a single model by its id.
:param id:
Model id
"""
return self.session.query(self.model).get(id)
# Error handler
def handle_view_exception(self, exc):
if isinstance(exc, IntegrityError):
flash(gettext('Integrity error. %(message)s', message=exc.message), 'error')
return True
return super(BaseModelView, self).handle_view_exception(exc)
# Model handlers
def create_model(self, form):
"""
Create model from form.
:param form:
Form instance
"""
try:
model = self.model()
form.populate_obj(model)
self.session.add(model)
self._on_model_change(form, model, True)
self.session.commit()
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to create model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to create model')
self.session.rollback()
return False
else:
self.after_model_change(form, model, True)
return True
def update_model(self, form, model):
"""
Update model from form.
:param form:
Form instance
:param model:
Model instance
"""
try:
form.populate_obj(model)
self._on_model_change(form, model, False)
self.session.commit()
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to update model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to update model')
self.session.rollback()
return False
else:
self.after_model_change(form, model, False)
return True
def delete_model(self, model):
"""
Delete model.
:param model:
Model to delete
"""
try:
self.on_model_delete(model)
self.session.flush()
self.session.delete(model)
self.session.commit()
return True
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to delete model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to delete model')
self.session.rollback()
return False
# Default model actions
def is_action_allowed(self, name):
# Check delete action permission
if name == 'delete' and not self.can_delete:
return False
return super(ModelView, self).is_action_allowed(name)
@action('delete',
lazy_gettext('Delete'),
lazy_gettext('Are you sure you want to delete selected models?'))
def action_delete(self, ids):
try:
query = get_query_for_ids(self.get_query(), self.model, ids)
if self.fast_mass_delete:
count = query.delete(synchronize_session=False)
else:
count = 0
for m in query.all():
self.session.delete(m)
count += 1
self.session.commit()
flash(ngettext('Model was successfully deleted.',
'%(count)s models were successfully deleted.',
count,
count=count))
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to delete models. %(error)s', error=str(ex)), 'error')
| bsd-3-clause | 28,792,571,841,860,910 | 31.529217 | 120 | 0.532436 | false |
twitter/pants | src/python/pants/backend/native/subsystems/native_build_settings.py | 1 | 1424 | # coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants.build_graph.mirrored_target_option_mixin import MirroredTargetOptionMixin
from pants.subsystem.subsystem import Subsystem
class NativeBuildSettings(Subsystem, MirroredTargetOptionMixin):
"""Settings which affect both the compile and link phases."""
options_scope = 'native-build-settings'
mirrored_target_option_actions = {
'strict_deps': lambda tgt: tgt.strict_deps,
}
@classmethod
def register_options(cls, register):
super(NativeBuildSettings, cls).register_options(register)
# TODO: rename this so it's clear it is not the same option as JVM strict deps!
register('--strict-deps', type=bool, default=True, fingerprint=True, advanced=True,
help="Whether to include only dependencies directly declared in the BUILD file "
"for C and C++ targets by default. If this is False, all transitive dependencies "
"are used when compiling and linking native code. C and C++ targets may override "
"this behavior with the strict_deps keyword argument as well.")
def get_strict_deps_value_for_target(self, target):
return self.get_scalar_mirrored_target_option('strict_deps', target)
| apache-2.0 | 2,196,205,430,313,482,000 | 44.935484 | 100 | 0.728933 | false |
602p/mps-punctuality | app/oauth.py | 1 | 2807 | from flask import Flask, redirect, url_for, session, request, jsonify, flash
from flask_oauthlib.client import OAuth
from flask_login import login_user
import json
from . import util
from . import app, db
from . import models
oauth = OAuth(app)
google = oauth.remote_app(
'google',
consumer_key=app.config.get('GOOGLE_ID'),
consumer_secret=app.config.get('GOOGLE_SECRET'),
request_token_params={
'scope': 'email'
},
base_url='https://www.googleapis.com/oauth2/v1/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://accounts.google.com/o/oauth2/token',
authorize_url='https://accounts.google.com/o/oauth2/auth',
)
@app.route('/oauth_login')
def oauth_login():
return google.authorize(callback=url_for('oauth_authorized', _external=True))
@app.route('/login/authorized')
def oauth_authorized():
resp = google.authorized_response()
if resp is None: #OAuth authorization failed
flash("OAuth login failed: %s -> %s" %(request.args['error_reason'], request.args['error_description']))
return redirect(url_for("home"))
session['google_token'] = (resp['access_token'], '') #Stick it in the session (if we potentially decide to use
#more of Google's API features later, e.g. mailing or
#whatever we'll need this for the OAuth scope in the
#API calls
me = google.get('userinfo').data #Snarf out the user's free data
user = models.User.query.filter_by(username=me["email"], auth_provider="OAUTH").first() #Is there a user with this
#email using OAuth already?
if user: #If so...
return util.try_login_user(user) #Proceed to try to log them in
else: #Otherwise
user=models.User( #Create a (disabled) account for them for the admin to enable later
marss_id=-1, #Cant find this w/o some kind of DB dump, if even applicable
username=me["email"], #Google's return gaurenteed to have email, this is the username for OAuth accounts
name=me["name"], #Google's return sometimes has name, otherwise empty string
email=me["email"], #Store it here too
auth_provider="OAUTH", #Use OAUTH provider, duh!
enabled=False #And leave them disabled
) #Default permission='view'
db.session.add(user)
db.session.commit()
flash("Please wait for an Administrator to enable your account")
return redirect(url_for("login_user_page"))
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token') | gpl-3.0 | 1,188,039,750,926,177,800 | 45.8 | 119 | 0.615604 | false |
tomchristie/django-rest-framework | rest_framework/compat.py | 4 | 4387 | """
The `compat` module provides support for backwards compatibility with older
versions of Django/Python, and compatibility wrappers around optional packages.
"""
from django.conf import settings
from django.views.generic import View
def unicode_http_header(value):
# Coerce HTTP header value to unicode.
if isinstance(value, bytes):
return value.decode('iso-8859-1')
return value
def distinct(queryset, base):
if settings.DATABASES[queryset.db]["ENGINE"] == "django.db.backends.oracle":
# distinct analogue for Oracle users
return base.filter(pk__in=set(queryset.values_list('pk', flat=True)))
return queryset.distinct()
# django.contrib.postgres requires psycopg2
try:
from django.contrib.postgres import fields as postgres_fields
except ImportError:
postgres_fields = None
# coreapi is required for CoreAPI schema generation
try:
import coreapi
except ImportError:
coreapi = None
# uritemplate is required for OpenAPI and CoreAPI schema generation
try:
import uritemplate
except ImportError:
uritemplate = None
# coreschema is optional
try:
import coreschema
except ImportError:
coreschema = None
# pyyaml is optional
try:
import yaml
except ImportError:
yaml = None
# requests is optional
try:
import requests
except ImportError:
requests = None
# PATCH method is not implemented by Django
if 'patch' not in View.http_method_names:
View.http_method_names = View.http_method_names + ['patch']
# Markdown is optional (version 3.0+ required)
try:
import markdown
HEADERID_EXT_PATH = 'markdown.extensions.toc'
LEVEL_PARAM = 'baselevel'
def apply_markdown(text):
"""
Simple wrapper around :func:`markdown.markdown` to set the base level
of '#' style headers to <h2>.
"""
extensions = [HEADERID_EXT_PATH]
extension_configs = {
HEADERID_EXT_PATH: {
LEVEL_PARAM: '2'
}
}
md = markdown.Markdown(
extensions=extensions, extension_configs=extension_configs
)
md_filter_add_syntax_highlight(md)
return md.convert(text)
except ImportError:
apply_markdown = None
markdown = None
try:
import pygments
from pygments.formatters import HtmlFormatter
from pygments.lexers import TextLexer, get_lexer_by_name
def pygments_highlight(text, lang, style):
lexer = get_lexer_by_name(lang, stripall=False)
formatter = HtmlFormatter(nowrap=True, style=style)
return pygments.highlight(text, lexer, formatter)
def pygments_css(style):
formatter = HtmlFormatter(style=style)
return formatter.get_style_defs('.highlight')
except ImportError:
pygments = None
def pygments_highlight(text, lang, style):
return text
def pygments_css(style):
return None
if markdown is not None and pygments is not None:
# starting from this blogpost and modified to support current markdown extensions API
# https://zerokspot.com/weblog/2008/06/18/syntax-highlighting-in-markdown-with-pygments/
import re
from markdown.preprocessors import Preprocessor
class CodeBlockPreprocessor(Preprocessor):
pattern = re.compile(
r'^\s*``` *([^\n]+)\n(.+?)^\s*```', re.M | re.S)
formatter = HtmlFormatter()
def run(self, lines):
def repl(m):
try:
lexer = get_lexer_by_name(m.group(1))
except (ValueError, NameError):
lexer = TextLexer()
code = m.group(2).replace('\t', ' ')
code = pygments.highlight(code, lexer, self.formatter)
code = code.replace('\n\n', '\n \n').replace('\n', '<br />').replace('\\@', '@')
return '\n\n%s\n\n' % code
ret = self.pattern.sub(repl, "\n".join(lines))
return ret.split("\n")
def md_filter_add_syntax_highlight(md):
md.preprocessors.register(CodeBlockPreprocessor(), 'highlight', 40)
return True
else:
def md_filter_add_syntax_highlight(md):
return False
# `separators` argument to `json.dumps()` differs between 2.x and 3.x
# See: https://bugs.python.org/issue22767
SHORT_SEPARATORS = (',', ':')
LONG_SEPARATORS = (', ', ': ')
INDENT_SEPARATORS = (',', ': ')
| bsd-2-clause | -7,791,166,050,184,486,000 | 26.591195 | 101 | 0.644404 | false |
mwalli/spark-cloudant | sql-cloudant/examples/python/CloudantDFOption.py | 2 | 2848 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyspark.sql import SparkSession
spark = SparkSession\
.builder\
.appName("Cloudant Spark SQL Example in Python using dataframes with options")\
.getOrCreate()
cloudant_host = "ACCOUNT.cloudant.com"
cloudant_username = "USERNAME"
cloudant_password = "PASSWORD"
# ***1. Loading dataframe from Cloudant db
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password", cloudant_password) \
.load("n_airportcodemapping")
df.cache() # persisting in memory
df.printSchema()
df.filter(df._id >= 'CAA').select("_id",'airportName').show()
# ***2.Saving dataframe to Cloudant db
df.filter(df._id >= 'CAA').select("_id",'airportName') \
.write.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password",cloudant_password) \
.option("bulkSize","100") \
.option("createDBOnSave", "true") \
.save("airportcodemapping_df")
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password", cloudant_password) \
.load("n_flight")
df.printSchema()
total = df.filter(df.flightSegmentId >'AA9') \
.select("flightSegmentId", "scheduledDepartureTime") \
.orderBy(df.flightSegmentId).count()
print ("Total", total, "flights from table")
# ***3. Loading dataframe from Cloudant search index
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host",cloudant_host) \
.option("cloudant.username",cloudant_username) \
.option("cloudant.password",cloudant_password) \
.option("index","_design/view/_search/n_flights").load("n_flight")
df.printSchema()
total = df.filter(df.flightSegmentId >'AA9') \
.select("flightSegmentId", "scheduledDepartureTime") \
.orderBy(df.flightSegmentId).count()
print ("Total", total, "flights from index")
| apache-2.0 | 5,622,174,105,918,317,000 | 39.112676 | 83 | 0.716994 | false |
fhoring/autorest | src/generator/AutoRest.Python.Tests/AcceptanceTests/complex_tests.py | 1 | 14903 | # --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import unittest
import subprocess
import sys
import isodate
import tempfile
import json
from datetime import date, datetime, timedelta, tzinfo
import os
from os.path import dirname, pardir, join, realpath
cwd = dirname(realpath(__file__))
log_level = int(os.environ.get('PythonLogLevel', 30))
tests = realpath(join(cwd, pardir, "Expected", "AcceptanceTests"))
sys.path.append(join(tests, "BodyComplex"))
from msrest.serialization import Deserializer
from msrest.exceptions import DeserializationError, SerializationError, ValidationError
from autorestcomplextestservice import AutoRestComplexTestService
from autorestcomplextestservice.models import *
class UTC(tzinfo):
def utcoffset(self,dt):
return timedelta(hours=0,minutes=0)
def tzname(self,dt):
return "Z"
def dst(self,dt):
return timedelta(0)
class ComplexTests(unittest.TestCase):
def test_complex(self):
client = AutoRestComplexTestService(base_url="http://localhost:3000")
# GET basic/valid
basic_result = client.basic_operations.get_valid()
self.assertEqual(2, basic_result.id)
self.assertEqual("abc", basic_result.name);
self.assertEqual(CMYKColors.yellow.value, basic_result.color);
# PUT basic/valid
basic_result = Basic(id=2, name='abc', color="Magenta")
client.basic_operations.put_valid(basic_result)
basic_result = Basic(id=2, name='abc', color=CMYKColors.magenta)
client.basic_operations.put_valid(basic_result)
# GET basic/empty
basic_result = client.basic_operations.get_empty()
self.assertIsNone(basic_result.id)
self.assertIsNone(basic_result.name)
# GET basic/null
basic_result = client.basic_operations.get_null()
self.assertIsNone(basic_result.id)
self.assertIsNone(basic_result.name)
# GET basic/notprovided
basic_result = client.basic_operations.get_not_provided()
self.assertIsNone(basic_result)
# GET basic/invalid
with self.assertRaises(DeserializationError):
client.basic_operations.get_invalid()
"""
COMPLEX TYPE WITH PRIMITIVE PROPERTIES
"""
# GET primitive/integer
intResult = client.primitive.get_int();
self.assertEqual(-1, intResult.field1)
self.assertEqual(2, intResult.field2)
# PUT primitive/integer
intRequest = {'field1':-1, 'field2':2}
client.primitive.put_int(intRequest)
# GET primitive/long
longResult = client.primitive.get_long();
self.assertEqual(1099511627775, longResult.field1)
self.assertEqual(-999511627788, longResult.field2)
# PUT primitive/long
longRequest = {'field1':1099511627775, 'field2':-999511627788}
client.primitive.put_long(longRequest)
# GET primitive/float
floatResult = client.primitive.get_float()
self.assertEqual(1.05, floatResult.field1)
self.assertEqual(-0.003, floatResult.field2)
# PUT primitive/float
floatRequest = FloatWrapper(field1=1.05, field2=-0.003)
client.primitive.put_float(floatRequest)
# GET primitive/double
doubleResult = client.primitive.get_double()
self.assertEqual(3e-100, doubleResult.field1)
self.assertEqual(-5e-57, doubleResult.field_56_zeros_after_the_dot_and_negative_zero_before_dot_and_this_is_a_long_field_name_on_purpose)
# PUT primitive/double
doubleRequest = {'field1':3e-100}
doubleRequest['field_56_zeros_after_the_dot_and_negative_zero_before_dot_and_this_is_a_long_field_name_on_purpose'] = -5e-57
client.primitive.put_double(doubleRequest);
# GET primitive/bool
boolResult = client.primitive.get_bool()
self.assertTrue(boolResult.field_true)
self.assertFalse(boolResult.field_false)
# PUT primitive/bool
boolRequest = BooleanWrapper(field_true=True, field_false=False)
client.primitive.put_bool(boolRequest);
# GET primitive/string
stringResult = client.primitive.get_string();
self.assertEqual("goodrequest", stringResult.field)
self.assertEqual("", stringResult.empty)
self.assertIsNone(stringResult.null)
# PUT primitive/string
stringRequest = StringWrapper(null=None, empty="", field="goodrequest")
client.primitive.put_string(stringRequest);
# GET primitive/date
dateResult = client.primitive.get_date()
self.assertEqual(isodate.parse_date("0001-01-01"), dateResult.field)
self.assertEqual(isodate.parse_date("2016-02-29"), dateResult.leap)
dateRequest = DateWrapper(
field=isodate.parse_date('0001-01-01'),
leap=isodate.parse_date('2016-02-29'))
client.primitive.put_date(dateRequest)
# GET primitive/datetime
datetimeResult = client.primitive.get_date_time()
min_date = datetime.min
min_date = min_date.replace(tzinfo=UTC())
self.assertEqual(min_date, datetimeResult.field)
datetime_request = DatetimeWrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T18:38:00Z"))
client.primitive.put_date_time(datetime_request)
# GET primitive/datetimerfc1123
datetimeRfc1123Result = client.primitive.get_date_time_rfc1123()
self.assertEqual(min_date, datetimeRfc1123Result.field)
datetime_request = Datetimerfc1123Wrapper(
field=isodate.parse_datetime("0001-01-01T00:00:00Z"),
now=isodate.parse_datetime("2015-05-18T11:38:00Z"))
client.primitive.put_date_time_rfc1123(datetime_request)
# GET primitive/duration
expected = timedelta(days=123, hours=22, minutes=14, seconds=12, milliseconds=11)
self.assertEqual(expected, client.primitive.get_duration().field)
client.primitive.put_duration(expected)
# GET primitive/byte
byteResult = client.primitive.get_byte()
valid_bytes = bytearray([0x0FF, 0x0FE, 0x0FD, 0x0FC, 0x000, 0x0FA, 0x0F9, 0x0F8, 0x0F7, 0x0F6])
self.assertEqual(valid_bytes, byteResult.field)
# PUT primitive/byte
client.primitive.put_byte(valid_bytes)
"""
COMPLEX TYPE WITH READ ONLY PROPERTIES
"""
# GET readonly/valid
valid_obj = ReadonlyObj(size=2)
valid_obj.id = '1234'
readonly_result = client.readonlyproperty.get_valid()
self.assertEqual(readonly_result, valid_obj)
# PUT readonly/valid
readonly_result = client.readonlyproperty.put_valid(valid_obj)
self.assertIsNone(readonly_result)
"""
COMPLEX TYPE WITH ARRAY PROPERTIES
"""
# GET array/valid
array_result = client.array.get_valid()
self.assertEqual(5, len(array_result.array))
array_value = ["1, 2, 3, 4", "", None, "&S#$(*Y",
"The quick brown fox jumps over the lazy dog"]
self.assertEqual(array_result.array, array_value)
# PUT array/valid
client.array.put_valid(array_value)
# GET array/empty
array_result = client.array.get_empty()
self.assertEqual(0, len(array_result.array))
# PUT array/empty
client.array.put_empty([])
# Get array/notprovided
self.assertIsNone(client.array.get_not_provided().array)
"""
COMPLEX TYPE WITH DICTIONARY PROPERTIES
"""
# GET dictionary/valid
dict_result = client.dictionary.get_valid()
self.assertEqual(5, len(dict_result.default_program))
dict_val = {'txt':'notepad', 'bmp':'mspaint', 'xls':'excel', 'exe':'', '':None}
self.assertEqual(dict_val, dict_result.default_program)
# PUT dictionary/valid
client.dictionary.put_valid(dict_val)
# GET dictionary/empty
dict_result = client.dictionary.get_empty()
self.assertEqual(0, len(dict_result.default_program))
# PUT dictionary/empty
client.dictionary.put_empty(default_program={})
# GET dictionary/null
self.assertIsNone(client.dictionary.get_null().default_program)
# GET dictionary/notprovided
self.assertIsNone(client.dictionary.get_not_provided().default_program)
"""
COMPLEX TYPES THAT INVOLVE INHERITANCE
"""
# GET inheritance/valid
inheritanceResult = client.inheritance.get_valid()
self.assertEqual(2, inheritanceResult.id)
self.assertEqual("Siameeee", inheritanceResult.name)
self.assertEqual(-1, inheritanceResult.hates[1].id)
self.assertEqual("Tomato", inheritanceResult.hates[1].name)
# PUT inheritance/valid
request = {
'id': 2,
'name': "Siameeee",
'color': "green",
'breed': "persian",
'hates': [Dog(id=1, name="Potato", food="tomato"),
Dog(id=-1, name="Tomato", food="french fries")]
}
client.inheritance.put_valid(request)
"""
COMPLEX TYPES THAT INVOLVE POLYMORPHISM
"""
# GET polymorphism/valid
result = client.polymorphism.get_valid()
self.assertIsNotNone(result)
self.assertEqual(result.location, "alaska")
self.assertEqual(len(result.siblings), 3)
self.assertIsInstance(result.siblings[0], Shark)
self.assertIsInstance(result.siblings[1], Sawshark)
self.assertIsInstance(result.siblings[2], Goblinshark)
self.assertEqual(result.siblings[0].age, 6)
self.assertEqual(result.siblings[1].age, 105)
self.assertEqual(result.siblings[2].age, 1)
# PUT polymorphism/valid
request = Salmon(1,
iswild = True,
location = "alaska",
species = "king",
siblings = [Shark(20, isodate.parse_datetime("2012-01-05T01:00:00Z"),
age=6, species="predator"),
Sawshark(10, isodate.parse_datetime("1900-01-05T01:00:00Z"),
age=105, species="dangerous",
picture=bytearray([255, 255, 255, 255, 254])),
Goblinshark(30, isodate.parse_datetime("2015-08-08T00:00:00Z"),
age=1, species="scary", jawsize=5)]
)
client.polymorphism.put_valid(request)
bad_request = Salmon(1,
iswild=True,
location="alaska",
species="king",
siblings = [
Shark(20, isodate.parse_datetime("2012-01-05T01:00:00Z"),
age=6, species="predator"),
Sawshark(10, None, age=105, species="dangerous",
picture=bytearray([255, 255, 255, 255, 254]))]
)
with self.assertRaises(ValidationError):
client.polymorphism.put_valid_missing_required(bad_request)
"""
COMPLEX TYPES THAT INVOLVE RECURSIVE REFERENCE
"""
# GET polymorphicrecursive/valid
result = client.polymorphicrecursive.get_valid()
self.assertIsInstance(result, Salmon)
self.assertIsInstance(result.siblings[0], Shark)
self.assertIsInstance(result.siblings[0].siblings[0], Salmon)
self.assertEqual(result.siblings[0].siblings[0].location, "atlantic")
request = Salmon(
iswild=True,
length=1,
species="king",
location="alaska",
siblings=[
Shark(
age=6,
length=20,
species="predator",
siblings=[
Salmon(
iswild=True,
length=2,
location="atlantic",
species="coho",
siblings=[
Shark(
age=6,
length=20,
species="predator",
birthday=isodate.parse_datetime("2012-01-05T01:00:00Z")),
Sawshark(
age=105,
length=10,
species="dangerous",
birthday=isodate.parse_datetime("1900-01-05T01:00:00Z"),
picture=bytearray([255, 255, 255, 255, 254]))]),
Sawshark(
age=105,
length=10,
species="dangerous",
siblings=[],
birthday=isodate.parse_datetime("1900-01-05T01:00:00Z"),
picture=bytearray([255, 255, 255, 255, 254]))],
birthday=isodate.parse_datetime("2012-01-05T01:00:00Z")),
Sawshark(
age=105,
length=10,
species="dangerous",
siblings=[],
birthday=isodate.parse_datetime("1900-01-05T01:00:00Z"),
picture=bytearray([255, 255, 255, 255, 254]))])
# PUT polymorphicrecursive/valid
client.polymorphicrecursive.put_valid(request)
if __name__ == '__main__':
unittest.main()
| mit | -1,278,795,022,670,390,500 | 37.503876 | 145 | 0.596403 | false |
marble/Toolchain_RenderDocumentation | 36-Get-ready-for-publishing/run_01-Treat-pdf-folder.py | 1 | 4291 | #!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
from __future__ import absolute_import
import os
import tct
import sys
params = tct.readjson(sys.argv[1])
binabspath = sys.argv[2]
facts = tct.readjson(params['factsfile'])
milestones = tct.readjson(params['milestonesfile'])
reason = ''
resultfile = params['resultfile']
result = tct.readjson(resultfile)
loglist = result['loglist'] = result.get('loglist', [])
toolname = params['toolname']
toolname_pure = params['toolname_pure']
toolchain_name = facts['toolchain_name']
workdir = params['workdir']
exitcode = CONTINUE = 0
# ==================================================
# Make a copy of milestones for later inspection?
# --------------------------------------------------
if 0 or milestones.get('debug_always_make_milestones_snapshot'):
tct.make_snapshot_of_milestones(params['milestonesfile'], sys.argv[1])
# ==================================================
# Get and check required milestone(s)
# --------------------------------------------------
def milestones_get(name, default=None):
result = milestones.get(name, default)
loglist.append((name, result))
return result
def facts_get(name, default=None):
result = facts.get(name, default)
loglist.append((name, result))
return result
def params_get(name, default=None):
result = params.get(name, default)
loglist.append((name, result))
return result
# ==================================================
# define
# --------------------------------------------------
pdf_dest_folder_htaccess = ''
pdf_url_relpath = ''
xeq_name_cnt = 0
# ==================================================
# Check params
# --------------------------------------------------
if exitcode == CONTINUE:
loglist.append('CHECK PARAMS')
# required milestones
requirements = ['configset']
# just test
for requirement in requirements:
v = milestones_get(requirement)
if not v:
loglist.append("'%s' not found" % requirement)
exitcode = 22
reason = 'Bad PARAMS or nothing to do'
if exitcode == CONTINUE:
configset = milestones_get('configset')
# fetch
webroot_abspath = tct.deepget(facts, 'tctconfig', configset, 'webroot_abspath')
loglist.append(('webroot_abspath', webroot_abspath))
if not webroot_abspath:
exitcode = 22
reason = 'Bad PARAMS or nothing to do'
if exitcode == CONTINUE:
loglist.append('PARAMS are ok')
else:
loglist.append('Bad PARAMS or nothing to do')
# ==================================================
# work
# --------------------------------------------------
if exitcode == CONTINUE:
pdf_dest_file = milestones_get('pdf_dest_file')
pdf_dest_folder = milestones_get('pdf_dest_folder')
publish_dir_pdf_planned = milestones_get('publish_dir_pdf_planned')
if not (pdf_dest_file and pdf_dest_folder and publish_dir_pdf_planned):
CONTINUE = -2
reason = 'Nothing to do'
loglist.append(reason)
if exitcode == CONTINUE:
temp = os.path.join(publish_dir_pdf_planned, os.path.split(pdf_dest_file)[1])
pdf_url_relpath = temp[len(webroot_abspath):]
loglist.append(('pdf_url_relpath', pdf_url_relpath))
htaccess_contents = (
"RewriteEngine On\n"
"RewriteCond %{REQUEST_FILENAME} !-f\n"
"RewriteRule ^(.*)$ " + pdf_url_relpath + " [L,R=301]\n")
pdf_dest_folder_htaccess = os.path.join(pdf_dest_folder, '.htaccess')
with open(pdf_dest_folder_htaccess, 'w') as f2:
f2.write(htaccess_contents)
# ==================================================
# Set MILESTONE
# --------------------------------------------------
if pdf_url_relpath:
result['MILESTONES'].append({'pdf_dest_folder_htaccess': pdf_dest_folder_htaccess})
if pdf_url_relpath:
result['MILESTONES'].append({'pdf_url_relpath': pdf_url_relpath})
# ==================================================
# save result
# --------------------------------------------------
tct.save_the_result(result, resultfile, params, facts, milestones, exitcode, CONTINUE, reason)
# ==================================================
# Return with proper exitcode
# --------------------------------------------------
sys.exit(exitcode)
| mit | 8,712,881,131,675,113,000 | 27.798658 | 94 | 0.543696 | false |
keon/algorithms | algorithms/matrix/crout_matrix_decomposition.py | 1 | 1286 | """
Crout matrix decomposition is used to find two matrices that, when multiplied
give our input matrix, so L * U = A.
L stands for lower and L has non-zero elements only on diagonal and below.
U stands for upper and U has non-zero elements only on diagonal and above.
This can for example be used to solve systems of linear equations.
The last if is used if to avoid dividing by zero.
Example:
We input the A matrix:
[[1,2,3],
[3,4,5],
[6,7,8]]
We get:
L = [1.0, 0.0, 0.0]
[3.0, -2.0, 0.0]
[6.0, -5.0, 0.0]
U = [1.0, 2.0, 3.0]
[0.0, 1.0, 2.0]
[0.0, 0.0, 1.0]
We can check that L * U = A.
I think the complexity should be O(n^3).
"""
def crout_matrix_decomposition(A):
n = len(A)
L = [[0.0] * n for i in range(n)]
U = [[0.0] * n for i in range(n)]
for j in range(n):
U[j][j] = 1.0
for i in range(j, n):
alpha = float(A[i][j])
for k in range(j):
alpha -= L[i][k]*U[k][j]
L[i][j] = float(alpha)
for i in range(j+1, n):
tempU = float(A[j][i])
for k in range(j):
tempU -= float(L[j][k]*U[k][i])
if int(L[j][j]) == 0:
L[j][j] = float(0.1**40)
U[j][i] = float(tempU/L[j][j])
return (L,U)
| mit | -7,678,984,974,903,344,000 | 26.361702 | 77 | 0.517107 | false |
zetaops/ulakbus | tests/test_ogrenci_sinav_programi_goruntule.py | 1 | 4514 | # -*- coding: utf-8 -*-
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from pyoko.db.adapter.db_riak import BlockSave
from ulakbus.models import Ogrenci, User, Donem, SinavEtkinligi
from zengine.lib.test_utils import BaseTestCase
from ulakbus.lib.ogrenci import aktif_sinav_listesi
from ulakbus.lib.date_time_helper import map_etkinlik_hafta_gunleri
class TestCase(BaseTestCase):
def test_ogrenci_sinav_programi_goruntule(self):
user = User.objects.get(username='ogrenci_3')
unit = user.role_set[0].role.unit
sinav_etkinligi = SinavEtkinligi.objects.filter(bolum=unit, donem=Donem.guncel_donem())
with BlockSave(SinavEtkinligi, query_dict={'published': True}):
for se in sinav_etkinligi:
se.published = True
se.save()
# İlgili öğrenci bulunur.
ogrenci = Ogrenci.objects.get(user=user)
ogrenci_adi = ogrenci.__unicode__()
sinav_etkinlikleri = aktif_sinav_listesi(ogrenci)
for i in range(2):
# Testi çalıştırılacak iş akışı seçilir.
self.prepare_client('/ogrenci_sinav_programi_goruntule', user=user)
cond = False if i == 0 else True
# İlk test yayınlanmış sınav etkinliğinin olmaması durumudur.
# Bu yüzden Sınav Etkinliği modelinin published fieldı False yapılır.
# İkinci test yayınlanmış sınav etkinliğinin olması durumudur.
# Bu yüzden Sınav Etkinliği modelinin published fieldı True yapılır.
with BlockSave(SinavEtkinligi, query_dict={'published': cond}):
for etkinlik in sinav_etkinlikleri:
etkinlik.published = cond
etkinlik.save()
resp = self.client.post()
# Yayınlanmış sınav etkinliği bulunmaması durumunda Uyarı vermesi beklenir.
if i == 0:
assert resp.json['msgbox']['title'] == "Uyarı!"
# Yayınlanmış sınav etkinliği olması durumunda öğretim görevlisinin adının
# bulunduğu bir sınav takvimi gösterilmesi beklenir.
else:
# Öğrencinin güncel dönemde aktif şube
# sayısının 7 olması beklenir.
assert len(ogrenci.donem_subeleri()) == 7
# Sınav etkinlikleri sayısının 7 olması beklenir.
assert len(sinav_etkinlikleri) == 7
# Sınav etkinliklerinin tarihe göre küçükten büyüğe sıralandığı
# kontrol edilir.
assert sinav_etkinlikleri[0].tarih <= sinav_etkinlikleri[-1].tarih
etkinlikler = map_etkinlik_hafta_gunleri(sinav_etkinlikleri)
# Sınav etkinliklerinin etkinlikler sözlüğü içerisinde istenildiği
# gibi yerleştirildiği kontrol edilir.
for etkinlik in sinav_etkinlikleri:
assert etkinlik.tarih.isoweekday() in etkinlikler
assert etkinlik.__unicode__() in etkinlikler[etkinlik.tarih.isoweekday()]
# Yayınlanmış sınav etkinliği olması durumunda öğretim görevlisinin adının
# bulunduğu bir sınav takvimi gösterilmesi beklenir.
assert ogrenci_adi in resp.json['forms']['schema']["title"]
etkinlik_sayisi = 0
for i in range(1, len(resp.json['objects'])):
for k, day in enumerate(resp.json['objects'][i]['fields']):
if resp.json['objects'][i]['fields'][day]:
# Ekranda gösterilen sınav etkinliklerinin istenildiği gibi
# gösterildiği kontrol edilir.
assert resp.json['objects'][i]['fields'][day] == etkinlikler[k + 1][i - 1]
etkinlik_sayisi += 1
# Ekranda gösterilen sınav etkinliklerinin sayısının veri tabanından
# dönen etkinlik sayısıyla eşit olduğu kontrol edilir.
assert etkinlik_sayisi == len(sinav_etkinlikleri)
sinav_etkinligi = SinavEtkinligi.objects.filter(bolum=unit, donem=Donem.guncel_donem())
with BlockSave(SinavEtkinligi, query_dict={'published': False}):
for se in sinav_etkinligi:
se.published = False
se.save()
| gpl-3.0 | -3,618,474,234,346,714,000 | 44.708333 | 102 | 0.612124 | false |
stephanie-wang/ray | python/ray/tests/conftest.py | 1 | 5199 | """
This file defines the common pytest fixtures used in current directory.
"""
from contextlib import contextmanager
import json
import pytest
import subprocess
import ray
from ray.cluster_utils import Cluster
@pytest.fixture
def shutdown_only():
yield None
# The code after the yield will run as teardown code.
ray.shutdown()
def get_default_fixure_internal_config():
internal_config = json.dumps({
"initial_reconstruction_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
})
return internal_config
def get_default_fixture_ray_kwargs():
internal_config = get_default_fixure_internal_config()
ray_kwargs = {
"num_cpus": 1,
"object_store_memory": 150 * 1024 * 1024,
"_internal_config": internal_config,
}
return ray_kwargs
@contextmanager
def _ray_start(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
init_kwargs.update(kwargs)
# Start the Ray processes.
address_info = ray.init(**init_kwargs)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
# The following fixture will start ray with 0 cpu.
@pytest.fixture
def ray_start_no_cpu(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=0, **param) as res:
yield res
# The following fixture will start ray with 1 cpu.
@pytest.fixture
def ray_start_regular(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture(scope="session")
def ray_start_regular_shared(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture
def ray_start_2_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=2, **param) as res:
yield res
@pytest.fixture
def ray_start_10_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=10, **param) as res:
yield res
@contextmanager
def _ray_start_cluster(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
num_nodes = 0
do_init = False
# num_nodes & do_init are not arguments for ray.init, so delete them.
if "num_nodes" in kwargs:
num_nodes = kwargs["num_nodes"]
del kwargs["num_nodes"]
if "do_init" in kwargs:
do_init = kwargs["do_init"]
del kwargs["do_init"]
elif num_nodes > 0:
do_init = True
init_kwargs.update(kwargs)
cluster = Cluster()
remote_nodes = []
for _ in range(num_nodes):
remote_nodes.append(cluster.add_node(**init_kwargs))
if do_init:
ray.init(address=cluster.address)
yield cluster
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
# This fixture will start a cluster with empty nodes.
@pytest.fixture
def ray_start_cluster(request):
param = getattr(request, "param", {})
with _ray_start_cluster(**param) as res:
yield res
@pytest.fixture
def ray_start_cluster_head(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=1, **param) as res:
yield res
@pytest.fixture
def ray_start_cluster_2_nodes(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=2, **param) as res:
yield res
@pytest.fixture
def ray_start_object_store_memory(request):
# Start the Ray processes.
store_size = request.param
internal_config = get_default_fixure_internal_config()
init_kwargs = {
"num_cpus": 1,
"_internal_config": internal_config,
"object_store_memory": store_size,
}
ray.init(**init_kwargs)
yield store_size
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def call_ray_start(request):
parameter = getattr(request, "param", "ray start --head --num-cpus=1")
command_args = parameter.split(" ")
out = ray.utils.decode(
subprocess.check_output(command_args, stderr=subprocess.STDOUT))
# Get the redis address from the output.
redis_substring_prefix = "--address='"
address_location = (
out.find(redis_substring_prefix) + len(redis_substring_prefix))
address = out[address_location:]
address = address.split("'")[0]
yield address
# Disconnect from the Ray cluster.
ray.shutdown()
# Kill the Ray cluster.
subprocess.check_output(["ray", "stop"])
@pytest.fixture
def call_ray_stop_only():
yield
subprocess.check_output(["ray", "stop"])
@pytest.fixture()
def two_node_cluster():
internal_config = json.dumps({
"initial_reconstruction_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
})
cluster = ray.cluster_utils.Cluster(
head_node_args={"_internal_config": internal_config})
for _ in range(2):
remote_node = cluster.add_node(
num_cpus=1, _internal_config=internal_config)
ray.init(address=cluster.address)
yield cluster, remote_node
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
| apache-2.0 | 3,774,389,219,337,072,600 | 25.661538 | 74 | 0.653203 | false |
nine/webcam | annotate.py | 1 | 1085 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from datetime import datetime
from wand.image import Image
from wand.drawing import Drawing
from wand.color import Color
def readStdinBinary():
return sys.stdin.buffer.read()
def main():
img_binary = readStdinBinary()
with Drawing() as draw:
with Image(blob=img_binary) as img:
draw.font_size = 32
draw.fill_color = Color('#ffffff')
draw.text_under_color = Color('#00000080')
draw.gravity = "north_west"
draw.text(0, 0, u' Landhaus Nindl – Hollersbach im Pinzgau ')
draw.gravity = "south_east"
draw.text(0, 0, u' webcam.hollersbach.eu ')
# exif timestamp example:
# 2014:08:29 17:49:00
img_time = datetime.strptime(img.metadata['exif:DateTimeOriginal'], '%Y:%m:%d %H:%M:%S').strftime('%d.%m.%Y %H:%M')
draw.gravity = "north_west"
draw.font_size = 26
draw.text(0, 38, ' ' + img_time + ' ')
draw(img)
jpeg_bin = img.make_blob('jpeg')
sys.stdout.buffer.write(jpeg_bin)
if __name__ == '__main__':
main()
#eof
| gpl-3.0 | 8,877,753,994,074,388,000 | 24.186047 | 121 | 0.617729 | false |
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_12_01/operations/_virtual_machine_scale_set_rolling_upgrades_operations.py | 1 | 21526 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetRollingUpgradesOperations(object):
"""VirtualMachineScaleSetRollingUpgradesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _cancel_initial(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
# Construct URL
url = self._cancel_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_cancel_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/rollingUpgrades/cancel'} # type: ignore
def begin_cancel(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Cancels the current virtual machine scale set rolling upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._cancel_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/rollingUpgrades/cancel'} # type: ignore
def _start_os_upgrade_initial(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
# Construct URL
url = self._start_os_upgrade_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_os_upgrade_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/osRollingUpgrade'} # type: ignore
def begin_start_os_upgrade(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts a rolling upgrade to move all virtual machine scale set instances to the latest
available Platform Image OS version. Instances which are already running the latest available
OS version are not affected.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_os_upgrade_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start_os_upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/osRollingUpgrade'} # type: ignore
def _start_extension_upgrade_initial(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
# Construct URL
url = self._start_extension_upgrade_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_extension_upgrade_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/extensionRollingUpgrade'} # type: ignore
def begin_start_extension_upgrade(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts a rolling upgrade to move all extensions for all virtual machine scale set instances to
the latest available extension version. Instances which are already running the latest
extension versions are not affected.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_extension_upgrade_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start_extension_upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/extensionRollingUpgrade'} # type: ignore
def get_latest(
self,
resource_group_name, # type: str
vm_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.RollingUpgradeStatusInfo"
"""Gets the status of the latest virtual machine scale set rolling upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RollingUpgradeStatusInfo, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_12_01.models.RollingUpgradeStatusInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RollingUpgradeStatusInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get_latest.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmScaleSetName': self._serialize.url("vm_scale_set_name", vm_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RollingUpgradeStatusInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_latest.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/rollingUpgrades/latest'} # type: ignore
| mit | -4,176,951,313,729,006,000 | 48.599078 | 234 | 0.646474 | false |
punchagan/zulip | zerver/lib/markdown/__init__.py | 1 | 105624 | # Zulip's main Markdown implementation. See docs/subsystems/markdown.md for
# detailed documentation on our Markdown syntax.
import datetime
import functools
import html
import logging
import re
import time
import urllib
import urllib.parse
from collections import defaultdict, deque
from dataclasses import dataclass
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Match,
Optional,
Pattern,
Set,
Tuple,
TypeVar,
Union,
)
from urllib.parse import urlencode, urlsplit
from xml.etree import ElementTree as etree
from xml.etree.ElementTree import Element, SubElement
import ahocorasick
import dateutil.parser
import dateutil.tz
import markdown
import markdown.blockprocessors
import markdown.inlinepatterns
import markdown.postprocessors
import markdown.treeprocessors
import markdown.util
import requests
from django.conf import settings
from django.db.models import Q
from markdown.blockparser import BlockParser
from markdown.extensions import codehilite, nl2br, sane_lists, tables
from tlds import tld_set
from typing_extensions import TypedDict
from zerver.lib import mention as mention
from zerver.lib.cache import NotFoundInCache, cache_with_key
from zerver.lib.camo import get_camo_url
from zerver.lib.emoji import (
codepoint_to_name,
emoticon_regex,
name_to_codepoint,
translate_emoticons,
)
from zerver.lib.exceptions import MarkdownRenderingException
from zerver.lib.markdown import fenced_code
from zerver.lib.markdown.fenced_code import FENCE_RE
from zerver.lib.mention import extract_user_group, possible_mentions, possible_user_group_mentions
from zerver.lib.subdomains import is_static_or_current_realm_url
from zerver.lib.tex import render_tex
from zerver.lib.thumbnail import user_uploads_or_external
from zerver.lib.timeout import TimeoutExpired, timeout
from zerver.lib.timezone import common_timezones
from zerver.lib.types import LinkifierDict
from zerver.lib.url_encoding import encode_stream, hash_util_encode
from zerver.lib.url_preview import preview as link_preview
from zerver.models import (
MAX_MESSAGE_LENGTH,
Message,
Realm,
UserGroup,
UserGroupMembership,
UserProfile,
get_active_streams,
linkifiers_for_realm,
)
ReturnT = TypeVar("ReturnT")
def one_time(method: Callable[[], ReturnT]) -> Callable[[], ReturnT]:
"""
Use this decorator with extreme caution.
The function you wrap should have no dependency
on any arguments (no args, no kwargs) nor should
it depend on any global state.
"""
val = None
def cache_wrapper() -> ReturnT:
nonlocal val
if val is None:
val = method()
return val
return cache_wrapper
class FullNameInfo(TypedDict):
id: int
email: str
full_name: str
class LinkInfo(TypedDict):
parent: Element
title: Optional[str]
index: Optional[int]
remove: Optional[Element]
DbData = Dict[str, Any]
# Format version of the Markdown rendering; stored along with rendered
# messages so that we can efficiently determine what needs to be re-rendered
version = 1
_T = TypeVar("_T")
ElementStringNone = Union[Element, Optional[str]]
EMOJI_REGEX = r"(?P<syntax>:[\w\-\+]+:)"
def verbose_compile(pattern: str) -> Pattern[str]:
return re.compile(
f"^(.*?){pattern}(.*?)$",
re.DOTALL | re.UNICODE | re.VERBOSE,
)
def normal_compile(pattern: str) -> Pattern[str]:
return re.compile(
fr"^(.*?){pattern}(.*)$",
re.DOTALL | re.UNICODE,
)
STREAM_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks
(?P<stream_name>[^\*]+) # stream name can contain anything
\*\* # ends by double asterisks
"""
@one_time
def get_compiled_stream_link_regex() -> Pattern[str]:
# Not using verbose_compile as it adds ^(.*?) and
# (.*?)$ which cause extra overhead of matching
# pattern which is not required.
# With new InlineProcessor these extra patterns
# are not required.
return re.compile(
STREAM_LINK_REGEX,
re.DOTALL | re.UNICODE | re.VERBOSE,
)
STREAM_TOPIC_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks
(?P<stream_name>[^\*>]+) # stream name can contain anything except >
> # > acts as separator
(?P<topic_name>[^\*]+) # topic name can contain anything
\*\* # ends by double asterisks
"""
@one_time
def get_compiled_stream_topic_link_regex() -> Pattern[str]:
# Not using verbose_compile as it adds ^(.*?) and
# (.*?)$ which cause extra overhead of matching
# pattern which is not required.
# With new InlineProcessor these extra patterns
# are not required.
return re.compile(
STREAM_TOPIC_LINK_REGEX,
re.DOTALL | re.UNICODE | re.VERBOSE,
)
LINK_REGEX: Optional[Pattern[str]] = None
def get_web_link_regex() -> Pattern[str]:
# We create this one time, but not at startup. So the
# first message rendered in any process will have some
# extra costs. It's roughly 75ms to run this code, so
# caching the value in LINK_REGEX is super important here.
global LINK_REGEX
if LINK_REGEX is not None:
return LINK_REGEX
tlds = "|".join(list_of_tlds())
# A link starts at a word boundary, and ends at space, punctuation, or end-of-input.
#
# We detect a URL either by the `https?://` or by building around the TLD.
# In lieu of having a recursive regex (which python doesn't support) to match
# arbitrary numbers of nested matching parenthesis, we manually build a regexp that
# can match up to six
# The inner_paren_contents chunk matches the innermore non-parenthesis-holding text,
# and the paren_group matches text with, optionally, a matching set of parens
inner_paren_contents = r"[^\s()\"]*"
paren_group = r"""
[^\s()\"]*? # Containing characters that won't end the URL
(?: \( %s \) # and more characters in matched parens
[^\s()\"]*? # followed by more characters
)* # zero-or-more sets of paired parens
"""
nested_paren_chunk = paren_group
for i in range(6):
nested_paren_chunk = nested_paren_chunk % (paren_group,)
nested_paren_chunk = nested_paren_chunk % (inner_paren_contents,)
file_links = r"| (?:file://(/[^/ ]*)+/?)" if settings.ENABLE_FILE_LINKS else r""
REGEX = fr"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
# (Double-negative lookbehind to allow start-of-string)
(?P<url> # Main group
(?:(?: # Domain part
https?://[\w.:@-]+? # If it has a protocol, anything goes.
|(?: # Or, if not, be more strict to avoid false-positives
(?:[\w-]+\.)+ # One or more domain components, separated by dots
(?:{tlds}) # TLDs
)
)
(?:/ # A path, beginning with /
{nested_paren_chunk} # zero-to-6 sets of paired parens
)?) # Path is optional
| (?:[\w.-]+\@[\w.-]+\.[\w]+) # Email is separate, since it can't have a path
{file_links} # File path start with file:///, enable by setting ENABLE_FILE_LINKS=True
| (?:bitcoin:[13][a-km-zA-HJ-NP-Z1-9]{{25,34}}) # Bitcoin address pattern, see https://mokagio.github.io/tech-journal/2014/11/21/regex-bitcoin.html
)
(?= # URL must be followed by (not included in group)
[!:;\?\),\.\'\"\>]* # Optional punctuation characters
(?:\Z|\s) # followed by whitespace or end of string
)
"""
LINK_REGEX = verbose_compile(REGEX)
return LINK_REGEX
def clear_state_for_testing() -> None:
# The link regex never changes in production, but our tests
# try out both sides of ENABLE_FILE_LINKS, so we need
# a way to clear it.
global LINK_REGEX
LINK_REGEX = None
markdown_logger = logging.getLogger()
def rewrite_local_links_to_relative(db_data: Optional[DbData], link: str) -> str:
"""If the link points to a local destination (e.g. #narrow/...),
generate a relative link that will open it in the current window.
"""
if db_data:
realm_uri_prefix = db_data["realm_uri"] + "/"
if (
link.startswith(realm_uri_prefix)
and urllib.parse.urljoin(realm_uri_prefix, link[len(realm_uri_prefix) :]) == link
):
return link[len(realm_uri_prefix) :]
return link
def url_embed_preview_enabled(
message: Optional[Message] = None, realm: Optional[Realm] = None, no_previews: bool = False
) -> bool:
if not settings.INLINE_URL_EMBED_PREVIEW:
return False
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
# like generating documentation or running
# test code
return True
return realm.inline_url_embed_preview
def image_preview_enabled(
message: Optional[Message] = None, realm: Optional[Realm] = None, no_previews: bool = False
) -> bool:
if not settings.INLINE_IMAGE_PREVIEW:
return False
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
# like generating documentation or running
# test code
return True
return realm.inline_image_preview
def list_of_tlds() -> List[str]:
# Skip a few overly-common false-positives from file extensions
common_false_positives = set(["java", "md", "mov", "py", "zip"])
tlds = list(tld_set - common_false_positives)
tlds.sort(key=len, reverse=True)
return tlds
def walk_tree(
root: Element, processor: Callable[[Element], Optional[_T]], stop_after_first: bool = False
) -> List[_T]:
results = []
queue = deque([root])
while queue:
currElement = queue.popleft()
for child in currElement:
if child:
queue.append(child)
result = processor(child)
if result is not None:
results.append(result)
if stop_after_first:
return results
return results
@dataclass
class ElementFamily:
grandparent: Optional[Element]
parent: Element
child: Element
in_blockquote: bool
T = TypeVar("T")
class ResultWithFamily(Generic[T]):
family: ElementFamily
result: T
def __init__(self, family: ElementFamily, result: T):
self.family = family
self.result = result
class ElementPair:
parent: Optional["ElementPair"]
value: Element
def __init__(self, parent: Optional["ElementPair"], value: Element):
self.parent = parent
self.value = value
def walk_tree_with_family(
root: Element,
processor: Callable[[Element], Optional[_T]],
) -> List[ResultWithFamily[_T]]:
results = []
queue = deque([ElementPair(parent=None, value=root)])
while queue:
currElementPair = queue.popleft()
for child in currElementPair.value:
if child:
queue.append(ElementPair(parent=currElementPair, value=child))
result = processor(child)
if result is not None:
if currElementPair.parent is not None:
grandparent_element = currElementPair.parent
grandparent: Optional[Element] = grandparent_element.value
else:
grandparent = None
family = ElementFamily(
grandparent=grandparent,
parent=currElementPair.value,
child=child,
in_blockquote=has_blockquote_ancestor(currElementPair),
)
results.append(
ResultWithFamily(
family=family,
result=result,
)
)
return results
def has_blockquote_ancestor(element_pair: Optional[ElementPair]) -> bool:
if element_pair is None:
return False
elif element_pair.value.tag == "blockquote":
return True
else:
return has_blockquote_ancestor(element_pair.parent)
@cache_with_key(lambda tweet_id: tweet_id, cache_name="database", with_statsd_key="tweet_data")
def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]:
if settings.TEST_SUITE:
from . import testing_mocks
res = testing_mocks.twitter(tweet_id)
else:
creds = {
"consumer_key": settings.TWITTER_CONSUMER_KEY,
"consumer_secret": settings.TWITTER_CONSUMER_SECRET,
"access_token_key": settings.TWITTER_ACCESS_TOKEN_KEY,
"access_token_secret": settings.TWITTER_ACCESS_TOKEN_SECRET,
}
if not all(creds.values()):
return None
# We lazily import twitter here because its import process is
# surprisingly slow, and doing so has a significant impact on
# the startup performance of `manage.py` commands.
import twitter
api = twitter.Api(tweet_mode="extended", **creds)
try:
# Sometimes Twitter hangs on responses. Timing out here
# will cause the Tweet to go through as-is with no inline
# preview, rather than having the message be rejected
# entirely. This timeout needs to be less than our overall
# formatting timeout.
tweet = timeout(3, lambda: api.GetStatus(tweet_id))
res = tweet.AsDict()
except TimeoutExpired:
# We'd like to try again later and not cache the bad result,
# so we need to re-raise the exception (just as though
# we were being rate-limited)
raise
except twitter.TwitterError as e:
t = e.args[0]
if len(t) == 1 and ("code" in t[0]):
# https://developer.twitter.com/en/docs/basics/response-codes
code = t[0]["code"]
if code in [34, 144, 421, 422]:
# All these "correspond with HTTP 404," and mean
# that the message doesn't exist; return None so
# that we will cache the error.
return None
elif code in [63, 179]:
# 63 is that the account is suspended, 179 is that
# it is now locked; cache the None.
return None
elif code in [88, 130, 131]:
# Code 88 means that we were rate-limited, 130
# means Twitter is having capacity issues, and 131
# is other 400-equivalent; in these cases, raise
# the error so we don't cache None and will try
# again later.
raise
# It's not clear what to do in cases of other errors,
# but for now it seems reasonable to log at error
# level (so that we get notified), but then cache the
# failure to proceed with our usual work
markdown_logger.exception("Unknown error fetching tweet data", stack_info=True)
return None
return res
HEAD_START_RE = re.compile("^head[ >]")
HEAD_END_RE = re.compile("^/head[ >]")
META_START_RE = re.compile("^meta[ >]")
META_END_RE = re.compile("^/meta[ >]")
def fetch_open_graph_image(url: str) -> Optional[Dict[str, Any]]:
in_head = False
# HTML will auto close meta tags, when we start the next tag add
# a closing tag if it has not been closed yet.
last_closed = True
head = []
# TODO: What if response content is huge? Should we get headers first?
try:
content = requests.get(url, timeout=1).text
except requests.RequestException:
return None
# Extract the head and meta tags
# All meta tags are self closing, have no children or are closed
# automatically.
for part in content.split("<"):
if not in_head and HEAD_START_RE.match(part):
# Started the head node output it to have a document root
in_head = True
head.append("<head>")
elif in_head and HEAD_END_RE.match(part):
# Found the end of the head close any remaining tag then stop
# processing
in_head = False
if not last_closed:
last_closed = True
head.append("</meta>")
head.append("</head>")
break
elif in_head and META_START_RE.match(part):
# Found a meta node copy it
if not last_closed:
head.append("</meta>")
last_closed = True
head.append("<")
head.append(part)
if "/>" not in part:
last_closed = False
elif in_head and META_END_RE.match(part):
# End of a meta node just copy it to close the tag
head.append("<")
head.append(part)
last_closed = True
try:
doc = etree.fromstring("".join(head))
except etree.ParseError:
return None
og_image = doc.find('meta[@property="og:image"]')
og_title = doc.find('meta[@property="og:title"]')
og_desc = doc.find('meta[@property="og:description"]')
title = None
desc = None
if og_image is not None:
image = og_image.get("content")
else:
return None
if og_title is not None:
title = og_title.get("content")
if og_desc is not None:
desc = og_desc.get("content")
return {"image": image, "title": title, "desc": desc}
def get_tweet_id(url: str) -> Optional[str]:
parsed_url = urllib.parse.urlparse(url)
if not (parsed_url.netloc == "twitter.com" or parsed_url.netloc.endswith(".twitter.com")):
return None
to_match = parsed_url.path
# In old-style twitter.com/#!/wdaher/status/1231241234-style URLs,
# we need to look at the fragment instead
if parsed_url.path == "/" and len(parsed_url.fragment) > 5:
to_match = parsed_url.fragment
tweet_id_match = re.match(
r"^!?/.*?/status(es)?/(?P<tweetid>\d{10,30})(/photo/[0-9])?/?$", to_match
)
if not tweet_id_match:
return None
return tweet_id_match.group("tweetid")
class InlineImageProcessor(markdown.treeprocessors.Treeprocessor):
"""
Rewrite inline img tags to serve external content via Camo.
This rewrites all images, except ones that are served from the current
realm or global STATIC_URL. This is to ensure that each realm only loads
images that are hosted on that realm or by the global installation,
avoiding information leakage to external domains or between realms. We need
to disable proxying of images hosted on the same realm, because otherwise
we will break images in /user_uploads/, which require authorization to
view.
"""
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_imgs = walk_tree(root, lambda e: e if e.tag == "img" else None)
for img in found_imgs:
url = img.get("src")
assert url is not None
if is_static_or_current_realm_url(url, self.md.zulip_realm):
# Don't rewrite images on our own site (e.g. emoji, user uploads).
continue
img.set("src", get_camo_url(url))
class BacktickInlineProcessor(markdown.inlinepatterns.BacktickInlineProcessor):
""" Return a `<code>` element containing the matching text. """
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
# Let upstream's implementation do its job as it is, we'll
# just replace the text to not strip the group because it
# makes it impossible to put leading/trailing whitespace in
# an inline code block.
el, start, end = ret = super().handleMatch(m, data)
if el is not None and m.group(3):
# upstream's code here is: m.group(3).strip() rather than m.group(3).
el.text = markdown.util.AtomicString(markdown.util.code_escape(m.group(3)))
return ret
class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
TWITTER_MAX_IMAGE_HEIGHT = 400
TWITTER_MAX_TO_PREVIEW = 3
INLINE_PREVIEW_LIMIT_PER_MESSAGE = 5
def __init__(self, md: markdown.Markdown) -> None:
markdown.treeprocessors.Treeprocessor.__init__(self, md)
def add_a(
self,
root: Element,
url: str,
link: str,
title: Optional[str] = None,
desc: Optional[str] = None,
class_attr: str = "message_inline_image",
data_id: Optional[str] = None,
insertion_index: Optional[int] = None,
already_thumbnailed: bool = False,
) -> None:
desc = desc if desc is not None else ""
# Update message.has_image attribute.
if "message_inline_image" in class_attr and self.md.zulip_message:
self.md.zulip_message.has_image = True
if insertion_index is not None:
div = Element("div")
root.insert(insertion_index, div)
else:
div = SubElement(root, "div")
div.set("class", class_attr)
a = SubElement(div, "a")
a.set("href", link)
if title is not None:
a.set("title", title)
if data_id is not None:
a.set("data-id", data_id)
img = SubElement(a, "img")
if (
settings.THUMBNAIL_IMAGES
and (not already_thumbnailed)
and user_uploads_or_external(url)
):
# See docs/thumbnailing.md for some high-level documentation.
#
# We strip leading '/' from relative URLs here to ensure
# consistency in what gets passed to /thumbnail
url = url.lstrip("/")
img.set("src", "/thumbnail?" + urlencode({"url": url, "size": "thumbnail"}))
img.set("data-src-fullsize", "/thumbnail?" + urlencode({"url": url, "size": "full"}))
else:
img.set("src", url)
if class_attr == "message_inline_ref":
summary_div = SubElement(div, "div")
title_div = SubElement(summary_div, "div")
title_div.set("class", "message_inline_image_title")
title_div.text = title
desc_div = SubElement(summary_div, "desc")
desc_div.set("class", "message_inline_image_desc")
def add_oembed_data(self, root: Element, link: str, extracted_data: Dict[str, Any]) -> bool:
oembed_resource_type = extracted_data.get("type", "")
title = extracted_data.get("title")
if oembed_resource_type == "photo":
image = extracted_data.get("image")
if image:
self.add_a(root, image, link, title=title)
return True
elif oembed_resource_type == "video":
html = extracted_data["html"]
image = extracted_data["image"]
title = extracted_data.get("title")
description = extracted_data.get("description")
self.add_a(
root,
image,
link,
title,
description,
"embed-video message_inline_image",
html,
already_thumbnailed=True,
)
return True
return False
def add_embed(self, root: Element, link: str, extracted_data: Dict[str, Any]) -> None:
oembed = extracted_data.get("oembed", False)
if oembed and self.add_oembed_data(root, link, extracted_data):
return
img_link = extracted_data.get("image")
if not img_link:
# Don't add an embed if an image is not found
return
container = SubElement(root, "div")
container.set("class", "message_embed")
parsed_img_link = urllib.parse.urlparse(img_link)
# Append domain where relative img_link url is given
if not parsed_img_link.netloc:
parsed_url = urllib.parse.urlparse(link)
domain = "{url.scheme}://{url.netloc}/".format(url=parsed_url)
img_link = urllib.parse.urljoin(domain, img_link)
img = SubElement(container, "a")
img.set("style", "background-image: url(" + img_link + ")")
img.set("href", link)
img.set("class", "message_embed_image")
data_container = SubElement(container, "div")
data_container.set("class", "data-container")
title = extracted_data.get("title")
if title:
title_elm = SubElement(data_container, "div")
title_elm.set("class", "message_embed_title")
a = SubElement(title_elm, "a")
a.set("href", link)
a.set("title", title)
a.text = title
description = extracted_data.get("description")
if description:
description_elm = SubElement(data_container, "div")
description_elm.set("class", "message_embed_description")
description_elm.text = description
def get_actual_image_url(self, url: str) -> str:
# Add specific per-site cases to convert image-preview URLs to image URLs.
# See https://github.com/zulip/zulip/issues/4658 for more information
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc == "github.com" or parsed_url.netloc.endswith(".github.com"):
# https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png ->
# https://raw.githubusercontent.com/zulip/zulip/master/static/images/logo/zulip-icon-128x128.png
split_path = parsed_url.path.split("/")
if len(split_path) > 3 and split_path[3] == "blob":
return urllib.parse.urljoin(
"https://raw.githubusercontent.com", "/".join(split_path[0:3] + split_path[4:])
)
return url
def is_image(self, url: str) -> bool:
if not self.md.image_preview_enabled:
return False
parsed_url = urllib.parse.urlparse(url)
# remove HTML URLs which end with image extensions that can not be shorted
if parsed_url.netloc == "pasteboard.co":
return False
# List from https://support.google.com/chromeos/bin/answer.py?hl=en&answer=183093
for ext in [".bmp", ".gif", ".jpe", "jpeg", ".jpg", ".png", ".webp"]:
if parsed_url.path.lower().endswith(ext):
return True
return False
def corrected_image_source(self, url: str) -> Optional[str]:
# This function adjusts any URLs from linx.li and
# wikipedia.org to point to the actual image URL. It's
# structurally very similar to dropbox_image, and possibly
# should be rewritten to use open graph, but has some value.
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc.lower().endswith(".wikipedia.org"):
# Redirecting from "/wiki/File:" to "/wiki/Special:FilePath/File:"
# A possible alternative, that avoids the redirect after hitting "Special:"
# is using the first characters of md5($filename) to generate the URL
domain = parsed_url.scheme + "://" + parsed_url.netloc
correct_url = domain + parsed_url.path[:6] + "Special:FilePath" + parsed_url.path[5:]
return correct_url
if parsed_url.netloc == "linx.li":
return "https://linx.li/s" + parsed_url.path
return None
def dropbox_image(self, url: str) -> Optional[Dict[str, Any]]:
# TODO: The returned Dict could possibly be a TypedDict in future.
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc == "dropbox.com" or parsed_url.netloc.endswith(".dropbox.com"):
is_album = parsed_url.path.startswith("/sc/") or parsed_url.path.startswith("/photos/")
# Only allow preview Dropbox shared links
if not (
parsed_url.path.startswith("/s/") or parsed_url.path.startswith("/sh/") or is_album
):
return None
# Try to retrieve open graph protocol info for a preview
# This might be redundant right now for shared links for images.
# However, we might want to make use of title and description
# in the future. If the actual image is too big, we might also
# want to use the open graph image.
image_info = fetch_open_graph_image(url)
is_image = is_album or self.is_image(url)
# If it is from an album or not an actual image file,
# just use open graph image.
if is_album or not is_image:
# Failed to follow link to find an image preview so
# use placeholder image and guess filename
if image_info is None:
return None
image_info["is_image"] = is_image
return image_info
# Otherwise, try to retrieve the actual image.
# This is because open graph image from Dropbox may have padding
# and gifs do not work.
# TODO: What if image is huge? Should we get headers first?
if image_info is None:
image_info = {}
image_info["is_image"] = True
parsed_url_list = list(parsed_url)
parsed_url_list[4] = "dl=1" # Replaces query
image_info["image"] = urllib.parse.urlunparse(parsed_url_list)
return image_info
return None
def youtube_id(self, url: str) -> Optional[str]:
if not self.md.image_preview_enabled:
return None
# YouTube video id extraction regular expression from https://pastebin.com/KyKAFv1s
# Slightly modified to support URLs of the forms
# - youtu.be/<id>
# - youtube.com/playlist?v=<id>&list=<list-id>
# - youtube.com/watch_videos?video_ids=<id1>,<id2>,<id3>
# If it matches, match.group(2) is the video id.
schema_re = r"(?:https?://)"
host_re = r"(?:youtu\.be/|(?:\w+\.)?youtube(?:-nocookie)?\.com/)"
param_re = (
r"(?:(?:(?:v|embed)/)|"
+ r"(?:(?:(?:watch|playlist)(?:_popup|_videos)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v(?:ideo_ids)?=))"
)
id_re = r"([0-9A-Za-z_-]+)"
youtube_re = r"^({schema_re}?{host_re}{param_re}?)?{id_re}(?(1).+)?$"
youtube_re = youtube_re.format(
schema_re=schema_re, host_re=host_re, id_re=id_re, param_re=param_re
)
match = re.match(youtube_re, url)
# URLs of the form youtube.com/playlist?list=<list-id> are incorrectly matched
if match is None or match.group(2) == "playlist":
return None
return match.group(2)
def youtube_title(self, extracted_data: Dict[str, Any]) -> Optional[str]:
title = extracted_data.get("title")
if title is not None:
return f"YouTube - {title}"
return None
def youtube_image(self, url: str) -> Optional[str]:
yt_id = self.youtube_id(url)
if yt_id is not None:
return f"https://i.ytimg.com/vi/{yt_id}/default.jpg"
return None
def vimeo_id(self, url: str) -> Optional[str]:
if not self.md.image_preview_enabled:
return None
# (http|https)?:\/\/(www\.)?vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/([^\/]*)\/videos\/|)(\d+)(?:|\/\?)
# If it matches, match.group('id') is the video id.
vimeo_re = (
r"^((http|https)?:\/\/(www\.)?vimeo.com\/"
+ r"(?:channels\/(?:\w+\/)?|groups\/"
+ r"([^\/]*)\/videos\/|)(\d+)(?:|\/\?))$"
)
match = re.match(vimeo_re, url)
if match is None:
return None
return match.group(5)
def vimeo_title(self, extracted_data: Dict[str, Any]) -> Optional[str]:
title = extracted_data.get("title")
if title is not None:
return f"Vimeo - {title}"
return None
def twitter_text(
self,
text: str,
urls: List[Dict[str, str]],
user_mentions: List[Dict[str, Any]],
media: List[Dict[str, Any]],
) -> Element:
"""
Use data from the Twitter API to turn links, mentions and media into A
tags. Also convert Unicode emojis to images.
This works by using the URLs, user_mentions and media data from
the twitter API and searching for Unicode emojis in the text using
`unicode_emoji_regex`.
The first step is finding the locations of the URLs, mentions, media and
emoji in the text. For each match we build a dictionary with type, the start
location, end location, the URL to link to, and the text(codepoint and title
in case of emojis) to be used in the link(image in case of emojis).
Next we sort the matches by start location. And for each we add the
text from the end of the last link to the start of the current link to
the output. The text needs to added to the text attribute of the first
node (the P tag) or the tail the last link created.
Finally we add any remaining text to the last node.
"""
to_process: List[Dict[str, Any]] = []
# Build dicts for URLs
for url_data in urls:
short_url = url_data["url"]
full_url = url_data["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append(
{
"type": "url",
"start": match.start(),
"end": match.end(),
"url": short_url,
"text": full_url,
}
)
# Build dicts for mentions
for user_mention in user_mentions:
screen_name = user_mention["screen_name"]
mention_string = "@" + screen_name
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE):
to_process.append(
{
"type": "mention",
"start": match.start(),
"end": match.end(),
"url": "https://twitter.com/" + urllib.parse.quote(screen_name),
"text": mention_string,
}
)
# Build dicts for media
for media_item in media:
short_url = media_item["url"]
expanded_url = media_item["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append(
{
"type": "media",
"start": match.start(),
"end": match.end(),
"url": short_url,
"text": expanded_url,
}
)
# Build dicts for emojis
for match in re.finditer(unicode_emoji_regex, text, re.IGNORECASE):
orig_syntax = match.group("syntax")
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ":" + codepoint_to_name[codepoint] + ":"
to_process.append(
{
"type": "emoji",
"start": match.start(),
"end": match.end(),
"codepoint": codepoint,
"title": display_string,
}
)
to_process.sort(key=lambda x: x["start"])
p = current_node = Element("p")
def set_text(text: str) -> None:
"""
Helper to set the text or the tail of the current_node
"""
if current_node == p:
current_node.text = text
else:
current_node.tail = text
db_data = self.md.zulip_db_data
current_index = 0
for item in to_process:
# The text we want to link starts in already linked text skip it
if item["start"] < current_index:
continue
# Add text from the end of last link to the start of the current
# link
set_text(text[current_index : item["start"]])
current_index = item["end"]
if item["type"] != "emoji":
elem = url_to_a(db_data, item["url"], item["text"])
assert isinstance(elem, Element)
else:
elem = make_emoji(item["codepoint"], item["title"])
current_node = elem
p.append(elem)
# Add any unused text
set_text(text[current_index:])
return p
def twitter_link(self, url: str) -> Optional[Element]:
tweet_id = get_tweet_id(url)
if tweet_id is None:
return None
try:
res = fetch_tweet_data(tweet_id)
if res is None:
return None
user: Dict[str, Any] = res["user"]
tweet = Element("div")
tweet.set("class", "twitter-tweet")
img_a = SubElement(tweet, "a")
img_a.set("href", url)
profile_img = SubElement(img_a, "img")
profile_img.set("class", "twitter-avatar")
# For some reason, for, e.g. tweet 285072525413724161,
# python-twitter does not give us a
# profile_image_url_https, but instead puts that URL in
# profile_image_url. So use _https if available, but fall
# back gracefully.
image_url = user.get("profile_image_url_https", user["profile_image_url"])
profile_img.set("src", image_url)
text = html.unescape(res["full_text"])
urls = res.get("urls", [])
user_mentions = res.get("user_mentions", [])
media: List[Dict[str, Any]] = res.get("media", [])
p = self.twitter_text(text, urls, user_mentions, media)
tweet.append(p)
span = SubElement(tweet, "span")
span.text = "- {} (@{})".format(user["name"], user["screen_name"])
# Add image previews
for media_item in media:
# Only photos have a preview image
if media_item["type"] != "photo":
continue
# Find the image size that is smaller than
# TWITTER_MAX_IMAGE_HEIGHT px tall or the smallest
size_name_tuples = list(media_item["sizes"].items())
size_name_tuples.sort(reverse=True, key=lambda x: x[1]["h"])
for size_name, size in size_name_tuples:
if size["h"] < self.TWITTER_MAX_IMAGE_HEIGHT:
break
media_url = "{}:{}".format(media_item["media_url_https"], size_name)
img_div = SubElement(tweet, "div")
img_div.set("class", "twitter-image")
img_a = SubElement(img_div, "a")
img_a.set("href", media_item["url"])
img = SubElement(img_a, "img")
img.set("src", media_url)
return tweet
except Exception:
# We put this in its own try-except because it requires external
# connectivity. If Twitter flakes out, we don't want to not-render
# the entire message; we just want to not show the Twitter preview.
markdown_logger.warning("Error building Twitter link", exc_info=True)
return None
def get_url_data(self, e: Element) -> Optional[Tuple[str, Optional[str]]]:
if e.tag == "a":
url = e.get("href")
assert url is not None
return (url, e.text)
return None
def get_inlining_information(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
) -> LinkInfo:
grandparent = found_url.family.grandparent
parent = found_url.family.parent
ahref_element = found_url.family.child
(url, text) = found_url.result
# url != text usually implies a named link, which we opt not to remove
url_eq_text = text is None or url == text
title = None if url_eq_text else text
info: LinkInfo = {
"parent": root,
"title": title,
"index": None,
"remove": None,
}
if parent.tag == "li":
info["parent"] = parent
if not parent.text and not ahref_element.tail and url_eq_text:
info["remove"] = ahref_element
elif parent.tag == "p":
assert grandparent is not None
parent_index = None
for index, uncle in enumerate(grandparent):
if uncle is parent:
parent_index = index
break
# Append to end of list of grandparent's children as normal
info["parent"] = grandparent
if (
len(parent) == 1
and (not parent.text or parent.text == "\n")
and not ahref_element.tail
and url_eq_text
):
info["remove"] = parent
if parent_index is not None:
info["index"] = self.find_proper_insertion_index(grandparent, parent, parent_index)
return info
def handle_image_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
) -> None:
info = self.get_inlining_information(root, found_url)
(url, text) = found_url.result
actual_url = self.get_actual_image_url(url)
self.add_a(
info["parent"], actual_url, url, title=info["title"], insertion_index=info["index"]
)
if info["remove"] is not None:
info["parent"].remove(info["remove"])
def handle_tweet_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
twitter_data: Element,
) -> None:
info = self.get_inlining_information(root, found_url)
if info["index"] is not None:
div = Element("div")
root.insert(info["index"], div)
else:
div = SubElement(root, "div")
div.set("class", "inline-preview-twitter")
div.insert(0, twitter_data)
def handle_youtube_url_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
yt_image: str,
) -> None:
info = self.get_inlining_information(root, found_url)
(url, text) = found_url.result
yt_id = self.youtube_id(url)
self.add_a(
info["parent"],
yt_image,
url,
None,
None,
"youtube-video message_inline_image",
yt_id,
insertion_index=info["index"],
already_thumbnailed=True,
)
def find_proper_insertion_index(
self, grandparent: Element, parent: Element, parent_index_in_grandparent: int
) -> int:
# If there are several inline images from same paragraph, ensure that
# they are in correct (and not opposite) order by inserting after last
# inline image from paragraph 'parent'
parent_links = [ele.attrib["href"] for ele in parent.iter(tag="a")]
insertion_index = parent_index_in_grandparent
while True:
insertion_index += 1
if insertion_index >= len(grandparent):
return insertion_index
uncle = grandparent[insertion_index]
inline_image_classes = [
"message_inline_image",
"message_inline_ref",
"inline-preview-twitter",
]
if (
uncle.tag != "div"
or "class" not in uncle.keys()
or uncle.attrib["class"] not in inline_image_classes
):
return insertion_index
uncle_link = list(uncle.iter(tag="a"))[0].attrib["href"]
if uncle_link not in parent_links:
return insertion_index
def is_absolute_url(self, url: str) -> bool:
return bool(urllib.parse.urlparse(url).netloc)
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_urls = walk_tree_with_family(root, self.get_url_data)
unique_urls = {found_url.result[0] for found_url in found_urls}
# Collect unique URLs which are not quoted as we don't do
# inline previews for links inside blockquotes.
unique_previewable_urls = {
found_url.result[0] for found_url in found_urls if not found_url.family.in_blockquote
}
# Set has_link and similar flags whenever a message is processed by Markdown
if self.md.zulip_message:
self.md.zulip_message.has_link = len(found_urls) > 0
self.md.zulip_message.has_image = False # This is updated in self.add_a
self.md.zulip_message.potential_attachment_path_ids = []
for url in unique_urls:
# Due to rewrite_local_links_to_relative, we need to
# handle both relative URLs beginning with
# `/user_uploads` and beginning with `user_uploads`.
# This urllib construction converts the latter into
# the former.
parsed_url = urllib.parse.urlsplit(urllib.parse.urljoin("/", url))
host = parsed_url.netloc
if host != "" and host != self.md.zulip_realm.host:
continue
if not parsed_url.path.startswith("/user_uploads/"):
continue
path_id = parsed_url.path[len("/user_uploads/") :]
self.md.zulip_message.potential_attachment_path_ids.append(path_id)
if len(found_urls) == 0:
return
if len(unique_previewable_urls) > self.INLINE_PREVIEW_LIMIT_PER_MESSAGE:
return
processed_urls: Set[str] = set()
rendered_tweet_count = 0
for found_url in found_urls:
(url, text) = found_url.result
if url in unique_previewable_urls and url not in processed_urls:
processed_urls.add(url)
else:
continue
if not self.is_absolute_url(url):
if self.is_image(url):
self.handle_image_inlining(root, found_url)
# We don't have a strong use case for doing URL preview for relative links.
continue
dropbox_image = self.dropbox_image(url)
if dropbox_image is not None:
class_attr = "message_inline_ref"
is_image = dropbox_image["is_image"]
if is_image:
class_attr = "message_inline_image"
# Not making use of title and description of images
self.add_a(
root,
dropbox_image["image"],
url,
title=dropbox_image.get("title"),
desc=dropbox_image.get("desc", ""),
class_attr=class_attr,
already_thumbnailed=True,
)
continue
if self.is_image(url):
image_source = self.corrected_image_source(url)
if image_source is not None:
found_url = ResultWithFamily(
family=found_url.family,
result=(image_source, image_source),
)
self.handle_image_inlining(root, found_url)
continue
if get_tweet_id(url) is not None:
if rendered_tweet_count >= self.TWITTER_MAX_TO_PREVIEW:
# Only render at most one tweet per message
continue
twitter_data = self.twitter_link(url)
if twitter_data is None:
# This link is not actually a tweet known to twitter
continue
rendered_tweet_count += 1
self.handle_tweet_inlining(root, found_url, twitter_data)
continue
youtube = self.youtube_image(url)
if youtube is not None:
self.handle_youtube_url_inlining(root, found_url, youtube)
# NOTE: We don't `continue` here, to allow replacing the URL with
# the title, if INLINE_URL_EMBED_PREVIEW feature is enabled.
# The entire preview would ideally be shown only if the feature
# is enabled, but URL previews are a beta feature and YouTube
# previews are pretty stable.
db_data = self.md.zulip_db_data
if db_data and db_data["sent_by_bot"]:
continue
if not self.md.url_embed_preview_enabled:
continue
try:
extracted_data = link_preview.link_embed_data_from_cache(url)
except NotFoundInCache:
self.md.zulip_message.links_for_preview.add(url)
continue
if extracted_data:
if youtube is not None:
title = self.youtube_title(extracted_data)
if title is not None:
if url == text:
found_url.family.child.text = title
else:
found_url.family.child.text = text
continue
self.add_embed(root, url, extracted_data)
if self.vimeo_id(url):
title = self.vimeo_title(extracted_data)
if title:
if url == text:
found_url.family.child.text = title
else:
found_url.family.child.text = text
class Timestamp(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
time_input_string = match.group("time")
timestamp = None
try:
timestamp = dateutil.parser.parse(time_input_string, tzinfos=common_timezones)
except ValueError:
try:
timestamp = datetime.datetime.fromtimestamp(float(time_input_string))
except ValueError:
pass
if not timestamp:
error_element = Element("span")
error_element.set("class", "timestamp-error")
error_element.text = markdown.util.AtomicString(
f"Invalid time format: {time_input_string}"
)
return error_element
# Use HTML5 <time> element for valid timestamps.
time_element = Element("time")
if timestamp.tzinfo:
timestamp = timestamp.astimezone(datetime.timezone.utc)
else:
timestamp = timestamp.replace(tzinfo=datetime.timezone.utc)
time_element.set("datetime", timestamp.isoformat().replace("+00:00", "Z"))
# Set text to initial input, so simple clients translating
# HTML to text will at least display something.
time_element.text = markdown.util.AtomicString(time_input_string)
return time_element
# All of our emojis(non ZWJ sequences) belong to one of these Unicode blocks:
# \U0001f100-\U0001f1ff - Enclosed Alphanumeric Supplement
# \U0001f200-\U0001f2ff - Enclosed Ideographic Supplement
# \U0001f300-\U0001f5ff - Miscellaneous Symbols and Pictographs
# \U0001f600-\U0001f64f - Emoticons (Emoji)
# \U0001f680-\U0001f6ff - Transport and Map Symbols
# \U0001f900-\U0001f9ff - Supplemental Symbols and Pictographs
# \u2000-\u206f - General Punctuation
# \u2300-\u23ff - Miscellaneous Technical
# \u2400-\u243f - Control Pictures
# \u2440-\u245f - Optical Character Recognition
# \u2460-\u24ff - Enclosed Alphanumerics
# \u2500-\u257f - Box Drawing
# \u2580-\u259f - Block Elements
# \u25a0-\u25ff - Geometric Shapes
# \u2600-\u26ff - Miscellaneous Symbols
# \u2700-\u27bf - Dingbats
# \u2900-\u297f - Supplemental Arrows-B
# \u2b00-\u2bff - Miscellaneous Symbols and Arrows
# \u3000-\u303f - CJK Symbols and Punctuation
# \u3200-\u32ff - Enclosed CJK Letters and Months
unicode_emoji_regex = (
"(?P<syntax>["
"\U0001F100-\U0001F64F"
"\U0001F680-\U0001F6FF"
"\U0001F900-\U0001F9FF"
"\u2000-\u206F"
"\u2300-\u27BF"
"\u2900-\u297F"
"\u2B00-\u2BFF"
"\u3000-\u303F"
"\u3200-\u32FF"
"])"
)
# The equivalent JS regex is \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f]|\ud83d[\ude80-\udeff]|
# \ud83e[\udd00-\uddff]|[\u2000-\u206f]|[\u2300-\u27bf]|[\u2b00-\u2bff]|[\u3000-\u303f]|
# [\u3200-\u32ff]. See below comments for explanation. The JS regex is used by marked.js for
# frontend Unicode emoji processing.
# The JS regex \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f] represents U0001f100-\U0001f64f
# The JS regex \ud83d[\ude80-\udeff] represents \U0001f680-\U0001f6ff
# The JS regex \ud83e[\udd00-\uddff] represents \U0001f900-\U0001f9ff
# The JS regex [\u2000-\u206f] represents \u2000-\u206f
# The JS regex [\u2300-\u27bf] represents \u2300-\u27bf
# Similarly other JS regexes can be mapped to the respective Unicode blocks.
# For more information, please refer to the following article:
# http://crocodillon.com/blog/parsing-emoji-unicode-in-javascript
def make_emoji(codepoint: str, display_string: str) -> Element:
# Replace underscore in emoji's title with space
title = display_string[1:-1].replace("_", " ")
span = Element("span")
span.set("class", f"emoji emoji-{codepoint}")
span.set("title", title)
span.set("role", "img")
span.set("aria-label", title)
span.text = markdown.util.AtomicString(display_string)
return span
def make_realm_emoji(src: str, display_string: str) -> Element:
elt = Element("img")
elt.set("src", src)
elt.set("class", "emoji")
elt.set("alt", display_string)
elt.set("title", display_string[1:-1].replace("_", " "))
return elt
def unicode_emoji_to_codepoint(unicode_emoji: str) -> str:
codepoint = hex(ord(unicode_emoji))[2:]
# Unicode codepoints are minimum of length 4, padded
# with zeroes if the length is less than zero.
while len(codepoint) < 4:
codepoint = "0" + codepoint
return codepoint
class EmoticonTranslation(markdown.inlinepatterns.Pattern):
""" Translates emoticons like `:)` into emoji like `:smile:`. """
def handleMatch(self, match: Match[str]) -> Optional[Element]:
db_data = self.md.zulip_db_data
if db_data is None or not db_data["translate_emoticons"]:
return None
emoticon = match.group("emoticon")
translated = translate_emoticons(emoticon)
name = translated[1:-1]
return make_emoji(name_to_codepoint[name], translated)
class UnicodeEmoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
orig_syntax = match.group("syntax")
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ":" + codepoint_to_name[codepoint] + ":"
return make_emoji(codepoint, display_string)
else:
return None
class Emoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Union[str, Element]]:
orig_syntax = match.group("syntax")
name = orig_syntax[1:-1]
active_realm_emoji: Dict[str, Dict[str, str]] = {}
db_data = self.md.zulip_db_data
if db_data is not None:
active_realm_emoji = db_data["active_realm_emoji"]
if self.md.zulip_message and name in active_realm_emoji:
return make_realm_emoji(active_realm_emoji[name]["source_url"], orig_syntax)
elif name == "zulip":
return make_realm_emoji(
"/static/generated/emoji/images/emoji/unicode/zulip.png", orig_syntax
)
elif name in name_to_codepoint:
return make_emoji(name_to_codepoint[name], orig_syntax)
else:
return orig_syntax
def content_has_emoji_syntax(content: str) -> bool:
return re.search(EMOJI_REGEX, content) is not None
class Tex(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Element:
rendered = render_tex(match.group("body"), is_inline=True)
if rendered is not None:
return self.md.htmlStash.store(rendered)
else: # Something went wrong while rendering
span = Element("span")
span.set("class", "tex-error")
span.text = markdown.util.AtomicString("$$" + match.group("body") + "$$")
return span
def sanitize_url(url: str) -> Optional[str]:
"""
Sanitize a URL against XSS attacks.
See the docstring on markdown.inlinepatterns.LinkPattern.sanitize_url.
"""
try:
parts = urllib.parse.urlparse(url.replace(" ", "%20"))
scheme, netloc, path, params, query, fragment = parts
except ValueError:
# Bad URL - so bad it couldn't be parsed.
return ""
# If there is no scheme or netloc and there is a '@' in the path,
# treat it as a mailto: and set the appropriate scheme
if scheme == "" and netloc == "" and "@" in path:
scheme = "mailto"
elif scheme == "" and netloc == "" and len(path) > 0 and path[0] == "/":
# Allow domain-relative links
return urllib.parse.urlunparse(("", "", path, params, query, fragment))
elif (scheme, netloc, path, params, query) == ("", "", "", "", "") and len(fragment) > 0:
# Allow fragment links
return urllib.parse.urlunparse(("", "", "", "", "", fragment))
# Zulip modification: If scheme is not specified, assume http://
# We re-enter sanitize_url because netloc etc. need to be re-parsed.
if not scheme:
return sanitize_url("http://" + url)
locless_schemes = ["mailto", "news", "file", "bitcoin"]
if netloc == "" and scheme not in locless_schemes:
# This fails regardless of anything else.
# Return immediately to save additional processing
return None
# Upstream code will accept a URL like javascript://foo because it
# appears to have a netloc. Additionally there are plenty of other
# schemes that do weird things like launch external programs. To be
# on the safe side, we whitelist the scheme.
if scheme not in ("http", "https", "ftp", "mailto", "file", "bitcoin"):
return None
# Upstream code scans path, parameters, and query for colon characters
# because
#
# some aliases [for javascript:] will appear to urllib.parse to have
# no scheme. On top of that relative links (i.e.: "foo/bar.html")
# have no scheme.
#
# We already converted an empty scheme to http:// above, so we skip
# the colon check, which would also forbid a lot of legitimate URLs.
# URL passes all tests. Return URL as-is.
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
def url_to_a(
db_data: Optional[DbData], url: str, text: Optional[str] = None
) -> Union[Element, str]:
a = Element("a")
href = sanitize_url(url)
if href is None:
# Rejected by sanitize_url; render it as plain text.
return url
if text is None:
text = markdown.util.AtomicString(url)
href = rewrite_local_links_to_relative(db_data, href)
a.set("href", href)
a.text = text
return a
class CompiledPattern(markdown.inlinepatterns.Pattern):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
class AutoLink(CompiledPattern):
def handleMatch(self, match: Match[str]) -> ElementStringNone:
url = match.group("url")
db_data = self.md.zulip_db_data
return url_to_a(db_data, url)
class OListProcessor(sane_lists.SaneOListProcessor):
def __init__(self, parser: BlockParser) -> None:
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class UListProcessor(sane_lists.SaneUListProcessor):
""" Unordered lists, but with 2-space indent """
def __init__(self, parser: BlockParser) -> None:
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class ListIndentProcessor(markdown.blockprocessors.ListIndentProcessor):
"""Process unordered list blocks.
Based on markdown.blockprocessors.ListIndentProcessor, but with 2-space indent
"""
def __init__(self, parser: BlockParser) -> None:
# HACK: Set the tab length to 2 just for the initialization of
# this class, so that bulleted lists (and only bulleted lists)
# work off 2-space indentation.
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class HashHeaderProcessor(markdown.blockprocessors.HashHeaderProcessor):
"""Process hash headers.
Based on markdown.blockprocessors.HashHeaderProcessor, but requires space for heading.
"""
# Original regex for hashheader is
# RE = re.compile(r'(?:^|\n)(?P<level>#{1,6})(?P<header>(?:\\.|[^\\])*?)#*(?:\n|$)')
RE = re.compile(r"(?:^|\n)(?P<level>#{1,6})\s(?P<header>(?:\\.|[^\\])*?)#*(?:\n|$)")
class BlockQuoteProcessor(markdown.blockprocessors.BlockQuoteProcessor):
"""Process block quotes.
Based on markdown.blockprocessors.BlockQuoteProcessor, but with 2-space indent
"""
# Original regex for blockquote is RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)')
RE = re.compile(r"(^|\n)(?!(?:[ ]{0,3}>\s*(?:$|\n))*(?:$|\n))" r"[ ]{0,3}>[ ]?(.*)")
mention_re = re.compile(mention.find_mentions)
# run() is very slightly forked from the base class; see notes below.
def run(self, parent: Element, blocks: List[str]) -> None:
block = blocks.pop(0)
m = self.RE.search(block)
if m:
before = block[: m.start()] # Lines before blockquote
# Pass lines before blockquote in recursively for parsing first.
self.parser.parseBlocks(parent, [before])
# Remove ``> `` from beginning of each line.
block = "\n".join([self.clean(line) for line in block[m.start() :].split("\n")])
# Zulip modification: The next line is patched to match
# CommonMark rather than original Markdown. In original
# Markdown, blockquotes with a blank line between them were
# merged, which makes it impossible to break a blockquote with
# a blank line intentionally.
#
# This is a new blockquote. Create a new parent element.
quote = etree.SubElement(parent, "blockquote")
# Recursively parse block with blockquote as parent.
# change parser state so blockquotes embedded in lists use p tags
self.parser.state.set("blockquote")
self.parser.parseChunk(quote, block)
self.parser.state.reset()
def clean(self, line: str) -> str:
# Silence all the mentions inside blockquotes
line = re.sub(self.mention_re, lambda m: "@_{}".format(m.group("match")), line)
# And then run the upstream processor's code for removing the '>'
return super().clean(line)
@dataclass
class Fence:
fence_str: str
is_code: bool
class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
"""Allows list blocks that come directly after another block
to be rendered as a list.
Detects paragraphs that have a matching list item that comes
directly after a line of text, and inserts a newline between
to satisfy Markdown"""
LI_RE = re.compile(r"^[ ]*([*+-]|\d\.)[ ]+(.*)", re.MULTILINE)
def run(self, lines: List[str]) -> List[str]:
""" Insert a newline between a paragraph and ulist if missing """
inserts = 0
in_code_fence: bool = False
open_fences: List[Fence] = []
copy = lines[:]
for i in range(len(lines) - 1):
# Ignore anything that is inside a fenced code block but not quoted.
# We ignore all lines where some parent is a non quote code block.
m = FENCE_RE.match(lines[i])
if m:
fence_str = m.group("fence")
is_code = not m.group("lang") in ("quote", "quoted")
has_open_fences = not len(open_fences) == 0
matches_last_fence = (
fence_str == open_fences[-1].fence_str if has_open_fences else False
)
closes_last_fence = not m.group("lang") and matches_last_fence
if closes_last_fence:
open_fences.pop()
else:
open_fences.append(Fence(fence_str, is_code))
in_code_fence = any(fence.is_code for fence in open_fences)
# If we're not in a fenced block and we detect an upcoming list
# hanging off any block (including a list of another type), add
# a newline.
li1 = self.LI_RE.match(lines[i])
li2 = self.LI_RE.match(lines[i + 1])
if not in_code_fence and lines[i]:
if (li2 and not li1) or (
li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1)
):
copy.insert(i + inserts + 1, "")
inserts += 1
return copy
# Name for the outer capture group we use to separate whitespace and
# other delimiters from the actual content. This value won't be an
# option in user-entered capture groups.
OUTER_CAPTURE_GROUP = "linkifier_actual_match"
def prepare_linkifier_pattern(source: str) -> str:
"""Augment a linkifier so it only matches after start-of-string,
whitespace, or opening delimiters, won't match if there are word
characters directly after, and saves what was matched as
OUTER_CAPTURE_GROUP."""
return fr"""(?<![^\s'"\(,:<])(?P<{OUTER_CAPTURE_GROUP}>{source})(?!\w)"""
# Given a regular expression pattern, linkifies groups that match it
# using the provided format string to construct the URL.
class LinkifierPattern(markdown.inlinepatterns.Pattern):
""" Applied a given linkifier to the input """
def __init__(
self,
source_pattern: str,
format_string: str,
markdown_instance: Optional[markdown.Markdown] = None,
) -> None:
self.pattern = prepare_linkifier_pattern(source_pattern)
self.format_string = format_string
markdown.inlinepatterns.Pattern.__init__(self, self.pattern, markdown_instance)
def handleMatch(self, m: Match[str]) -> Union[Element, str]:
db_data = self.md.zulip_db_data
return url_to_a(
db_data,
self.format_string % m.groupdict(),
markdown.util.AtomicString(m.group(OUTER_CAPTURE_GROUP)),
)
class UserMentionPattern(markdown.inlinepatterns.InlineProcessor):
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
match = m.group("match")
silent = m.group("silent") == "_"
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
if match.startswith("**") and match.endswith("**"):
name = match[2:-2]
else:
return None, None, None
wildcard = mention.user_mention_matches_wildcard(name)
# For @**|id** and @**name|id** mention syntaxes.
id_syntax_match = re.match(r"(?P<full_name>.+)?\|(?P<user_id>\d+)$", name)
if id_syntax_match:
full_name = id_syntax_match.group("full_name")
id = int(id_syntax_match.group("user_id"))
user = db_data["mention_data"].get_user_by_id(id)
# For @**name|id**, we need to specifically check that
# name matches the full_name of user in mention_data.
# This enforces our decision that
# @**user_1_name|id_for_user_2** should be invalid syntax.
if full_name:
if user and user["full_name"] != full_name:
return None, None, None
else:
# For @**name** syntax.
user = db_data["mention_data"].get_user_by_name(name)
if wildcard:
self.md.zulip_message.mentions_wildcard = True
user_id = "*"
elif user:
if not silent:
self.md.zulip_message.mentions_user_ids.add(user["id"])
name = user["full_name"]
user_id = str(user["id"])
else:
# Don't highlight @mentions that don't refer to a valid user
return None, None, None
el = Element("span")
el.set("data-user-id", user_id)
text = f"{name}"
if silent:
el.set("class", "user-mention silent")
else:
el.set("class", "user-mention")
text = f"@{text}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class UserGroupMentionPattern(markdown.inlinepatterns.InlineProcessor):
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
match = m.group(1)
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
name = extract_user_group(match)
user_group = db_data["mention_data"].get_user_group(name)
if user_group:
self.md.zulip_message.mentions_user_group_ids.add(user_group.id)
name = user_group.name
user_group_id = str(user_group.id)
else:
# Don't highlight @-mentions that don't refer to a valid user
# group.
return None, None, None
el = Element("span")
el.set("class", "user-group-mention")
el.set("data-user-group-id", user_group_id)
text = f"@{name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class StreamPattern(markdown.inlinepatterns.InlineProcessor):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
def find_stream_by_name(self, name: str) -> Optional[Dict[str, Any]]:
db_data = self.md.zulip_db_data
if db_data is None:
return None
stream = db_data["stream_names"].get(name)
return stream
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
name = m.group("stream_name")
if self.md.zulip_message:
stream = self.find_stream_by_name(name)
if stream is None:
return None, None, None
el = Element("a")
el.set("class", "stream")
el.set("data-stream-id", str(stream["id"]))
# TODO: We should quite possibly not be specifying the
# href here and instead having the browser auto-add the
# href when it processes a message with one of these, to
# provide more clarity to API clients.
# Also do the same for StreamTopicPattern.
stream_url = encode_stream(stream["id"], name)
el.set("href", f"/#narrow/stream/{stream_url}")
text = f"#{name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class StreamTopicPattern(markdown.inlinepatterns.InlineProcessor):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
def find_stream_by_name(self, name: str) -> Optional[Dict[str, Any]]:
db_data = self.md.zulip_db_data
if db_data is None:
return None
stream = db_data["stream_names"].get(name)
return stream
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
stream_name = m.group("stream_name")
topic_name = m.group("topic_name")
if self.md.zulip_message:
stream = self.find_stream_by_name(stream_name)
if stream is None or topic_name is None:
return None, None, None
el = Element("a")
el.set("class", "stream-topic")
el.set("data-stream-id", str(stream["id"]))
stream_url = encode_stream(stream["id"], stream_name)
topic_url = hash_util_encode(topic_name)
link = f"/#narrow/stream/{stream_url}/topic/{topic_url}"
el.set("href", link)
text = f"#{stream_name} > {topic_name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
def possible_linked_stream_names(content: str) -> Set[str]:
matches = re.findall(STREAM_LINK_REGEX, content, re.VERBOSE)
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE):
matches.append(match.group("stream_name"))
return set(matches)
class AlertWordNotificationProcessor(markdown.preprocessors.Preprocessor):
allowed_before_punctuation = {" ", "\n", "(", '"', ".", ",", "'", ";", "[", "*", "`", ">"}
allowed_after_punctuation = {
" ",
"\n",
")",
'",',
"?",
":",
".",
",",
"'",
";",
"]",
"!",
"*",
"`",
}
def check_valid_start_position(self, content: str, index: int) -> bool:
if index <= 0 or content[index] in self.allowed_before_punctuation:
return True
return False
def check_valid_end_position(self, content: str, index: int) -> bool:
if index >= len(content) or content[index] in self.allowed_after_punctuation:
return True
return False
def run(self, lines: List[str]) -> List[str]:
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
# We check for alert words here, the set of which are
# dependent on which users may see this message.
#
# Our caller passes in the list of possible_words. We
# don't do any special rendering; we just append the alert words
# we find to the set self.md.zulip_message.alert_words.
realm_alert_words_automaton = db_data["realm_alert_words_automaton"]
if realm_alert_words_automaton is not None:
content = "\n".join(lines).lower()
for end_index, (original_value, user_ids) in realm_alert_words_automaton.iter(
content
):
if self.check_valid_start_position(
content, end_index - len(original_value)
) and self.check_valid_end_position(content, end_index + 1):
self.md.zulip_message.user_ids_with_alert_words.update(user_ids)
return lines
class LinkInlineProcessor(markdown.inlinepatterns.LinkInlineProcessor):
def zulip_specific_link_changes(self, el: Element) -> Union[None, Element]:
href = el.get("href")
assert href is not None
# Sanitize URL or don't parse link. See linkify_tests in markdown_test_cases for banned syntax.
href = sanitize_url(self.unescape(href.strip()))
if href is None:
return None # no-op; the link is not processed.
# Rewrite local links to be relative
db_data = self.md.zulip_db_data
href = rewrite_local_links_to_relative(db_data, href)
# Make changes to <a> tag attributes
el.set("href", href)
# Show link href if title is empty
if not el.text or not el.text.strip():
el.text = href
# Prevent linkifiers from running on the content of a Markdown link, breaking up the link.
# This is a monkey-patch, but it might be worth sending a version of this change upstream.
el.text = markdown.util.AtomicString(el.text)
return el
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
ret = super().handleMatch(m, data)
if ret[0] is not None:
el: Optional[Element]
el, match_start, index = ret
el = self.zulip_specific_link_changes(el)
if el is not None:
return el, match_start, index
return None, None, None
def get_sub_registry(r: markdown.util.Registry, keys: List[str]) -> markdown.util.Registry:
# Registry is a new class added by Python-Markdown to replace OrderedDict.
# Since Registry doesn't support .keys(), it is easier to make a new
# object instead of removing keys from the existing object.
new_r = markdown.util.Registry()
for k in keys:
new_r.register(r[k], k, r.get_index_for_name(k))
return new_r
# These are used as keys ("linkifiers_keys") to md_engines and the respective
# linkifier caches
DEFAULT_MARKDOWN_KEY = -1
ZEPHYR_MIRROR_MARKDOWN_KEY = -2
class Markdown(markdown.Markdown):
zulip_message: Optional[Message]
zulip_realm: Optional[Realm]
zulip_db_data: Optional[DbData]
image_preview_enabled: bool
url_embed_preview_enabled: bool
def __init__(
self,
linkifiers: List[LinkifierDict],
linkifiers_key: int,
email_gateway: bool,
) -> None:
self.linkifiers = linkifiers
self.linkifiers_key = linkifiers_key
self.email_gateway = email_gateway
super().__init__(
extensions=[
nl2br.makeExtension(),
tables.makeExtension(),
codehilite.makeExtension(
linenums=False,
guess_lang=False,
),
],
)
self.set_output_format("html")
def build_parser(self) -> markdown.Markdown:
# Build the parser using selected default features from Python-Markdown.
# The complete list of all available processors can be found in the
# super().build_parser() function.
#
# Note: for any Python-Markdown updates, manually check if we want any
# of the new features added upstream or not; they wouldn't get
# included by default.
self.preprocessors = self.build_preprocessors()
self.parser = self.build_block_parser()
self.inlinePatterns = self.build_inlinepatterns()
self.treeprocessors = self.build_treeprocessors()
self.postprocessors = self.build_postprocessors()
self.handle_zephyr_mirror()
return self
def build_preprocessors(self) -> markdown.util.Registry:
# We disable the following preprocessors from upstream:
#
# html_block - insecure
# reference - references don't make sense in a chat context.
preprocessors = markdown.util.Registry()
preprocessors.register(MarkdownListPreprocessor(self), "hanging_lists", 35)
preprocessors.register(
markdown.preprocessors.NormalizeWhitespace(self), "normalize_whitespace", 30
)
preprocessors.register(fenced_code.FencedBlockPreprocessor(self), "fenced_code_block", 25)
preprocessors.register(
AlertWordNotificationProcessor(self), "custom_text_notifications", 20
)
return preprocessors
def build_block_parser(self) -> BlockParser:
# We disable the following blockparsers from upstream:
#
# indent - replaced by ours
# setextheader - disabled; we only support hashheaders for headings
# olist - replaced by ours
# ulist - replaced by ours
# quote - replaced by ours
parser = BlockParser(self)
parser.blockprocessors.register(
markdown.blockprocessors.EmptyBlockProcessor(parser), "empty", 95
)
parser.blockprocessors.register(ListIndentProcessor(parser), "indent", 90)
if not self.email_gateway:
parser.blockprocessors.register(
markdown.blockprocessors.CodeBlockProcessor(parser), "code", 85
)
parser.blockprocessors.register(HashHeaderProcessor(parser), "hashheader", 80)
# We get priority 75 from 'table' extension
parser.blockprocessors.register(markdown.blockprocessors.HRProcessor(parser), "hr", 70)
parser.blockprocessors.register(OListProcessor(parser), "olist", 65)
parser.blockprocessors.register(UListProcessor(parser), "ulist", 60)
parser.blockprocessors.register(BlockQuoteProcessor(parser), "quote", 55)
parser.blockprocessors.register(
markdown.blockprocessors.ParagraphProcessor(parser), "paragraph", 50
)
return parser
def build_inlinepatterns(self) -> markdown.util.Registry:
# We disable the following upstream inline patterns:
#
# backtick - replaced by ours
# escape - probably will re-add at some point.
# link - replaced by ours
# image_link - replaced by ours
# autolink - replaced by ours
# automail - replaced by ours
# linebreak - we use nl2br and consider that good enough
# html - insecure
# reference - references not useful
# image_reference - references not useful
# short_reference - references not useful
# ---------------------------------------------------
# strong_em - for these three patterns,
# strong2 - we have our own versions where
# emphasis2 - we disable _ for bold and emphasis
# Declare regexes for clean single line calls to .register().
NOT_STRONG_RE = markdown.inlinepatterns.NOT_STRONG_RE
# Custom strikethrough syntax: ~~foo~~
DEL_RE = r"(?<!~)(\~\~)([^~\n]+?)(\~\~)(?!~)"
# Custom bold syntax: **foo** but not __foo__
# str inside ** must start and end with a word character
# it need for things like "const char *x = (char *)y"
EMPHASIS_RE = r"(\*)(?!\s+)([^\*^\n]+)(?<!\s)\*"
ENTITY_RE = markdown.inlinepatterns.ENTITY_RE
STRONG_EM_RE = r"(\*\*\*)(?!\s+)([^\*^\n]+)(?<!\s)\*\*\*"
# Add inline patterns. We use a custom numbering of the
# rules, that preserves the order from upstream but leaves
# space for us to add our own.
reg = markdown.util.Registry()
reg.register(BacktickInlineProcessor(markdown.inlinepatterns.BACKTICK_RE), "backtick", 105)
reg.register(
markdown.inlinepatterns.DoubleTagPattern(STRONG_EM_RE, "strong,em"), "strong_em", 100
)
reg.register(UserMentionPattern(mention.find_mentions, self), "usermention", 95)
reg.register(
Tex(r"\B(?<!\$)\$\$(?P<body>[^\n_$](\\\$|[^$\n])*)\$\$(?!\$)\B", self), "tex", 90
)
reg.register(StreamTopicPattern(get_compiled_stream_topic_link_regex(), self), "topic", 87)
reg.register(StreamPattern(get_compiled_stream_link_regex(), self), "stream", 85)
reg.register(Timestamp(r"<time:(?P<time>[^>]*?)>"), "timestamp", 75)
reg.register(
UserGroupMentionPattern(mention.user_group_mentions, self), "usergroupmention", 65
)
reg.register(LinkInlineProcessor(markdown.inlinepatterns.LINK_RE, self), "link", 60)
reg.register(AutoLink(get_web_link_regex(), self), "autolink", 55)
# Reserve priority 45-54 for linkifiers
reg = self.register_linkifiers(reg)
reg.register(markdown.inlinepatterns.HtmlInlineProcessor(ENTITY_RE, self), "entity", 40)
reg.register(
markdown.inlinepatterns.SimpleTagPattern(r"(\*\*)([^\n]+?)\2", "strong"), "strong", 35
)
reg.register(markdown.inlinepatterns.SimpleTagPattern(EMPHASIS_RE, "em"), "emphasis", 30)
reg.register(markdown.inlinepatterns.SimpleTagPattern(DEL_RE, "del"), "del", 25)
reg.register(
markdown.inlinepatterns.SimpleTextInlineProcessor(NOT_STRONG_RE), "not_strong", 20
)
reg.register(Emoji(EMOJI_REGEX, self), "emoji", 15)
reg.register(EmoticonTranslation(emoticon_regex, self), "translate_emoticons", 10)
# We get priority 5 from 'nl2br' extension
reg.register(UnicodeEmoji(unicode_emoji_regex), "unicodeemoji", 0)
return reg
def register_linkifiers(self, inlinePatterns: markdown.util.Registry) -> markdown.util.Registry:
for linkifier in self.linkifiers:
pattern = linkifier["pattern"]
inlinePatterns.register(
LinkifierPattern(pattern, linkifier["url_format"], self),
f"linkifiers/{pattern}",
45,
)
return inlinePatterns
def build_treeprocessors(self) -> markdown.util.Registry:
# Here we build all the processors from upstream, plus a few of our own.
treeprocessors = markdown.util.Registry()
# We get priority 30 from 'hilite' extension
treeprocessors.register(markdown.treeprocessors.InlineProcessor(self), "inline", 25)
treeprocessors.register(markdown.treeprocessors.PrettifyTreeprocessor(self), "prettify", 20)
treeprocessors.register(
InlineInterestingLinkProcessor(self), "inline_interesting_links", 15
)
if settings.CAMO_URI:
treeprocessors.register(InlineImageProcessor(self), "rewrite_images_proxy", 10)
return treeprocessors
def build_postprocessors(self) -> markdown.util.Registry:
# These are the default Python-Markdown processors, unmodified.
postprocessors = markdown.util.Registry()
postprocessors.register(markdown.postprocessors.RawHtmlPostprocessor(self), "raw_html", 20)
postprocessors.register(
markdown.postprocessors.AndSubstitutePostprocessor(), "amp_substitute", 15
)
postprocessors.register(markdown.postprocessors.UnescapePostprocessor(), "unescape", 10)
return postprocessors
def handle_zephyr_mirror(self) -> None:
if self.linkifiers_key == ZEPHYR_MIRROR_MARKDOWN_KEY:
# Disable almost all inline patterns for zephyr mirror
# users' traffic that is mirrored. Note that
# inline_interesting_links is a treeprocessor and thus is
# not removed
self.inlinePatterns = get_sub_registry(self.inlinePatterns, ["autolink"])
self.treeprocessors = get_sub_registry(
self.treeprocessors, ["inline_interesting_links", "rewrite_images_proxy"]
)
# insert new 'inline' processor because we have changed self.inlinePatterns
# but InlineProcessor copies md as self.md in __init__.
self.treeprocessors.register(
markdown.treeprocessors.InlineProcessor(self), "inline", 25
)
self.preprocessors = get_sub_registry(self.preprocessors, ["custom_text_notifications"])
self.parser.blockprocessors = get_sub_registry(
self.parser.blockprocessors, ["paragraph"]
)
md_engines: Dict[Tuple[int, bool], Markdown] = {}
linkifier_data: Dict[int, List[LinkifierDict]] = {}
def make_md_engine(linkifiers_key: int, email_gateway: bool) -> None:
md_engine_key = (linkifiers_key, email_gateway)
if md_engine_key in md_engines:
del md_engines[md_engine_key]
linkifiers = linkifier_data[linkifiers_key]
md_engines[md_engine_key] = Markdown(
linkifiers=linkifiers,
linkifiers_key=linkifiers_key,
email_gateway=email_gateway,
)
# Split the topic name into multiple sections so that we can easily use
# our common single link matching regex on it.
basic_link_splitter = re.compile(r"[ !;\?\),\'\"]")
# Security note: We don't do any HTML escaping in this
# function on the URLs; they are expected to be HTML-escaped when
# rendered by clients (just as links rendered into message bodies
# are validated and escaped inside `url_to_a`).
def topic_links(linkifiers_key: int, topic_name: str) -> List[Dict[str, str]]:
matches: List[Dict[str, Union[str, int]]] = []
linkifiers = linkifiers_for_realm(linkifiers_key)
for linkifier in linkifiers:
raw_pattern = linkifier["pattern"]
url_format_string = linkifier["url_format"]
pattern = prepare_linkifier_pattern(raw_pattern)
for m in re.finditer(pattern, topic_name):
match_details = m.groupdict()
match_text = match_details["linkifier_actual_match"]
# We format the linkifier's url string using the matched text.
# Also, we include the matched text in the response, so that our clients
# don't have to implement any logic of their own to get back the text.
matches += [
dict(
url=url_format_string % match_details,
text=match_text,
index=topic_name.find(match_text),
)
]
# Also make raw URLs navigable.
for sub_string in basic_link_splitter.split(topic_name):
link_match = re.match(get_web_link_regex(), sub_string)
if link_match:
actual_match_url = link_match.group("url")
result = urlsplit(actual_match_url)
if not result.scheme:
if not result.netloc:
i = (result.path + "/").index("/")
result = result._replace(netloc=result.path[:i], path=result.path[i:])
url = result._replace(scheme="https").geturl()
else:
url = actual_match_url
matches.append(
dict(url=url, text=actual_match_url, index=topic_name.find(actual_match_url))
)
# In order to preserve the order in which the links occur, we sort the matched text
# based on its starting index in the topic. We pop the index field before returning.
matches = sorted(matches, key=lambda k: k["index"])
return [{k: str(v) for k, v in match.items() if k != "index"} for match in matches]
def maybe_update_markdown_engines(linkifiers_key: int, email_gateway: bool) -> None:
global linkifier_data
linkifiers = linkifiers_for_realm(linkifiers_key)
if linkifiers_key not in linkifier_data or linkifier_data[linkifiers_key] != linkifiers:
# Linkifier data has changed, update `linkifier_data` and any
# of the existing Markdown engines using this set of linkifiers.
linkifier_data[linkifiers_key] = linkifiers
for email_gateway_flag in [True, False]:
if (linkifiers_key, email_gateway_flag) in md_engines:
# Update only existing engines(if any), don't create new one.
make_md_engine(linkifiers_key, email_gateway_flag)
if (linkifiers_key, email_gateway) not in md_engines:
# Markdown engine corresponding to this key doesn't exists so create one.
make_md_engine(linkifiers_key, email_gateway)
# We want to log Markdown parser failures, but shouldn't log the actual input
# message for privacy reasons. The compromise is to replace all alphanumeric
# characters with 'x'.
#
# We also use repr() to improve reproducibility, and to escape terminal control
# codes, which can do surprisingly nasty things.
_privacy_re = re.compile("\\w", flags=re.UNICODE)
def privacy_clean_markdown(content: str) -> str:
return repr(_privacy_re.sub("x", content))
def get_possible_mentions_info(realm_id: int, mention_texts: Set[str]) -> List[FullNameInfo]:
if not mention_texts:
return []
q_list = set()
name_re = r"(?P<full_name>.+)?\|(?P<mention_id>\d+)$"
for mention_text in mention_texts:
name_syntax_match = re.match(name_re, mention_text)
if name_syntax_match:
full_name = name_syntax_match.group("full_name")
mention_id = name_syntax_match.group("mention_id")
if full_name:
# For **name|id** mentions as mention_id
# cannot be null inside this block.
q_list.add(Q(full_name__iexact=full_name, id=mention_id))
else:
# For **|id** syntax.
q_list.add(Q(id=mention_id))
else:
# For **name** syntax.
q_list.add(Q(full_name__iexact=mention_text))
rows = (
UserProfile.objects.filter(
realm_id=realm_id,
is_active=True,
)
.filter(
functools.reduce(lambda a, b: a | b, q_list),
)
.values(
"id",
"full_name",
"email",
)
)
return list(rows)
class MentionData:
def __init__(self, realm_id: int, content: str) -> None:
mention_texts, has_wildcards = possible_mentions(content)
possible_mentions_info = get_possible_mentions_info(realm_id, mention_texts)
self.full_name_info = {row["full_name"].lower(): row for row in possible_mentions_info}
self.user_id_info = {row["id"]: row for row in possible_mentions_info}
self.init_user_group_data(realm_id=realm_id, content=content)
self.has_wildcards = has_wildcards
def message_has_wildcards(self) -> bool:
return self.has_wildcards
def init_user_group_data(self, realm_id: int, content: str) -> None:
user_group_names = possible_user_group_mentions(content)
self.user_group_name_info = get_user_group_name_info(realm_id, user_group_names)
self.user_group_members: Dict[int, List[int]] = defaultdict(list)
group_ids = [group.id for group in self.user_group_name_info.values()]
if not group_ids:
# Early-return to avoid the cost of hitting the ORM,
# which shows up in profiles.
return
membership = UserGroupMembership.objects.filter(user_group_id__in=group_ids)
for info in membership.values("user_group_id", "user_profile_id"):
group_id = info["user_group_id"]
user_profile_id = info["user_profile_id"]
self.user_group_members[group_id].append(user_profile_id)
def get_user_by_name(self, name: str) -> Optional[FullNameInfo]:
# warning: get_user_by_name is not dependable if two
# users of the same full name are mentioned. Use
# get_user_by_id where possible.
return self.full_name_info.get(name.lower(), None)
def get_user_by_id(self, id: int) -> Optional[FullNameInfo]:
return self.user_id_info.get(id, None)
def get_user_ids(self) -> Set[int]:
"""
Returns the user IDs that might have been mentioned by this
content. Note that because this data structure has not parsed
the message and does not know about escaping/code blocks, this
will overestimate the list of user ids.
"""
return set(self.user_id_info.keys())
def get_user_group(self, name: str) -> Optional[UserGroup]:
return self.user_group_name_info.get(name.lower(), None)
def get_group_members(self, user_group_id: int) -> List[int]:
return self.user_group_members.get(user_group_id, [])
def get_user_group_name_info(realm_id: int, user_group_names: Set[str]) -> Dict[str, UserGroup]:
if not user_group_names:
return {}
rows = UserGroup.objects.filter(realm_id=realm_id, name__in=user_group_names)
dct = {row.name.lower(): row for row in rows}
return dct
def get_stream_name_info(realm: Realm, stream_names: Set[str]) -> Dict[str, FullNameInfo]:
if not stream_names:
return {}
q_list = {Q(name=name) for name in stream_names}
rows = (
get_active_streams(
realm=realm,
)
.filter(
functools.reduce(lambda a, b: a | b, q_list),
)
.values(
"id",
"name",
)
)
dct = {row["name"]: row for row in rows}
return dct
def do_convert(
content: str,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
message: Optional[Message] = None,
message_realm: Optional[Realm] = None,
sent_by_bot: bool = False,
translate_emoticons: bool = False,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
no_previews: bool = False,
) -> str:
"""Convert Markdown to HTML, with Zulip-specific settings and hacks."""
# This logic is a bit convoluted, but the overall goal is to support a range of use cases:
# * Nothing is passed in other than content -> just run default options (e.g. for docs)
# * message is passed, but no realm is -> look up realm from message
# * message_realm is passed -> use that realm for Markdown purposes
if message is not None:
if message_realm is None:
message_realm = message.get_realm()
if message_realm is None:
linkifiers_key = DEFAULT_MARKDOWN_KEY
else:
linkifiers_key = message_realm.id
if message and hasattr(message, "id") and message.id:
logging_message_id = "id# " + str(message.id)
else:
logging_message_id = "unknown"
if message is not None and message_realm is not None:
if message_realm.is_zephyr_mirror_realm:
if message.sending_client.name == "zephyr_mirror":
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY
maybe_update_markdown_engines(linkifiers_key, email_gateway)
md_engine_key = (linkifiers_key, email_gateway)
_md_engine = md_engines[md_engine_key]
# Reset the parser; otherwise it will get slower over time.
_md_engine.reset()
# Filters such as UserMentionPattern need a message.
_md_engine.zulip_message = message
_md_engine.zulip_realm = message_realm
_md_engine.zulip_db_data = None # for now
_md_engine.image_preview_enabled = image_preview_enabled(message, message_realm, no_previews)
_md_engine.url_embed_preview_enabled = url_embed_preview_enabled(
message, message_realm, no_previews
)
# Pre-fetch data from the DB that is used in the Markdown thread
if message_realm is not None:
# Here we fetch the data structures needed to render
# mentions/stream mentions from the database, but only
# if there is syntax in the message that might use them, since
# the fetches are somewhat expensive and these types of syntax
# are uncommon enough that it's a useful optimization.
if mention_data is None:
mention_data = MentionData(message_realm.id, content)
stream_names = possible_linked_stream_names(content)
stream_name_info = get_stream_name_info(message_realm, stream_names)
if content_has_emoji_syntax(content):
active_realm_emoji = message_realm.get_active_emoji()
else:
active_realm_emoji = {}
_md_engine.zulip_db_data = {
"realm_alert_words_automaton": realm_alert_words_automaton,
"mention_data": mention_data,
"active_realm_emoji": active_realm_emoji,
"realm_uri": message_realm.uri,
"sent_by_bot": sent_by_bot,
"stream_names": stream_name_info,
"translate_emoticons": translate_emoticons,
}
try:
# Spend at most 5 seconds rendering; this protects the backend
# from being overloaded by bugs (e.g. Markdown logic that is
# extremely inefficient in corner cases) as well as user
# errors (e.g. a linkifier that makes some syntax
# infinite-loop).
rendered_content = timeout(5, lambda: _md_engine.convert(content))
# Throw an exception if the content is huge; this protects the
# rest of the codebase from any bugs where we end up rendering
# something huge.
if len(rendered_content) > MAX_MESSAGE_LENGTH * 10:
raise MarkdownRenderingException(
f"Rendered content exceeds {MAX_MESSAGE_LENGTH * 10} characters (message {logging_message_id})"
)
return rendered_content
except Exception:
cleaned = privacy_clean_markdown(content)
# NOTE: Don't change this message without also changing the
# logic in logging_handlers.py or we can create recursive
# exceptions.
markdown_logger.exception(
"Exception in Markdown parser; input (sanitized) was: %s\n (message %s)",
cleaned,
logging_message_id,
)
raise MarkdownRenderingException()
finally:
# These next three lines are slightly paranoid, since
# we always set these right before actually using the
# engine, but better safe then sorry.
_md_engine.zulip_message = None
_md_engine.zulip_realm = None
_md_engine.zulip_db_data = None
markdown_time_start = 0.0
markdown_total_time = 0.0
markdown_total_requests = 0
def get_markdown_time() -> float:
return markdown_total_time
def get_markdown_requests() -> int:
return markdown_total_requests
def markdown_stats_start() -> None:
global markdown_time_start
markdown_time_start = time.time()
def markdown_stats_finish() -> None:
global markdown_total_time
global markdown_total_requests
global markdown_time_start
markdown_total_requests += 1
markdown_total_time += time.time() - markdown_time_start
def markdown_convert(
content: str,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
message: Optional[Message] = None,
message_realm: Optional[Realm] = None,
sent_by_bot: bool = False,
translate_emoticons: bool = False,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
no_previews: bool = False,
) -> str:
markdown_stats_start()
ret = do_convert(
content,
realm_alert_words_automaton,
message,
message_realm,
sent_by_bot,
translate_emoticons,
mention_data,
email_gateway,
no_previews=no_previews,
)
markdown_stats_finish()
return ret
| apache-2.0 | -5,365,257,474,454,156,000 | 38.076582 | 160 | 0.587632 | false |
jiangdexiang/awesome-python-webapp | www/transwarp/db.py | 1 | 8617 | #!/usr/bin/env python
#-*-coding:utf8-*-
__author__ = 'jiangdexiang'
__version__ = '1.0'
__all__ = ['__author__', '__version__']
'''
Database operation module
'''
import threading
import time
import uuid
import functools
import logging
class Dict(dict):
"""
Simple dict but support access as x.y style.
>>> d1 = Dirt()
>>> d1['x'] = 100
>>> d1.x
100
>>> d1['y'] = 200
>>> d1.y
200
>>> d2 = Dirt(a=1, b=2, c='3')
>>> d2.c
'3'
>>> d2['empty']
Traceback (most recent call last:
...
KeyError:
"""
def __init__(self, names=(), values=(), **kw):
super(Dict, self).__init__(**kw)
for k, v in zip(names, values):
self[k] = v
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
def next_id(t=None):
"""
Return next id as 50-char string.
Args:
t: unix timestamp, default to None and using time.time().
:param t:
:return:
"""
if t is None:
t = time.time()
return '%015d%s000' % (int(t * 1000), uuid.uuid4().hex)
def _profiling(start, sql=''):
t = time.time() - start
if t > 0.1:
logging.warning('[PROFILING] [DB] %s: %s' % (t, sql))
else:
logging.info('[PROFILING] [DB] %s: %s' % (t, sql))
class DBError(Exception):
pass
class MultiColumnsError(DBError):
pass
class _LasyConnection(object):
def __init__(self):
self.connection = None
def cursor(self):
if self.connection is None:
connection = engine.connect()
logging.info('open connection <%s>...' % hex(id(connection)))
self.connection = connection
return self.connection.cursor()
def commit(self):
self.connection.commit()
def rollback(self):
self.connection.rollback()
def cleanup(self):
if self.connection:
connection = self.connection
self.connection = None
logging.info('close connection <%s>...' % hex(id(connection)))
connection.close()
class _DbCtx(threading.local):
"""
Thread local object that holds connection info.
"""
def __init__(self):
self.connection = None
self.transactions = 0
def is_init(self):
return not self.connection is None
def init(self):
self.connection = _LasyConnection()
self.transactions = 0
def cleanup(self):
self.connection.cleanup()
self.connection = None
def cursor(self):
"""
return cursor
:return:
"""
return self.connection.cursor()
# thread-local db context:
_db_ctx = _DbCtx()
#global engine object:
engine = None
class _Engine(object):
def __init__(self, connect):
self._connect = connect
def connect(self):
return self._connect()
def create_engine(user, password, database, host='127.0.0.1', port=3306, **kw):
import mysql.connection
global engine
if engine is not None:
raise DBError('Engine is already initialized.')
params = dict(user=user, password=password, database=database, host=host, port=port)
defaults = dict(use_unicode=True, charset='utf8', collation='utf8_general_ci', autocommit=False)
for k, v in defaults.iteritems():
params[k] = kw.pop(k, v)
params.update(kw)
params['buffered'] = True
engine = _Engine(lambda: mysql.connection.connect(**params))
#test conncetion...
logging.info('Init mysql engine <%s> ok.' % hex(id(engine)))
class _ConnectionCtx(object):
"""
_ConnectionCtx object that can open and close connection context. _ConnectionCtx object can be nested and only the
most outer connection has effect.
with connection():
pass
with connection():
pass
"""
def __enter__(self):
global _db_ctx
self.should_cleanup = False
if not _db_ctx.is_init():
_db_ctx.init()
self.should_cleanup = True
return self
def __exit__(self, exctype, excvalue, traceback):
global _db_ctx
if self.should_cleanup:
_db_ctx.cleanup()
def connections():
return _ConnectionCtx()
def with_connection(func):
"""
Decorator for reuse connection.
@with_conncetion
def foo(*args, **kw):
f1()
f2()
f3()
:return:
"""
@functools.wraps(func)
def _wrapper(*args, **kw):
with _ConnectionCtx():
return func(*args, **kw)
return _wrapper
class _TransactionCtx(object):
"""
_TransactionCtx object that can handle transactions.
with _TransactionCtx():
pass
"""
def __enter__(self):
global _db_ctx
self.should_close_conn = False
if not _db_ctx.is_init():
# needs open a connection first:
_db_ctx.init()
self.should_close_conn = True
_db_ctx.transactions += 1
logging.info('Begin transaction...' if _db_ctx.transactions == 1 else 'join current transaction...')
return self
def __exit__(self, exctype, excvalue, traceback):
global _db_ctx
_db_ctx.transactions -= 1
try:
if _db_ctx.transactions == 0:
if exctype is None:
self.commit()
else:
self.rollback()
finally:
if self.should_close_conn:
_db_ctx.cleanup()
@staticmethod
def commit():
global _db_ctx
logging.info('Commit transaction...')
try:
_db_ctx.connection.commit()
logging.info('Commit OK.')
except:
logging.warning('Commit failed. try rollback...')
_db_ctx.connection.rollback()
raise
@staticmethod
def rollback():
global _db_ctx
logging.warning('Rollback transaction...')
_db_ctx.connection.rollback()
logging.info('Rollback OK.')
def transaction():
return _TransactionCtx()
def with_transaction(func):
@functools.wraps(func)
def _wrapper(*args, **kw):
_start = time.time()
with _TransactionCtx():
return func(*args, **kw)
_profiling(_start)
return _wrapper
def _select(sql, first, *args):
"""execute select SQL and return unique result or list results."""
global _db_ctx
cursor = None
names = []
sql = sql.replace('?', '%s')
logging.info('SQL: %s, ARGS: %s' % (sql, args))
try:
cursor = _db_ctx.connection.cursor()
cursor.execute(sql, args)
if cursor.description:
names = [x[0] for x in cursor.description]
if first:
values = cursor.fetchone()
if not values:
return None
return Dict(names, values)
return [Dict(names, x) for x in cursor.fetchall()]
finally:
if cursor:
cursor.close()
@with_connection
def select_one(sql, *args):
return _select(sql, True, *args)
@with_connection
def select_int(sql, *args):
d = _select(sql, True, *args)
if len(d) != 1:
raise MultiColumnsError('Expect only one column.')
return d.values()[0]
@with_connection
def select(sql, *args):
return _select(sql, False, *args)
@with_connection
def _update(sql, *args):
global _db_ctx
cursor = None
sql = sql.replace('?', '%s')
logging.info('SQL: %s, ARGS: %s' % (sql, args))
try:
cursor = _db_ctx.connection.cursor()
cursor.execute(sql, args)
r = cursor.rowcount
if _db_ctx.transactions == 0:
logging.info('auto commit')
_db_ctx.connection.commit()
return r
finally:
if cursor:
cursor.close()
def insert(table, **kw):
cols, args = zip(*kw.iteritems())
sql = 'insert into %s (%s) values (%s)' % (table, ','.join(['`%s`' % col for col in cols]),
','.join(['?' for i in range(len(cols))]))
return _update(sql, *args)
def update(sql, *args):
return _update(sql, *args)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
create_engine('www-data', 'www-data', 'test')
update('drop table if exists user')
update('create table user (id int primary key, name text, email text, passwd text, last_modified real)')
import doctest
doctest.testmod() | gpl-2.0 | 7,174,816,158,842,612,000 | 23.413598 | 118 | 0.559011 | false |
dset0x/invenio-checker | invenio_checker/recids.py | 1 | 1661 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Record ID handling for checker."""
from intbitset import intbitset # pylint: disable=no-name-in-module
from .common import ALL
def ids_from_input(ids_input):
"""Return the list of IDs to check for from user-input.
:param ids_input: Comma-separated list of requested record IDs.
May contain, or be ALL.
:type ids_input: str
:returns: intbitset of IDs or ALL
:rtype: seq
:raises: ValueError
"""
if ALL in ids_input.split(','):
from invenio_checker.common import ALL
return ALL
else:
from invenio_utils.shell import split_cli_ids_arg
return intbitset(split_cli_ids_arg(ids_input), sanity_checks=True)
| gpl-2.0 | 2,949,021,375,642,213,000 | 32.897959 | 76 | 0.717038 | false |
fabteam1/komsukomsuhuhu | komsukomsuhuu/profiles/forms.py | 1 | 2012 | from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import UserCreationForm
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from profiles.models import CustomUser, UserLocation
class LoginForm(forms.Form):
username = forms.CharField(required=True)
password = forms.CharField(widget=forms.PasswordInput, required=True)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if not username or not password:
return self.cleaned_data
user = authenticate(username=username, password=password)
if user:
self.user = user
else:
raise ValidationError('Wrong username or password !')
return self.cleaned_data
class RegistrationForm(UserCreationForm):
pass
class AdvancedRegistrationForm(UserCreationForm):
first_name = forms.CharField(max_length=30, required=True)
last_name = forms.CharField(max_length=30, required=True)
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
def clean_email(self):
if not self.cleaned_data['email']:
raise forms.ValidationError(u'Enter email.')
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(
u'''
This email has already been in use. Please try with different email.
'''
)
return self.cleaned_data['email']
class ChangeCustomUserDetails(forms.ModelForm):
class Meta:
model = CustomUser
fields = ('address', 'phone', 'birthDay')
class UserLocationForm(forms.ModelForm):
class Meta:
model = UserLocation
fields = ['longitude', 'latitude']
class UserStatusForm(forms.ModelForm):
class Meta:
model = CustomUser
fields = ('status',)
| mit | 8,524,575,782,871,746,000 | 25.473684 | 84 | 0.657555 | false |
induane/stomp.py3 | stomp/connect.py | 1 | 38302 | import math
import random
import re
import socket
import sys
import threading
import time
import types
import xml.dom.minidom
import errno
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
protocols = frozenset([
'PROTOCOL_SSLv3',
'PROTOCOL_TLSv1_2',
'PROTOCOL_TLSv1_1',
'PROTOCOL_TLSv1',
'PROTOCOL_SSLv23',
'PROTOCOL_SSLv2',
])
DEFAULT_SSL_VERSION = None
SSL_AVAILABLE = True
try:
import ssl
from ssl import SSLError
except ImportError:
SSL_AVAILABLE = False
if SSL_AVAILABLE:
for protocol in protocols:
try:
DEFAULT_SSL_VERSION = getattr(ssl, protocol)
except AttributeError:
continue
except SSLError:
continue
break
try:
from socket import SOL_SOCKET, SO_KEEPALIVE
from socket import SOL_TCP, TCP_KEEPIDLE, TCP_KEEPINTVL, TCP_KEEPCNT
LINUX_KEEPALIVE_AVAIL=True
except ImportError:
LINUX_KEEPALIVE_AVAIL=False
import exception
import listener
import utils
from backward import decode, encode, hasbyte, pack, socksend, NULL
try:
import uuid
except ImportError:
from backward import uuid
try:
from fractions import gcd
except ImportError:
from backward import gcd
import logging
log = logging.getLogger('stomp.py')
class Connection(object):
"""
Represents a STOMP client connection.
"""
# ========= PRIVATE MEMBERS =========
# List of all host names (unqualified, fully-qualified, and IP
# addresses) that refer to the local host (both loopback interface
# and external interfaces). This is used for determining
# preferred targets.
__localhost_names = [ "localhost", "127.0.0.1" ]
try:
__localhost_names.append(socket.gethostbyname(socket.gethostname()))
except:
pass
try:
__localhost_names.append(socket.gethostname())
except:
pass
try:
__localhost_names.append(socket.getfqdn(socket.gethostname()))
except:
pass
#
# Used to parse the STOMP "content-length" header lines,
#
__content_length_re = re.compile('^content-length[:]\\s*(?P<value>[0-9]+)', re.MULTILINE)
def __init__(self,
host_and_ports = [ ('localhost', 61613) ],
user = None,
passcode = None,
prefer_localhost = True,
try_loopback_connect = True,
reconnect_sleep_initial = 0.1,
reconnect_sleep_increase = 0.5,
reconnect_sleep_jitter = 0.1,
reconnect_sleep_max = 60.0,
reconnect_attempts_max = 3,
use_ssl = False,
ssl_key_file = None,
ssl_cert_file = None,
ssl_ca_certs = None,
ssl_cert_validator = None,
wait_on_receipt = False,
ssl_version = DEFAULT_SSL_VERSION,
timeout = None,
version = 1.0,
strict = True,
heartbeats = (0, 0),
keepalive = None,
vhost = None
):
"""
Initialize and start this connection.
\param host_and_ports
a list of (host, port) tuples.
\param prefer_localhost
if True and the local host is mentioned in the (host,
port) tuples, try to connect to this first
\param try_loopback_connect
if True and the local host is found in the host
tuples, try connecting to it using loopback interface
(127.0.0.1)
\param reconnect_sleep_initial
initial delay in seconds to wait before reattempting
to establish a connection if connection to any of the
hosts fails.
\param reconnect_sleep_increase
factor by which the sleep delay is increased after
each connection attempt. For example, 0.5 means
to wait 50% longer than before the previous attempt,
1.0 means wait twice as long, and 0.0 means keep
the delay constant.
\param reconnect_sleep_max
maximum delay between connection attempts, regardless
of the reconnect_sleep_increase.
\param reconnect_sleep_jitter
random additional time to wait (as a percentage of
the time determined using the previous parameters)
between connection attempts in order to avoid
stampeding. For example, a value of 0.1 means to wait
an extra 0%-10% (randomly determined) of the delay
calculated using the previous three parameters.
\param reconnect_attempts_max
maximum attempts to reconnect
\param use_ssl
connect using SSL to the socket. This wraps the
socket in a SSL connection. The constructor will
raise an exception if you ask for SSL, but it can't
find the SSL module.
\param ssl_cert_file
the path to a X509 certificate
\param ssl_key_file
the path to a X509 key file
\param ssl_ca_certs
the path to the a file containing CA certificates
to validate the server against. If this is not set,
server side certificate validation is not done.
\param ssl_cert_validator
function which performs extra validation on the client
certificate, for example checking the returned
certificate has a commonName attribute equal to the
hostname (to avoid man in the middle attacks).
The signature is:
(OK, err_msg) = validation_function(cert, hostname)
where OK is a boolean, and cert is a certificate structure
as returned by ssl.SSLSocket.getpeercert()
\param wait_on_receipt
if a receipt is specified, then the send method should wait
(block) for the server to respond with that receipt-id
before continuing
\param ssl_version
SSL protocol to use for the connection. This should be
one of the PROTOCOL_x constants provided by the ssl module.
The default is ssl.PROTOCOL_SSLv3
\param timeout
the timeout value to use when connecting the stomp socket
\param version
STOMP protocol version (1.0 or 1.1)
\param strict
if true, use the strict version of the protocol. For STOMP 1.1, this means
it will use the STOMP connect header, rather than CONNECT.
\param heartbeats
a tuple containing the heartbeat send and receive time in millis. (0,0)
if no heartbeats
\param keepalive
some operating systems support sending the occasional heart
beat packets to detect when a connection fails. This
parameter can either be set set to a boolean to turn on the
default keepalive options for your OS, or as a tuple of
values, which also enables keepalive packets, but specifies
options specific to your OS implementation
\param vhost
specify a virtual hostname to provide in the 'host' header of the connection
"""
sorted_host_and_ports = []
sorted_host_and_ports.extend(host_and_ports)
#
# If localhost is preferred, make sure all (host, port) tuples that refer to the local host come first in the list
#
if prefer_localhost:
sorted_host_and_ports.sort(key = self.is_localhost)
#
# If the user wishes to attempt connecting to local ports using the loopback interface, for each (host, port) tuple
# referring to a local host, add an entry with the host name replaced by 127.0.0.1 if it doesn't exist already
#
loopback_host_and_ports = []
if try_loopback_connect:
for host_and_port in sorted_host_and_ports:
if self.is_localhost(host_and_port) == 1:
port = host_and_port[1]
if (not ("127.0.0.1", port) in sorted_host_and_ports
and not ("localhost", port) in sorted_host_and_ports):
loopback_host_and_ports.append(("127.0.0.1", port))
#
# Assemble the final, possibly sorted list of (host, port) tuples
#
self.__host_and_ports = []
self.__host_and_ports.extend(loopback_host_and_ports)
self.__host_and_ports.extend(sorted_host_and_ports)
self.__recvbuf = ''
self.__listeners = {}
self.__reconnect_sleep_initial = reconnect_sleep_initial
self.__reconnect_sleep_increase = reconnect_sleep_increase
self.__reconnect_sleep_jitter = reconnect_sleep_jitter
self.__reconnect_sleep_max = reconnect_sleep_max
self.__reconnect_attempts_max = reconnect_attempts_max
self.__timeout = timeout
self.__connect_headers = {}
if user is not None and passcode is not None:
self.__connect_headers['login'] = user
self.__connect_headers['passcode'] = passcode
self.__socket = None
self.__socket_semaphore = threading.BoundedSemaphore(1)
self.__current_host_and_port = None
self.__receiver_thread_exit_condition = threading.Condition()
self.__receiver_thread_exited = False
self.__send_wait_condition = threading.Condition()
self.__connect_wait_condition = threading.Condition()
self.blocking = None
self.connected = False
# setup SSL
if use_ssl and not ssl:
raise Exception("SSL connection requested, but SSL library not found.")
self.__ssl = use_ssl
self.__ssl_cert_file = ssl_cert_file
self.__ssl_key_file = ssl_key_file
self.__ssl_ca_certs = ssl_ca_certs
self.__ssl_cert_validator = ssl_cert_validator
self.__ssl_version = ssl_version
self.__receipts = {}
self.__wait_on_receipt = wait_on_receipt
# protocol version
self.version = version
self.__strict = strict
# setup heartbeating
if version < 1.1 and heartbeats != (0, 0):
raise exception.ProtocolException('Heartbeats can only be set on a 1.1+ connection')
self.heartbeats = heartbeats
# used for 1.1 heartbeat messages (set to true every time a heartbeat message arrives)
self.__received_heartbeat = time.time()
# flag used when we receive the disconnect receipt
self.__disconnect_receipt = None
# function for creating threads used by the connection
self.create_thread_fc = default_create_thread
self.__keepalive = keepalive
self.vhost = vhost
def is_localhost(self, host_and_port):
"""
Return true if the specified host+port is a member of the 'localhost' list of hosts
"""
(host, port) = host_and_port
if host in Connection.__localhost_names:
return 1
else:
return 2
def override_threading(self, create_thread_fc):
"""
Override for thread creation. Use an alternate threading library by
setting this to a function with a single argument (which is the receiver loop callback).
The thread which is returned should be started (ready to run)
"""
self.create_thread_fc = create_thread_fc
#
# Manage the connection
#
def start(self):
"""
Start the connection. This should be called after all
listeners have been registered. If this method is not called,
no frames will be received by the connection.
"""
self.__running = True
self.__attempt_connection()
thread = self.create_thread_fc(self.__receiver_loop)
self.__notify('connecting')
def stop(self):
"""
Stop the connection. This is equivalent to calling
disconnect() but will do a clean shutdown by waiting for the
receiver thread to exit.
"""
self.disconnect()
self.__receiver_thread_exit_condition.acquire()
while not self.__receiver_thread_exited:
self.__receiver_thread_exit_condition.wait()
self.__receiver_thread_exit_condition.release()
def get_host_and_port(self):
"""
Return a (host, port) tuple indicating which STOMP host and
port is currently connected, or None if there is currently no
connection.
"""
return self.__current_host_and_port
def is_connected(self):
"""
Return true if the socket managed by this connection is connected
"""
try:
return self.__socket is not None and self.__socket.getsockname()[1] != 0 and self.connected
except socket.error:
return False
#
# Manage objects listening to incoming frames
#
def set_listener(self, name, listener):
"""
Set a named listener on this connection
\see listener::ConnectionListener
\param name the name of the listener
\param listener the listener object
"""
self.__listeners[name] = listener
def remove_listener(self, name):
"""
Remove a listener according to the specified name
\param name the name of the listener to remove
"""
del self.__listeners[name]
def get_listener(self, name):
"""
Return a named listener
\param name the listener to return
"""
if name in self.__listeners:
return self.__listeners[name]
else:
return None
#
# STOMP transmissions
#
def subscribe(self, headers={}, **keyword_headers):
"""
Send a SUBSCRIBE frame to subscribe to a queue
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
required_headers = [ 'destination' ]
if self.version >= 1.1:
required_headers.append('id')
self.__send_frame_helper('SUBSCRIBE', '', merged_headers, required_headers)
def unsubscribe(self, headers={}, **keyword_headers):
"""
Send an UNSUBSCRIBE frame to unsubscribe from a queue
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
self.__send_frame_helper('UNSUBSCRIBE', '', merged_headers, [ ('destination', 'id') ])
def send(self, message='', headers={}, **keyword_headers):
"""
Send a message (SEND) frame
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
wait_on_receipt = self.__wait_on_receipt and 'receipt' in merged_headers.keys()
if wait_on_receipt:
self.__send_wait_condition.acquire()
try:
self.__send_frame_helper('SEND', message, merged_headers, [ 'destination' ])
self.__notify('send', headers, message)
# if we need to wait-on-receipt, then block until the receipt frame arrives
if wait_on_receipt:
receipt = merged_headers['receipt']
while receipt not in self.__receipts:
self.__send_wait_condition.wait()
del self.__receipts[receipt]
finally:
if wait_on_receipt:
self.__send_wait_condition.release()
def ack(self, headers={}, **keyword_headers):
"""
Send an ACK frame, to acknowledge receipt of a message
"""
self.__send_frame_helper('ACK', '', utils.merge_headers([headers, keyword_headers]), [ 'message-id' ])
def nack(self, headers={}, **keyword_headers):
"""
Send an NACK frame, to acknowledge a message was not successfully processed
"""
if self.version < 1.1:
raise RuntimeError('NACK is not supported with 1.0 connections')
self.__send_frame_helper('NACK', '', utils.merge_headers([headers, keyword_headers]), [ 'message-id' ])
def begin(self, headers={}, **keyword_headers):
"""
Send a BEGIN frame to start a transaction
"""
use_headers = utils.merge_headers([headers, keyword_headers])
if not 'transaction' in use_headers.keys():
use_headers['transaction'] = str(uuid.uuid4())
self.__send_frame_helper('BEGIN', '', use_headers, [ 'transaction' ])
return use_headers['transaction']
def abort(self, headers={}, **keyword_headers):
"""
Send an ABORT frame to rollback a transaction
"""
self.__send_frame_helper('ABORT', '', utils.merge_headers([headers, keyword_headers]), [ 'transaction' ])
def commit(self, headers={}, **keyword_headers):
"""
Send a COMMIT frame to commit a transaction (send pending messages)
"""
self.__send_frame_helper('COMMIT', '', utils.merge_headers([headers, keyword_headers]), [ 'transaction' ])
def connect(self, headers={}, **keyword_headers):
"""
Send a CONNECT frame to start a connection
"""
wait = False
if 'wait' in keyword_headers and keyword_headers['wait']:
wait = True
del keyword_headers['wait']
if self.version >= 1.1:
if self.__strict:
cmd = 'STOMP'
else:
cmd = 'CONNECT'
if self.vhost is not None:
headers['host'] = self.vhost
headers['accept-version'] = self.version
headers['heart-beat'] = '%s,%s' % self.heartbeats
else:
cmd = 'CONNECT'
self.__send_frame_helper(cmd, '', utils.merge_headers([self.__connect_headers, headers, keyword_headers]), [ ])
if wait:
self.__connect_wait_condition.acquire()
while not self.is_connected():
self.__connect_wait_condition.wait()
self.__connect_wait_condition.release()
def disconnect_socket(self):
self.__running = False
if self.__socket is not None:
if self.__ssl:
#
# Even though we don't want to use the socket, unwrap is the only API method which does a proper SSL shutdown
#
try:
self.__socket = self.__socket.unwrap()
except Exception:
#
# unwrap seems flaky on Win with the backported ssl mod, so catch any exception and log it
#
_, e, _ = sys.exc_info()
log.warn(e)
elif hasattr(socket, 'SHUT_RDWR'):
try:
self.__socket.shutdown(socket.SHUT_RDWR)
except socket.error:
_, e, _ = sys.exc_info()
log.warn('Unable to issue SHUT_RDWR on socket because of error "%s"' % e)
#
# split this into a separate check, because sometimes the socket is nulled between shutdown and this call
#
if self.__socket is not None:
try:
self.__socket.close()
except socket.error:
_, e, _ = sys.exc_info()
log.warn('Unable to close socket because of error "%s"' % e)
self.__current_host_and_port = None
def disconnect(self, send_disconnect=True, headers={}, **keyword_headers):
"""
Send a DISCONNECT frame to finish a connection
"""
if self.version >= 1.1 and 'receipt' not in headers:
headers['receipt'] = str(uuid.uuid4())
try:
self.__send_frame_helper('DISCONNECT', '', utils.merge_headers([self.__connect_headers, headers, keyword_headers]), [ ])
except exception.NotConnectedException:
_, e, _ = sys.exc_info()
self.disconnect_socket()
raise e
if 'receipt' in headers:
self.__disconnect_receipt = headers['receipt']
else:
self.disconnect_socket()
def __convert_dict(self, payload):
"""
Encode a python dictionary as a <map>...</map> structure.
"""
xmlStr = "<map>\n"
for key in payload:
xmlStr += "<entry>\n"
xmlStr += "<string>%s</string>" % key
xmlStr += "<string>%s</string>" % payload[key]
xmlStr += "</entry>\n"
xmlStr += "</map>"
return xmlStr
def __send_frame_helper(self, command, payload, headers, required_header_keys):
"""
Helper function for sending a frame after verifying that a
given set of headers are present.
\param command
the command to send
\param payload
the frame's payload
\param headers
a dictionary containing the frame's headers
\param required_header_keys
a sequence enumerating all required header keys. If an element in this sequence is itself
a tuple, that tuple is taken as a list of alternatives, one of which must be present.
\throws ArgumentError
if one of the required header keys is not present in the header map.
"""
for required_header_key in required_header_keys:
if type(required_header_key) == tuple:
found_alternative = False
for alternative in required_header_key:
if alternative in headers.keys():
found_alternative = True
if not found_alternative:
raise KeyError("Command %s requires one of the following headers: %s" % (command, str(required_header_key)))
elif not required_header_key in headers.keys():
raise KeyError("Command %s requires header %r" % (command, required_header_key))
self.__send_frame(command, headers, payload)
def __send_frame(self, command, headers={}, payload=''):
"""
Send a STOMP frame.
\param command
the frame command
\param headers
a map of headers (key-val pairs)
\param payload
the message payload
"""
if type(payload) == dict:
headers["transformation"] = "jms-map-xml"
payload = self.__convert_dict(payload)
if payload:
payload = encode(payload)
if hasbyte(0, payload):
headers.update({'content-length': len(payload)})
if self.__socket is not None:
try:
frame = [ ]
if command is not None:
frame.append(command + '\n')
for key, val in headers.items():
frame.append('%s:%s\n' % (key, val))
frame.append('\n')
if payload:
frame.append(payload)
if command is not None:
# only send the terminator if we're sending a command (heartbeats have no term)
frame.append(NULL)
frame = pack(frame)
self.__socket_semaphore.acquire()
try:
socksend(self.__socket, frame)
log.debug("Sent frame: type=%s, headers=%r, body=%r" % (command, headers, payload))
finally:
self.__socket_semaphore.release()
except Exception:
_, e, _ = sys.exc_info()
log.error("Error sending frame: %s" % e)
raise e
else:
raise exception.NotConnectedException()
def __notify(self, frame_type, headers=None, body=None):
"""
Utility function for notifying listeners of incoming and outgoing messages
\param frame_type
the type of message
\param headers
the map of headers associated with the message
\param body
the content of the message
"""
if frame_type == 'receipt':
# logic for wait-on-receipt notification
receipt = headers['receipt-id']
self.__send_wait_condition.acquire()
try:
self.__receipts[receipt] = None
self.__send_wait_condition.notify()
finally:
self.__send_wait_condition.release()
# received a stomp 1.1 disconnect receipt
if receipt == self.__disconnect_receipt:
self.disconnect_socket()
if frame_type == 'connected':
self.__connect_wait_condition.acquire()
self.connected = True
self.__connect_wait_condition.notify()
self.__connect_wait_condition.release()
if 'version' not in headers.keys():
if self.version >= 1.1:
log.warn('Downgraded STOMP protocol version to 1.0')
self.version = 1.0
if 'heart-beat' in headers.keys():
self.heartbeats = utils.calculate_heartbeats(headers['heart-beat'].replace(' ', '').split(','), self.heartbeats)
if self.heartbeats != (0,0):
default_create_thread(self.__heartbeat_loop)
elif frame_type == 'disconnected':
self.__connect_wait_condition.acquire()
self.connected = False
self.__connect_wait_condition.release()
for listener in self.__listeners.values():
if not listener: continue
if not hasattr(listener, 'on_%s' % frame_type):
log.debug('listener %s has no method on_%s' % (listener, frame_type))
continue
if frame_type == 'connecting':
listener.on_connecting(self.__current_host_and_port)
continue
elif frame_type == 'disconnected':
listener.on_disconnected()
continue
notify_func = getattr(listener, 'on_%s' % frame_type)
notify_func(headers, body)
def __receiver_loop(self):
"""
Main loop listening for incoming data.
"""
log.debug("Starting receiver loop")
try:
try:
while self.__running:
if self.__socket is None:
break
try:
try:
while self.__running:
frames = self.__read()
for frame in frames:
(frame_type, headers, body) = utils.parse_frame(frame)
log.debug("Received frame: %r, headers=%r, body=%r" % (frame_type, headers, body))
frame_type = frame_type.lower()
if frame_type in [ 'connected', 'message', 'receipt', 'error' ]:
self.__notify(frame_type, headers, body)
elif frame_type == 'heartbeat':
# no notifications needed
pass
else:
log.warning('Unknown response frame type: "%s" (frame length was %d)' % (frame_type, len(frame)))
finally:
try:
self.__socket.close()
except:
pass # ignore errors when attempting to close socket
self.__socket = None
self.__current_host_and_port = None
except exception.ConnectionClosedException:
if self.__running:
log.error("Lost connection")
self.__notify('disconnected')
#
# Clear out any half-received messages after losing connection
#
self.__recvbuf = ''
self.__running = False
break
except:
log.exception("An unhandled exception was encountered in the stomp receiver loop")
finally:
self.__receiver_thread_exit_condition.acquire()
self.__receiver_thread_exited = True
self.__receiver_thread_exit_condition.notifyAll()
self.__receiver_thread_exit_condition.release()
log.debug("Receiver loop ended")
def __heartbeat_loop(self):
"""
Loop for sending (and monitoring received) heartbeats
"""
send_sleep = self.heartbeats[0] / 1000
# receive gets an additional threshold of 3 additional seconds
receive_sleep = (self.heartbeats[1] / 1000) + 3
if send_sleep == 0:
sleep_time = receive_sleep
elif receive_sleep == 0:
sleep_time = send_sleep
else:
# sleep is the GCD of the send and receive times
sleep_time = gcd(send_sleep, receive_sleep) / 2.0
send_time = time.time()
receive_time = time.time()
while self.__running:
time.sleep(sleep_time)
if time.time() - send_time > send_sleep:
send_time = time.time()
log.debug('Sending a heartbeat message')
self.__send_frame(None)
if time.time() - receive_time > receive_sleep:
if time.time() - self.__received_heartbeat > receive_sleep:
log.debug('Heartbeat timeout')
# heartbeat timeout
for listener in self.__listeners.values():
listener.on_heartbeat_timeout()
self.disconnect_socket()
self.__connect_wait_condition.acquire()
self.connected = False
self.__connect_wait_condition.release()
def __read(self):
"""
Read the next frame(s) from the socket.
"""
fastbuf = StringIO()
while self.__running:
try:
try:
c = self.__socket.recv(1024)
except socket.error:
_, e, _ = sys.exc_info()
if e.args[0] in (errno.EAGAIN, errno.EINTR):
log.debug("socket read interrupted, restarting")
continue
raise
c = decode(c)
# reset the heartbeat for any received message
self.__received_heartbeat = time.time()
except Exception:
_, e, _ = sys.exc_info()
c = ''
if len(c) == 0:
raise exception.ConnectionClosedException()
fastbuf.write(c)
if '\x00' in c:
break
elif c == '\x0a':
# heartbeat (special case)
return c
self.__recvbuf += fastbuf.getvalue()
fastbuf.close()
result = []
if len(self.__recvbuf) > 0 and self.__running:
while True:
pos = self.__recvbuf.find('\x00')
if pos >= 0:
frame = self.__recvbuf[0:pos]
preamble_end = frame.find('\n\n')
if preamble_end >= 0:
content_length_match = Connection.__content_length_re.search(frame[0:preamble_end])
if content_length_match:
content_length = int(content_length_match.group('value'))
content_offset = preamble_end + 2
frame_size = content_offset + content_length
if frame_size > len(frame):
#
# Frame contains NUL bytes, need to read more
#
if frame_size < len(self.__recvbuf):
pos = frame_size
frame = self.__recvbuf[0:pos]
else:
#
# Haven't read enough data yet, exit loop and wait for more to arrive
#
break
result.append(frame)
self.__recvbuf = self.__recvbuf[pos+1:]
else:
break
return result
def __enable_keepalive(self):
def try_setsockopt(sock, name, fam, opt, val):
if val is None:
return True # no value to set always works
try:
sock.setsockopt(fam, opt, val)
log.debug('keepalive: set %r option to %r on socket' % (name, val))
except:
log.error('keepalive: unable to set %r option to %r on socket' % (name,val))
return False
return True
ka = self.__keepalive
if not ka:
return
if ka == True:
ka_sig = 'auto'
ka_args = ()
else:
try:
ka_sig = ka[0]
ka_args = ka[1:]
except Exception:
log.error('keepalive: bad specification %r' % (ka,))
return
if ka_sig == 'auto':
if LINUX_KEEPALIVE_AVAIL:
ka_sig = 'linux'
ka_args = None
log.debug('keepalive: autodetected linux-style support')
else:
log.error('keepalive: unable to detect any implementation, DISABLED!')
return
if ka_sig == 'linux':
log.debug('keepalive: activating linux-style support')
if ka_args is None:
log.debug('keepalive: using system defaults')
ka_args = (None, None, None)
lka_idle, lka_intvl, lka_cnt = ka_args
if try_setsockopt(self.__socket, 'enable', SOL_SOCKET, SO_KEEPALIVE, 1):
try_setsockopt(self.__socket, 'idle time', SOL_TCP, TCP_KEEPIDLE, lka_idle)
try_setsockopt(self.__socket, 'interval', SOL_TCP, TCP_KEEPINTVL, lka_intvl)
try_setsockopt(self.__socket, 'count', SOL_TCP, TCP_KEEPCNT, lka_cnt)
else:
log.error('keepalive: implementation %r not recognized or not supported' % ka_sig)
def __attempt_connection(self):
"""
Try connecting to the (host, port) tuples specified at construction time.
"""
sleep_exp = 1
connect_count = 0
while self.__running and self.__socket is None and connect_count < self.__reconnect_attempts_max:
for host_and_port in self.__host_and_ports:
try:
log.debug("Attempting connection to host %s, port %s" % host_and_port)
self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__enable_keepalive()
if self.__ssl: # wrap socket
if self.__ssl_ca_certs:
cert_validation = ssl.CERT_REQUIRED
else:
cert_validation = ssl.CERT_NONE
self.__socket = ssl.wrap_socket(self.__socket, keyfile = self.__ssl_key_file,
certfile = self.__ssl_cert_file, cert_reqs = cert_validation,
ca_certs = self.__ssl_ca_certs, ssl_version = self.__ssl_version)
self.__socket.settimeout(self.__timeout)
if self.blocking is not None:
self.__socket.setblocking(self.blocking)
self.__socket.connect(host_and_port)
#
# Validate server cert
#
if self.__ssl and self.__ssl_cert_validator:
cert = self.__socket.getpeercert()
(ok, errmsg) = apply(self.__ssl_cert_validator, (cert, host_and_port[0]))
if not ok:
raise SSLError("Server certificate validation failed: %s" % errmsg)
self.__current_host_and_port = host_and_port
log.info("Established connection to host %s, port %s" % host_and_port)
break
except socket.error:
self.__socket = None
if isinstance(sys.exc_info()[1], tuple):
exc = sys.exc_info()[1][1]
else:
exc = sys.exc_info()[1]
connect_count += 1
log.warning("Could not connect to host %s, port %s: %s" % (host_and_port[0], host_and_port[1], exc))
if self.__socket is None:
sleep_duration = (min(self.__reconnect_sleep_max,
((self.__reconnect_sleep_initial / (1.0 + self.__reconnect_sleep_increase))
* math.pow(1.0 + self.__reconnect_sleep_increase, sleep_exp)))
* (1.0 + random.random() * self.__reconnect_sleep_jitter))
sleep_end = time.time() + sleep_duration
log.debug("Sleeping for %.1f seconds before attempting reconnect" % sleep_duration)
while self.__running and time.time() < sleep_end:
time.sleep(0.2)
if sleep_duration < self.__reconnect_sleep_max:
sleep_exp += 1
if not self.__socket:
raise exception.ConnectFailedException()
def default_create_thread(callback):
"""
Default thread creation
"""
thread = threading.Thread(None, callback)
thread.daemon = True # Don't let receiver thread prevent termination
thread.start()
return thread
| apache-2.0 | -1,741,751,368,077,238,500 | 36.698819 | 137 | 0.534593 | false |
mode89/snn | izhikevich/da.py | 1 | 3623 | import matplotlib.pyplot as plt
import numpy
import random
random.seed(0)
T = 100000
N = 1000
Ne = int(N * 0.8)
Ni = N - Ne
M = int(N * 0.1)
D = 20
a = numpy.concatenate((
0.02 * numpy.ones(Ne),
0.1 * numpy.ones(Ni)
))
d = numpy.concatenate((
8 * numpy.ones(Ne),
2 * numpy.ones(Ni)
))
# generate post-synaptic connections
post = numpy.empty((N, M), dtype=numpy.int)
for i in range(Ne):
post[i,:] = random.sample(range(N), M)
for i in range(Ne, N):
post[i,:] = random.sample(range(Ne), M)
# find pre-synaptic connections to excitatory neurons
pre = [[] for i in range(N)]
for i in range(Ne):
for j in range(M):
pre[post[i,j]].append(i)
# generate delays
delays = [[[] for i in range(D)] for j in range(N)]
for i in range(Ne):
for j in range(M):
delays[i][int(D * random.random())].append(post[i,j])
for i in range(Ne, N):
for j in range(M):
delays[i][0].append(post[i,j])
# generate matrix of synaptic weights
s = numpy.zeros((N, N))
for i in range(Ne):
s[i, post[i,:]] = 6.0
for i in range(Ne, N):
s[i, post[i,:]] = -5.0
v = -65 * numpy.ones(N)
u = 0.2 * v
firings = []
STDP = numpy.zeros(N)
sm = 4.0
sd = numpy.zeros((N, N))
DA = 0
rew = []
n1 = 700
n2 = post[n1, 0]
s[n1, n2] = 0.0
interval = 20
n1f = []
n2f = []
class data:
n0 = 700
n1 = post[n0, 0]
stdp0 = []
stdp1 = []
s01 = []
da = []
for t in range(T):
print(t)
# provide random input
I = 13.0 * numpy.array([random.uniform(-0.5, 0.5) for i in range(N)])
# identify fired neurons
fired = numpy.argwhere(v >= 30)
if fired.size > 0:
v[fired] = -65.0
u[fired] = u[fired] + d[fired]
# deliver spikes to post-synaptic neurons
firings.append(fired)
for time in range(min(D, len(firings))):
for fired_neuron in firings[t - time]:
post_neurons = delays[fired_neuron][time]
if len(post_neurons) > 0:
I[post_neurons] += s[fired_neuron, post_neurons]
# update post-synaptic potential
for i in range(2):
v += 0.5 * ((0.04 * v + 5.0) * v + 140.0 - u + I)
u += a * (0.2 * v - u)
# update synaptic weights
STDP[fired] = 0.1
for fired_neuron in fired:
if fired_neuron < Ne:
post_neurons = post[fired_neuron,:]
sd[fired_neuron, post_neurons] -= 1.2 * STDP[post_neurons]
pre_neurons = pre[fired_neuron]
sd[pre_neurons, fired_neuron] += STDP[pre_neurons]
STDP *= 0.95
DA *= 0.995
if t % 10 == 0:
s[0:Ne,:] = numpy.maximum(0.0, numpy.minimum(sm,
s[0:Ne,:] + (0.002 + DA) * sd[0:Ne,:]))
sd *= 0.99
if numpy.any(fired == n1):
n1f.append(t)
if numpy.any(fired == n2):
n2f.append(t)
if len(n1f) > 0:
if t - n1f[-1] < interval and n2f[-1] > n1f[-1]:
print("Coincident spiking")
rew.append(t + 1000 + int(2000 * random.random()))
if any([it == t for it in rew]):
print("Rewarding")
DA += 0.5
data.stdp0.append(STDP[data.n0])
data.stdp1.append(STDP[data.n1])
data.s01.append(s[data.n0, data.n1])
data.da.append(DA)
x = []
y = []
for t in range(T):
for fired in firings[t]:
x.append(t)
y.append(fired)
plt.subplot(411)
plt.scatter(x, y, color="black", marker=".")
plt.xlim(0, T)
plt.ylim(0, N)
plt.subplot(412)
plt.plot(
range(T), data.stdp0,
range(T), data.stdp1)
plt.xlim(0, T)
plt.subplot(413)
plt.plot(range(T), data.s01)
plt.xlim(0, T)
plt.subplot(414)
plt.plot(range(T), data.da)
plt.xlim(0, T)
plt.show()
| mit | 6,536,851,437,615,077,000 | 21.226994 | 73 | 0.553133 | false |
MontpellierRessourcesImagerie/openmicroscopy | components/tools/OmeroPy/src/omero/plugins/import.py | 1 | 23476 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2016 Glencoe Software, Inc. All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Startup plugin for command-line importer.
"""
import os
import csv
import sys
import shlex
from omero.cli import BaseControl, CLI
import omero.java
from omero_ext.argparse import SUPPRESS
from path import path
START_CLASS = "ome.formats.importer.cli.CommandLineImporter"
TEST_CLASS = "ome.formats.test.util.TestEngine"
HELP = """Run the Java-based command-line importer
This is a Python wrapper around the Java importer. Login is handled by Python
OMERO.cli. To see more options, use "--javahelp".
Options marked with "**" are passed strictly to Java. If they interfere with
any of the Python arguments, you may need to end precede your arguments with a
"--".
Bulk imports:
Rather than passing one or more files to the import command, a single
dictionary-like file (e.g. yml or json) can be passed to the `--bulk`
argument. Most keys in the bulk file will be treated like additional
command-line arguments. Special keys include:
* columns A list of columns for parsing the value of path
* continue Like the "-c" changes error handling
* dry_run Prints out additional arguments rather than running them
* include Relative path (from the bulk file) of a parent bulk file
* path A file which will be parsed line by line based on its file
ending. Lines containing zero or more keys along with a
single file to be imported. Options for formats include:
- .tsv and .csv files will be parsed by the existing library
- other files will be parsed with shlex
- unless no columns are specified, in which case each line
is treated as a file
"""
EXAMPLES = """
Examples:
# Display help
$ bin/omero import -h
# Import foo.tiff using current login
$ bin/omero import ~/Data/my_file.dv
# Import foo.tiff using input credentials
$ bin/omero import -s localhost -u user -w password foo.tiff
# Set Java debugging level to ALL
$ bin/omero import foo.tiff -- --debug=ALL
# Display used files for importing foo.tiff
$ bin/omero import foo.tiff -f
# Limit debugging output
$ bin/omero import -- --debug=ERROR foo.tiff
For additional information, see:
https://docs.openmicroscopy.org/latest/omero/users/cli/import.html
Report bugs to <[email protected]>
"""
TESTHELP = """Run the Importer TestEngine suite (devs-only)"""
DEBUG_CHOICES = ["ALL", "DEBUG", "ERROR", "FATAL", "INFO", "TRACE", "WARN"]
OUTPUT_CHOICES = ["ids", "legacy", "yaml"]
SKIP_CHOICES = ['all', 'checksum', 'minmax', 'thumbnails', 'upgrade']
NO_ARG = object()
class CommandArguments(object):
def __init__(self, ctx, args):
self.__ctx = ctx
self.__args = args
self.__accepts = set()
self.__added = dict()
self.__java_initial = list()
self.__java_additional = list()
self.__py_initial = list()
self.__py_additional = list()
# Python arguments
self.__py_keys = (
"javahelp", "skip", "file", "errs", "logback",
"port", "password", "group", "create", "func",
"bulk", "prog", "user", "key", "path", "logprefix",
"JAVA_DEBUG", "quiet", "server", "depth", "clientdir",
"sudo")
self.set_login_arguments(ctx, args)
self.set_skip_arguments(args)
for key in vars(args):
self.__accepts.add(key)
val = getattr(args, key)
if key in self.__py_keys:
# Place the Python elements on the CommandArguments
# instance so that it behaves like `args`
setattr(self, key, val)
self.append_arg(self.__py_initial, key, val)
elif not val:
# If there's no value, do nothing
pass
else:
self.append_arg(self.__java_initial, key, val)
def append_arg(self, cmd_list, key, val=NO_ARG):
arg_list = self.build_arg_list(key, val)
cmd_list.extend(arg_list)
def reset_arg(self, cmd_list, idx, key, val=NO_ARG):
arg_list = self.build_arg_list(key, val)
cmd_list[idx:idx+len(arg_list)] = arg_list
def build_arg_list(self, key, val=NO_ARG):
arg_list = []
if len(key) == 1:
arg_list.append("-"+key)
if val != NO_ARG:
if isinstance(val, (str, unicode)):
arg_list.append(val)
else:
key = key.replace("_", "-")
if val == NO_ARG:
arg_list.append("--%s" % key)
elif isinstance(val, (str, unicode)):
arg_list.append(
"--%s=%s" % (key, val))
else:
arg_list.append("--%s" % key)
return arg_list
def set_path(self, path):
if not isinstance(path, list):
self.__ctx.die(202, "Path is not a list")
else:
self.path = path
def java_args(self):
rv = list()
rv.extend(self.__java_initial)
rv.extend(self.__java_additional)
rv.extend(self.path)
if self.JAVA_DEBUG:
# Since "args.debug" is used by omero/cli.py itself,
# uses of "--debug" *after* the `import` command are
# handled by placing them in this special variable.
rv.append("--debug=%s" % self.JAVA_DEBUG)
return rv
def initial_args(self):
rv = list()
rv.extend(self.__py_initial)
rv.extend(self.__java_initial)
return rv
def added_args(self):
rv = list()
rv.extend(self.__py_additional)
rv.extend(self.__java_additional)
rv.extend(self.path)
return rv
def accepts(self, key):
return key in self.__accepts
def add(self, key, val=NO_ARG):
idx = None
if key in self.__added:
idx = self.__added[key]
if key in self.__py_keys:
# First we check if this is a Python argument, in which
# case it's set directly on the instance itself. This
# may need to be later set elsewhere if multiple bulk
# files are supported.
setattr(self, key, val)
where = self.__py_additional
elif not self.accepts(key):
self.__ctx.die(200, "Unknown argument: %s" % key)
else:
where = self.__java_additional
if idx is None:
idx = len(where)
self.append_arg(where, key, val)
self.__added[key] = idx
else:
self.reset_arg(where, idx, key, val)
def set_login_arguments(self, ctx, args):
"""Set the connection arguments"""
if args.javahelp:
self.__java_initial.append("-h")
# Connection is required unless help arguments or -f is passed
connection_required = ("-h" not in self.__java_initial and
not args.f and
not args.advanced_help)
if connection_required:
client = ctx.conn(args)
host = client.getProperty("omero.host")
port = client.getProperty("omero.port")
session = client.getSessionId()
self.__java_initial.extend(["-s", host])
self.__java_initial.extend(["-p", port])
self.__java_initial.extend(["-k", session])
def set_skip_arguments(self, args):
"""Set the arguments to skip steps during import"""
if not args.skip:
return
if ('all' in args.skip or 'checksum' in args.skip):
self.__java_initial.append("--checksum-algorithm=File-Size-64")
if ('all' in args.skip or 'thumbnails' in args.skip):
self.__java_initial.append("--no-thumbnails")
if ('all' in args.skip or 'minmax' in args.skip):
self.__java_initial.append("--no-stats-info")
if ('all' in args.skip or 'upgrade' in args.skip):
self.__java_initial.append("--no-upgrade-check")
def open_files(self):
# Open file handles for stdout/stderr if applicable
out = self.open_log(self.__args.file, self.__args.logprefix)
err = self.open_log(self.__args.errs, self.__args.logprefix)
return out, err
def open_log(self, file, prefix=None):
if not file:
return None
if prefix:
file = os.path.sep.join([prefix, file])
dir = os.path.dirname(file)
if not os.path.exists(dir):
os.makedirs(dir)
return open(file, "w")
class ImportControl(BaseControl):
COMMAND = [START_CLASS]
def _configure(self, parser):
parser.add_login_arguments()
parser.add_argument(
"--javahelp", "--java-help",
action="store_true", help="Show the Java help text")
# The following arguments are strictly used by Python
# The "---" form is kept for backwards compatibility.
py_group = parser.add_argument_group(
'Python arguments',
'Optional arguments which are used to configure import.')
def add_python_argument(*args, **kwargs):
py_group.add_argument(*args, **kwargs)
for name, help in (
("bulk", "Bulk YAML file for driving multiple imports"),
("logprefix", "Directory or file prefix for --file and --errs"),
("file", "File for storing the standard out of the Java process"),
("errs", "File for storing the standard err of the Java process")
):
add_python_argument("--%s" % name, nargs="?", help=help)
add_python_argument("---%s" % name, nargs="?", help=SUPPRESS)
add_python_argument(
"--clientdir", type=str,
help="Path to the directory containing the client JARs. "
" Default: lib/client")
add_python_argument(
"--logback", type=str,
help="Path to a logback xml file. "
" Default: etc/logback-cli.xml")
# The following arguments are strictly passed to Java
name_group = parser.add_argument_group(
'Naming arguments', 'Optional arguments passed strictly to Java.')
def add_java_name_argument(*args, **kwargs):
name_group.add_argument(*args, **kwargs)
add_java_name_argument(
"-n", "--name",
help="Image or plate name to use (**)",
metavar="NAME")
add_java_name_argument(
"-x", "--description",
help="Image or plate description to use (**)",
metavar="DESCRIPTION")
# Deprecated naming arguments
add_java_name_argument(
"--plate_name",
help=SUPPRESS)
add_java_name_argument(
"--plate_description",
help=SUPPRESS)
# Feedback options
feedback_group = parser.add_argument_group(
'Feedback arguments',
'Optional arguments passed strictly to Java allowing to report'
' errors to the OME team.')
def add_feedback_argument(*args, **kwargs):
feedback_group.add_argument(*args, **kwargs)
add_feedback_argument(
"--report", action="store_true",
help="Report errors to the OME team (**)")
add_feedback_argument(
"--upload", action="store_true",
help=("Upload broken files and log file (if any) with report."
" Required --report (**)"))
add_feedback_argument(
"--logs", action="store_true",
help=("Upload log file (if any) with report."
" Required --report (**)"))
add_feedback_argument(
"--email",
help="Email for reported errors. Required --report (**)",
metavar="EMAIL")
add_feedback_argument(
"--qa-baseurl",
help=SUPPRESS)
# Annotation options
annotation_group = parser.add_argument_group(
'Annotation arguments',
'Optional arguments passed strictly to Java allowing to annotate'
' imports.')
def add_annotation_argument(*args, **kwargs):
annotation_group.add_argument(*args, **kwargs)
add_annotation_argument(
"--annotation-ns", metavar="ANNOTATION_NS",
help="Namespace to use for subsequent annotation (**)")
add_annotation_argument(
"--annotation-text", metavar="ANNOTATION_TEXT",
help="Content for a text annotation (**)")
add_annotation_argument(
"--annotation-link",
metavar="ANNOTATION_LINK",
help="Comment annotation ID to link all images to (**)")
add_annotation_argument(
"--annotation_ns", metavar="ANNOTATION_NS",
help=SUPPRESS)
add_annotation_argument(
"--annotation_text", metavar="ANNOTATION_TEXT",
help=SUPPRESS)
add_annotation_argument(
"--annotation_link", metavar="ANNOTATION_LINK",
help=SUPPRESS)
java_group = parser.add_argument_group(
'Java arguments', 'Optional arguments passed strictly to Java')
def add_java_argument(*args, **kwargs):
java_group.add_argument(*args, **kwargs)
add_java_argument(
"-f", action="store_true",
help="Display the used files and exit (**)")
add_java_argument(
"-c", action="store_true",
help="Continue importing after errors (**)")
add_java_argument(
"-l",
help="Use the list of readers rather than the default (**)",
metavar="READER_FILE")
add_java_argument(
"-d",
help="OMERO dataset ID to import image into (**)",
metavar="DATASET_ID")
add_java_argument(
"-r",
help="OMERO screen ID to import plate into (**)",
metavar="SCREEN_ID")
add_java_argument(
"-T", "--target",
help="OMERO target specification (**)",
metavar="TARGET")
add_java_argument(
"--debug", choices=DEBUG_CHOICES,
help="Turn debug logging on (**)",
metavar="LEVEL", dest="JAVA_DEBUG")
add_java_argument(
"--output", choices=OUTPUT_CHOICES,
help="Set an alternative output style",
metavar="TYPE")
# Arguments previously *following" `--`
advjava_group = parser.add_argument_group(
'Advanced Java arguments', (
'Optional arguments passed strictly to Java.'
'For more information, see --advanced-help'))
def add_advjava_argument(*args, **kwargs):
advjava_group.add_argument(*args, **kwargs)
add_advjava_argument(
"--advanced-help", action="store_true",
help="Show the advanced help text")
add_advjava_argument(
"--transfer", nargs="?", metavar="TYPE",
help="Transfer methods like in-place import")
add_advjava_argument(
"--exclude", nargs="?", metavar="TYPE",
help="Exclusion filters for preventing re-import")
add_advjava_argument(
"--checksum-algorithm", nargs="?", metavar="TYPE",
help="Alternative hashing mechanisms balancing speed & accuracy")
# Unsure on these.
add_python_argument(
"--depth", default=4, type=int,
help="Number of directories to scan down for files")
add_python_argument(
"--skip", type=str, choices=SKIP_CHOICES, action='append',
help="Optional step to skip during import")
add_python_argument(
"path", nargs="*",
help="Path to be passed to the Java process")
parser.set_defaults(func=self.importer)
def importer(self, args):
if args.clientdir:
client_dir = path(args.clientdir)
else:
client_dir = self.ctx.dir / "lib" / "client"
etc_dir = self.ctx.dir / "etc"
if args.logback:
xml_file = path(args.logback)
else:
xml_file = etc_dir / "logback-cli.xml"
logback = "-Dlogback.configurationFile=%s" % xml_file
try:
classpath = [file.abspath() for file in client_dir.files("*.jar")]
except OSError as e:
self.ctx.die(102, "Cannot get JAR files from '%s' (%s)"
% (client_dir, e.strerror))
if not classpath:
self.ctx.die(103, "No JAR files found under '%s'" % client_dir)
command_args = CommandArguments(self.ctx, args)
xargs = [logback, "-Xmx1024M", "-cp", os.pathsep.join(classpath)]
xargs.append("-Domero.import.depth=%s" % args.depth)
if args.bulk and args.path:
self.ctx.die(104, "When using bulk import, omit paths")
elif args.bulk:
self.bulk_import(command_args, xargs)
else:
self.do_import(command_args, xargs)
def do_import(self, command_args, xargs):
out = err = None
try:
import_command = self.COMMAND + command_args.java_args()
out, err = command_args.open_files()
p = omero.java.popen(
import_command, debug=False, xargs=xargs,
stdout=out, stderr=err)
self.ctx.rv = p.wait()
finally:
# Make sure file handles are closed
if out:
out.close()
if err:
err.close()
def bulk_import(self, command_args, xargs):
try:
from yaml import safe_load
except ImportError:
self.ctx.die(105, "yaml is unsupported")
old_pwd = os.getcwd()
try:
# Walk the .yml graph looking for includes
# and load them all so that the top parent
# values can be overwritten.
contents = list()
bulkfile = command_args.bulk
while bulkfile:
bulkfile = os.path.abspath(bulkfile)
parent = os.path.dirname(bulkfile)
with open(bulkfile, "r") as f:
data = safe_load(f)
contents.append((bulkfile, parent, data))
bulkfile = data.get("include")
os.chdir(parent)
# TODO: included files are updated based on the including
# file but other file paths aren't!
bulk = dict()
for bulkfile, parent, data in reversed(contents):
bulk.update(data)
os.chdir(parent)
failed = 0
total = 0
for cont in self.parse_bulk(bulk, command_args):
if command_args.dry_run:
rv = ['"%s"' % x for x in command_args.added_args()]
rv = " ".join(rv)
self.ctx.out(rv)
else:
self.do_import(command_args, xargs)
if self.ctx.rv:
failed += 1
total += self.ctx.rv
if cont:
msg = "Import failed with error code: %s. Continuing"
self.ctx.err(msg % self.ctx.rv)
else:
msg = "Import failed. Use -c to continue after errors"
self.ctx.die(106, msg)
# Fail if any import failed
self.ctx.rv = total
if failed:
self.ctx.err("%x failed imports" % failed)
finally:
os.chdir(old_pwd)
def parse_bulk(self, bulk, command_args):
# Known keys with special handling
cont = False
command_args.dry_run = False
if "dry_run" in bulk:
dry_run = bulk.pop("dry_run")
command_args.dry_run = dry_run
if "continue" in bulk:
cont = True
c = bulk.pop("continue")
if bool(c):
command_args.add("c")
if "path" not in bulk:
# Required until @file format is implemented
self.ctx.die(107, "No path specified")
path = bulk.pop("path")
cols = None
if "columns" in bulk:
cols = bulk.pop("columns")
if "include" in bulk:
bulk.pop("include")
# Now parse all other keys
for key in bulk:
command_args.add(key, bulk[key])
# All properties are set, yield for each path
# to be imported in turn. The value for `cont`
# is yielded so that the caller knows whether
# or not an error should be fatal.
if not cols:
# No parsing necessary
function = self.parse_text
else:
function = self.parse_shlex
if path.endswith(".tsv"):
function = self.parse_tsv
elif path.endswith(".csv"):
function = self.parse_csv
for parts in function(path):
if not cols:
command_args.set_path(parts)
else:
for idx, col in enumerate(cols):
if col == "path":
command_args.set_path([parts[idx]])
else:
command_args.add(col, parts[idx])
yield cont
def parse_text(self, path, parse=False):
with open(path, "r") as o:
for line in o:
line = line.strip()
if parse:
line = shlex.split(line)
yield [line]
def parse_shlex(self, path):
for line in self.parse_text(path, parse=True):
yield line
def parse_tsv(self, path, delimiter="\t"):
for line in self.parse_csv(path, delimiter):
yield line
def parse_csv(self, path, delimiter=","):
with open(path, "r") as data:
for line in csv.reader(data, delimiter=delimiter):
yield line
class TestEngine(ImportControl):
COMMAND = [TEST_CLASS]
try:
register("import", ImportControl, HELP, epilog=EXAMPLES)
register("testengine", TestEngine, TESTHELP)
except NameError:
if __name__ == "__main__":
cli = CLI()
cli.register("import", ImportControl, HELP, epilog=EXAMPLES)
cli.register("testengine", TestEngine, TESTHELP)
cli.invoke(sys.argv[1:])
| gpl-2.0 | -1,096,505,063,967,057,400 | 34.732116 | 78 | 0.553672 | false |
bellhops/TapeDeck | tapedeck/deck/models.py | 1 | 1249 | import os
from binascii import hexlify
from django.db import models
def _createId():
return hexlify(os.urandom(8))
class Deck(models.Model):
hash = models.CharField(max_length=256, primary_key=True, default=_createId)
branch = models.ForeignKey('branch.Branch', on_delete=models.CASCADE)
version = models.ForeignKey('version.Version', on_delete=models.CASCADE)
previous = models.OneToOneField('deck.Deck', on_delete=models.SET_NULL, blank=True, null=True, related_name='next')
file = models.FileField(upload_to='decks')
uploaded_at = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
def save(self, *args, **kwargs):
# Expire other decks for this branch/version combination, and put this one at the end of the list.
overlapping_decks = Deck.objects.filter(branch=self.branch, version=self.version)
if overlapping_decks.exists():
try:
active_deck = overlapping_decks.filter(active=True).latest('uploaded_at')
except Deck.DoesNotExist:
pass
else:
self.previous = active_deck
overlapping_decks.update(active=False)
super().save(*args, **kwargs)
| mit | -3,008,835,626,457,854,500 | 39.290323 | 119 | 0.669335 | false |
qualitio/qualitio | qualitio/settings.py | 1 | 5111 | import os
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Admin Qualitio', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_PATH, 'data.sqlite'),
}
}
TIME_ZONE = 'Europe/Warsaw'
LANGUAGE_CODE = 'en'
SITE_ID = 1
USE_I18N = True
DATE_FORMAT = "d-m-Y"
DATETIME_FORMAT = "d-m-Y, H:i:s"
DATE_INPUT_FORMATS = ('%d-%m-%Y',)
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'static')
MEDIA_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static_admin/'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'dbtemplates.loader.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'qualitio.organizations.middleware.OrganizationMiddleware',
'qualitio.organizations.middleware.ProjectMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'qualitio.core.middleware.LoginRequiredMiddleware',
'qualitio.core.middleware.QueriesCounterMiddleware',
'django.middleware.transaction.TransactionMiddleware',
)
ROOT_URLCONF = 'qualitio.urls'
LOGIN_REDIRECT_URL = "/"
LOGIN_URL = '/login/'
LOGIN_EXEMPT_URLS = (
(r'^$', lambda request: request.organization is None),
r'^r/.*',
r'^none/$',
r'^static/',
r'^login/',
r'^inactive/',
r'^admin/',
r'^register/.*',
r'^associate/*',
r'^complete/*',
r'^project/(?P<slug>[\w-]+)/report/external/*',
r'^__debug__/.*',
r'^api/.*',
r'^googleapps_setup/$',
r'^google_checkout/$',
r'^paypal_ipn/$',
)
PROJECT_EXEMPT_URLS = (
r'^static/.*',
r'^admin/.*',
r'^login/.*',
r'^register/.*',
r'^associate/*',
r'^complete/*',
r'^__debug__/.*',
r'^api/.*',
r'^project/new/.*',
)
ORGANIZATION_EXEMPT_URLS = (
r'^static/',
r'^admin/',
)
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.webdesign',
'django.contrib.markup',
'django.contrib.humanize',
'mptt',
'social_auth',
'django_nose',
'reversion',
'south',
'pagination',
'compressor',
'dbtemplates',
'tastypie',
'articles',
'django_extensions',
'qualitio.core.custommodel', # iternal core django application
'qualitio.core',
'qualitio.organizations',
'qualitio.require',
'qualitio.report',
'qualitio.execute',
'qualitio.store',
'qualitio.filter',
'qualitio.actions',
'qualitio.glossary',
'qualitio.payments',
'qualitio.customizations',
)
TEMPLATE_CONTEXT_PROCESSORS = ("django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"qualitio.core.context_processors.settings",
"qualitio.core.context_processors.development",
"qualitio.core.context_processors.core",
"qualitio.core.context_processors.module",
"qualitio.organizations.context_processors.main")
AUTH_PROFILE_MODULE = 'organizations.UserProfile'
SOCIAL_AUTH_IMPORT_BACKENDS = (
'qualitio.googleapps.backends',
)
AUTHENTICATION_BACKENDS = (
'qualitio.googleapps.backends.GoogleBackend',
'qualitio.googleapps.backends.GoogleAppsBackend',
'qualitio.organizations.auth.backends.OrganizationModelBackend',
)
MPTT_ADMIN_LEVEL_INDENT = 30
ISSUE_BACKEND = "qualitio.execute.backends.bugzilla"
ISSUE_BACKEND_ABSOLUTE_URL = "https://bugzilla.mozilla.org/show_bug.cgi?id=%s"
ISSUE_BACKEND_BUGZILLA_URL = "https://bugzilla.mozilla.org/"
SOUTH_TESTS_MIGRATE = False
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter']
COMPRESS = False
DBTEMPLATES_CACHE_BACKEND = 'dummy://127.0.0.1/'
DBTEMPLATES_USE_REVERSION = True
DBTEMPLATES_MEDIA_PREFIX = MEDIA_URL
DBTEMPLATES_USE_CODEMIRROR = False
DBTEMPLATES_AUTO_POPULATE_CONTENT = False
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_PORT = 587
DEFAULT_FROM_EMAIL = "Qualitio Notifications <[email protected]>"
try:
from local_settings import *
except ImportError:
pass
| gpl-3.0 | -5,309,156,436,536,011,000 | 24.683417 | 85 | 0.638231 | false |
nrupatunga/PY-GOTURN | goturn/loader/loader_imagenet.py | 1 | 5893 | # Date: Nrupatunga: Tuesday 04 July 2017
# Email: [email protected]
# Name: Nrupatunga
# Description: loading Imagenet dataset
from __future__ import print_function
import os
import cv2
import glob
from annotation import annotation
import xml.etree.ElementTree as ET
from ..logger.logger import setup_logger
from ..helper import config
kMaxRatio = 0.66
class loader_imagenet:
"""Docstring for loader_imagenetdet. """
def __init__(self, imagenet_folder, annotations_folder, logger):
"""TODO: to be defined1. """
self.logger = logger
self.imagenet_folder = imagenet_folder
self.annotations_folder = annotations_folder
if not os.path.isdir(imagenet_folder):
logger.error('{} is not a valid directory'.format(imagenet_folder))
def loaderImageNetDet(self):
"""TODO: Docstring for get_videos.
:returns: TODO
"""
logger = self.logger
imagenet_subdirs = sorted(self.find_subfolders(self.annotations_folder))
num_annotations = 0
list_of_annotations_out = []
for i, imgnet_sub_folder in enumerate(imagenet_subdirs):
annotations_files = sorted(glob.glob(os.path.join(self.annotations_folder, imgnet_sub_folder, '*.xml')))
logger.info('Loading {}/{} - annotation file from folder = {}'.format(i + 1, len(imagenet_subdirs), imgnet_sub_folder))
for ann in annotations_files:
list_of_annotations, num_ann_curr = self.load_annotation_file(ann)
num_annotations = num_annotations + num_ann_curr
if len(list_of_annotations) == 0:
continue
list_of_annotations_out.append(list_of_annotations)
logger.info('Found {} annotations from {} images'.format(num_annotations, len(list_of_annotations_out)))
# save it for future use
self.list_of_annotations_out = list_of_annotations_out
self.num_annotations = num_annotations
return list_of_annotations_out
def find_subfolders(self, imagenet_folder):
"""TODO: Docstring for find_subfolders.
:vot_folder: directory for vot videos
:returns: list of video sub directories
"""
return [dir_name for dir_name in os.listdir(imagenet_folder) if os.path.isdir(os.path.join(imagenet_folder, dir_name))]
def load_annotation_file(self, annotation_file):
"""TODO: Docstring for load_annotation_file.
:returns: TODO
"""
list_of_annotations = []
num_annotations = 0
root = ET.parse(annotation_file).getroot()
folder = root.find('folder').text
filename = root.find('filename').text
size = root.find('size')
disp_width = int(size.find('width').text)
disp_height = int(size.find('height').text)
for obj in root.findall('object'):
bbox = obj.find('bndbox')
xmin = int(bbox.find('xmin').text)
xmax = int(bbox.find('xmax').text)
ymin = int(bbox.find('ymin').text)
ymax = int(bbox.find('ymax').text)
width = xmax - xmin
height = ymax - ymin
if width > (kMaxRatio * disp_width) or height > (kMaxRatio * disp_height):
continue
if ((xmin < 0) or (ymin < 0) or (xmax <= xmin) or (ymax <= ymin)):
continue
objAnnotation = annotation()
objAnnotation.setbbox(xmin, xmax, ymin, ymax)
objAnnotation.setWidthHeight(disp_width, disp_height)
objAnnotation.setImagePath(os.path.join(folder, filename))
list_of_annotations.append(objAnnotation)
num_annotations = num_annotations + 1
return list_of_annotations, num_annotations
def load_annotation(self, image_num, annotation_num):
"""TODO: Docstring for load_annotation.
:returns: TODO
"""
logger = self.logger
images = self.list_of_annotations_out
list_annotations = images[image_num]
random_ann = list_annotations[annotation_num]
img_path = os.path.join(self.imagenet_folder, random_ann.image_path + '.JPEG')
if config.DEBUG:
img_path = "/media/nrupatunga/Data-Backup/DL/goturn/ILSVRC2014/ILSVRC2014_DET_train/ILSVRC2014_train_0005/ILSVRC2014_train_00059375.JPEG"
random_ann.bbox.x1 = 243
random_ann.bbox.y1 = 157
random_ann.bbox.x2 = 278
random_ann.bbox.y2 = 176
random_ann.disp_height = 375
random_ann.disp_width = 500
image = cv2.imread(img_path)
img_height = image.shape[0]
img_width = image.shape[1]
sc_factor_1 = 1.0
if img_height != random_ann.disp_height or img_width != random_ann.disp_width:
logger.info('Image Number = {}, Annotation Number = {}, Image file = {}'.format(image_num, annotation_num, img_path))
logger.info('Image Size = {} x {}'.format(img_width, img_height))
logger.info('Display Size = {} x {}'.format(random_ann.disp_width, random_ann.disp_height))
sc_factor_1 = (img_height * 1.) / random_ann.disp_height
sc_factor_2 = (img_width * 1.) / random_ann.disp_width
logger.info('Factor: {} {}'.format(sc_factor_1, sc_factor_2))
bbox = random_ann.bbox
bbox.x1 = bbox.x1 * sc_factor_1
bbox.x2 = bbox.x2 * sc_factor_1
bbox.y1 = bbox.y1 * sc_factor_1
bbox.y2 = bbox.y2 * sc_factor_1
return image, bbox
if '__main__' == __name__:
logger = setup_logger(logfile=None)
objLoaderImgNet = loader_imagenet('/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_train/', '/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_bbox_train/', logger)
dict_list_of_annotations = objLoaderImgNet.loaderImageNetDet()
| mit | -4,537,307,078,829,376,500 | 36.535032 | 186 | 0.614628 | false |
behrtam/xpython | exercises/satellite/satellite_test.py | 1 | 1927 | import unittest
from satellite import tree_from_traversals
# Tests adapted from `problem-specifications//canonical-data.json` @ v2.0.0
class SatelliteTest(unittest.TestCase):
def test_empty_tree(self):
preorder = []
inorder = []
expected = {}
self.assertEqual(tree_from_traversals(preorder, inorder), expected)
def test_tree_with_one_item(self):
preorder = ["a"]
inorder = ["a"]
expected = {"v": "a", "l": {}, "r": {}}
self.assertEqual(tree_from_traversals(preorder, inorder), expected)
def test_tree_with_many_items(self):
preorder = ["a", "i", "x", "f", "r"]
inorder = ["i", "a", "f", "x", "r"]
expected = {
"v": "a",
"l": {"v": "i", "l": {}, "r": {}},
"r": {
"v": "x",
"l": {"v": "f", "l": {}, "r": {}},
"r": {"v": "r", "l": {}, "r": {}},
},
}
self.assertEqual(tree_from_traversals(preorder, inorder), expected)
def test_reject_traversals_of_different_length(self):
preorder = ["a", "b"]
inorder = ["b", "a", "r"]
with self.assertRaisesWithMessage(ValueError):
tree_from_traversals(preorder, inorder)
def test_reject_inconsistent_traversals_of_same_length(self):
preorder = ["x", "y", "z"]
inorder = ["a", "b", "c"]
with self.assertRaisesWithMessage(ValueError):
tree_from_traversals(preorder, inorder)
def test_reject_traversals_with_repeated_items(self):
preorder = ["a", "b", "a"]
inorder = ["b", "a", "a"]
with self.assertRaisesWithMessage(ValueError):
tree_from_traversals(preorder, inorder)
# Utility functions
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == "__main__":
unittest.main()
| mit | -7,956,708,340,354,587,000 | 28.646154 | 75 | 0.532953 | false |
taschetto/computationalMethods | apoio/parse_svg.py | 1 | 1979 | import xml.etree.ElementTree as etree
import re
import sys
__author__ = 'Ramon Costi Fernandes <[email protected]>'
SVG_namespace = "http://www.w3.org/2000/svg"
SVG_fname = ''
OUTPUT_fname = 'output.txt'
coordinates_list = []
output_list = []
#Instrucoes de uso.
def usage():
print("Como executar:\n")
print("{} <{}>".format(sys.argv[0], "SVG input file"))
print("ou")
print("{} <{}> <{}>".format(sys.argv[0], "SVG input file", "OUTPUT file"))
sys.exit(1)
# Remove as coordenadas duplicadas do arquivo SVG de entrada.
def remove_duplicates(coord_list):
global coordinates_list
temp_list = []
for item in coord_list:
if item not in temp_list:
temp_list.append(item)
coordinates_list = temp_list
#Enumera os pontos.
def enumerate_coordinates():
count = 1
for item in coordinates_list:
coord = re.split(",", item)
output_list.append("{} {} {}\n".format(count, coord[0], coord[1]))
count += 1
#Gera o arquivo de saida.
def write_output_file():
file = open(OUTPUT_fname, "w+")
for item in output_list:
file.write("".join(str(s) for s in item) + "\n")
file.close()
#Processa o arquivo XML de entrada.
def parse_xml():
global coordinates_list
tree = etree.parse(SVG_fname)
root = tree.getroot()
coordinates = root.find('.//{%s}path' % SVG_namespace).get("d")
coordinates_list = re.findall("[0-9]+\.[0-9]+,[0-9]+\.[0-9]+", coordinates)
if __name__ == "__main__":
if len(sys.argv) < 2:
usage()
elif len(sys.argv) < 3:
print("Gravando resultados no arquivo de saida \"{}\"\n".format(OUTPUT_fname))
elif len(sys.argv) == 3:
OUTPUT_fname = sys.argv[2]
print("Gravando resultados no arquivo de saida \"{}\"\n".format(OUTPUT_fname))
else:
usage()
SVG_fname = sys.argv[1]
parse_xml()
remove_duplicates(coordinates_list)
enumerate_coordinates()
write_output_file() | mit | -5,315,818,081,049,413,000 | 25.756757 | 86 | 0.610915 | false |
nextcloud/appstore | nextcloudappstore/api/v1/serializers.py | 1 | 5412 | from django.contrib.auth import get_user_model
from parler_rest.fields import TranslatedFieldsField
from parler_rest.serializers import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.fields import SerializerMethodField, DateTimeField
from nextcloudappstore.core.models import PhpExtensionDependency, \
DatabaseDependency, Category, AppAuthor, AppRelease, Screenshot, \
AppRating, App, NextcloudRelease
from nextcloudappstore.core.validators import HttpsUrlValidator
class PhpExtensionDependencySerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField(source='php_extension.id')
version_spec = SerializerMethodField()
raw_version_spec = SerializerMethodField()
class Meta:
model = PhpExtensionDependency
fields = ('id', 'version_spec', 'raw_version_spec')
def get_version_spec(self, obj):
return obj.version_spec.replace(',', ' ')
def get_raw_version_spec(self, obj):
return obj.raw_version_spec.replace(',', ' ')
class DatabaseDependencySerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField(source='database.id')
version_spec = SerializerMethodField()
raw_version_spec = SerializerMethodField()
class Meta:
model = DatabaseDependency
fields = ('id', 'version_spec', 'raw_version_spec')
def get_version_spec(self, obj):
return obj.version_spec.replace(',', ' ')
def get_raw_version_spec(self, obj):
return obj.raw_version_spec.replace(',', ' ')
class CategorySerializer(TranslatableModelSerializer):
translations = TranslatedFieldsField(shared_model=Category)
class Meta:
model = Category
fields = ('id', 'translations')
class NextcloudReleaseSerializer(serializers.ModelSerializer):
class Meta:
model = NextcloudRelease
fields = ('has_release', 'version', 'is_supported')
class AuthorSerializer(serializers.ModelSerializer):
class Meta:
model = AppAuthor
fields = ('name', 'mail', 'homepage')
class AppReleaseSerializer(serializers.ModelSerializer):
databases = DatabaseDependencySerializer(many=True, read_only=True,
source='databasedependencies')
php_extensions = \
PhpExtensionDependencySerializer(many=True, read_only=True,
source='phpextensiondependencies')
php_version_spec = SerializerMethodField()
platform_version_spec = SerializerMethodField()
raw_php_version_spec = SerializerMethodField()
raw_platform_version_spec = SerializerMethodField()
translations = TranslatedFieldsField(shared_model=AppRelease)
class Meta:
model = AppRelease
fields = (
'version', 'php_extensions', 'databases', 'shell_commands',
'php_version_spec', 'platform_version_spec', 'min_int_size',
'download', 'created', 'licenses', 'last_modified', 'is_nightly',
'raw_php_version_spec', 'raw_platform_version_spec', 'signature',
'translations', 'signature_digest'
)
def get_platform_version_spec(self, obj):
return obj.platform_version_spec.replace(',', ' ')
def get_php_version_spec(self, obj):
return obj.php_version_spec.replace(',', ' ')
def get_raw_platform_version_spec(self, obj):
return obj.raw_platform_version_spec.replace(',', ' ')
def get_raw_php_version_spec(self, obj):
return obj.raw_php_version_spec.replace(',', ' ')
class ScreenshotSerializer(serializers.ModelSerializer):
class Meta:
model = Screenshot
fields = ('url', 'small_thumbnail')
class AppSerializer(serializers.ModelSerializer):
releases = AppReleaseSerializer(many=True, read_only=True)
discussion = SerializerMethodField()
screenshots = ScreenshotSerializer(many=True, read_only=True)
authors = AuthorSerializer(many=True, read_only=True)
translations = TranslatedFieldsField(shared_model=App)
last_modified = DateTimeField(source='last_release')
class Meta:
model = App
fields = (
'id', 'categories', 'user_docs', 'admin_docs', 'developer_docs',
'issue_tracker', 'website', 'created', 'last_modified', 'releases',
'screenshots', 'translations', 'is_featured', 'authors',
'rating_recent', 'rating_overall', 'rating_num_recent',
'rating_num_overall', 'certificate', 'discussion'
)
def get_discussion(self, obj):
return obj.discussion_url
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ('id', 'first_name', 'last_name')
class AppRatingSerializer(serializers.ModelSerializer):
user = UserSerializer(many=False, read_only=True)
translations = TranslatedFieldsField(shared_model=AppRating)
class Meta:
model = AppRating
fields = ('rating', 'rated_at', 'translations', 'user', 'app')
class AppReleaseDownloadSerializer(serializers.Serializer):
download = serializers.URLField(validators=[HttpsUrlValidator()])
signature = serializers.CharField()
nightly = serializers.BooleanField(required=False, default=False)
class AppRegisterSerializer(serializers.Serializer):
certificate = serializers.CharField()
signature = serializers.CharField()
| agpl-3.0 | -6,720,910,243,370,764,000 | 34.84106 | 79 | 0.683666 | false |
pomma89/Dessert | Dessert.Benchmarks/Common.py | 1 | 2245 | #
# Common.py
#
# Author(s):
# Alessio Parma <[email protected]>
#
# Copyright (c) 2012-2016 Alessio Parma <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import random
from Galois.MemoryRecorder import memory_usage
simTime = 1000
memRecFreq = simTime/5.0
minTimeout = simTime/100.0
maxTimeout = simTime/20.0
repetitionCount = 21
processCounts = range(500, 20500, 500)
import platform
if platform.system().lower().startswith("linux"):
tag = "simpy-linux"
else:
tag = "simpy-windows"
class Counter:
def __init__(self):
self._random = random.Random()
self._total = 0
def total(self):
return self._total
def increment(self):
self._total += 1
def randomDelay(self):
return self._random.uniform(minTimeout, maxTimeout)
class Result:
def __init__(self, eventCount, avgMemUsage):
self._eventCount = eventCount
self._avgMemUsage = avgMemUsage
def eventCount(self):
return self._eventCount
def averageMemUsage(self):
return self._avgMemUsage
def memoryRecorder(env, tally):
while True:
yield env.timeout(memRecFreq)
tally.observe(memory_usage())
| mit | -4,538,445,810,943,743,500 | 30.591549 | 79 | 0.712885 | false |
google/clif | clif/python/proto.py | 1 | 5489 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Generate CLIF extension C++ source for a protobuf.
PROTO -cOUTPATH/CCNAME \
-hOUTPATH/HNAME \
--strip_dir=SYSPATH \
SYSPATH/PKGPATH/NAME.proto
reads NAME.proto and generates C++ CCNAME source and HNAME header files.
"""
import argparse
import itertools
import sys
from clif.python import gen
from clif.python import clif_types as types
from clif.python.utils import proto_util
VALID_EXT = ['.proto']
gen.PY3OUTPUT = None # Generate version-agnostic headers.
FLAGS = None
class _ParseError(Exception):
pass
def _ParseCommandline(doc, argv):
"""Define command-line flags and return parsed argv."""
parser = argparse.ArgumentParser(description=doc, add_help=False)
parser.add_argument('--source_dir', '-s', default='',
help=('The base of the source code tree to strip from'
' file names.'))
parser.add_argument('--strip_dir', '-d', default='',
help=('The base of the generated code tree to strip from'
' file names.'))
parser.add_argument('--ccdeps_out', '-c', help='output filename for base .cc')
parser.add_argument('--header_out', '-h', help='output filename for .h')
parser.add_argument('--allow_empty_package', action='store_true',
help=('Generate CLIF conversion library in ::clif '
'namespace, ADL will not work.'))
parser.add_argument('protobuf', nargs=1)
return parser.parse_args(argv[1:])
def _CppName(desc):
"""Return the fully qualified C++ name of the entity in |desc|."""
return '::'+desc.fqname.replace('.', '::')
def _PyName(desc, pkg):
"""Return the Python name of the entity in |desc| from proto package |pkg|."""
if not pkg: return desc.fqname
assert desc.fqname.startswith(pkg)
return desc.fqname[len(pkg)+1:] # Add 1 for '.' between pkg and name.
def CreatePyTypeInfo(desc, path,
package_required=True, generate_service_info=False):
"""Create the type objects from the proto file descriptor in |desc|."""
pypath = '' + path.replace('/', '.').replace('-', '_') + '_pb2'
messages = [] # Proto messages.
p = desc.PackageName()
if p:
n = '::'+p.replace('.', '::') + '::'
else:
if package_required:
raise ValueError('Package statement required')
n = '::'
for m in desc.Messages():
messages.append(types.ProtoType(_CppName(m), _PyName(m, p), pypath, ns=n))
for e in desc.Enums():
messages.append(types.ProtoEnumType(_CppName(e), _PyName(e, p), ns=n))
if generate_service_info:
for s in desc.Services():
messages.append(types.CapsuleType(_CppName(s), _PyName(s, p), ns=n))
return messages
def GenerateFrom(messages, proto_filename, clif_hdr, proto_hdr):
"""Traverse ast and generate output files."""
with open(FLAGS.header_out, 'w') as hout:
gen.WriteTo(hout, gen.Headlines(
proto_filename, [proto_hdr, 'clif/python/postconv.h']))
gen.WriteTo(hout, _GenHeader(messages))
with open(FLAGS.ccdeps_out, 'w') as cout:
gen.WriteTo(cout, gen.Headlines(
proto_filename, ['clif/python/runtime.h',
'clif/python/types.h',
clif_hdr]))
for ns, ts in itertools.groupby(messages, types.Namespace):
if ns == '::':
ns = 'clif'
gen.WriteTo(cout, gen.TypeConverters(ns, ts))
def _GenHeader(messages):
"""Helper function for GenerateFrom."""
for ns, ts in itertools.groupby(messages, types.Namespace):
yield ''
if ns == '::':
ns = 'clif'
yield gen.OpenNs(ns)
else:
yield gen.OpenNs(ns)
yield 'using namespace ::clif;'
yield ''
for t in ts:
for s in t.GenHeader():
yield s
yield ''
yield gen.CloseNs(ns)
def main(_):
assert FLAGS.ccdeps_out and FLAGS.header_out, ('Both output files '
'(-c, -h) must be specified.')
assert not FLAGS.strip_dir.endswith('/')
assert FLAGS.header_out.startswith(FLAGS.strip_dir)
strip_dir = len(FLAGS.strip_dir)+1 # +1 for '/'
hdr = FLAGS.header_out[strip_dir:]
name = src = FLAGS.protobuf[0]
assert not FLAGS.source_dir.endswith('/')
if FLAGS.source_dir and name.startswith(FLAGS.source_dir):
name = name[len(FLAGS.source_dir)+1:] # +1 for '/'
for ext in VALID_EXT:
if name.endswith(ext):
pypath = name[:-len(ext)]
break
else:
raise NameError('Proto file should have any%s extension' % VALID_EXT)
desc = proto_util.ProtoFileInfo(src, FLAGS.source_dir)
if not desc:
raise _ParseError(desc.ErrorMsg())
messages = CreatePyTypeInfo(desc, pypath, not FLAGS.allow_empty_package)
GenerateFrom(messages, name, hdr, pypath+'.pb.h')
def ParseFlags():
global FLAGS
FLAGS = _ParseCommandline(__doc__.splitlines()[0], sys.argv)
def Start():
ParseFlags()
main(0)
if __name__ == '__main__':
Start()
| apache-2.0 | -3,793,998,925,067,095,000 | 32.469512 | 80 | 0.638914 | false |
khchine5/book | lino_book/projects/homeworkschool/fixtures/demo.py | 1 | 1546 | # -*- coding: UTF-8 -*-
# Copyright 2012-2013 Luc Saffre
# License: BSD (see file COPYING for details)
from lino.utils.instantiator import Instantiator, i2d
from django.utils.translation import ugettext_lazy as _
from lino.api import dd
def objects():
#~ slot = Instantiator('courses.Slot','name start_time end_time').build
#~
#~ kw = dict(monday=True,tuesday=True,wednesday=False,thursday=True,friday=True)
#~ yield slot("Erste Stunde","16:00","17:00",**kw)
#~ yield slot("Zweite Stunde","17:00","18:00",**kw)
#~ yield slot("Dritte Stunde","18:00","19:00",**kw)
#~
#~ kw = dict(wednesday=True)
#~ yield slot("Mittwochs 13 Uhr","13:00","14:00",**kw)
#~ yield slot("Mittwochs 14 Uhr","14:00","15:00",**kw)
#~ yield slot("Mittwochs 15 Uhr","15:00","16:00",**kw)
#~ yield slot("Mittwochs 16 Uhr","16:00","17:00",**kw)
#~ yield slot("Mittwochs 17 Uhr","17:00","18:00",**kw)
#~ yield slot("Mittwochs 18 Uhr","18:00","19:00",**kw)
courses = dd.resolve_app('courses')
yield courses.Line(**dd.babelkw('name',
de=u"Deutsch Anfänger",
fr=u"Allemand débutants",
en=u"German beginners",
))
yield courses.Line(**dd.babelkw('name',
de=u"Französisch Anfänger",
fr=u"Français débutants",
en=u"French beginners",
))
| bsd-2-clause | 4,543,015,073,316,322,000 | 36.560976 | 84 | 0.529221 | false |
s0lst1c3/eaphammer | local/hostapd-eaphammer/tests/hwsim/test_radius.py | 1 | 70562 | # RADIUS tests
# Copyright (c) 2013-2016, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import binascii
import hashlib
import hmac
import logging
logger = logging.getLogger()
import os
import select
import struct
import subprocess
import threading
import time
import hostapd
from utils import HwsimSkip, require_under_vm, skip_with_fips, alloc_fail, fail_test, wait_fail_trigger
from test_ap_hs20 import build_dhcp_ack
from test_ap_ft import ft_params1
def connect(dev, ssid, wait_connect=True):
dev.connect(ssid, key_mgmt="WPA-EAP", scan_freq="2412",
eap="PSK", identity="[email protected]",
password_hex="0123456789abcdef0123456789abcdef",
wait_connect=wait_connect)
@remote_compatible
def test_radius_auth_unreachable(dev, apdev):
"""RADIUS Authentication server unreachable"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_port'] = "18139"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAuthClientAccessRequests" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAuthClientAccessRetransmissions"]) < 1:
raise Exception("Missing RADIUS Authentication retransmission")
if int(mib["radiusAuthClientPendingRequests"]) < 1:
raise Exception("Missing pending RADIUS Authentication request")
def test_radius_auth_unreachable2(dev, apdev):
"""RADIUS Authentication server unreachable (2)"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_addr'] = "192.168.213.17"
params['auth_server_port'] = "18139"
hapd = hostapd.add_ap(apdev[0], params)
subprocess.call(['ip', 'ro', 'del', '192.168.213.17', 'dev', 'lo'])
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAuthClientAccessRequests" not in mib:
raise Exception("Missing MIB fields")
logger.info("radiusAuthClientAccessRetransmissions: " + mib["radiusAuthClientAccessRetransmissions"])
def test_radius_auth_unreachable3(dev, apdev):
"""RADIUS Authentication server initially unreachable, but then available"""
subprocess.call(['ip', 'ro', 'replace', 'blackhole', '192.168.213.18'])
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_addr'] = "192.168.213.18"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
subprocess.call(['ip', 'ro', 'del', 'blackhole', '192.168.213.18'])
time.sleep(0.1)
dev[0].request("DISCONNECT")
hapd.set('auth_server_addr_replace', '127.0.0.1')
dev[0].request("RECONNECT")
dev[0].wait_connected()
def test_radius_acct_unreachable(dev, apdev):
"""RADIUS Accounting server unreachable"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAccClientRetransmissions" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAccClientRetransmissions"]) < 2:
raise Exception("Missing RADIUS Accounting retransmissions")
if int(mib["radiusAccClientPendingRequests"]) < 2:
raise Exception("Missing pending RADIUS Accounting requests")
def test_radius_acct_unreachable2(dev, apdev):
"""RADIUS Accounting server unreachable(2)"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "192.168.213.17"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
subprocess.call(['ip', 'ro', 'del', '192.168.213.17', 'dev', 'lo'])
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAccClientRetransmissions" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAccClientRetransmissions"]) < 1 and int(mib["radiusAccClientPendingRequests"]) < 1:
raise Exception("Missing pending or retransmitted RADIUS Accounting requests")
def test_radius_acct_unreachable3(dev, apdev):
"""RADIUS Accounting server initially unreachable, but then available"""
require_under_vm()
subprocess.call(['ip', 'ro', 'replace', 'blackhole', '192.168.213.18'])
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "192.168.213.18"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
subprocess.call(['ip', 'ro', 'del', 'blackhole', '192.168.213.18'])
time.sleep(0.1)
dev[0].request("DISCONNECT")
hapd.set('acct_server_addr_replace', '127.0.0.1')
dev[0].request("RECONNECT")
dev[0].wait_connected()
time.sleep(1)
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalResponses'])
req_e = int(as_mib_end['radiusAccServTotalResponses'])
if req_e <= req_s:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_unreachable4(dev, apdev):
"""RADIUS Accounting server unreachable and multiple STAs"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
for i in range(20):
connect(dev[0], "radius-acct")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
def test_radius_acct(dev, apdev):
"""RADIUS Accounting"""
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_auth_req_attr'] = ["126:s:Operator", "77:s:testing",
"62:d:1"]
params['radius_acct_req_attr'] = ["126:s:Operator", "62:d:1",
"77:s:testing"]
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
dev[1].connect("radius-acct", key_mgmt="WPA-EAP", scan_freq="2412",
eap="PAX", identity="test-class",
password_hex="0123456789abcdef0123456789abcdef")
dev[2].connect("radius-acct", key_mgmt="WPA-EAP",
eap="GPSK", identity="gpsk-cui",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
logger.info("Checking for RADIUS counters")
count = 0
while True:
mib = hapd.get_mib()
if int(mib['radiusAccClientResponses']) >= 3:
break
time.sleep(0.1)
count += 1
if count > 10:
raise Exception("Did not receive Accounting-Response packets")
if int(mib['radiusAccClientRetransmissions']) > 0:
raise Exception("Unexpected Accounting-Request retransmission")
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
acc_s = int(as_mib_start['radiusAuthServAccessAccepts'])
acc_e = int(as_mib_end['radiusAuthServAccessAccepts'])
if acc_e < acc_s + 1:
raise Exception("Unexpected RADIUS server auth MIB value")
def test_radius_acct_non_ascii_ssid(dev, apdev):
"""RADIUS Accounting and non-ASCII SSID"""
params = hostapd.wpa2_eap_params()
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
ssid2 = "740665007374"
params['ssid2'] = ssid2
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid2=ssid2, key_mgmt="WPA-EAP", scan_freq="2412",
eap="PSK", identity="[email protected]",
password_hex="0123456789abcdef0123456789abcdef")
def test_radius_acct_pmksa_caching(dev, apdev):
"""RADIUS Accounting with PMKSA caching"""
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
dev[1].connect("radius-acct", key_mgmt="WPA-EAP", scan_freq="2412",
eap="PAX", identity="test-class",
password_hex="0123456789abcdef0123456789abcdef")
for d in [dev[0], dev[1]]:
d.request("REASSOCIATE")
d.wait_connected(timeout=15, error="Reassociation timed out")
count = 0
while True:
mib = hapd.get_mib()
if int(mib['radiusAccClientResponses']) >= 4:
break
time.sleep(0.1)
count += 1
if count > 10:
raise Exception("Did not receive Accounting-Response packets")
if int(mib['radiusAccClientRetransmissions']) > 0:
raise Exception("Unexpected Accounting-Request retransmission")
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
acc_s = int(as_mib_start['radiusAuthServAccessAccepts'])
acc_e = int(as_mib_end['radiusAuthServAccessAccepts'])
if acc_e < acc_s + 1:
raise Exception("Unexpected RADIUS server auth MIB value")
def test_radius_acct_interim(dev, apdev):
"""RADIUS Accounting interim update"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS counters")
as_mib_start = as_hapd.get_mib(param="radius_server")
time.sleep(4.1)
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 3:
raise Exception("Unexpected RADIUS server acct MIB value (req_e=%d req_s=%d)" % (req_e, req_s))
def test_radius_acct_interim_unreachable(dev, apdev):
"""RADIUS Accounting interim update with unreachable server"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
start = hapd.get_mib()
connect(dev[0], "radius-acct")
logger.info("Waiting for interium accounting updates")
time.sleep(3.1)
end = hapd.get_mib()
req_s = int(start['radiusAccClientTimeouts'])
req_e = int(end['radiusAccClientTimeouts'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_interim_unreachable2(dev, apdev):
"""RADIUS Accounting interim update with unreachable server (retry)"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
# Use long enough interim update interval to allow RADIUS retransmission
# case (3 seconds) to trigger first.
params['radius_acct_interim_interval'] = "4"
hapd = hostapd.add_ap(apdev[0], params)
start = hapd.get_mib()
connect(dev[0], "radius-acct")
logger.info("Waiting for interium accounting updates")
time.sleep(7.5)
end = hapd.get_mib()
req_s = int(start['radiusAccClientTimeouts'])
req_e = int(end['radiusAccClientTimeouts'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_ipaddr(dev, apdev):
"""RADIUS Accounting and Framed-IP-Address"""
try:
_test_radius_acct_ipaddr(dev, apdev)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def _test_radius_acct_ipaddr(dev, apdev):
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'proxy_arp': '1',
'ap_isolate': '1',
'bridge': 'ap-br0'}
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
bssid = apdev[0]['bssid']
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
addr0 = dev[0].own_addr()
pkt = build_dhcp_ack(dst_ll="ff:ff:ff:ff:ff:ff", src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.123", chaddr=addr0)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
def send_and_check_reply(srv, req, code, error_cause=0):
reply = srv.SendPacket(req)
logger.debug("RADIUS response from hostapd")
for i in list(reply.keys()):
logger.debug("%s: %s" % (i, reply[i]))
if reply.code != code:
raise Exception("Unexpected response code")
if error_cause:
if 'Error-Cause' not in reply:
raise Exception("Missing Error-Cause")
if reply['Error-Cause'][0] != error_cause:
raise Exception("Unexpected Error-Cause: {}".format(reply['Error-Cause']))
def test_radius_acct_psk(dev, apdev):
"""RADIUS Accounting - PSK"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_params(ssid="radius-acct", passphrase="12345678")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", psk="12345678", scan_freq="2412")
def test_radius_acct_psk_sha256(dev, apdev):
"""RADIUS Accounting - PSK SHA256"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_params(ssid="radius-acct", passphrase="12345678")
params["wpa_key_mgmt"] = "WPA-PSK-SHA256"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", key_mgmt="WPA-PSK-SHA256",
psk="12345678", scan_freq="2412")
def test_radius_acct_ft_psk(dev, apdev):
"""RADIUS Accounting - FT-PSK"""
as_hapd = hostapd.Hostapd("as")
params = ft_params1(ssid="radius-acct", passphrase="12345678")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", key_mgmt="FT-PSK",
psk="12345678", scan_freq="2412")
def test_radius_acct_ieee8021x(dev, apdev):
"""RADIUS Accounting - IEEE 802.1X"""
skip_with_fips(dev[0])
as_hapd = hostapd.Hostapd("as")
params = hostapd.radius_params()
params["ssid"] = "radius-acct-1x"
params["ieee8021x"] = "1"
params["wep_key_len_broadcast"] = "13"
params["wep_key_len_unicast"] = "13"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct-1x", key_mgmt="IEEE8021X", eap="PSK",
identity="[email protected]",
password_hex="0123456789abcdef0123456789abcdef",
scan_freq="2412")
def test_radius_das_disconnect(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - Disconnect"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
params['own_ip_addr'] = "127.0.0.1"
params['nas_identifier'] = "nas.example.com"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].p2p_interface_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
logger.info("Disconnect-Request with incorrect secret")
req = radius_das.DisconnectPacket(dict=dict, secret=b"incorrect",
User_Name="foo",
NAS_Identifier="localhost",
Event_Timestamp=int(time.time()))
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with incorrect secret properly ignored")
logger.info("Disconnect-Request without Event-Timestamp")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="[email protected]")
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request without Event-Timestamp properly ignored")
logger.info("Disconnect-Request with non-matching Event-Timestamp")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="[email protected]",
Event_Timestamp=123456789)
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with non-matching Event-Timestamp properly ignored")
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
User_Password="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 401)
logger.info("Disconnect-Request with invalid Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
Calling_Station_Id="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 407)
logger.info("Disconnect-Request with mismatching User-Name")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Calling_Station_Id="12:34:56:78:90:aa",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Session_Id="12345678-87654321",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Session-Id (len)")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Session_Id="12345678",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Multi-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Multi_Session_Id="12345678+87654321",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Multi-Session-Id (len)")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Multi_Session_Id="12345678",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with no session identification attributes")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with mismatching NAS-IP-Address")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="192.168.3.4",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 403)
logger.info("Disconnect-Request with mismatching NAS-Identifier")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="unknown.example.com",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 403)
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with matching Acct-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Acct-Multi-Session-Id")
sta = hapd.get_sta(addr)
multi_sess_id = sta['authMultiSessionId']
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Multi_Session_Id=multi_sess_id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching User-Name")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
User_Name="[email protected]",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout while waiting for re-connection")
if "CTRL-EVENT-EAP-STARTED" not in ev:
raise Exception("Unexpected skipping of EAP authentication in reconnection")
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Calling-Station-Id and non-matching CUI")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Calling_Station_Id=addr,
Chargeable_User_Identity="[email protected]",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=503)
logger.info("Disconnect-Request with matching CUI")
dev[1].connect("radius-das", key_mgmt="WPA-EAP",
eap="GPSK", identity="gpsk-cui",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Chargeable_User_Identity="gpsk-chargeable-user-identity",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[1].wait_disconnected(timeout=10)
dev[1].wait_connected(timeout=10, error="Re-connection timed out")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
connect(dev[2], "radius-das")
logger.info("Disconnect-Request with matching User-Name - multiple sessions matching")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
User_Name="[email protected]",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=508)
logger.info("Disconnect-Request with User-Name matching multiple sessions, Calling-Station-Id only one")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
User_Name="[email protected]",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
ev = dev[2].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with matching Acct-Multi-Session-Id after disassociation")
sta = hapd.get_sta(addr)
multi_sess_id = sta['authMultiSessionId']
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Multi_Session_Id=multi_sess_id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
dev[0].wait_connected(timeout=15)
logger.info("Disconnect-Request with matching User-Name after disassociation")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
dev[2].request("DISCONNECT")
dev[2].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
User_Name="[email protected]",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with matching CUI after disassociation")
dev[1].request("DISCONNECT")
dev[1].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Chargeable_User_Identity="gpsk-chargeable-user-identity",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with matching Calling-Station-Id after disassociation")
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
dev[0].wait_connected(timeout=15)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with mismatching Calling-Station-Id after disassociation")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=503)
def add_message_auth_req(req):
req.authenticator = req.CreateAuthenticator()
hmac_obj = hmac.new(req.secret)
hmac_obj.update(struct.pack("B", req.code))
hmac_obj.update(struct.pack("B", req.id))
# request attributes
req.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = b''
for code, datalst in sorted(req.items()):
for data in datalst:
attrs += req._PktEncodeAttribute(code, data)
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(16*b"\x00") # all zeros Authenticator in calculation
hmac_obj.update(attrs)
del req[80]
req.AddAttribute("Message-Authenticator", hmac_obj.digest())
def test_radius_das_disconnect_time_window(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - Disconnect - time window"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
params['radius_das_require_message_authenticator'] = "1"
params['radius_das_time_window'] = "10"
params['own_ip_addr'] = "127.0.0.1"
params['nas_identifier'] = "nas.example.com"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].own_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()) - 50)
add_message_auth_req(req)
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with non-matching Event-Timestamp properly ignored")
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
add_message_auth_req(req)
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
def test_radius_das_coa(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - CoA"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].p2p_interface_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
# hostapd does not currently support CoA-Request, so NAK is expected
logger.info("CoA-Request with matching Acct-Session-Id")
req = radius_das.CoAPacket(dict=dict, secret=b"secret",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.CoANAK, error_cause=405)
def test_radius_ipv6(dev, apdev):
"""RADIUS connection over IPv6"""
params = {}
params['ssid'] = 'as'
params['beacon_int'] = '2000'
params['radius_server_clients'] = 'auth_serv/radius_clients_ipv6.conf'
params['radius_server_ipv6'] = '1'
params['radius_server_auth_port'] = '18129'
params['radius_server_acct_port'] = '18139'
params['eap_server'] = '1'
params['eap_user_file'] = 'auth_serv/eap_user.conf'
params['ca_cert'] = 'auth_serv/ca.pem'
params['server_cert'] = 'auth_serv/server.pem'
params['private_key'] = 'auth_serv/server.key'
hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="radius-ipv6")
params['auth_server_addr'] = "::0"
params['auth_server_port'] = "18129"
params['acct_server_addr'] = "::0"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
params['own_ip_addr'] = "::0"
hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-ipv6")
def test_radius_macacl(dev, apdev):
"""RADIUS MAC ACL"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hostapd.add_ap(apdev[0], params)
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
# Invalid VLAN ID from RADIUS server
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
def test_radius_macacl_acct(dev, apdev):
"""RADIUS MAC ACL and accounting enabled"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hostapd.add_ap(apdev[0], params)
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].request("DISCONNECT")
dev[1].wait_disconnected()
dev[1].request("RECONNECT")
def test_radius_macacl_oom(dev, apdev):
"""RADIUS MAC ACL and OOM"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "hostapd_allowed_address"):
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 2, "hostapd_allowed_address"):
dev[1].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[2].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 2, "=hostapd_allowed_address"):
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
def test_radius_macacl_unreachable(dev, apdev):
"""RADIUS MAC ACL and server unreachable"""
params = hostapd.radius_params()
params['auth_server_port'] = "18139"
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=3)
if ev is not None:
raise Exception("Unexpected connection")
logger.info("Fix authentication server port")
hapd.set("auth_server_port", "1812")
hapd.disable()
hapd.enable()
dev[0].wait_connected()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
def test_radius_failover(dev, apdev):
"""RADIUS Authentication and Accounting server failover"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-failover")
params["auth_server_addr"] = "192.168.213.17"
params["auth_server_port"] = "1812"
params["auth_server_shared_secret"] = "testing"
params['acct_server_addr'] = "192.168.213.17"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "testing"
params['radius_retry_primary_interval'] = "20"
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
hapd.set("auth_server_addr", "127.0.0.1")
hapd.set("auth_server_port", "1812")
hapd.set("auth_server_shared_secret", "radius")
hapd.set('acct_server_addr', "127.0.0.1")
hapd.set('acct_server_port', "1813")
hapd.set('acct_server_shared_secret', "radius")
hapd.enable()
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=30)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
start = os.times()[4]
try:
subprocess.call(['ip', 'ro', 'replace', 'prohibit', '192.168.213.17'])
dev[0].request("SET EAPOL::authPeriod 5")
connect(dev[0], "radius-failover", wait_connect=False)
dev[0].wait_connected(timeout=20)
finally:
dev[0].request("SET EAPOL::authPeriod 30")
subprocess.call(['ip', 'ro', 'del', '192.168.213.17'])
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e <= req_s:
raise Exception("Unexpected RADIUS server acct MIB value")
end = os.times()[4]
try:
subprocess.call(['ip', 'ro', 'replace', 'prohibit', '192.168.213.17'])
dev[1].request("SET EAPOL::authPeriod 5")
if end - start < 21:
time.sleep(21 - (end - start))
connect(dev[1], "radius-failover", wait_connect=False)
dev[1].wait_connected(timeout=20)
finally:
dev[1].request("SET EAPOL::authPeriod 30")
subprocess.call(['ip', 'ro', 'del', '192.168.213.17'])
def run_pyrad_server(srv, t_events):
srv.RunWithStop(t_events)
def test_radius_protocol(dev, apdev):
"""RADIUS Authentication protocol tests with a fake server"""
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
if self.t_events['msg_auth'].is_set():
logger.info("Add Message-Authenticator")
if self.t_events['wrong_secret'].is_set():
logger.info("Use incorrect RADIUS shared secret")
pw = b"incorrect"
else:
pw = reply.secret
hmac_obj = hmac.new(pw)
hmac_obj.update(struct.pack("B", reply.code))
hmac_obj.update(struct.pack("B", reply.id))
# reply attributes
reply.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = reply._PktEncodeAttributes()
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(pkt.authenticator)
hmac_obj.update(attrs)
if self.t_events['double_msg_auth'].is_set():
logger.info("Include two Message-Authenticator attributes")
else:
del reply[80]
reply.AddAttribute("Message-Authenticator", hmac_obj.digest())
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['msg_auth'] = threading.Event()
t_events['wrong_secret'] = threading.Event()
t_events['double_msg_auth'] = threading.Event()
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
try:
params = hostapd.wpa2_eap_params(ssid="radius-test")
params['auth_server_port'] = "18138"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-test", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['msg_auth'].set()
t_events['wrong_secret'].set()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['wrong_secret'].clear()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['double_msg_auth'].set()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def build_tunnel_password(secret, authenticator, psk):
a = b"\xab\xcd"
psk = psk.encode()
padlen = 16 - (1 + len(psk)) % 16
if padlen == 16:
padlen = 0
p = struct.pack('B', len(psk)) + psk + padlen * b'\x00'
cc_all = bytes()
b = hashlib.md5(secret + authenticator + a).digest()
while len(p) > 0:
pp = bytearray(p[0:16])
p = p[16:]
bb = bytearray(b)
cc = bytearray(pp[i] ^ bb[i] for i in range(len(bb)))
cc_all += cc
b = hashlib.md5(secret + cc).digest()
data = b'\x00' + a + bytes(cc_all)
return data
def start_radius_psk_server(psk, invalid_code=False, acct_interim_interval=0,
session_timeout=0, reject=False):
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
if self.t_events['invalid_code']:
reply.code = pyrad.packet.AccessRequest
if self.t_events['reject']:
reply.code = pyrad.packet.AccessReject
data = build_tunnel_password(reply.secret, pkt.authenticator,
self.t_events['psk'])
reply.AddAttribute("Tunnel-Password", data)
if self.t_events['acct_interim_interval']:
reply.AddAttribute("Acct-Interim-Interval",
self.t_events['acct_interim_interval'])
if self.t_events['session_timeout']:
reply.AddAttribute("Session-Timeout",
self.t_events['session_timeout'])
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['psk'] = psk
t_events['invalid_code'] = invalid_code
t_events['acct_interim_interval'] = acct_interim_interval
t_events['session_timeout'] = session_timeout
t_events['reject'] = reject
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
return t, t_events
def hostapd_radius_psk_test_params():
params = hostapd.radius_params()
params['ssid'] = "test-wpa2-psk"
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['wpa_psk_radius'] = '2'
params['auth_server_port'] = "18138"
return params
def test_radius_psk(dev, apdev):
"""WPA2 with PSK from RADIUS"""
t, t_events = start_radius_psk_server("12345678")
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412")
t_events['psk'] = "0123456789abcdef"
dev[1].connect("test-wpa2-psk", psk="0123456789abcdef",
scan_freq="2412")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_invalid(dev, apdev):
"""WPA2 with invalid PSK from RADIUS"""
t, t_events = start_radius_psk_server("1234567")
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_invalid2(dev, apdev):
"""WPA2 with invalid PSK (hexstring) from RADIUS"""
t, t_events = start_radius_psk_server(64*'q')
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_hex_psk(dev, apdev):
"""WPA2 with PSK hexstring from RADIUS"""
t, t_events = start_radius_psk_server(64*'2', acct_interim_interval=19,
session_timeout=123)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", raw_psk=64*'2', scan_freq="2412")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_unknown_code(dev, apdev):
"""WPA2 with PSK from RADIUS and unknown code"""
t, t_events = start_radius_psk_server(64*'2', invalid_code=True)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_reject(dev, apdev):
"""WPA2 with PSK from RADIUS and reject"""
t, t_events = start_radius_psk_server("12345678", reject=True)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-AUTH-REJECT"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AUTH-REJECT event")
dev[0].request("DISCONNECT")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_oom(dev, apdev):
"""WPA2 with PSK from RADIUS and OOM"""
t, t_events = start_radius_psk_server(64*'2')
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "=hostapd_acl_recv_radius"):
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_default(dev, apdev):
"""WPA2 with default PSK"""
ssid = "test-wpa2-psk"
params = hostapd.radius_params()
params['ssid'] = ssid
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['wpa_psk_radius'] = '1'
params['wpa_passphrase'] = 'qwertyuiop'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk="qwertyuiop", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
hapd.disable()
hapd.set("wpa_psk_radius", "2")
hapd.enable()
dev[0].connect(ssid, psk="qwertyuiop", scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-AUTH-REJECT"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AUTH-REJECT event")
dev[0].request("DISCONNECT")
def test_radius_auth_force_client_addr(dev, apdev):
"""RADIUS client address specified"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['radius_client_addr'] = "127.0.0.1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth")
@remote_compatible
def test_radius_auth_force_invalid_client_addr(dev, apdev):
"""RADIUS client address specified and invalid address"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
#params['radius_client_addr'] = "10.11.12.14"
params['radius_client_addr'] = "1::2"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def add_message_auth(req):
req.authenticator = req.CreateAuthenticator()
hmac_obj = hmac.new(req.secret)
hmac_obj.update(struct.pack("B", req.code))
hmac_obj.update(struct.pack("B", req.id))
# request attributes
req.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = req._PktEncodeAttributes()
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(req.authenticator)
hmac_obj.update(attrs)
del req[80]
req.AddAttribute("Message-Authenticator", hmac_obj.digest())
def test_radius_server_failures(dev, apdev):
"""RADIUS server failure cases"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
dict = pyrad.dictionary.Dictionary("dictionary.radius")
client = pyrad.client.Client(server="127.0.0.1", authport=1812,
secret=b"radius", dict=dict)
client.retries = 1
client.timeout = 1
# unexpected State
req = client.CreateAuthPacket(code=pyrad.packet.AccessRequest,
User_Name="foo")
req['State'] = b'foo-state'
add_message_auth(req)
reply = client.SendPacket(req)
if reply.code != pyrad.packet.AccessReject:
raise Exception("Unexpected RADIUS response code " + str(reply.code))
# no EAP-Message
req = client.CreateAuthPacket(code=pyrad.packet.AccessRequest,
User_Name="foo")
add_message_auth(req)
try:
reply = client.SendPacket(req)
raise Exception("Unexpected response")
except pyrad.client.Timeout:
pass
def test_ap_vlan_wpa2_psk_radius_required(dev, apdev):
"""AP VLAN with WPA2-PSK and RADIUS attributes required"""
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
secret = reply.secret
if self.t_events['extra'].is_set():
reply.AddAttribute("Chargeable-User-Identity", "test-cui")
reply.AddAttribute("User-Name", "test-user")
if self.t_events['long'].is_set():
reply.AddAttribute("Tunnel-Type", 13)
reply.AddAttribute("Tunnel-Medium-Type", 6)
reply.AddAttribute("Tunnel-Private-Group-ID", "1")
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['long'] = threading.Event()
t_events['extra'] = threading.Event()
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
try:
ssid = "test-wpa2-psk"
params = hostapd.radius_params()
params['ssid'] = ssid
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['dynamic_vlan'] = "2"
params['wpa_passphrase'] = '0123456789abcdefghi'
params['auth_server_port'] = "18138"
hapd = hostapd.add_ap(apdev[0], params)
logger.info("connecting without VLAN")
dev[0].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected success without vlan parameters")
logger.info("connecting without VLAN failed as expected")
logger.info("connecting without VLAN (CUI/User-Name)")
t_events['extra'].set()
dev[1].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[1].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected success without vlan parameters(2)")
logger.info("connecting without VLAN failed as expected(2)")
t_events['extra'].clear()
t_events['long'].set()
logger.info("connecting with VLAN")
dev[2].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[2].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-SSID-TEMP-DISABLED" in ev:
raise Exception("Unexpected failure with vlan parameters")
logger.info("connecting with VLAN succeeded as expected")
finally:
t_events['stop'].set()
t.join()
def test_radius_mppe_failure(dev, apdev):
"""RADIUS failure when adding MPPE keys"""
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18127',
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key"}
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18127"
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(authsrv, 1, "os_get_random;radius_msg_add_mppe_keys"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", eap="TTLS",
identity="user", anonymous_identity="ttls",
password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
def test_radius_acct_failure(dev, apdev):
"""RADIUS Accounting and failure to add attributes"""
# Connection goes through, but Accounting-Request cannot be sent out due to
# NAS-Identifier being too long to fit into a RADIUS attribute.
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'nas_identifier': 255*'A'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
def test_radius_acct_failure_oom(dev, apdev):
"""RADIUS Accounting and failure to add attributes due to OOM"""
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'radius_acct_interim_interval': "1",
'nas_identifier': 250*'A',
'radius_acct_req_attr': ["126:s:" + 250*'B',
"77:s:" + 250*'C',
"127:s:" + 250*'D',
"181:s:" + 250*'E']}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "accounting_sta_report"):
dev[1].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[1].request("REMOVE_NETWORK all")
dev[1].wait_disconnected()
tests = [(1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"),
(2, "radius_msg_add_attr;accounting_msg"),
(3, "radius_msg_add_attr;accounting_msg")]
for count, func in tests:
with fail_test(hapd, count, func):
dev[0].connect("radius-acct-open", key_mgmt="NONE",
scan_freq="2412")
wait_fail_trigger(hapd, "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
with fail_test(hapd, 8,
"radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_sta_report"):
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
wait_fail_trigger(hapd, "GET_FAIL")
with fail_test(hapd, 1, "radius_msg_add_attr;=accounting_report_state"):
hapd.disable()
def test_radius_acct_failure_oom_rsn(dev, apdev):
"""RADIUS Accounting in RSN and failure to add attributes due to OOM"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
params['nas_identifier'] = 250*'A'
params['radius_acct_req_attr'] = ["126:s:" + 250*'B',
"77:s:" + 250*'C',
"127:s:" + 250*'D',
"181:s:" + 250*'E']
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
connect(dev[0], "radius-acct")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "accounting_sta_report"):
connect(dev[1], "radius-acct")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[2].scan_for_bss(bssid, freq="2412")
connect(dev[2], "radius-acct")
for i in range(1, 8):
with alloc_fail(hapd, i, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
for i in range(1, 15):
with alloc_fail(hapd, i, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_sta_report"):
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
def test_radius_acct_failure_sta_data(dev, apdev):
"""RADIUS Accounting and failure to get STA data"""
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius"}
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(hapd, 1, "accounting_sta_update_stats"):
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=1)
| gpl-3.0 | -3,032,000,768,132,133,000 | 41.920925 | 108 | 0.593733 | false |
OpenEntityMap/oem-client-anidb | examples/anidb_example.py | 1 | 2245 | from __future__ import print_function
import logging
logging.basicConfig(level=logging.DEBUG)
from oem import OemClient
from oem.media.show.identifier import EpisodeIdentifier
log = logging.getLogger(__name__)
def run():
# Initialize client
client = OemClient(['anidb'], 'package')
#
# Basic
#
log.debug("\n%s\nBasic\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').map('3', EpisodeIdentifier(1, 2)))
log.debug(client['anidb'].to('tvdb').map('38', EpisodeIdentifier(1, 2)))
log.debug(client['anidb'].to('tvdb').map('818', EpisodeIdentifier(0, 1)))
log.debug(client['anidb'].to('tvdb').map('1041', EpisodeIdentifier(1, 45)))
#
# Timeline
#
log.debug("\n%s\nTimeline\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=34)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=49)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=50)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=51)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=64)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=99)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=100)))
# Movies
log.debug("\n%s\nMovies\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('imdb').get(7103))
log.debug(client['imdb'].to('anidb').get("tt1663145"))
# Shows
log.debug("\n%s\nShows\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').get(3))
log.debug(client['tvdb'].to('anidb').get( 70973))
log.debug(client['tvdb'].to('anidb').get( 71551))
log.debug(client['tvdb'].to('anidb').get(103691))
log.debug(client['tvdb'].to('anidb').get(136251))
log.debug(client['tvdb'].to('anidb').get(137151))
log.debug(client['tvdb'].to('anidb').get(138691))
if __name__ == '__main__':
# Run example
run()
# Display call statistics
from oem_framework.core.elapsed import Elapsed
for line in Elapsed.format_statistics():
print(line)
| bsd-3-clause | 7,731,758,222,099,678,000 | 34.634921 | 93 | 0.621381 | false |
mdmintz/SeleniumBase | examples/raw_parameter_script.py | 1 | 3034 | """ The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
sb = MyTestClass("test_basic")
sb.browser = "chrome"
sb.headless = False
sb.headed = False
sb.start_page = None
sb.locale_code = None
sb.servername = "localhost"
sb.port = 4444
sb.data = None
sb.environment = "test"
sb.user_agent = None
sb.incognito = False
sb.guest_mode = False
sb.devtools = False
sb.mobile_emulator = False
sb.device_metrics = None
sb.extension_zip = None
sb.extension_dir = None
sb.database_env = "test"
sb.log_path = "latest_logs/"
sb.archive_logs = False
sb.disable_csp = False
sb.enable_ws = False
sb.enable_sync = False
sb.use_auto_ext = False
sb.no_sandbox = False
sb.disable_gpu = False
sb._reuse_session = False
sb._crumbs = False
sb.visual_baseline = False
sb.maximize_option = False
sb.save_screenshot_after_test = False
sb.timeout_multiplier = None
sb.pytest_html_report = None
sb.with_db_reporting = False
sb.with_s3_logging = False
sb.js_checking_on = False
sb.report_on = False
sb.is_pytest = False
sb.slow_mode = False
sb.demo_mode = False
sb.time_limit = None
sb.demo_sleep = 1
sb.message_duration = 2
sb.block_images = False
sb.settings_file = None
sb.user_data_dir = None
sb.proxy_string = None
sb.swiftshader = False
sb.ad_block_on = False
sb.highlights = None
sb.check_js = False
sb.cap_file = None
sb.cap_string = None
sb.setUp()
try:
sb.test_basic()
finally:
sb.tearDown()
del sb
| mit | 8,546,046,157,528,285,000 | 33.477273 | 75 | 0.678972 | false |
fedora-infra/fedocal | fedocal/fedocallib/exceptions.py | 1 | 1076 | # -*- coding: utf-8 -*-
"""
exceptions - Different Exceptions classes used in the project.
Copyright (C) 2012 Pierre-Yves Chibon
Author: Pierre-Yves Chibon <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or (at
your option) any later version.
See http://www.gnu.org/copyleft/gpl.html for the full text of the
license.
"""
from __future__ import unicode_literals, absolute_import, print_function
# pylint: disable=R0903
class FedocalException(Exception):
""" Exception thrown when a user is not allowed to perform a specific
action.
"""
pass
# pylint: disable=R0903
class UserNotAllowed(FedocalException):
""" Exception thrown when a user is not allowed to perform a specific
action.
"""
pass
# pylint: disable=R0903
class InvalidMeeting(FedocalException):
""" Exception thrown when a user is not allowed to perform a specific
action.
"""
pass
| gpl-3.0 | 8,661,605,388,957,415,000 | 25.9 | 73 | 0.728625 | false |
RocketScienceAbteilung/git-grid | gitgrid/utils/names.py | 1 | 14825 | # -*- coding: utf-8 -*-
import random
adjectives = [
"admiring",
"adoring",
"agitated",
"angry",
"backstabbing",
"berserk",
"boring",
"clever",
"cocky",
"compassionate",
"condescending",
"cranky",
"desperate",
"determined",
"distracted",
"dreamy",
"drunk",
"ecstatic",
"elated",
"elegant",
"evil",
"fervent",
"focused",
"furious",
"gloomy",
"goofy",
"grave",
"happy",
"high",
"hopeful",
"hungry",
"insane",
"jolly",
"jovial",
"kickass",
"lonely",
"loving",
"mad",
"modest",
"naughty",
"nostalgic",
"pensive",
"prickly",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"sick",
"silly",
"sleepy",
"stoic",
"stupefied",
"suspicious",
"tender",
"thirsty",
"trusting",
]
names = [
# Muhammad ibn Jābir al-Ḥarrānī al-Battānī was a founding father of astronomy. https://en.wikipedia.org/wiki/Mu%E1%B8%A5ammad_ibn_J%C4%81bir_al-%E1%B8%A4arr%C4%81n%C4%AB_al-Batt%C4%81n%C4%AB
"albattani",
# June Almeida - Scottish virologist who took the first pictures of the rubella virus - https://en.wikipedia.org/wiki/June_Almeida
"almeida",
# Archimedes was a physicist, engineer and mathematician who invented too many things to list them here. https://en.wikipedia.org/wiki/Archimedes
"archimedes",
# Maria Ardinghelli - Italian translator, mathematician and physicist - https://en.wikipedia.org/wiki/Maria_Ardinghelli
"ardinghelli",
# Charles Babbage invented the concept of a programmable computer. https://en.wikipedia.org/wiki/Charles_Babbage.
"babbage",
# Stefan Banach - Polish mathematician, was one of the founders of modern functional analysis. https://en.wikipedia.org/wiki/Stefan_Banach
"banach",
# William Shockley, Walter Houser Brattain and John Bardeen co-invented the transistor (thanks Brian Goff).
# - https://en.wikipedia.org/wiki/John_Bardeen
# - https://en.wikipedia.org/wiki/Walter_Houser_Brattain
# - https://en.wikipedia.org/wiki/William_Shockley
"bardeen",
"brattain",
"shockley",
# Jean Bartik, born Betty Jean Jennings, was one of the original programmers for the ENIAC computer. https://en.wikipedia.org/wiki/Jean_Bartik
"bartik",
# Alexander Graham Bell - an eminent Scottish-born scientist, inventor, engineer and innovator who is credited with inventing the first practical telephone - https://en.wikipedia.org/wiki/Alexander_Graham_Bell
"bell",
# Elizabeth Blackwell - American doctor and first American woman to receive a medical degree - https://en.wikipedia.org/wiki/Elizabeth_Blackwell
"blackwell",
# Niels Bohr is the father of quantum theory. https://en.wikipedia.org/wiki/Niels_Bohr.
"bohr",
# Emmett Brown invented time travel. https://en.wikipedia.org/wiki/Emmett_Brown (thanks Brian Goff)
"brown",
# Rachel Carson - American marine biologist and conservationist, her book Silent Spring and other writings are credited with advancing the global environmental movement. https://en.wikipedia.org/wiki/Rachel_Carson
"carson",
# Jane Colden - American botanist widely considered the first female American botanist - https://en.wikipedia.org/wiki/Jane_Colden
"colden",
# Gerty Theresa Cori - American biochemist who became the third woman—and first American woman—to win a Nobel Prize in science, and the first woman to be awarded the Nobel Prize in Physiology or Medicine. Cori was born in Prague. https://en.wikipedia.org/wiki/Gerty_Cori
"cori",
# Seymour Roger Cray was an American electrical engineer and supercomputer architect who designed a series of computers that were the fastest in the world for decades. https://en.wikipedia.org/wiki/Seymour_Cray
"cray",
# Marie Curie discovered radioactivity. https://en.wikipedia.org/wiki/Marie_Curie.
"curie",
# Charles Darwin established the principles of natural evolution. https://en.wikipedia.org/wiki/Charles_Darwin.
"darwin",
# Leonardo Da Vinci invented too many things to list here. https://en.wikipedia.org/wiki/Leonardo_da_Vinci.
"davinci",
# Albert Einstein invented the general theory of relativity. https://en.wikipedia.org/wiki/Albert_Einstein
"einstein",
# Gertrude Elion - American biochemist, pharmacologist and the 1988 recipient of the Nobel Prize in Medicine - https://en.wikipedia.org/wiki/Gertrude_Elion
"elion",
# Douglas Engelbart gave the mother of all demos: https://en.wikipedia.org/wiki/Douglas_Engelbart
"engelbart",
# Euclid invented geometry. https://en.wikipedia.org/wiki/Euclid
"euclid",
# Pierre de Fermat pioneered several aspects of modern mathematics. https://en.wikipedia.org/wiki/Pierre_de_Fermat
"fermat",
# Enrico Fermi invented the first nuclear reactor. https://en.wikipedia.org/wiki/Enrico_Fermi.
"fermi",
# Richard Feynman was a key contributor to quantum mechanics and particle physics. https://en.wikipedia.org/wiki/Richard_Feynman
"feynman",
# Benjamin Franklin is famous for his experiments in electricity and the invention of the lightning rod.
"franklin",
# Galileo was a founding father of modern astronomy, and faced politics and obscurantism to establish scientific truth. https://en.wikipedia.org/wiki/Galileo_Galilei
"galileo",
# Adele Goldstine, born Adele Katz, wrote the complete technical description for the first electronic digital computer, ENIAC. https://en.wikipedia.org/wiki/Adele_Goldstine
"goldstine",
# Jane Goodall - British primatologist, ethologist, and anthropologist who is considered to be the world's foremost expert on chimpanzees - https://en.wikipedia.org/wiki/Jane_Goodall
"goodall",
# Stephen Hawking pioneered the field of cosmology by combining general relativity and quantum mechanics. https://en.wikipedia.org/wiki/Stephen_Hawking
"hawking",
# Werner Heisenberg was a founding father of quantum mechanics. https://en.wikipedia.org/wiki/Werner_Heisenberg
"heisenberg",
# Dorothy Hodgkin was a British biochemist, credited with the development of protein crystallography. She was awarded the Nobel Prize in Chemistry in 1964. https://en.wikipedia.org/wiki/Dorothy_Hodgkin
"hodgkin",
# Erna Schneider Hoover revolutionized modern communication by inventing a computerized telephon switching method. https://en.wikipedia.org/wiki/Erna_Schneider_Hoover
"hoover",
# Grace Hopper developed the first compiler for a computer programming language and is credited with popularizing the term "debugging" for fixing computer glitches. https://en.wikipedia.org/wiki/Grace_Hopper
"hopper",
# Hypatia - Greek Alexandrine Neoplatonist philosopher in Egypt who was one of the earliest mothers of mathematics - https://en.wikipedia.org/wiki/Hypatia
"hypatia",
# Yeong-Sil Jang was a Korean scientist and astronomer during the Joseon Dynasty; he invented the first metal printing press and water gauge. https://en.wikipedia.org/wiki/Jang_Yeong-sil
"jang",
# Karen Spärck Jones came up with the concept of inverse document frequency, which is used in most search engines today. https://en.wikipedia.org/wiki/Karen_Sp%C3%A4rck_Jones
"jones",
# Jack Kilby and Robert Noyce have invented silicone integrated circuits and gave Silicon Valley its name.
# - https://en.wikipedia.org/wiki/Jack_Kilby
# - https://en.wikipedia.org/wiki/Robert_Noyce
"kilby",
"noyce",
# Maria Kirch - German astronomer and first woman to discover a comet - https://en.wikipedia.org/wiki/Maria_Margarethe_Kirch
"kirch",
# Sophie Kowalevski - Russian mathematician responsible for important original contributions to analysis, differential equations and mechanics - https://en.wikipedia.org/wiki/Sofia_Kovalevskaya
"kowalevski",
# Marie-Jeanne de Lalande - French astronomer, mathematician and cataloguer of stars - https://en.wikipedia.org/wiki/Marie-Jeanne_de_Lalande
"lalande",
# Mary Leakey - British paleoanthropologist who discovered the first fossilized Proconsul skull - https://en.wikipedia.org/wiki/Mary_Leakey
"leakey",
# Ada Lovelace invented the first algorithm. https://en.wikipedia.org/wiki/Ada_Lovelace (thanks James Turnbull)
"lovelace",
# Auguste and Louis Lumière - the first filmmakers in history - https://en.wikipedia.org/wiki/Auguste_and_Louis_Lumi%C3%A8re
"lumiere",
# Maria Mayer - American theoretical physicist and Nobel laureate in Physics for proposing the nuclear shell model of the atomic nucleus - https://en.wikipedia.org/wiki/Maria_Mayer
"mayer",
# John McCarthy invented LISP: https://en.wikipedia.org/wiki/John_McCarthy_(computer_scientist)
"mccarthy",
# Barbara McClintock - a distinguished American cytogeneticist, 1983 Nobel Laureate in Physiology or Medicine for discovering transposons. https://en.wikipedia.org/wiki/Barbara_McClintock
"mcclintock",
# Malcolm McLean invented the modern shipping container: https://en.wikipedia.org/wiki/Malcom_McLean
"mclean",
# Lise Meitner - Austrian/Swedish physicist who was involved in the discovery of nuclear fission. The element meitnerium is named after her - https://en.wikipedia.org/wiki/Lise_Meitner
"meitner",
# Johanna Mestorf - German prehistoric archaeologist and first female museum director in Germany - https://en.wikipedia.org/wiki/Johanna_Mestorf
"mestorf",
# Samuel Morse - contributed to the invention of a single-wire telegraph system based on European telegraphs and was a co-developer of the Morse code - https://en.wikipedia.org/wiki/Samuel_Morse
"morse",
# Isaac Newton invented classic mechanics and modern optics. https://en.wikipedia.org/wiki/Isaac_Newton
"newton",
# Alfred Nobel - a Swedish chemist, engineer, innovator, and armaments manufacturer (inventor of dynamite) - https://en.wikipedia.org/wiki/Alfred_Nobel
"nobel",
# Cecilia Payne-Gaposchkin was an astronomer and astrophysicist who, in 1925, proposed in her Ph.D. thesis an explanation for the composition of stars in terms of the relative abundances of hydrogen and helium. https://en.wikipedia.org/wiki/Cecilia_Payne-Gaposchkin
"payne",
# Ambroise Pare invented modern surgery. https://en.wikipedia.org/wiki/Ambroise_Par%C3%A9
"pare",
# Louis Pasteur discovered vaccination, fermentation and pasteurization. https://en.wikipedia.org/wiki/Louis_Pasteur.
"pasteur",
# Radia Perlman is a software designer and network engineer and most famous for her invention of the spanning-tree protocol (STP). https://en.wikipedia.org/wiki/Radia_Perlman
"perlman",
# Rob Pike was a key contributor to Unix, Plan 9, the X graphic system, utf-8, and the Go programming language. https://en.wikipedia.org/wiki/Rob_Pike
"pike",
# Henri Poincaré made fundamental contributions in several fields of mathematics. https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9
"poincare",
# Laura Poitras is a director and producer whose work, made possible by open source crypto tools, advances the causes of truth and freedom of information by reporting disclosures by whistleblowers such as Edward Snowden. https://en.wikipedia.org/wiki/Laura_Poitras
"poitras",
# Claudius Ptolemy - a Greco-Egyptian writer of Alexandria, known as a mathematician, astronomer, geographer, astrologer, and poet of a single epigram in the Greek Anthology - https://en.wikipedia.org/wiki/Ptolemy
"ptolemy",
# Dennis Ritchie and Ken Thompson created UNIX and the C programming language.
# - https://en.wikipedia.org/wiki/Dennis_Ritchie
# - https://en.wikipedia.org/wiki/Ken_Thompson
"ritchie",
"thompson",
# Rosalind Franklin - British biophysicist and X-ray crystallographer whose research was critical to the understanding of DNA - https://en.wikipedia.org/wiki/Rosalind_Franklin
"rosalind",
# Jean E. Sammet developed FORMAC, the first widely used computer language for symbolic manipulation of mathematical formulas. https://en.wikipedia.org/wiki/Jean_E._Sammet
"sammet",
# Françoise Barré-Sinoussi - French virologist and Nobel Prize Laureate in Physiology or Medicine; her work was fundamental in identifying HIV as the cause of AIDS. https://en.wikipedia.org/wiki/Fran%C3%A7oise_Barr%C3%A9-Sinoussi
"sinoussi",
# Richard Matthew Stallman - the founder of the Free Software movement, the GNU project, the Free Software Foundation, and the League for Programming Freedom. He also invented the concept of copyleft to protect the ideals of this movement, and enshrined this concept in the widely-used GPL (General Public License) for software. https://en.wikiquote.org/wiki/Richard_Stallman
"stallman",
# Aaron Swartz was influential in creating RSS, Markdown, Creative Commons, Reddit, and much of the internet as we know it today. He was devoted to freedom of information on the web. https://en.wikiquote.org/wiki/Aaron_Swartz
"swartz",
# Nikola Tesla invented the AC electric system and every gadget ever used by a James Bond villain. https://en.wikipedia.org/wiki/Nikola_Tesla
"tesla",
# Linus Torvalds invented Linux and Git. https://en.wikipedia.org/wiki/Linus_Torvalds
"torvalds",
# Alan Turing was a founding father of computer science. https://en.wikipedia.org/wiki/Alan_Turing.
"turing",
# Sophie Wilson designed the first Acorn Micro-Computer and the instruction set for ARM processors. https://en.wikipedia.org/wiki/Sophie_Wilson
"wilson",
# Steve Wozniak invented the Apple I and Apple II. https://en.wikipedia.org/wiki/Steve_Wozniak
"wozniak",
# The Wright brothers, Orville and Wilbur - credited with inventing and building the world's first successful airplane and making the first controlled, powered and sustained heavier-than-air human flight - https://en.wikipedia.org/wiki/Wright_brothers
"wright",
# Rosalyn Sussman Yalow - Rosalyn Sussman Yalow was an American medical physicist, and a co-winner of the 1977 Nobel Prize in Physiology or Medicine for development of the radioimmunoassay technique. https://en.wikipedia.org/wiki/Rosalyn_Sussman_Yalow
"yalow",
# Ada Yonath - an Israeli crystallographer, the first woman from the Middle East to win a Nobel prize in the sciences. https://en.wikipedia.org/wiki/Ada_Yonath
"yonath",
]
def name():
return random.choice(adjectives).capitalize() + " " + random.choice(names).capitalize()
def handle():
return random.choice(adjectives) + "_" + random.choice(names)
| mit | 8,545,689,404,790,956,000 | 46.617363 | 380 | 0.725842 | false |
jfecroft/DOS | data/alkalis/jfec_k2/rovib.py | 1 | 1600 | import subprocess
import numpy as np
import os
import numpy.ma as ma
import re
from tempfile import mkstemp, mkdtemp
import shutil
import scipy.constants
#########################################
#replace will search through a file for a specific word an then replace that line
def replace(file, pattern, subst):
p = re.compile(pattern)
#Create temp file
fh, abs_path = mkstemp()
new_file = open(abs_path,'w')
old_file = open(file)
for line in old_file:
if p.match(line): #using match because for input files only want to replace the currently used variable
line = pattern + ' = ' + str(subst) + ', \n'
new_file.write(line)
#close temp file
new_file.close()
os.close(fh)
old_file.close()
os.remove(file)
shutil.move(abs_path, file)
#routine which call 1d_schrodinger eqn solver and returns
#all the levels below zero outputted
def run_1d_schrodinger(inputfile_name,outputfile_name,L):
home = os.getcwd()
replace(inputfile_name, ' L', L) #editing inputfile such that L=L is called
subprocess.call(home+"/1d_schrodinger.x < " + inputfile_name, stdout=open(os.devnull, 'w'), shell=True)
return()
############################################
lmax = 100
inputfile = 'input_K2.txt'
outputfile = 'fort.10'
sys = 'kk'
#generate to states of the dimer for different n upto nmax
for i in range(0,lmax+1):
run_1d_schrodinger(inputfile,outputfile,i)
# shutil.copyfile(outputfile,sys+'_results_j'+str(i)+'.dat')
try:
shutil.move(outputfile,sys+'_results_j'+str(i)+'.dat')
except IOError:
pass
| mit | 8,721,651,068,161,395,000 | 30.372549 | 111 | 0.646875 | false |
gkc1000/pyscf | pyscf/pbc/df/test/test_mdf_ao2mo.py | 1 | 4940 | # Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from pyscf.pbc.df import mdf
import pyscf.pbc.gto as pgto
from pyscf.pbc.lib import kpts_helper
from pyscf import ao2mo
L = 5.
n = 3
cell = pgto.Cell()
cell.a = numpy.diag([L,L,L])
cell.mesh = numpy.array([n,n,n])
cell.atom = '''He 3. 2. 3.
He 1. 1. 1.'''
cell.basis = 'ccpvdz'
cell.verbose = 0
cell.rcut = 17
cell.build(0,0)
nao = cell.nao_nr()
def finger(a):
w = np.cos(np.arange(a.size))
return np.dot(w, a.ravel())
class KnowValues(unittest.TestCase):
def test_eri1111(self):
kpts = numpy.random.random((4,3)) * .25
kpts[3] = -numpy.einsum('ij->j', kpts[:3])
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = kpts
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = with_df.get_eri(kpts).reshape((nao,)*4)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
def test_eri0110(self):
kpts = numpy.random.random((4,3)) * .25
kpts[3] = kpts[0]
kpts[2] = kpts[1]
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = kpts
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = with_df.get_eri(kpts).reshape((nao,)*4)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 8)
def test_eri0000(self):
with_df = mdf.MDF(cell).set(auxbasis='weigend')
with_df.linear_dep_threshold = 1e-7
with_df.kpts = numpy.zeros((4,3))
mo =(numpy.random.random((nao,nao)) +
numpy.random.random((nao,nao))*1j)
eri = ao2mo.restore(1, with_df.get_eri(with_df.kpts), nao)
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, with_df.kpts)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
mo = mo.real
eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj())
eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo )
eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj())
eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo )
eri1 = with_df.ao2mo(mo, with_df.kpts, compact=False)
self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
def test_ao2mo_7d(self):
L = 3.
n = 6
cell = pgto.Cell()
cell.a = numpy.diag([L,L,L])
cell.mesh = [n,n,n]
cell.atom = '''He 2. 2.2 2.
He 1.2 1. 1.'''
cell.basis = {'He': [[0, (1.2, 1)], [1, (0.6, 1)]]}
cell.verbose = 0
cell.build(0,0)
kpts = cell.make_kpts([1,3,1])
nkpts = len(kpts)
nao = cell.nao_nr()
numpy.random.seed(1)
mo =(numpy.random.random((nkpts,nao,nao)) +
numpy.random.random((nkpts,nao,nao))*1j)
with_df = mdf.MDF(cell, kpts)
out = with_df.ao2mo_7d(mo, kpts)
ref = numpy.empty_like(out)
kconserv = kpts_helper.get_kconserv(cell, kpts)
for ki, kj, kk in kpts_helper.loop_kkk(nkpts):
kl = kconserv[ki, kj, kk]
tmp = with_df.ao2mo((mo[ki], mo[kj], mo[kk], mo[kl]), kpts[[ki,kj,kk,kl]])
ref[ki,kj,kk] = tmp.reshape([nao]*4)
self.assertAlmostEqual(abs(out-ref).max(), 0, 12)
if __name__ == '__main__':
print("Full Tests for mdf ao2mo")
unittest.main()
| apache-2.0 | -4,028,618,113,666,119,000 | 36.709924 | 86 | 0.571457 | false |
seleniumbase/SeleniumBase | examples/test_hack_search.py | 1 | 1313 | """ Testing the "self.set_attribute()" and "self.set_attributes()" methods
to modify a Google search into becoming a Bing search.
set_attribute() -> Modifies the attribute of the first matching element.
set_attributes() -> Modifies the attribute of all matching elements. """
from seleniumbase import BaseCase
class HackingTests(BaseCase):
def test_hack_search(self):
self.open("https://google.com/ncr")
self.assert_element('input[title="Search"]')
self.set_attribute('[action="/search"]', "action", "//bing.com/search")
self.set_attributes('[value="Google Search"]', "value", "Bing Search")
self.type('input[title="Search"]', "SeleniumBase GitHub")
self.sleep(0.5)
self.js_click('[value="Bing Search"]')
self.highlight("h1.b_logo")
self.highlight_click('a[href*="github.com/seleniumbase/SeleniumBase"]')
self.switch_to_newest_window()
self.assert_element('[href="/seleniumbase/SeleniumBase"]')
self.assert_true("seleniumbase/SeleniumBase" in self.get_current_url())
self.click('a[title="examples"]')
self.assert_text("examples", "strong.final-path")
self.highlight_click('[title="test_hack_search.py"]')
self.assert_text("test_hack_search.py", "strong.final-path")
| mit | 5,558,656,236,004,526,000 | 49.5 | 79 | 0.657273 | false |
campadrenalin/EJTP-lib-python | setup.py | 1 | 3110 | #!/usr/bin/env python
from setuptools import setup
long_desc = '''
Encrypted JSON Transport Protocol
---------------------------------
EJTP is an overlay protocol that allows the pluggable use of underlying transports, such as UDP, TCP, HTTP, IRC, Email and carrier pigeon to provide a cryptographically secure network of unreliable message forwarding. You can think of it as a bit like a more general-purpose and security-minded successor to XMPP, using JSON rather than XML as its frame medium.
On top of a simple frame format, EJTP boasts a consistent and simple format for describing encryption credentials, which is useful even without the rest of EJTP. The ejtp-crypto script makes it easy for other projects to take advantage of this pending a native port of ejtp.crypto to languages other than Python.
The intention of EJTP is to make it trivial to establish secure and NAT-oblivious distributed services across a common network of message relays. Your system only has to worry about exchanging encryption credentials and establishing a connection with a relay host, helping to pave the way toward distributed apps that run entirely in HTML5 (pending a port of the project to JS). You can be serverless *and* smartphone-friendly.
Optionally supports elliptic curve cryptography if the PyECC_ module is installed.
For more technical and in-depth information, visit the `Github project <https://github.com/campadrenalin/EJTP-lib-python>`_.
.. _PyECC: https://pypi.python.org/pypi/PyECC
'''
setup(
name = 'ejtp',
version = '0.9.7p1',
description = 'Encrypted JSON Transport Protocol library',
long_description = long_desc,
author = 'Philip Horger',
author_email = '[email protected]',
url = 'https://github.com/campadrenalin/EJTP-lib-python/',
package_data={
'ejtp.tests' : ['examplecache.json', 'idents/*']
},
install_requires = [
'pycrypto',
'persei',
'requests',
'streql',
],
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Security :: Cryptography',
],
scripts = [
'scripts/ejtpd',
'scripts/ejtp-keygen',
'scripts/ejtp-console',
'scripts/ejtp-crypto',
'scripts/ejtp-identity',
],
packages = [
'ejtp',
'ejtp.applications',
'ejtp.applications.ejforward',
'ejtp.crypto',
'ejtp.frame',
'ejtp.identity',
'ejtp.jacks',
'ejtp.tests',
'ejtp.util',
'ejtp.vendor',
],
)
| lgpl-3.0 | 5,118,365,220,916,456,000 | 39.38961 | 427 | 0.671704 | false |
juliakreger/bifrost | playbooks/roles/ironic-install/files/parse_zuul_changes.py | 1 | 2325 | #!/usr/bin/env python
# (c) 2015, Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import re
import subprocess
import sys
if len(sys.argv) is 1:
print("ERROR: This script requires arguments!\n"
"%s repository_path review_url repository_name "
"zuul_changes" % sys.argv[0])
sys.exit(1)
repo_path = sys.argv[1]
review_url = sys.argv[2]
repo_name = sys.argv[3]
change_list = str(sys.argv[4]).split('^')
applicable_changes = [x for x in change_list if repo_name in x]
try:
for change in applicable_changes:
(project, branch, ref) = change.split(':')
if re.search(repo_name, project):
if not re.search(branch, subprocess.check_output(
['git', '-C', repo_path, 'status', '-s', '-b'])):
command = ['git', '-C', repo_path, 'checkout', branch]
subprocess.call(command, stdout=True)
command = ['git', '-C', repo_path, 'fetch',
review_url + "/" + repo_name, ref]
if subprocess.call(command, stdout=True) is 0:
if subprocess.call(
['git', '-C', repo_path, 'cherry-pick',
'-n', 'FETCH_HEAD'], stdout=True) is 0:
print("Applied %s" % ref)
else:
print("Failed to cherry pick %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
else:
print("Failed to download %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
except Exception as e:
print("Failed to process change: %s" % e)
| apache-2.0 | -3,221,562,445,962,822,000 | 37.75 | 78 | 0.581505 | false |
wavesoft/CCLib | Python/cclib/chip/cc2510.py | 1 | 14606 | #
# CS2510 Chip-Specific code for CCLib
#
# Copyright (c) 2015 Simon Schulz - github.com/fishpepper
# Copyright (c) 2014-2016 Ioannis Charalampidis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
from cclib.chip import ChipDriver
import sys
import time
class CC2510(ChipDriver):
"""
Chip-specific code for CC2510 SOC
"""
@staticmethod
def test(chipID):
"""
Check if this ChipID can be handled by this class
"""
return ((chipID & 0xFF00) == 0x8100)
def chipName(self):
"""
Return Chip Name
"""
return "CC251x"
def initialize(self):
"""
Initialize chip driver
"""
# Update the CC.Debugger instruction set that arduino should use with
# the chip, because CC251xx chips use a different one
if self.instructionTableVersion != 2:
self.updateInstructionTable(2, [
0x44, # I_HALT
0x4C, # I_RESUME
0x24, # I_RD_CONFIG
0x1D, # I_WR_CONFIG
0x55, # I_DEBUG_INSTR_1
0x56, # I_DEBUG_INSTR_2
0x57, # I_DEBUG_INSTR_3
0x68, # I_GET_CHIP_ID
0x28, # I_GET_PC
0x34, # I_READ_STATUS
0x5C, # I_STEP_INSTR
0x14, # I_CHIP_ERASE
])
# Custom chip info for cc2510
self.chipInfo = {
'flash' : 16,
'usb' : 0,
'sram' : 2
}
# Populate variables
self.flashSize = self.chipInfo['flash'] * 1024
#all cc251x have 0x400 as flash page size
self.flashPageSize = 0x400
self.sramSize = self.chipInfo['sram'] * 1024
self.bulkBlockSize = 0x400 # < This should be the same as the flash page size
self.flashWordSize = 2 #cc251x have 2 bytes per word
###############################################
# Data reading
###############################################
def readXDATA( self, offset, size ):
"""
Read any size of buffer from the XDATA region
"""
# Setup DPTR
a = self.instri( 0x90, offset ) # MOV DPTR,#data16
# Prepare ans array
ans = bytearray()
# Read bytes
for i in range(0, size):
a = self.instr ( 0xE0 ) # MOVX A,@DPTR
ans.append(a)
a = self.instr ( 0xA3 ) # INC DPTR
# Return ans
return ans
def writeXDATA( self, offset, bytes ):
"""
Write any size of buffer in the XDATA region
"""
# Setup DPTR
a = self.instri( 0x90, offset ) # MOV DPTR,#data16
# Read bytes
for b in bytes:
a = self.instr ( 0x74, b ) # MOV A,#data
a = self.instr ( 0xF0 ) # MOVX @DPTR,A
a = self.instr ( 0xA3 ) # INC DPTR
# Return bytes written
return len(bytes)
def readCODE( self, offset, size ):
"""
Read any size of buffer from the XDATA+0x8000 (code-mapped) region
"""
# Pick the code bank this code chunk belongs to
fBank = int(offset / 0x8000 )
self.selectXDATABank( fBank )
# Recalibrate offset
offset -= fBank * 0x8000
# Setup DPTR
a = self.instri( 0x90, offset ) # MOV DPTR,#data16
# Prepare ans array
ans = bytearray()
# Read bytes
for i in range(0, size):
a = self.instr ( 0xE4 ) # MOVX A,@DPTR
a = self.instr ( 0x93 ) # MOVX A,@DPTR
ans.append(a)
a = self.instr ( 0xA3 ) # INC DPTR
#
return ans
def getRegister( self, reg ):
"""
Return the value of the given register
"""
return self.instr( 0xE5, reg ) # MOV A,direct
def setRegister( self, reg, v ):
"""
Update the value of the
"""
return self.instr( 0x75, reg, v ) # MOV direct,#data
def selectXDATABank(self, bank):
"""
Select XDATA bank from the Memory Arbiter Control register
"""
#a = self.getRegister( 0xC7 )
#a = (a & 0xF8) | (bank & 0x07)
#return self.setRegister( 0xC7, a )
return self.instr(0x75, 0xC7, bank*16 + 1);
def selectFlashBank(self, bank):
"""
Select a bank for
"""
return self.setRegister( 0x9F, bank & 0x07 )
###############################################
# Chip information
###############################################
def getSerial(self):
"""
Read the IEEE address from the 0x780E register
"""
# Serial number is 6 bytes, stored on 0x780E
bytes = self.readXDATA( 0x780E, 6 )
# Build serial number string
serial = ""
for i in range(5,-1,-1):
serial += "%02x" % bytes[i]
# Return serial
return serial
def getChipInfo(self):
"""
Analyze chip info registers
"""
# Get chip info registers
chipInfo = self.readXDATA(0x6276, 2)
# Extract the useful info
return {
'flash' : pow(2, 4 + ((chipInfo[0] & 0x70) >> 4)), # in Kb
'usb' : (chipInfo[0] & 0x08) != 0, # in Kb
'sram' : (chipInfo[1] & 0x07) + 1
}
def getInfoPage(self):
"""
Return the read-only information page (2kb)
"""
# Read XDATA
data = self.readXDATA( 0x7800, self.flashPageSize )
# Get license key
return data
def getLastCODEPage(self):
"""
Return the entire last flash page
"""
# Return the last page-size bytes
return self.readCODE( self.flashSize - self.flashPageSize, self.flashPageSize )
def writeLastCODEPage(self, pageData):
"""
Write the entire last flash code page
"""
# Validate page data
if len(pageData) > self.flashPageSize:
raise IOError("Data bigger than flash page size!")
# Write flash code page
return self.writeCODE( self.flashSize - self.flashPageSize, pageData, erase=True )
###############################################
# cc251x
###############################################
def readFlashPage(self, address):
if (not self.debug_active):
print("ERROR: not in debug mode! did you forget a enter() call?\n")
sys.exit(2)
return self.readCODE(address & 0x7FFFF, self.flashPageSize)
def writeFlashPage(self, address, inputArray, erase_page=True):
if len(inputArray) != self.flashPageSize:
raise IOError("input data size != flash page size!")
if (not self.debug_active):
print("ERROR: not in debug mode! did you forget a enter() call?\n")
sys.exit(2)
#calc words per flash page
words_per_flash_page = self.flashPageSize / self.flashWordSize
#print "words_per_flash_page = %d" % (words_per_flash_page)
#print "flashWordSize = %d" % (self.flashWordSize)
if (erase_page):
print("[page erased]", end=' ')
routine8_1 = [
#see http://www.ti.com/lit/ug/swra124/swra124.pdf page 11
0x75, 0xAD, ((address >> 8) / self.flashWordSize) & 0x7E, #MOV FADDRH, #imm;
0x75, 0xAC, 0x00 #MOV FADDRL, #00;
]
routine8_erase = [
0x75, 0xAE, 0x01, #MOV FLC, #01H; // ERASE
#; Wait for flash erase to complete
0xE5, 0xAE, #eraseWaitLoop: MOV A, FLC;
0x20, 0xE7, 0xFB #JB ACC_BUSY, eraseWaitLoop;
]
routine8_2 = [
#; Initialize the data pointer
0x90, 0xF0, 0x00, #MOV DPTR, #0F000H;
#; Outer loops
0x7F, (((words_per_flash_page)>>8)&0xFF), #MOV R7, #imm;
0x7E, ((words_per_flash_page)&0xFF), #MOV R6, #imm;
0x75, 0xAE, 0x02, #MOV FLC, #02H; // WRITE
#; Inner loops
0x7D, self.flashWordSize, #writeLoop: MOV R5, #imm;
0xE0, #writeWordLoop: MOVX A, @DPTR;
0xA3, #INC DPTR;
0xF5, 0xAF, #MOV FWDATA, A;
0xDD, 0xFA, #DJNZ R5, writeWordLoop;
#; Wait for completion
0xE5, 0xAE, #writeWaitLoop: MOV A, FLC;
0x20, 0xE6, 0xFB, #JB ACC_SWBSY, writeWaitLoop;
0xDE, 0xF1, #DJNZ R6, writeLoop;
0xDF, 0xEF, #DJNZ R7, writeLoop;
#set green led for debugging info (DO NOT USE THIS!)
#LED_GREEN_DIR |= (1<<LED_GREEN_PIN);
#0x43, 0xFF, 0x18, # [24] 935 orl _P2DIR,#0x10
#LED_GREEN_PORT = (1<<LED_GREEN_PIN);
#0x75, 0xA0, 0x18, # [24] 937 mov _P2,#0x10
#; Done with writing, fake a breakpoint in order to HALT the cpu
0xA5 #DB 0xA5;
]
#build routine
routine = routine8_1
if (erase_page):
routine += routine8_erase
routine += routine8_2
#add led code to flash code (for debugging)
#aroutine = led_routine + routine
#routine = routine + led_routine
#for x in routine:
# print "%02X" % (x),
#halt CPU
self.halt()
#send data to xdata memory:
if (self.show_debug_info): print("copying data to xdata")
self.writeXDATA(0xF000, inputArray)
#send program to xdata mem
if (self.show_debug_info): print("copying flash routine to xdata")
self.writeXDATA(0xF000 + self.flashPageSize, routine)
if (self.show_debug_info): print("executing code")
#execute MOV MEMCTR, (bank * 16) + 1;
self.instr(0x75, 0xC7, 0x51)
#set PC to start of program
self.setPC(0xF000 + self.flashPageSize)
#start program exec, will continue after routine exec due to breakpoint
self.resume()
if (self.show_debug_info): print("page write running", end=' ')
#set some timeout (2 seconds)
timeout = 200
while (timeout > 0):
#show progress
if (self.show_debug_info):
print(".", end=' ')
sys.stdout.flush()
#check status (bit 0x20 = cpu halted)
if ((self.getStatus() & 0x20 ) != 0):
if (self.show_debug_info): print("done")
break
#timeout increment
timeout -= 1
#delay (10ms)
time.sleep(0.01)
if (timeout <=0):
raise IOError("flash write timed out!")
self.halt()
if (self.show_debug_info): print("done")
###############################################
# Flash functions
###############################################
def setFlashWordOffset(self, address):
"""
Set the flash address offset in FADDRH:FADDRL
"""
# Split address in high/low order bytes
cHigh = (address >> 8) & 0xFF
cLow = (address & 0xFF)
# Place in FADDRH:FADDRL
self.writeXDATA( 0x6271, [cLow, cHigh])
def isFlashFull(self):
"""
Check if the FULL bit is set in the flash register
"""
# Read flash status register
a = self.readXDATA(0x6270, 1)
return (a[0] & 0x40 != 0)
def isFlashBusy(self):
"""
Check if the BUSY bit is set in the flash register
"""
# Read flash status register
a = self.readXDATA(0x6270, 1)
return (a[0] & 0x80 != 0)
def isFlashAbort(self):
"""
Check if the ABORT bit is set in the flash register
"""
# Read flash status register
a = self.readXDATA(0x6270, 1)
return (a[0] & 0x20 != 0)
def clearFlashStatus(self):
"""
Clear the flash status register
"""
# Read & mask-out status register bits
a = self.readXDATA(0x6270, 1)
a[0] &= 0x1F
return self.writeXDATA(0x6270, a)
def setFlashWrite(self):
"""
Set the WRITE bit in the flash control register
"""
# Set flash WRITE bit
a = self.readXDATA(0x6270, 1)
a[0] |= 0x02
return self.writeXDATA(0x6270, a)
def setFlashErase(self):
"""
Set the ERASE bit in the flash control register
"""
# Set flash ERASE bit
a = self.readXDATA(0x6270, 1)
a[0] |= 0x01
return self.writeXDATA(0x6270, a)
def writeCODE(self, offset, data, erase=False, verify=False, showProgress=False):
"""
Fully automated function for writing the Flash memory.
WARNING: This requires DMA operations to be unpaused ( use: self.pauseDMA(False) )
"""
# Prepare DMA-0 for DEBUG -> RAM (using DBG_BW trigger)
self.configDMAChannel( 0, 0x6260, 0x0000, 0x1F, tlen=self.bulkBlockSize, srcInc=0, dstInc=1, priority=1, interrupt=True )
# Prepare DMA-1 for RAM -> FLASH (using the FLASH trigger)
self.configDMAChannel( 1, 0x0000, 0x6273, 0x12, tlen=self.bulkBlockSize, srcInc=1, dstInc=0, priority=2, interrupt=True )
# Reset flags
self.clearFlashStatus()
self.clearDMAIRQ(0)
self.clearDMAIRQ(1)
self.disarmDMAChannel(0)
self.disarmDMAChannel(1)
flashRetries = 0
# Split in 2048-byte chunks
iOfs = 0
while (iOfs < len(data)):
# Check if we should show progress
if showProgress:
print("\r Progress %0.0f%%... " % (iOfs*100/len(data)), end=' ')
sys.stdout.flush()
# Get next page
iLen = min( len(data) - iOfs, self.bulkBlockSize )
# Update DMA configuration if we have less than bulk-block size data
if (iLen < self.bulkBlockSize):
self.configDMAChannel( 0, 0x6260, 0x0000, 0x1F, tlen=iLen, srcInc=0, dstInc=1, priority=1, interrupt=True )
self.configDMAChannel( 1, 0x0000, 0x6273, 0x12, tlen=iLen, srcInc=1, dstInc=0, priority=2, interrupt=True )
# Upload to RAM through DMA-0
self.armDMAChannel(0)
self.brustWrite( data[iOfs:iOfs+iLen] )
# Wait until DMA-0 raises interrupt
while not self.isDMAIRQ(0):
time.sleep(0.010)
# Clear DMA IRQ flag
self.clearDMAIRQ(0)
# Calculate the page where this data belong to
fAddr = offset + iOfs
fPage = int( fAddr / self.flashPageSize )
# Calculate FLASH address High/Low bytes
# for writing (addressable as 32-bit words)
fWordOffset = int(fAddr / 4)
cHigh = (fWordOffset >> 8) & 0xFF
cLow = fWordOffset & 0xFF
self.writeXDATA( 0x6271, [cLow, cHigh] )
# Debug
#print "[@%04x: p=%i, ofs=%04x, %02x:%02x]" % (fAddr, fPage, fWordOffset, cHigh, cLow),
#sys.stdout.flush()
# Check if we should erase page first
if erase:
# Select the page to erase using FADDRH[7:1]
#
# NOTE: Specific to (CC2530, CC2531, CC2540, and CC2541),
# the CC2533 uses FADDRH[6:0]
#
cHigh = (fPage << 1)
cLow = 0
self.writeXDATA( 0x6271, [cLow, cHigh] )
# Set the erase bit
self.setFlashErase()
# Wait until flash is not busy any more
while self.isFlashBusy():
time.sleep(0.010)
# Upload to FLASH through DMA-1
self.armDMAChannel(1)
self.setFlashWrite()
# Wait until DMA-1 raises interrupt
while not self.isDMAIRQ(1):
# Also check for errors
if self.isFlashAbort():
self.disarmDMAChannel(1)
raise IOError("Flash page 0x%02x is locked!" % fPage)
time.sleep(0.010)
# Clear DMA IRQ flag
self.clearDMAIRQ(1)
# Check if we should verify
if verify:
verifyBytes = self.readCODE(fAddr, iLen)
for i in range(0, iLen):
if verifyBytes[i] != data[iOfs+i]:
if flashRetries < 3:
print("\n[Flash Error at @0x%04x, will retry]" % (fAddr+i))
flashRetries += 1
continue
else:
raise IOError("Flash verification error on offset 0x%04x" % (fAddr+i))
flashRetries = 0
# Forward to next page
iOfs += iLen
if showProgress:
print("\r Progress 100%... OK")
| gpl-3.0 | 2,958,015,112,497,272,300 | 25.222621 | 123 | 0.628372 | false |
threedliams/CallbackBot | src/api/base.py | 1 | 10608 | import json
import os
#TODO: handle unicode better instead of just ignoring it
from unidecode import unidecode
from abc import ABC, abstractmethod
import src.util.callbackUtil
import src.data.messages
import src.data.polls
class API(ABC):
def __init__(self, token):
self.apiName = ""
self.client = None
self.isSavedReady = False
self.isLiveReady = False
self.savedChannelTextMap = {}
self.liveChannelTextMap = {}
self.markovModelCache = {}
self.callbackData = {}
self.polls = {}
super().__init__()
@abstractmethod
def author(self, payload):
pass
@abstractmethod
def authorName(self, payload):
pass
@abstractmethod
def content(self, payload):
pass
@abstractmethod
def messageChannel(self, payload):
pass
@abstractmethod
def emoji(self, payload):
pass
@abstractmethod
def reactionMessage(self, payload):
pass
@abstractmethod
def messageID(self, payload):
pass
@abstractmethod
def clientName(self):
pass
@abstractmethod
def clientID(self):
pass
@abstractmethod
def clientUser(self):
pass
@abstractmethod
def getServers(self):
pass
@abstractmethod
def serverName(self, server):
pass
@abstractmethod
def channels(self, server):
pass
@abstractmethod
def channelName(self, channel):
pass
@abstractmethod
def channelID(self, channel):
pass
@abstractmethod
async def getLogs(self, channel):
pass
@abstractmethod
async def editMessage(self, message, newContent):
pass
################################################################################
# onReady
#
# When the bot starts up, this runs all the startup functions
#
# Args:
#
# None
#
# Returns - nothing
################################################################################
async def onReady(self):
print('Logged in as')
print(self.clientName())
print(self.clientID())
print('------')
rootFolder = "./servers/" + self.apiName + "/"
callbackFile = "./callbacks/callbacks.json"
#load callbackFile
with open(callbackFile) as data_file:
self.callbackData = json.load(data_file)
servers = self.getServers()
#preload any saved channels
for server in servers:
underscoredServerName = self.serverName(server).replace(" ", "_")
if(os.path.isdir(rootFolder + underscoredServerName)):
for channel in server.text_channels:
underscoredChannelName = self.channelName(channel).replace(" ", "_")
#TODO: channels with the same name on one server?
if(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
if not(channel.id in list(self.savedChannelTextMap.keys())):
self.savedChannelTextMap[self.channelID(channel)] = {}
for fileName in os.listdir(rootFolder + underscoredServerName + "/" + underscoredChannelName):
f = open(rootFolder + underscoredServerName + "/" + underscoredChannelName + "/" + fileName, 'r')
#TODO: handle people with . in their name
self.savedChannelTextMap[self.channelID(channel)][fileName.split('.')[0]] = f.read()
self.isSavedReady = True
print("saved ready!")
#catch up to current logs
for server in servers:
for channel in server.text_channels:
if not(self.channelID(channel) in list(self.liveChannelTextMap.keys())):
self.liveChannelTextMap[self.channelID(channel)] = {}
await self.getLogs(channel)
#save current logs for next time
for server in servers:
underscoredServerName = self.serverName(server).replace(" ", "_")
if not(os.path.isdir(rootFolder + underscoredServerName)):
os.makedirs(rootFolder + underscoredServerName)
if(os.path.isdir(rootFolder + underscoredServerName)):
for channel in server.text_channels:
underscoredChannelName = self.channelName(channel).replace(" ", "_")
if not(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
os.makedirs(rootFolder + underscoredServerName + "/" + underscoredChannelName)
if(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
for username in self.liveChannelTextMap[self.channelID(channel)].keys():
f = open(rootFolder + underscoredServerName + "/" + underscoredChannelName + "/" + username + ".txt", 'w')
f.write(self.liveChannelTextMap[self.channelID(channel)][username])
self.isLiveReady = True
for server in servers:
for channel in server.text_channels:
src.app.attemptMarkovCacheRefresh(self, channel.id, True)
print("live ready!")
################################################################################
# onMessage
#
# When someone sends a message in a channel with a bot, this function fires
# so you can process the given message
#
# Args:
#
# message - a Message object
#
# Returns - nothing
################################################################################
async def onMessage(self, message):
await src.util.callbackUtil.functionSwitcher(message)
if(self.isSavedReady and not self.isLiveReady):
src.data.messages.saveMessage(message, self.savedChannelTextMap)
if(self.isLiveReady):
src.data.messages.saveMessage(message, self.liveChannelTextMap)
src.app.attemptMarkovCacheRefresh(message.api, message.channelID)
################################################################################
# onReactionAdd
#
# When someone adds a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - a Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionAdd(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.addVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# onReactionRemove
#
# When someone removes a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - a Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionRemove(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.removeVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# onReactionClear
#
# When someone clears a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - the Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionClear(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.removeVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# sendFile
#
# Sends the given file to the given channel
#
# Args:
#
# message - a Message object
#
# fileToSend - a string with the path of the file to send
#
# Return - nothing
################################################################################
@abstractmethod
async def sendFile(self, message, fileToSend):
pass
################################################################################
# addReaction
#
# Adds the given reaction to the given message
#
# Args:
#
# message - a Message object
#
# reactionToAdd - a string with the name of the emoji to add, found in
# emojiDict
#
# Return - nothing
################################################################################
@abstractmethod
async def addReaction(self, message, reactionToAdd):
pass
################################################################################
# sendMessage
#
# Sends the given message to the given channel
#
# Args:
#
# message - a Message object
#
# messageToSend - a string message to send
#
# Return - nothing
################################################################################
@abstractmethod
async def sendMessage(self, message, messageToSend):
pass | mit | -6,713,889,131,034,812,000 | 30.954819 | 134 | 0.523284 | false |
odahoda/noisicaa | noisicaa/builtin_nodes/beat_track/track_ui_test.py | 1 | 1702 | #!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
# from noisidev import uitest
# from noisicaa import music
# from . import beat_track_item
# from . import track_item_tests
# class BeatTrackEditorItemTest(track_item_tests.TrackEditorItemTestMixin, uitest.UITestCase):
# async def setup_testcase(self):
# await self.project_client.send_command(music.Command(
# target=self.project.id,
# add_track=music.AddTrack(
# track_type='beat',
# parent_group_id=self.project.master_group.id)))
# self.tool_box = beat_track_item.BeatToolBox(context=self.context)
# def _createTrackItem(self, **kwargs):
# return beat_track_item.BeatTrackEditorItem(
# track=self.project.master_group.tracks[0],
# player_state=self.player_state,
# editor=self.editor,
# context=self.context,
# **kwargs)
| gpl-2.0 | -5,135,278,701,477,630,000 | 36.822222 | 94 | 0.692714 | false |
tantexian/sps-2014-12-4 | sps/openstack/common/policy.py | 1 | 21606 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. This is the original way of
expressing policies, but there now exists a new way: the policy
language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct code to perform that check. However, conjunction
operators are available, allowing for more expressiveness in crafting
policies.
As an example, take the following rule, expressed in the list-of-lists
representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
In the policy language, this becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import re
import urllib
import six
import urllib2
from sps.openstack.common.gettextutils import _
from sps.openstack.common import jsonutils
from sps.openstack.common import log as logging
LOG = logging.getLogger(__name__)
_rules = None
_checks = {}
class Rules(dict):
"""
A store for rules. Handles the default_rule setting directly.
"""
@classmethod
def load_json(cls, data, default_rule=None):
"""
Allow loading of JSON rule data.
"""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule or self.default_rule not in self:
raise KeyError(key)
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
# Really have to figure out a way to deprecate this
def set_rules(rules):
"""Set the rules in use for policy checks."""
global _rules
_rules = rules
# Ditto
def reset():
"""Clear the rules used for policy checks."""
global _rules
_rules = None
def check(rule, target, creds, exc=None, *args, **kwargs):
"""
Checks authorization of a rule against the target and credentials.
:param rule: The rule to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to check() (both
positional and keyword arguments) will be passed to
the exception class. If exc is not provided, returns
False.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds)
elif not _rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = _rules[rule](target, creds)
except KeyError:
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if exc and result is False:
raise exc(*args, **kwargs)
return result
class BaseCheck(object):
"""
Abstract base class for Check classes.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __str__(self):
"""
Retrieve a string representation of the Check tree rooted at
this node.
"""
pass
@abc.abstractmethod
def __call__(self, target, cred):
"""
Perform the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""
A policy check that always returns False (disallow).
"""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""
A policy check that always returns True (allow).
"""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred):
"""Check the policy."""
return True
class Check(BaseCheck):
"""
A base class to allow for user-defined policy checks.
"""
def __init__(self, kind, match):
"""
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""
A policy check that inverts the result of another policy check.
Implements the "not" operator.
"""
def __init__(self, rule):
"""
Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred):
"""
Check the policy. Returns the logical inverse of the wrapped
check.
"""
return not self.rule(target, cred)
class AndCheck(BaseCheck):
"""
A policy check that requires that a list of other checks all
return True. Implements the "and" operator.
"""
def __init__(self, rules):
"""
Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""
Check the policy. Requires that all rules accept in order to
return True.
"""
for rule in self.rules:
if not rule(target, cred):
return False
return True
def add_check(self, rule):
"""
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""
A policy check that requires that at least one of a list of other
checks returns True. Implements the "or" operator.
"""
def __init__(self, rules):
"""
Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""
Check the policy. Requires that at least one rule accept in
order to return True.
"""
for rule in self.rules:
if rule(target, cred):
return True
return False
def add_check(self, rule):
"""
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""
Parse a single base check rule into an appropriate Check object.
"""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_("Failed to understand rule %(rule)s") % locals())
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""
Provided for backwards compatibility. Translates the old
list-of-lists syntax into a tree of Check objects.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, basestring):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""
Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""
Metaclass for the ParseState class. Facilitates identifying
reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""
Create the class. Injects the 'reducers' list, a list of
tuples matching token sequences to the names of the
corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""
Decorator for reduction methods. Arguments are a sequence of
tokens, in order, which should trigger running this reduction
method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
class ParseState(object):
"""
Implement the core of parsing the policy language. Uses a greedy
reduction algorithm to reduce a sequence of tokens into a single
terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
__metaclass__ = ParseStateMeta
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""
Perform a greedy reduction of the token stream. If a reducer
method matches, it will be executed, then the reduce() method
will be called recursively to search for any more possible
reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""
Obtain the final result of the parse. Raises ValueError if
the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""
Create an 'and_expr' from two checks joined by the 'and'
operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""
Extend an 'and_expr' by adding one more check.
"""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""
Create an 'or_expr' from two checks joined by the 'or'
operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""
Extend an 'or_expr' by adding one more check.
"""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_("Failed to understand rule %(rule)r") % locals())
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""
Parses a policy rule into a tree of Check objects.
"""
# If the rule is a string, it's in the policy language
if isinstance(rule, basestring):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""
Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds):
"""
Recursively checks credentials based on the defined rules.
"""
try:
return _rules[self.match](target, creds)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds):
"""
Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
data = {'target': jsonutils.dumps(target),
'credentials': jsonutils.dumps(creds)}
post_data = urllib.urlencode(data)
f = urllib2.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds):
"""
Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
"""
# TODO(termie): do dict inspection via dot syntax
match = self.match % target
if self.kind in creds:
return match == six.text_type(creds[self.kind])
return False
| apache-2.0 | 9,064,018,407,252,972,000 | 26.7 | 78 | 0.594048 | false |
N-Parsons/exercism-python | exercises/book-store/book_store_test.py | 1 | 2174 | import unittest
from book_store import calculate_total
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.4.0
class BookStoreTest(unittest.TestCase):
def test_only_a_single_book(self):
self.assertEqual(calculate_total([1]), 800)
def test_two_of_the_same_book(self):
self.assertEqual(calculate_total([2, 2]), 1600)
def test_empty_basket(self):
self.assertEqual(calculate_total([]), 0)
def test_two_different_books(self):
self.assertEqual(calculate_total([1, 2]), 1520)
def test_three_different_books(self):
self.assertEqual(calculate_total([1, 2, 3]), 2160)
def test_four_different_books(self):
self.assertEqual(calculate_total([1, 2, 3, 4]), 2560)
def test_five_different_books(self):
self.assertEqual(calculate_total([1, 2, 3, 4, 5]), 3000)
def test_two_groups_of_4_is_cheaper_than_group_of_5_plus_group_of_3(self):
self.assertEqual(calculate_total([1, 1, 2, 2, 3, 3, 4, 5]), 5120)
def test_two_groups_of_4_is_cheaper_than_groups_of_5_and_3(self):
self.assertEqual(calculate_total([1, 1, 2, 3, 4, 4, 5, 5]), 5120)
def test_group_of_4_plus_group_of_2_is_cheaper_than_2_groups_of_3(self):
self.assertEqual(calculate_total([1, 1, 2, 2, 3, 4]), 4080)
def test_two_each_of_first_4_books_and_1_copy_each_of_rest(self):
self.assertEqual(calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5]), 5560)
def test_two_copies_of_each_book(self):
self.assertEqual(calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5]), 6000)
def test_three_copies_of_first_book_and_2_each_of_remaining(self):
self.assertEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1]), 6800)
def test_three_each_of_first_2_books_and_2_each_of_remaining_books(self):
self.assertEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1, 2]), 7520)
def test_four_groups_of_4_are_cheaper_than_2_groups_each_of_5_and_3(self):
self.assertEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 5, 1, 1, 2, 2, 3, 3, 4, 5]),
10240)
if __name__ == '__main__':
unittest.main()
| mit | -9,220,156,458,202,986,000 | 35.233333 | 79 | 0.618215 | false |
codecakes/algorithms_monk | search/numof_occurence_sorted.py | 1 | 1383 | def firstOccur(arr, N, x):
lastmid = lefti = mid = 0
righti = N-1
while lefti<righti:
mid = (lefti+righti)/2
if lastmid == mid:
mid += 1
if mid == righti:
return righti
if arr[mid] >= x:
righti = mid
else:
lefti = mid
lastmid = mid
# print lefti, righti
return righti if arr[righti] == x else -1
def lastOccur(arr, N, x):
lastmid = lefti = mid = 0
righti = N-1
while lefti<righti:
mid = (lefti+righti)/2
if lastmid == mid:
mid += 1
if mid == lefti:
return lefti
if arr[mid] <= x:
lefti = mid
else:
righti = mid
lastmid = mid
# print lefti, righti
return lefti if arr[lefti] == x else -1
def numOccur(arr, N, x):
left_index = firstOccur(arr, N, x)
right_index = lastOccur(arr, N, x)
# print left_index, right_index
return right_index - left_index + 1 if arr[left_index] == x and arr[right_index] == x else -1
if __name__ == "__main__":
arr = [2,2, 2, 3,46,1,5,90, 90, 90]
arr.sort()
print firstOccur(arr, len(arr), 2)
print lastOccur(arr, len(arr), 2)
print numOccur(arr, len(arr), 2)
print firstOccur(arr, len(arr), 90)
print lastOccur(arr, len(arr), 90)
print numOccur(arr, len(arr), 90)
| mit | 6,805,885,785,412,038,000 | 25.596154 | 97 | 0.52133 | false |
freelan-developers/teapot | teapot/path.py | 1 | 5726 | """
A teapot path-handling class.
"""
import os
import stat
import shutil
import errno
from contextlib import contextmanager
from functools import wraps
from teapot.log import LOGGER
from teapot.log import Highlight as hl
def from_user_path(path):
"""
Perform all variables substitutions from the specified user path.
"""
return os.path.normpath(os.path.expanduser(os.path.expandvars(path)))
def resolve_user_path(func):
"""
A decorator that resolves user paths in the return value.
"""
@wraps(func)
def wrapped_func(*args, **kwargs):
return from_user_path(func(*args, **kwargs))
return wrapped_func
def read_path(value, base_path, default_path):
"""
Read a path value from a string.
If `value` is a string, the Cache is default created at the location
specified by `value`.
If `value` is falsy, the Cache is default created.
"""
if not value:
cache_path = from_user_path(default_path)
else:
cache_path = from_user_path(value)
if not os.path.isabs(cache_path):
cache_path = os.path.normpath(os.path.join(base_path, cache_path))
return cache_path
@contextmanager
def chdir(path):
"""
Changes the directory temporarily.
"""
path = os.path.abspath(path)
saved_dir = os.getcwd()
if os.path.abspath(saved_dir) != os.path.abspath(path):
LOGGER.debug(
"Temporarily changing current directory from %s to %s",
hl(saved_dir),
hl(path),
)
os.chdir(path)
try:
yield
finally:
if os.path.abspath(saved_dir) != os.path.abspath(path):
LOGGER.debug(
"Changing back current directory from %s to %s",
hl(path),
hl(saved_dir),
)
os.chdir(saved_dir)
def mkdir(path):
"""
Create the specified path.
Does nothing if the path exists.
"""
try:
if not os.path.isdir(path):
LOGGER.debug('Creating directory at %s.', hl(path))
os.makedirs(path)
except OSError as ex:
if ex.errno != errno.EEXIST or not os.path.isdir(path):
raise
def rmdir(path):
"""
Delete the specified path if it exists.
Does nothing if the path doesn't exist.
"""
try:
LOGGER.info('Removing directory at %s.', hl(path))
def onerror(func, path, excinfo):
if os.path.exists(path):
LOGGER.debug('Was unable to delete "%s": %s', hl(path), excinfo[1])
LOGGER.debug('Trying again after changing permissions...')
os.chmod(path, stat.S_IWUSR)
try:
func(path)
except Exception as ex:
LOGGER.error('Unable to delete "%s": %s', hl(path), excinfo[1])
raise
shutil.rmtree(path, ignore_errors=False, onerror=onerror)
except Exception as ex:
LOGGER.warning(ex)
def copytree(src, dst, symlinks=False, ignore=None, copy_function=shutil.copy2):
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
try:
copy_function(srcname, dstname)
except (IOError, WindowsError):
shutil.copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
try:
shutil.copystat(src, dst)
except WindowsError:
# can't copy file access times on Windows
pass
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
@contextmanager
def temporary_copy(source_path, target_path, persistent=False):
"""
Copy a source path to a target path.
The target will be deleted upon function exist, unless `persistent`
is truthy.
"""
try:
if os.path.exists(target_path):
rmdir(target_path)
LOGGER.info('Copying %s to %s...', hl(source_path), hl(target_path))
copytree(source_path, target_path, copy_function=getattr(os, 'link', shutil.copy2))
yield target_path
finally:
if not persistent:
rmdir(target_path)
else:
LOGGER.info('Not erasing temporary directory at %s.', hl(target_path))
def windows_to_unix_path(path):
"""
Convert a Windows path to a UNIX path, in such a way that it can be used in
MSys or Cygwin.
"""
drive, tail = os.path.splitdrive(path)
if drive:
drive = '/' + drive[0]
return drive + tail.replace('\\', '/')
@contextmanager
def chdir(path):
"""
Change the current directory.
"""
old_path = os.getcwd()
LOGGER.debug('Moving to: %s', hl(path))
os.chdir(path)
try:
yield path
finally:
LOGGER.debug('Moving back to: %s', hl(old_path))
os.chdir(old_path)
| mit | 7,732,962,308,856,751,000 | 23.470085 | 91 | 0.580685 | false |
dstenb/pylaunchr-emulator | emulator/actions/__init__.py | 1 | 1172 | from .bookmark import EmulatorAddBookmarkFactory, EmulatorRemoveBookmarkFactory
from .dialog import EmulatorShowRomDialogFactory
from .launch import EmulatorLaunchFactory
from .run import EmulatorRunCommandFactory
from .screen import EmulatorConsoleSelectFactory
from .search import (EmulatorSearchDialogRunnerFactory,
EmulatorSearchClearFactory)
from .yank import EmulatorYankPathFactory, EmulatorYankTitleFactory
def get_action_factories(context):
return {
"EmulatorAddBookmark": EmulatorAddBookmarkFactory(context),
"EmulatorConsoleSelect": EmulatorConsoleSelectFactory(context),
"EmulatorLaunch": EmulatorLaunchFactory(context),
"EmulatorRemoveBookmark": EmulatorRemoveBookmarkFactory(context),
"EmulatorRunCommand": EmulatorRunCommandFactory(context),
"EmulatorShowRomDialog": EmulatorShowRomDialogFactory(context),
"EmulatorSearchDialog": EmulatorSearchDialogRunnerFactory(context),
"EmulatorSearchClear": EmulatorSearchClearFactory(context),
"EmulatorYankPath": EmulatorYankPathFactory(context),
"EmulatorYankTitle": EmulatorYankTitleFactory(context),
}
| mit | -7,874,199,933,425,081,000 | 49.956522 | 79 | 0.790102 | false |
rsepassi/tensor2tensor | tensor2tensor/data_generators/translate_enmk.py | 1 | 2362 | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data generators for translation data-sets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.data_generators import translate
from tensor2tensor.utils import registry
import tensorflow as tf
FLAGS = tf.flags.FLAGS
# End-of-sentence marker.
EOS = text_encoder.EOS_ID
# For Macedonian-English the SETimes corpus
# from http://nlp.ffzg.hr/resources/corpora/setimes/ is used.
# The original dataset has 207,777 parallel sentences.
# For training the first 205,777 sentences are used.
_MKEN_TRAIN_DATASETS = [[
"https://github.com/stefan-it/nmt-mk-en/raw/master/data/setimes.mk-en.train.tgz", # pylint: disable=line-too-long
("train.mk", "train.en")
]]
# For development 1000 parallel sentences are used.
_MKEN_TEST_DATASETS = [[
"https://github.com/stefan-it/nmt-mk-en/raw/master/data/setimes.mk-en.dev.tgz", # pylint: disable=line-too-long
("dev.mk", "dev.en")
]]
@registry.register_problem
class TranslateEnmkSetimes32k(translate.TranslateProblem):
"""Problem spec for SETimes Mk-En translation."""
@property
def approx_vocab_size(self):
return 2**15 # 32768
@property
def vocab_filename(self):
return "vocab.mken.%d" % self.approx_vocab_size
def source_data_files(self, dataset_split):
train = dataset_split == problem.DatasetSplit.TRAIN
datasets = _MKEN_TRAIN_DATASETS if train else _MKEN_TEST_DATASETS
source_datasets = [[item[0], [item[1][0]]] for item in datasets]
target_datasets = [[item[0], [item[1][1]]] for item in datasets]
return source_datasets + target_datasets
| apache-2.0 | -664,401,956,210,390,800 | 33.231884 | 118 | 0.738781 | false |
Larpon/DeadAscend | sbin/translation.py | 1 | 1945 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
from __future__ import print_function
import os
import re
import sys
import json
import argparse
import fileinput
def translation_qml(file_path):
j = json.load(open(file_path))
layers = j['layers']
qml_out = """import QtQuick 2.0
/*
* Auto-generated by sbin/translation.py
* Will export any scene editor descriptions to the game
*/
QtObject {
Component.onCompleted: {
"""
for layer in layers:
if layer['type'] == "objectgroup":
objects = layer['objects']
for obj in objects:
if 'properties' in obj and 'description' in obj['properties']:
desc = obj['properties']['description']
pad = " "
if desc[0] == '[':
desc = json.loads(desc)
for d in desc:
qml_out += pad+"qsTranslate(\"fromEditor\",\""+d+"\")"+"\n"
continue
qml_out += pad+"qsTranslate(\"fromEditor\",\""+desc+"\")"+"\n"
qml_out += """
}
}"""
return qml_out
def main(arguments):
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
#parser.add_argument('flag', help="flags (on, off)", type=str)
parser.add_argument('scene_file', help="Scene (json) file") #, type=argparse.FileType('rw')
#parser.add_argument('-i', '--infile', help="Input file",
# default=sys.stdout, type=argparse.FileType('w'))
args = parser.parse_args(arguments)
qml = translation_qml(args.scene_file)
with open("Extra.qml", "w") as text_file:
text_file.write(qml)
#print(args.infile)
#if args.flag == "on":
# comment(args.scene_file)
#else:
# uncomment(args.infile)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit | 3,238,838,331,313,436,700 | 24.933333 | 95 | 0.548586 | false |
google-research/google-research | hal/labeler/captioning_model/train_captioning_model.py | 1 | 9186 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Script for training a captioning model."""
# pylint: disable=wildcard-import
# pylint: disable=unused-variable
# pylint: disable=undefined-variable
# pylint: disable=g-import-not-at-top
from __future__ import absolute_import
from __future__ import division
import pickle
import time
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v2 as tf
from hal.labeler.labeler_utils import *
from hal.learner.language_utils import pad_to_max_length
import hal.utils.word_vectorization as wv
if 'gfile' not in sys.modules:
import tf.io.gfile as gfile
FLAGS = flags.FLAGS
flags.DEFINE_string('save_dir', None, 'directory for saving models')
_TRANSITION_PATH = None
_TRANSITION_STATE_PATH = None
_TRANSITION_LABEL_PATH = None
_VOCAB_PATH = None
def main(_):
tf.enable_v2_behavior()
##############################################################################
######################### Data loading and processing ########################
##############################################################################
print('Loading data')
with gfile.GFile(_TRANSITION_STATE_PATH, 'r') as f:
state_transitions = np.load(f)
state_transitions = np.float32(state_transitions)
with gfile.GFile(_TRANSITION_LABEL_PATH, 'r') as f:
captions = pickle.load(f)
with gfile.GFile(_VOCAB_PATH, 'r') as f:
vocab_list = f.readlines()
vocab_list = [w[:-1].decode('utf-8') for w in vocab_list]
vocab_list = ['eos', 'sos', 'nothing'] + vocab_list
vocab_list[-1] = 'to'
v2i, i2v = wv.create_look_up_table(vocab_list)
encode_fn = wv.encode_text_with_lookup_table(v2i)
decode_fn = wv.decode_with_lookup_table(i2v)
for caption in captions:
if len(caption) == 1:
caption[0] = 'nothing'
encoded_captions = []
for all_cp in captions:
for cp in all_cp:
cp = 'sos ' + cp + ' eos'
encoded_captions.append(np.array(encode_fn(cp)))
all_caption_n = len(encoded_captions)
encoded_captions = np.array(encoded_captions)
encoded_captions = pad_to_max_length(encoded_captions)
obs_idx, caption_idx = [], []
curr_caption_idx = 0
for i, _ in enumerate(state_transitions):
for cp in captions[i]:
obs_idx.append(i)
caption_idx.append(curr_caption_idx)
curr_caption_idx += 1
assert curr_caption_idx == all_caption_n
obs_idx = np.array(obs_idx)
caption_idx = np.array(caption_idx)
all_idx = np.arange(len(caption_idx))
train_idx = all_idx[:int(len(all_idx) * 0.7)]
test_idx = all_idx[int(len(all_idx) * 0.7):]
print('Number of training examples: {}'.format(len(train_idx)))
print('Number of test examples: {}\n'.format(len(test_idx)))
##############################################################################
############################# Training Setup #################################
##############################################################################
embedding_dim = 32
units = 64
vocab_size = len(vocab_list)
batch_size = 128
max_sequence_length = 21
encoder_config = {'name': 'state', 'embedding_dim': 8}
decoder_config = {
'name': 'state',
'word_embedding_dim': 64,
'hidden_units': 512,
'vocab_size': len(vocab_list),
}
encoder = get_captioning_encoder(encoder_config)
decoder = get_captioning_decoder(decoder_config)
optimizer = tf.keras.optimizers.Adam()
loss_object = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True, reduction='none')
def _loss_function(real, pred, sos_symbol=1):
"""Compute the loss given prediction and ground truth."""
mask = tf.math.logical_not(tf.math.equal(real, sos_symbol))
loss_ = loss_object(real, pred)
mask = tf.cast(mask, dtype=loss_.dtype)
loss_ *= mask
return tf.reduce_mean(loss_)
@tf.function
def _train_step(input_tensor, target):
"""Traing on a batch of data."""
loss = 0
# initializing the hidden state for each batch
# because the captions are not related from image to image
hidden = decoder.reset_state(batch_size=target.shape[0])
dec_input = tf.expand_dims([1] * target.shape[0], 1)
with tf.GradientTape() as tape:
features = encoder(input_tensor, training=True)
for i in range(1, target.shape[1]):
# passing the features through the decoder
predictions, hidden, _ = decoder(
dec_input, features, hidden, training=True)
loss += _loss_function(target[:, i], predictions)
# using teacher forcing
dec_input = tf.expand_dims(target[:, i], 1)
total_loss = (loss / int(target.shape[1]))
trainable_variables = encoder.trainable_variables + decoder.trainable_variables
gradients = tape.gradient(loss, trainable_variables)
optimizer.apply_gradients(zip(gradients, trainable_variables))
return loss, total_loss
@tf.function
def evaluate_batch(input_tensor, target):
"""Evaluate loss on a batch of data."""
loss = 0
# initializing the hidden state for each batch
# because the captions are not related from image to image
hidden = decoder.reset_state(batch_size=target.shape[0])
dec_input = tf.expand_dims([1] * target.shape[0], 1)
features = encoder(input_tensor, training=False)
for i in range(1, target.shape[1]):
# passing the features through the decoder
predictions, hidden, _ = decoder(
dec_input, features, hidden, training=False)
loss += _loss_function(target[:, i], predictions)
# using teacher forcing
dec_input = tf.expand_dims(target[:, i], 1)
total_loss = (loss / int(target.shape[1]))
return total_loss
##############################################################################
############################# Training Loop ##################################
##############################################################################
print('Start training...\n')
start_epoch = 0
if FLAGS.save_dir:
checkpoint_path = FLAGS.save_dir
ckpt = tf.train.Checkpoint(
encoder=encoder, decoder=decoder, optimizer=optimizer)
ckpt_manager = tf.train.CheckpointManager(
ckpt, checkpoint_path, max_to_keep=5)
if ckpt_manager.latest_checkpoint:
start_epoch = int(ckpt_manager.latest_checkpoint.split('-')[-1])
epochs = 400
step_per_epoch = int(len(captions) / batch_size)
previous_best = 100.
for epoch in range(start_epoch, epochs):
start = time.time()
total_loss = 0
for batch in range(step_per_epoch):
batch_idx = np.random.choice(train_idx, size=batch_size)
input_tensor = state_transitions[obs_idx[batch_idx], :]
input_tensor = encoder.preprocess(input_tensor)
target = encoded_captions[caption_idx[batch_idx]]
batch_loss, t_loss = _train_step(input_tensor, target)
total_loss += t_loss
if batch % 100 == 0:
print('Epoch {} Batch {} Loss {:.4f}'.format(
epoch + 1, batch,
batch_loss.numpy() / int(target.shape[1])))
if epoch % 5 == 0 and FLAGS.save_dir:
test_total_loss = 0
for batch in range(10):
batch_idx = np.arange(batch_size) + batch * batch_size
idx = test_idx[batch_idx]
input_tensor = state_transitions[obs_idx[idx], :]
target = encoded_captions[caption_idx[idx]]
input_tensor = input_tensor[:, 0] - input_tensor[:, 1]
t_loss = evaluate_batch(input_tensor, target)
test_total_loss += t_loss
test_total_loss /= 10.
if test_total_loss < previous_best:
previous_best = test_total_loss
ckpt_manager.save(checkpoint_number=epoch)
print('Epoch {} | Loss {:.6f} | Val loss {:.6f}'.format(
epoch + 1, total_loss / step_per_epoch, previous_best))
print('Time taken for 1 epoch {:.6f} sec\n'.format(time.time() - start))
if epoch % 20 == 0:
total_loss = 0
for batch in range(len(test_idx) // batch_size):
batch_idx = np.arange(batch_size) + batch * batch_size
idx = test_idx[batch_idx]
input_tensor = state_transitions[obs_idx[idx], :]
target = encoded_captions[caption_idx[idx]]
input_tensor = input_tensor[:, 0] - input_tensor[:, 1]
t_loss = evaluate_batch(input_tensor, target)
total_loss += t_loss
print('====================================================')
print('Test Loss {:.6f}'.format(total_loss /
(len(test_idx) // batch_size)))
print('====================================================\n')
if __name__ == '__main__':
app.run(main)
| apache-2.0 | 1,221,338,990,994,775,300 | 35.023529 | 83 | 0.60135 | false |
mattrobenolt/invoke | integration/main.py | 1 | 4239 | import os
import sys
from spec import Spec, trap, eq_, skip, ok_
from invoke import run
from invoke._version import __version__
from invoke.platform import WINDOWS
def _output_eq(cmd, expected):
return eq_(run(cmd, hide=True).stdout, expected)
class Main(Spec):
def setup(self):
# Enter integration/ so Invoke loads its local tasks.py
os.chdir(os.path.dirname(__file__))
@trap
def basic_invocation(self):
_output_eq("invoke print_foo", "foo\n")
@trap
def version_output(self):
_output_eq("invoke --version", "Invoke {0}\n".format(__version__))
@trap
def help_output(self):
ok_("Usage: inv[oke] " in run("invoke --help").stdout)
@trap
def shorthand_binary_name(self):
_output_eq("inv print_foo", "foo\n")
@trap
def explicit_task_module(self):
_output_eq("inv --collection _explicit foo", "Yup\n")
@trap
def invocation_with_args(self):
_output_eq(
"inv print_name --name whatevs",
"whatevs\n"
)
@trap
def bad_collection_exits_nonzero(self):
result = run("inv -c nope -l", warn=True)
eq_(result.exited, 1)
assert not result.stdout
assert result.stderr
def loads_real_user_config(self):
path = os.path.expanduser("~/.invoke.yaml")
try:
with open(path, 'w') as fd:
fd.write("foo: bar")
_output_eq("inv print_config", "bar\n")
finally:
try:
os.unlink(path)
except OSError:
pass
def complex_nesting_under_ptys_doesnt_break(self):
if WINDOWS: # Not sure how to make this work on Windows
return
# GH issue 191
substr = " hello\t\t\nworld with spaces"
cmd = """ eval 'echo "{0}" ' """.format(substr)
expected = ' hello\t\t\r\nworld with spaces\r\n'
eq_(run(cmd, pty=True, hide='both').stdout, expected)
def KeyboardInterrupt_on_stdin_doesnt_flake(self):
# E.g. inv test => Ctrl-C halfway => shouldn't get buffer API errors
skip()
class funky_characters_in_stdout:
def basic_nonstandard_characters(self):
os.chdir('_support')
# Crummy "doesn't explode with decode errors" test
if WINDOWS:
cmd = "type tree.out"
else:
cmd = "cat tree.out"
run(cmd, hide='both')
def nonprinting_bytes(self):
# Seriously non-printing characters (i.e. non UTF8) also don't
# asplode
run("echo '\xff'", hide='both')
def nonprinting_bytes_pty(self):
if WINDOWS:
return
# PTY use adds another utf-8 decode spot which can also fail.
run("echo '\xff'", pty=True, hide='both')
def pty_puts_both_streams_in_stdout(self):
if WINDOWS:
return
os.chdir('_support')
err_echo = "{0} err.py".format(sys.executable)
command = "echo foo && {0} bar".format(err_echo)
r = run(command, hide='both', pty=True)
eq_(r.stdout, 'foo\r\nbar\r\n')
eq_(r.stderr, '')
def simple_command_with_pty(self):
"""
Run command under PTY
"""
# Most Unix systems should have stty, which asplodes when not run under
# a pty, and prints useful info otherwise
result = run('stty -a', hide=True, pty=True)
# PTYs use \r\n, not \n, line separation
ok_("\r\n" in result.stdout)
eq_(result.pty, True)
def pty_size_is_realistic(self):
# When we don't explicitly set pty size, 'stty size' sees it as 0x0.
# When we do set it, it should be some non 0x0, non 80x24 (the default)
# value. (yes, this means it fails if you really do have an 80x24
# terminal. but who does that?)
size = run('stty size', hide=True, pty=True).stdout.strip()
assert size != ""
assert size != "0 0"
# Apparently true-headless execution like Travis does that!
if os.environ.get('TRAVIS', False):
assert size == "24 80"
else:
assert size != "24 80"
| bsd-2-clause | 6,658,644,813,651,164,000 | 30.87218 | 79 | 0.557915 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/operations/_express_route_cross_connections_operations.py | 1 | 43844 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCrossConnectionsOperations(object):
"""ExpressRouteCrossConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCrossConnectionListResult"]
"""Retrieves all the ExpressRouteCrossConnections in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCrossConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCrossConnections'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCrossConnectionListResult"]
"""Retrieves all the ExpressRouteCrossConnections in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCrossConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections'} # type: ignore
def get(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
"""Gets details about the specified ExpressRouteCrossConnection.
:param resource_group_name: The name of the resource group (peering location of the circuit).
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection (service key of the
circuit).
:type cross_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCrossConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
parameters, # type: "_models.ExpressRouteCrossConnection"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCrossConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
parameters, # type: "_models.ExpressRouteCrossConnection"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCrossConnection"]
"""Update the specified ExpressRouteCrossConnection.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param parameters: Parameters supplied to the update express route crossConnection operation.
:type parameters: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCrossConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
cross_connection_parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
"""Updates an express route cross connection tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the cross connection.
:type cross_connection_name: str
:param cross_connection_parameters: Parameters supplied to update express route cross
connection tags.
:type cross_connection_parameters: ~azure.mgmt.network.v2020_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCrossConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(cross_connection_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def _list_arp_table_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def begin_list_arp_table(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsArpTableListResult"]
"""Gets the currently advertised ARP table associated with the express route cross connection in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_arp_table_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def _list_routes_table_summary_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCrossConnectionsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def begin_list_routes_table_summary(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
"""Gets the route table summary associated with the express route cross connection in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCrossConnectionsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def _list_routes_table_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def begin_list_routes_table(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]
"""Gets the currently advertised routes table associated with the express route cross connection
in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
| mit | -2,148,993,289,227,831,800 | 51.257449 | 277 | 0.653043 | false |
jolyonb/edx-platform | common/djangoapps/microsite_configuration/migrations/0001_initial.py | 1 | 5579 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.db.models.deletion
import django.utils.timezone
import jsonfield.fields
import model_utils.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='HistoricalMicrositeOrganizationMapping',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('organization', models.CharField(max_length=63, db_index=True)),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical microsite organization mapping',
},
),
migrations.CreateModel(
name='HistoricalMicrositeTemplate',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('template_uri', models.CharField(max_length=255, db_index=True)),
('template', models.TextField()),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical microsite template',
},
),
migrations.CreateModel(
name='Microsite',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(unique=True, max_length=63, db_index=True)),
('values', jsonfield.fields.JSONField(blank=True)),
('site', models.OneToOneField(related_name='microsite', to='sites.Site', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='MicrositeHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('key', models.CharField(unique=True, max_length=63, db_index=True)),
('values', jsonfield.fields.JSONField(blank=True)),
('site', models.OneToOneField(related_name='microsite_history', to='sites.Site', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Microsite histories',
},
),
migrations.CreateModel(
name='MicrositeOrganizationMapping',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('organization', models.CharField(unique=True, max_length=63, db_index=True)),
('microsite', models.ForeignKey(to='microsite_configuration.Microsite', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='MicrositeTemplate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('template_uri', models.CharField(max_length=255, db_index=True)),
('template', models.TextField()),
('microsite', models.ForeignKey(to='microsite_configuration.Microsite', on_delete=models.CASCADE)),
],
),
migrations.AddField(
model_name='historicalmicrositetemplate',
name='microsite',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, db_constraint=False, blank=True, to='microsite_configuration.Microsite', null=True),
),
migrations.AddField(
model_name='historicalmicrositeorganizationmapping',
name='microsite',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, db_constraint=False, blank=True, to='microsite_configuration.Microsite', null=True),
),
migrations.AlterUniqueTogether(
name='micrositetemplate',
unique_together=set([('microsite', 'template_uri')]),
),
]
| agpl-3.0 | -6,011,930,392,601,583,000 | 51.140187 | 186 | 0.589353 | false |
sinhrks/chainer | tests/chainer_tests/functions_tests/activation_tests/test_softmax.py | 1 | 3585 | import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
class TestSoftmax(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_forward(self, x_data, use_cudnn=True):
x = chainer.Variable(x_data)
y = functions.softmax(x, use_cudnn)
self.assertEqual(y.data.dtype, numpy.float32)
y_expect = numpy.empty_like(self.x)
for i in six.moves.range(y_expect.shape[0]):
x = self.x[i]
log_z = numpy.ufunc.reduce(numpy.logaddexp, x)
x -= log_z
y_expect[i] = numpy.exp(x)
gradient_check.assert_allclose(y_expect, y.data)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.cudnn
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
@attr.gpu
@condition.retry(3)
def test_forward_gpu_no_cudnn(self):
self.check_forward(cuda.to_gpu(self.x), False)
def check_backward(self, x_data, gy_data, use_cudnn=True):
gradient_check.check_backward(
functions.Softmax(use_cudnn), x_data, gy_data, eps=1e-2)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.cudnn
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
@attr.gpu
@condition.retry(3)
def test_backward_gpu_no_cudnn(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), False)
class TestSoftmaxUnstable(TestSoftmax):
def setUp(self):
self.x = numpy.array([[-1000, 1]], dtype=numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (1, 2)).astype(numpy.float32)
class TestReplicatedSoftmax1(TestSoftmax):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
def check_forward(self, x_data, use_cudnn=True):
x = chainer.Variable(x_data)
y = functions.softmax(x, use_cudnn)
self.assertEqual(y.data.dtype, numpy.float32)
y_expect = numpy.exp(self.x)
for i in six.moves.range(y_expect.shape[0]):
for k in six.moves.range(y_expect.shape[2]):
y_expect[i, :, k] /= y_expect[i, :, k].sum()
gradient_check.assert_allclose(y_expect, y.data)
class TestReplicatedSoftmax2(TestSoftmax):
def setUp(self):
self.x = numpy.random.uniform(
-1, 1, (2, 3, 4, 5)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (2, 3, 4, 5)).astype(numpy.float32)
def check_forward(self, x_data, use_cudnn=True):
x = chainer.Variable(x_data)
y = functions.softmax(x, use_cudnn)
self.assertEqual(y.data.dtype, numpy.float32)
y_expect = numpy.exp(self.x)
for i in six.moves.range(y_expect.shape[0]):
for k in six.moves.range(y_expect.shape[2]):
for l in six.moves.range(y_expect.shape[3]):
y_expect[i, :, k, l] /= y_expect[i, :, k, l].sum()
gradient_check.assert_allclose(y_expect, y.data)
testing.run_module(__name__, __file__)
| mit | -5,965,677,899,038,104,000 | 30.173913 | 78 | 0.617852 | false |
mbohlool/client-python | kubernetes/client/apis/apis_api.py | 1 | 4259 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class ApisApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_api_versions(self, **kwargs):
"""
get available API versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_versions(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroupList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_api_versions_with_http_info(**kwargs)
else:
(data) = self.get_api_versions_with_http_info(**kwargs)
return data
def get_api_versions_with_http_info(self, **kwargs):
"""
get available API versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_versions_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroupList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_versions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroupList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| apache-2.0 | -4,531,586,328,895,382,000 | 33.346774 | 119 | 0.540268 | false |
raphaelvalentin/Utils | spectre/syntax/analyse.py | 1 | 10005 | import os
from spectre.syntax import Instance, common
from string import *
import numpy as np
__all__ = ['Noise', 'Xf', 'Ac', 'Dc', 'Sweep', 'MonteCarlo', 'Sp', 'Transient', 'Pss' ]
class values(list):
def __init__(self, it):
if isinstance(it, (list, np.ndarray)):
list.__init__(self, it)
else:
list.__init__(self, [it])
def __str__(self):
l = []
for v in self:
if type(v) == str:
l.append(v)
else:
l.append("%g"%v)
return "[{it}]".format(it=" ".join(l))
class Noise(Instance):
__type__ = "analyse"
__name__ = "noise"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'oprobe':'', 'iprobe':'', 'annotate':'status', 'oppoint':'raw_file' }
def __init__(self, name='noise1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Xf(Instance):
__type__ = "analyse"
__name__ = "xf"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'annotate':'status', 'oppoint':'raw_file' }
def __init__(self, name='xf1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Ac(Instance):
__type__ = "analyse"
__name__ = "ac"
__pattern__ = "{name} %s {**parameters}" % __name__
def __init__(self, name='ac1', **parameters):
self['name'] = name
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Dc(Instance):
__type__ = "analyse"
__name__ = "dc"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'oppoint':'rawfile', 'maxiters':150, 'maxsteps':10000, 'annotate':'status' }
def __init__(self, name='dc1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Sweep(Instance):
__type__ = "analyse"
__name__ = "sweep"
__pattern__ = "{name} %s {**parameters} {child}" % __name__
__default__ = { 'oppoint':'rawfile' }
def __init__(self, name='swp1', child=None, **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
self['child'] = child
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
child = ''
if self['child']:
child = "{\n" + str(self['child']) + "\n}"
return self.__pattern__.format(**{'name': self['name'],
'child': child,
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name', 'child')])
})
def getNumberOfPoints(self):
if 'values' in self:
return len(self['values'])
elif 'stop' in self:
if 'lin' in self:
return self['lin']+1
elif 'log' in self:
return self['log']
elif 'step' in self:
if 'start' in self:
return (self['stop']-self['start'])/self['step']+1
else:
return self['stop']/self['step']+1
else:
return 50
else:
return 1
def getRawFiles(self):
raw_files = []
if self['child']:
for i in xrange(int(self.getNumberOfPoints())):
raw_files.append("{name}-{number}_{childname}".format(name=self['name'], number=zfill(i,3), childname=self['child'].getRawFiles()[0]))
else:
raw_files.append(['{name}.{extension}'.format(name=self['name'], extension=self.__name__)])
return raw_files
class MonteCarlo(Instance):
__type__ = "analyse"
__name__ = "montecarlo"
__pattern__ = "{name} %s {**parameters} {child}" % __name__
__default__ = { 'variations':'all', 'numruns':3, 'savefamilyplots':'yes', 'saveprocessparams':'yes', 'processscalarfile':'\"process.dat\"' }
def __init__(self, name='mc1', child=None, **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
self['child'] = child
def __str__(self):
child = ''
if self['child']:
child = "{\n" + str(self['child']) + "\n}"
return self.__pattern__.format(**{'name': self['name'],
'child': child,
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name','child')])
})
def getRawFiles(self):
raw_files = []
if self['child']:
for i in xrange(self.getNumberOfPoints()):
raw_files.append("{name}-{number}_{childname}".format(name=self['name'], number=zfill(i+1,3), childname=self['child'].getRawFiles()[0]))
else:
raw_files.append(['{name}.{extension}'.format(name=self['name'], extension='mc')])
return raw_files
def getNumberOfPoints(self):
return self['numruns']
class Sp(Instance):
__type__ = "analyse"
__name__ = "sp"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'annotate':'status', 'paramtype':'yz',
'oppoint':'screen', 'datatype':'realimag'}
def __init__(self, name='sp1', **parameters):
self['name'] = name
if isinstance(parameters['ports'], list):
parameters['ports'] = "[" + " ".join(parameters['ports']) + "]"
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
parameters = ["%s=%s" % (k, v) \
for k, v in self.iteritems() \
if k not in ('name', 'donoise')]
if self.get('donoise',False):
parameters.append('donoise=yes')
return self.__pattern__.format(**{'name': self['name'],
'**parameters': " ".join(parameters),
})
def getRawFiles(self):
if 'donoise' in self:
if self['donoise'] in ('True', True):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__),
'{name}.noise.{extension}'.format(name=self['name'], extension=self.__name__)]
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Transient(Instance):
__type__ = "analyse"
__name__ = "tran"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'errpreset':'conservative', 'write':'spectre.ic', 'writefinal':'spectre.fc', 'annotate':'status', 'maxiters':5 }
def __init__(self, name='tran1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Pss(Instance):
__type__ = "analyse"
__name__ = "pss"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'errpreset':'conservative', 'annotate':'status'}
def __init__(self, name='pss1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.fd.{extension}'.format(name=self['name'], extension=self.__name__)]
"""
liberal 1e-3 sigglobal traponly 3.5 0.001 period/50
moderate 1e-3 alllocal gear2only 3.5 0.001 period/200
conservative 1e-4 alllocal gear2only * 0.01 period/200
"""
| gpl-2.0 | 4,857,034,511,372,369,000 | 39.836735 | 152 | 0.50015 | false |
google-research/ssl_detection | third_party/tensorpack/tensorpack/input_source/input_source.py | 1 | 25606 | # -*- coding: utf-8 -*-
# File: input_source.py
import threading
from contextlib import contextmanager
from itertools import chain
import tensorflow as tf
from ..compat import tfv1
from ..callbacks.base import Callback, CallbackFactory
from ..callbacks.graph import RunOp
from ..dataflow import DataFlow, MapData, RepeatedData, DataFlowTerminated
from ..tfutils.common import get_op_tensor_name
from ..tfutils.dependency import dependency_of_fetches
from ..tfutils.summary import add_moving_summary
from ..tfutils.tower import get_current_tower_context
from ..utils import logger
from ..utils.concurrency import ShareSessionThread
from .input_source_base import InputSource, build_or_reuse_placeholder
try:
from tensorflow.python.ops.data_flow_ops import StagingArea
except ImportError:
pass
__all__ = ['PlaceholderInput', 'FeedInput', 'FeedfreeInput',
'QueueInput', 'BatchQueueInput',
'DummyConstantInput', 'TensorInput',
'ZMQInput', 'TFDatasetInput',
'StagingInput']
def _get_reset_callback(df):
return CallbackFactory(setup_graph=lambda _: df.reset_state())
def _make_feeds(placeholders, datapoint):
assert len(datapoint) == len(placeholders), \
"Size of datapoint and placeholders are different: {} != {}".format(
len(datapoint), len(placeholders))
if isinstance(datapoint, (list, tuple)):
return dict(zip(placeholders, datapoint))
elif isinstance(datapoint, dict):
ret = {p: datapoint[p.op.name] for p in placeholders}
return ret
else:
raise TypeError("Got a datapoint of type {}!".format(type(datapoint)))
class PlaceholderInput(InputSource):
"""
Just produce placeholders as input tensors.
"""
def __init__(self):
pass
def _setup(self, inputs):
self._all_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
def _get_input_tensors(self):
return self._all_placehdrs
class FeedInput(InputSource):
"""
Input by iterating over a DataFlow and feed datapoints.
Note:
If `get_input_tensors()` is called more than one time, it will return the same placeholders (i.e. feed points)
as the first time.
Therefore you can't use it for data-parallel training.
"""
class _FeedCallback(Callback):
def __init__(self, ds, placeholders):
self._ds = ds
self._itr = self._ds.__iter__()
self._placeholders = placeholders
def _before_run(self, _):
dp = next(self._itr)
assert len(dp) == len(self._placeholders), "[FeedInput] datapoints and inputs are of different length!"
feed = _make_feeds(self._placeholders, dp)
return tfv1.train.SessionRunArgs(fetches=[], feed_dict=feed)
def _reset(self):
self._itr = self._ds.__iter__()
def __init__(self, ds, infinite=True):
"""
Args:
ds (DataFlow): the input DataFlow.
infinite (bool): When set to False, will raise StopIteration when
ds is exhausted.
"""
if not isinstance(ds, DataFlow):
raise ValueError("FeedInput takes a DataFlow! Got {}".format(ds))
self.ds = ds
if infinite:
self._iter_ds = RepeatedData(self.ds, -1)
else:
self._iter_ds = self.ds
def _size(self):
return len(self.ds)
def _setup(self, inputs):
# placeholders as input are always safe to reuse.
self._all_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
self._cb = self._FeedCallback(self._iter_ds, self._all_placehdrs)
def _get_input_tensors(self):
return self._all_placehdrs
def _reset_state(self):
self._cb._reset()
def _get_callbacks(self):
return [self._cb, _get_reset_callback(self._iter_ds)]
class FeedfreeInput(InputSource):
""" Abstract base for input without feed,
e.g. by queue or other operations. """
def _reset_state(self):
pass
# TODO enqueue_many? https://github.com/tensorflow/tensorflow/issues/7817#issuecomment-282053155
class EnqueueThread(ShareSessionThread):
def __init__(self, queue, ds, placehdrs):
super(EnqueueThread, self).__init__()
self.name = 'EnqueueThread ' + queue.name
self.daemon = True
self.dataflow = ds
self.queue = queue
self.placehdrs = placehdrs
self.op = self.queue.enqueue(self.placehdrs)
self.close_op = self.queue.close(cancel_pending_enqueues=True)
self._running = threading.Event()
self._running.set()
# self._size = queue.size()
def run(self):
with self.default_sess():
try:
self.reinitialize_dataflow()
while True:
# pausable loop
if not self._running.is_set():
self._running.wait()
dp = next(self._itr)
feed = _make_feeds(self.placehdrs, dp)
# _, sz = sess.run([self.op, self._sz], feed_dict=feed)
self.op.run(feed_dict=feed)
except (tf.errors.CancelledError, tf.errors.OutOfRangeError):
pass
except DataFlowTerminated:
logger.info("[EnqueueThread] DataFlow has terminated.")
except Exception as e:
if isinstance(e, RuntimeError) and 'closed Session' in str(e):
pass
else:
logger.exception("[EnqueueThread] Exception in thread {}:".format(self.name))
finally:
try:
self.close_op.run()
except Exception:
pass
logger.info("[EnqueueThread] Thread {} Exited.".format(self.name))
def reinitialize_dataflow(self):
self._itr = self.dataflow.__iter__()
def pause(self):
self._running.clear()
def resume(self):
self._running.set()
class QueueInput(FeedfreeInput):
""" Enqueue datapoints from a DataFlow to a TF queue.
And the model receives dequeued tensors.
"""
def __init__(self, ds, queue=None):
"""
Args:
ds(DataFlow): the input DataFlow.
queue (tf.QueueBase): A :class:`tf.QueueBase` whose type
should match the corresponding input signature of the model.
Defaults to a FIFO queue of size 50.
"""
if not isinstance(ds, DataFlow):
raise ValueError("QueueInput takes a DataFlow! Got {}".format(ds))
self.queue = queue
self.ds = ds
self._inf_ds = RepeatedData(ds, -1)
self._started = False
def _size(self):
return len(self.ds)
def _setup(self, inputs):
self._input_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
assert len(self._input_placehdrs) > 0, \
"QueueInput has to be used with some inputs!"
with self.cached_name_scope():
if self.queue is None:
self.queue = tfv1.FIFOQueue(
50, [x.dtype for x in self._input_placehdrs],
name='input_queue')
logger.info("Setting up the queue '{}' for CPU prefetching ...".format(self.queue.name))
self.thread = EnqueueThread(self.queue, self._inf_ds, self._input_placehdrs)
self._dequeue_op = self.queue.dequeue(name='dequeue_for_reset')
def refill_queue(self):
"""
Clear the queue, then call dataflow.__iter__() again and fill into the queue.
"""
self.thread.pause() # pause enqueue
opt = tfv1.RunOptions()
opt.timeout_in_ms = 2000 # 2s
sess = tfv1.get_default_session()
# dequeue until empty
try:
while True:
sess.run(self._dequeue_op, options=opt)
except tf.errors.DeadlineExceededError:
pass
# reset dataflow, start thread
self.thread.reinitialize_dataflow()
self.thread.resume()
def _create_ema_callback(self):
"""
Create a hook-only callback which maintain EMA of the queue size.
Also tf.summary.scalar the EMA.
"""
with self.cached_name_scope():
# in TF there is no API to get queue capacity, so we can only summary the size
size = tf.cast(self.queue.size(), tf.float32, name='queue_size')
size_ema_op = add_moving_summary(size, collection=None, decay=0.5)[0].op
ret = RunOp(
lambda: size_ema_op,
run_before=False,
run_as_trigger=False,
run_step=True)
ret.name_scope = "InputSource/EMA"
return ret
def _get_callbacks(self):
from ..callbacks.concurrency import StartProcOrThread
cb = StartProcOrThread(self.thread)
return [cb, self._create_ema_callback(), _get_reset_callback(self._inf_ds)]
def _get_input_tensors(self):
with tf.device('/cpu:0'), self.cached_name_scope():
ret = self.queue.dequeue(name='input_deque')
if isinstance(ret, tf.Tensor): # only one input
ret = [ret]
assert len(ret) == len(self._input_placehdrs)
for qv, v in zip(ret, self._input_placehdrs):
qv.set_shape(v.get_shape())
return ret
class BatchQueueInput(QueueInput):
""" Enqueue datapoints from a DataFlow to a TF queue.
And the model receives batches formed by concatenating
dequeued tensors.
"""
def __init__(self, ds, batch_size, queue=None):
"""
Args:
ds(DataFlow): the input DataFlow.
batch_size(int): the batch size.
queue (tf.QueueBase): A :class:`tf.QueueBase` whose type
should match the corresponding input signature of the model.
Defaults to a FIFO queue of size 3000.
"""
super(BatchQueueInput, self).__init__(ds, queue)
self.batch_size = int(batch_size)
def _size(self):
return len(self.ds) // self.batch_size
def _setup(self, inputs):
logger.info("Setting up the queue for CPU prefetching ...")
self.input_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
assert len(self.input_placehdrs) > 0, \
"BatchQueueInput has to be used with some input signature!"
# prepare placeholders without the first dimension
placehdrs_nobatch = []
for p in self.input_placehdrs:
placehdrs_nobatch.append(tfv1.placeholder(
dtype=p.dtype, shape=p.get_shape().as_list()[1:],
name=get_op_tensor_name(p.name)[0] + '-nobatch'))
# dequeue_many requires fully-defined shapes
shape_err = "Use of BatchQueueInput requires inputs to have fully-defined "
"shapes except for the batch dimension"
shapes = []
for p in placehdrs_nobatch:
assert p.get_shape().is_fully_defined(), shape_err
shapes.append(p.get_shape())
with self.cached_name_scope():
if self.queue is None:
self.queue = tf.FIFOQueue(
3000, [x.dtype for x in self.input_placehdrs],
shapes=shapes,
name='input_queue')
for shp in self.queue.shapes:
assert shp.is_fully_defined(), shape_err
self.thread = EnqueueThread(self.queue, self._inf_ds, placehdrs_nobatch)
def _get_input_tensors(self):
with tf.device('/cpu:0'), self.cached_name_scope():
ret = self.queue.dequeue_many(self.batch_size, name='input_deque')
if isinstance(ret, tf.Tensor): # only one input
ret = [ret]
assert len(ret) == len(self.input_placehdrs)
for qv, v in zip(ret, self.input_placehdrs):
shp = v.get_shape().as_list()
shp[0] = self.batch_size
qv.set_shape(shp)
return ret
# TODO tensor inputs can be drained? look at the new dataset API.
class TensorInput(FeedfreeInput):
""" Use inputs from a list of tensors, e.g. a TF data reading pipeline.
The PTB training example shows how to use it.
"""
def __init__(self, get_tensor_fn, size=None):
"""
Args:
get_tensor_fn ( -> [tf.Tensor]): a function which returns a list of input tensors
(for example, [image, label]) when called.
It will be called under a TowerContext and should return the inputs to be used in that tower.
The returned tensors will be evaluated every iteration, it's your job to make sure it's possible.
size(int): size of this input. Use None to leave it undefined.
"""
if not callable(get_tensor_fn):
raise ValueError("get_tensor_fn has to be a function! Got {}".format(get_tensor_fn))
self.get_tensor_fn = get_tensor_fn
if size is not None:
size = int(size)
assert size > 0
self._fixed_size = size
def _setup(self, input_signature):
self._spec = input_signature
def _size(self):
if self._fixed_size is None:
raise NotImplementedError("size of TensorInput is undefined!")
return self._fixed_size
def _get_input_tensors(self):
with self.cached_name_scope():
ret = self.get_tensor_fn()
assert isinstance(ret, (list, tuple)), "get_tensor_fn needs to return a list!"
assert len(ret) == len(self._spec), \
"get_tensor_fn returns {} tensors but there are {} inputs".format(len(ret), len(self._spec))
return ret
class DummyConstantInput(TensorInput):
""" Input with a constant zero tensor placed on GPU.
Useful for debugging performance issues """
def __init__(self, shapes):
"""
Args:
shapes (list[list]): a list of fully-specified shapes.
"""
self.shapes = shapes
logger.warn("Using dummy input for debug!")
def fn():
tlist = []
ctx = get_current_tower_context()
assert ctx is not None
assert len(self.shapes) == len(self._spec)
for idx, p in enumerate(self._spec):
tlist.append(tf.constant(
0, dtype=p.dtype,
name='dummy-{}-{}'.format(p.name, ctx.index),
shape=self.shapes[idx]))
return tlist
super(DummyConstantInput, self).__init__(fn)
class ZMQInput(TensorInput):
"""
Receive tensors from a ZMQ endpoint, with ops from https://github.com/tensorpack/zmq_ops.
It works with :func:`dataflow.remote.send_dataflow_zmq(format='zmq_ops')`.
"""
def __init__(self, end_point, hwm, bind=True):
"""
Args:
end_point (str): the ZMQ endpoint
hwm (int): the ZMQ high-water-mark
"""
self._end_point = end_point
self._hwm = int(hwm)
self._bind = bind
def fn():
ret = self._zmq_pull_socket.pull()
assert len(ret) == len(self._spec)
for qv, v in zip(ret, self._spec):
qv.set_shape(v.shape)
return ret
super(ZMQInput, self).__init__(fn)
def _setup(self, input_signature):
super(ZMQInput, self)._setup(input_signature)
assert len(input_signature) > 0, \
"ZMQInput has to be used with input signature!"
import zmq_ops
self._zmq_pull_socket = zmq_ops.ZMQPullSocket(
self._end_point,
[x.dtype for x in input_signature],
hwm=self._hwm,
bind=self._bind)
class TFDatasetInput(FeedfreeInput):
"""
Use a :class:`tf.data.Dataset` instance as input.
Note:
1. In training, the given dataset or dataflow has to be infinite
(you can use :func:`repeat()`, or :class:`RepeatedData` ).
2. TensorFlow may keep the dataflow alive even if the dataset is no
longer used.
"""
def __init__(self, dataset):
"""
Args:
dataset (tf.data.Dataset or DataFlow):
"""
if isinstance(dataset, tf.data.Dataset):
self._dataset = dataset
self._dataflow = None
elif isinstance(dataset, DataFlow):
self._dataset = None
self._dataflow = dataset
else:
raise ValueError("TFDatasetInput takes a tf.data.Dataset or DataFlow! Got {}".format(dataset))
def _setup(self, input_signature):
self._spec = input_signature
if self._dataset is not None:
types = self._dataset.output_types
spec_types = tuple(k.dtype for k in input_signature)
assert len(types) == len(spec_types), \
"Dataset and input signature have different length! {} != {}".format(
len(types), len(spec_types))
assert types == spec_types, \
"Data types of dataset and input signature don't match! {} != {}".format(
str(types), str(spec_types))
shapes = self._dataset.output_shapes
spec_shapes = [k.shape for k in input_signature]
for idx, (s1, s2) in enumerate(zip(shapes, spec_shapes)):
s2 = tf.TensorShape(s2)
assert s2.is_compatible_with(s1), \
"Input signature '{}' has incompatible shape with dataset! {} vs {}".format(
input_signature[idx].name, s2, s1)
else:
self._dataset = TFDatasetInput.dataflow_to_dataset(self._dataflow, [x.dtype for x in input_signature])
self._iterator = self._dataset.make_initializable_iterator()
self._init_op = self._iterator.initializer
def _reset_state(self):
self._init_op.run()
def _get_input_tensors(self):
spec_shapes = [k.shape for k in self._spec]
ret = self._iterator.get_next()
assert len(ret) == len(spec_shapes), \
"Dataset returns {} tensors but there are {} inputs!".format(len(ret), len(spec_shapes))
for t, shp in zip(ret, spec_shapes):
t.set_shape(shp)
return ret
@staticmethod
def dataflow_to_dataset(df, types):
"""
Wrap a dataflow to tf.data.Dataset.
This function will also reset the dataflow.
If the dataflow itself is finite, the returned dataset is also finite.
Therefore, if used for training, you'll need to add `.repeat()` on the returned
dataset.
Args:
df (DataFlow): a dataflow which produces lists
types([tf.DType]): list of types
Returns:
(tf.data.Dataset)
Note:
TensorFlow may keep the dataflow alive even if the dataset is no
longer used.
"""
# TODO theoretically it can support dict
assert isinstance(df, DataFlow), df
assert isinstance(types, (list, tuple)), types
df = MapData(df, tuple)
df.reset_state()
ds = tf.data.Dataset.from_generator(
df.get_data, tuple(types))
return ds
class StagingInput(FeedfreeInput):
"""
A wrapper around a feedfree input,
to prefetch the input in StagingArea (on GPUs).
It works by registering hooks to put & get tensors into the StagingArea.
If `get_input_tensors` gets called multiple times,
it requires that all outputs ever produced by this InputSource will be fetched together.
This means that in multi-GPU training, you should ensure that each call on `hooked_sess.run`
depends on either all input tensors on all GPUs, or no input tensors at all.
As a result you cannot use this InputSource for :class:`InferenceRunner`.
More than one StagingInput cannot be used together.
"""
class StagingCallback(Callback):
"""
A callback registered by this input source, to make sure stage/unstage
is run at each step.
"""
def __init__(self, input, nr_stage):
self.nr_stage = nr_stage
self._input = input
self._initialized = False
def _setup_graph(self):
self.stage_op = self._input._get_stage_op()
unstage_ops = self._input._get_unstage_ops()
unstage_op = tf.group(*unstage_ops, name='unstage_all')
self._check_dependency_op = unstage_ops[0]
self.fetches = tfv1.train.SessionRunArgs(
fetches=[self.stage_op, unstage_op])
def _prefill(self, sess):
logger.info("Pre-filling StagingArea ...")
for _ in range(self.nr_stage):
self.stage_op.run(session=sess)
logger.info("{} element{} put into StagingArea on each tower.".format(
self.nr_stage, "s were" if self.nr_stage > 1 else " was"))
def _before_run(self, ctx):
# This has to happen once, right before the first iteration.
# doing it in `before_train` may not work because QueueInput happens in before_train.
if not self._initialized:
self._initialized = True
self._prefill(ctx.session)
# Only step the stagingarea when the input is evaluated in this sess.run
fetches = ctx.original_args.fetches
if dependency_of_fetches(fetches, self._check_dependency_op):
# note: this disable nesting of StagingInput
return self.fetches
def __init__(self, input, nr_stage=1, device=None):
"""
Args:
input (FeedfreeInput):
nr_stage (int): number of elements to prefetch into each StagingArea, at the beginning.
Since enqueue and dequeue are synchronized, prefetching 1 element should be sufficient.
device (str or None): if not None, place the StagingArea on a specific device. e.g., '/cpu:0'.
Otherwise, they are placed under where `get_inputs_tensors`
gets called, which could be unspecified in case of simple trainers.
"""
if not isinstance(input, FeedfreeInput):
raise ValueError("StagingInput takes a FeedfreeInput! Got {}".format(input))
if isinstance(input, StagingInput):
raise ValueError("StagingInput cannot be nested!")
self._input = input
self._nr_stage = nr_stage
self._areas = []
self._stage_ops = []
self._unstage_ops = []
self._device = device
def _setup(self, inputs):
self._input.setup(inputs)
with self.cached_name_scope():
pass # just to cache the correct ns to use
def _get_callbacks(self):
cbs = self._input.get_callbacks()
# this callback has to happen after others, so StagingInput can be stacked together
cbs.append(
StagingInput.StagingCallback(self, self._nr_stage))
return cbs
def _size(self):
return self._input.size()
@contextmanager
def _device_ctx(self):
if not self._device:
yield
else:
with tf.device(self._device):
yield
def _get_input_tensors(self):
inputs = self._input.get_input_tensors()
with self._device_ctx():
with self.cached_name_scope():
# Putting variables to stagingarea will cause trouble
dtypes = []
for idx in range(len(inputs)):
dtype = inputs[idx].dtype
if dtype.base_dtype != dtype: # is reference type
inputs[idx] = tf.identity(inputs[idx])
dtypes.append(dtype.base_dtype)
# TODO tensorflow/benchmarks use static shapes here,
# though it doesn't seem to help. We can use it when it's known.
# Setting capacity to 1 to potentially save some memory, because we should
# expect the consumers to run slower than the producer.
stage = StagingArea(dtypes, shapes=None, capacity=1)
# put & get automatically inherit the name scope from the area
self._stage_ops.append(stage.put(inputs))
self._areas.append(stage)
outputs = stage.get()
if isinstance(outputs, tf.Tensor): # when size=1, TF doesn't return a list
outputs = [outputs]
for vin, vout in zip(inputs, outputs):
vout.set_shape(vin.get_shape())
self._unstage_ops.append(outputs)
# self._size_ops.append(stage.size())
return outputs
def _get_stage_op(self):
with self.cached_name_scope():
return tf.group(*self._stage_ops)
def _get_unstage_ops(self):
with self.cached_name_scope():
all_outputs = list(chain.from_iterable(self._unstage_ops))
return all_outputs
# for debugging only
def _create_ema_callback(self):
def create_ema_op():
with self.cached_name_scope():
avg_size = tf.truediv(tf.add_n(self._size_ops), len(self._size_ops), name='avg_stagingarea_size')
return add_moving_summary(avg_size, collection=None)[0].op
return RunOp(
create_ema_op,
run_before=False,
run_as_trigger=False,
run_step=True)
| apache-2.0 | 7,025,043,686,877,972,000 | 36.218023 | 118 | 0.582012 | false |
themattrix/bashup | bashup/test/test_compile/test_elements.py | 1 | 9454 | import textwrap
from ...compile import elements
from ... import parse
from ... import test
#
# Tests
#
def test_compile_fn_spec_to_bash_without_args():
expected = textwrap.dedent("""
#
# usage: hello [ARGS]
#
hello() {
""").strip()
actual = elements.compile_fn_spec_to_bash(fn_spec=parse.FnSpec(
name='hello',
args=()))
test.assert_eq(actual, expected)
def test_compile_fn_spec_to_bash_with_args():
expected = textwrap.dedent("""
#
# usage: enable_ramdisk --size=<SIZE> [--path=<PATH>] [ARGS]
#
enable_ramdisk() {
local size
local size__set=0
local path='/ramdisk'
local args=()
while (( $# )); do
if [[ "${1}" == --size=* ]]; then
size=${1#--size=}
size__set=1
elif [[ "${1}" == --path=* ]]; then
path=${1#--path=}
else
args+=("${1}")
fi
shift
done
if ! (( size__set )); then
echo "[ERROR] The --size parameter must be given."
return 1
fi
__enable_ramdisk "${size}" "${path}" "${args[@]}"
}
__enable_ramdisk() {
local size=${1}
local path=${2}
shift 2
""").lstrip()
actual = elements.compile_fn_spec_to_bash(fn_spec=parse.FnSpec(
name='enable_ramdisk',
args=(parse.FnArgSpec(name='size', value=None),
parse.FnArgSpec(name='path', value="'/ramdisk'"))))
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_single_fn_without_args():
expected = textwrap.dedent("""
#
# usage: hello [ARGS]
#
hello() {
echo "hello!"
}
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
@fn hello {
echo "hello!"
}
""").strip())
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_multiple_fns_without_args():
expected = textwrap.dedent("""
#!/bin/bash
set -e -o pipefail
#
# usage: hello [ARGS]
#
hello() { echo "hello!"; }
hello
#
# usage: world [ARGS]
#
world() {
echo "world!"
}
world
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
#!/bin/bash
set -e -o pipefail
@fn hello { echo "hello!"; }
hello
@fn world {
echo "world!"
}
world
""").strip())
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_multiple_fns_with_args():
expected = textwrap.dedent("""
#!/bin/bash
set -e -o pipefail
#
# usage: enable_ramdisk --size=<SIZE> [--path=<PATH>] [ARGS]
#
enable_ramdisk() {
local size
local size__set=0
local path='/ramdisk'
local args=()
while (( $# )); do
if [[ "${1}" == --size=* ]]; then
size=${1#--size=}
size__set=1
elif [[ "${1}" == --path=* ]]; then
path=${1#--path=}
else
args+=("${1}")
fi
shift
done
if ! (( size__set )); then
echo "[ERROR] The --size parameter must be given."
return 1
fi
__enable_ramdisk "${size}" "${path}" "${args[@]}"
}
__enable_ramdisk() {
local size=${1}
local path=${2}
shift 2
if ! grep "^tmpfs ${path}" /etc/fstab; then
echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
mkdir -p "${path}"
mount "${path}"
fi
}
#
# usage: ensure_root [ARGS]
#
ensure_root() {
if [ ${EUID} -ne 0 ]; then
echo "[ERROR] Script must be run as root."
return 1
fi
}
ensure_root
enable_ramdisk --size="4G"
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
#!/bin/bash
set -e -o pipefail
@fn enable_ramdisk size, path='/ramdisk' {
if ! grep "^tmpfs ${path}" /etc/fstab; then
echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
mkdir -p "${path}"
mount "${path}"
fi
}
@fn ensure_root {
if [ ${EUID} -ne 0 ]; then
echo "[ERROR] Script must be run as root."
return 1
fi
}
ensure_root
enable_ramdisk --size="4G"
""").strip())
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_custom_indents():
expected = textwrap.dedent("""
{
\t#
\t# usage: enable_ramdisk --size=<SIZE> [--path=<PATH>] [ARGS]
\t#
\tenable_ramdisk() {
\t local size
\t local size__set=0
\t local path='/ramdisk'
\t local args=()
\t while (( $# )); do
\t if [[ "${1}" == --size=* ]]; then
\t size=${1#--size=}
\t size__set=1
\t elif [[ "${1}" == --path=* ]]; then
\t path=${1#--path=}
\t else
\t args+=("${1}")
\t fi
\t shift
\t done
\t if ! (( size__set )); then
\t echo "[ERROR] The --size parameter must be given."
\t return 1
\t fi
\t __enable_ramdisk "${size}" "${path}" "${args[@]}"
\t}
\t__enable_ramdisk() {
\t local size=${1}
\t local path=${2}
\t shift 2
\t if ! grep "^tmpfs ${path}" /etc/fstab; then
\t echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
\t mkdir -p "${path}"
\t mount "${path}"
\t fi
\t}
}
{
#
# usage: ensure_root [ARGS]
#
ensure_root() {
\tif [ ${EUID} -ne 0 ]; then
\t\techo "[ERROR] Script must be run as root."
\t\treturn 1
\tfi
}
}
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
{
\t@fn enable_ramdisk size, path='/ramdisk' {
\t if ! grep "^tmpfs ${path}" /etc/fstab; then
\t echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
\t mkdir -p "${path}"
\t mount "${path}"
\t fi
\t}
}
{
@fn ensure_root {
\tif [ ${EUID} -ne 0 ]; then
\t\techo "[ERROR] Script must be run as root."
\t\treturn 1
\tfi
}
}
""").strip())
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_custom_indents_with_blank_lines():
expected = textwrap.dedent("""
{
\t#
\t# usage: enable_ramdisk --size=<SIZE> [--path=<PATH>] [ARGS]
\t#
\tenable_ramdisk() {
\t local size
\t local size__set=0
\t local path='/ramdisk'
\t local args=()
\t while (( $# )); do
\t if [[ "${1}" == --size=* ]]; then
\t size=${1#--size=}
\t size__set=1
\t elif [[ "${1}" == --path=* ]]; then
\t path=${1#--path=}
\t else
\t args+=("${1}")
\t fi
\t shift
\t done
\t if ! (( size__set )); then
\t echo "[ERROR] The --size parameter must be given."
\t return 1
\t fi
\t __enable_ramdisk "${size}" "${path}" "${args[@]}"
\t}
\t__enable_ramdisk() {
\t local size=${1}
\t local path=${2}
\t shift 2
\t if ! grep "^tmpfs ${path}" /etc/fstab; then
\t echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
\t mkdir -p "${path}"
\t mount "${path}"
\t fi
\t}
}
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
{
\t@fn enable_ramdisk size, path='/ramdisk' {
\t if ! grep "^tmpfs ${path}" /etc/fstab; then
\t echo "tmpfs ${path} tmpfs rw,size=${size} 0 0" >> /etc/fstab
\t mkdir -p "${path}"
\t mount "${path}"
\t fi
\t}
}
""").strip())
test.assert_eq(actual, expected)
def test_compile_fns_to_bash_nested_fns():
expected = textwrap.dedent("""
#
# usage: level_1 [ARGS]
#
level_1() {
#
# usage: level_2 [ARGS]
#
level_2() {
#
# usage: level_3 [ARGS]
#
level_3() {
:
}
}
}
""").strip()
actual = elements.compile_fns_to_bash(bashup_str=textwrap.dedent("""
@fn level_1 {
@fn level_2 {
@fn level_3 {
:
}
}
}
""").strip())
test.assert_eq(actual, expected)
| mit | 1,873,154,353,824,667,100 | 22.753769 | 76 | 0.416649 | false |
lief-project/LIEF | tests/elf/add_segment.py | 1 | 8927 | #!/usr/bin/env python
import logging
import os
import re
import shutil
import stat
import subprocess
import sys
import tempfile
import unittest
from subprocess import Popen
from unittest import TestCase
import lief
from lief.ELF import Segment
from utils import get_sample, has_recent_glibc, is_linux, is_x86_64, is_aarch64
lief.logging.set_level(lief.logging.LOGGING_LEVEL.INFO)
CURRENT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
class TestAddSegment(TestCase):
def setUp(self):
self.logger = logging.getLogger(__name__)
self.tmp_dir = tempfile.mkdtemp(suffix='_lief_test_add_segment')
self.logger.debug("temp dir: {}".format(self.tmp_dir))
@unittest.skipUnless(is_linux() and is_x86_64(), "requires Linux x86-64")
@unittest.skipUnless(has_recent_glibc(), "Need a recent GLIBC version")
def test_simple(self):
sample_path = get_sample('ELF/ELF64_x86-64_binary_ls.bin')
stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief.bin"))
output = os.path.join(self.tmp_dir, "ls.segment")
target = lief.parse(sample_path)
for i in range(4):
segment = stub.segments[0]
original_va = segment.virtual_address
segment.virtual_address = 0
segment = target.add(segment)
new_ep = (stub.header.entrypoint - original_va) + segment.virtual_address
target.header.entrypoint = new_ep
target.write(output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
p = Popen(output, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
self.logger.debug(stdout.decode("utf8"))
self.assertIsNotNone(re.search(r'LIEF is Working', stdout.decode("utf8")))
@unittest.skipUnless(is_linux() and is_x86_64(), "requires Linux x86-64")
@unittest.skipUnless(has_recent_glibc(), "Need a recent GLIBC version")
def test_gcc(self):
sample_path = get_sample('ELF/ELF64_x86-64_binary_gcc.bin')
stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief.bin"))
output = os.path.join(self.tmp_dir, "gcc.segment")
target = lief.parse(sample_path)
segment = stub.segments[0]
original_va = segment.virtual_address
segment.virtual_address = 0
segment = target.add(segment)
new_ep = (stub.header.entrypoint - original_va) + segment.virtual_address
target.header.entrypoint = new_ep
target.write(output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
p = Popen(output, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
self.logger.debug(stdout.decode("utf8"))
self.assertIsNotNone(re.search(r'LIEF is Working', stdout.decode("utf8")))
@unittest.skipUnless(is_linux() and is_x86_64(), "requires Linux x86-64")
def test_static(self):
sample_path = get_sample('ELF/ELF64_x86-64_binary_static-binary.bin')
stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief.bin"))
output = os.path.join(self.tmp_dir, "static.segment")
target = lief.parse(sample_path)
segment = stub.segments[0]
original_va = segment.virtual_address
segment.virtual_address = 0
segment = target.add(segment)
new_ep = (stub.header.entrypoint - original_va) + segment.virtual_address
target.header.entrypoint = new_ep
target.write(output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
p = Popen(output, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
self.logger.debug(stdout.decode("utf8"))
self.assertIsNotNone(re.search(r'LIEF is Working', stdout.decode("utf8")))
@unittest.skipUnless(is_linux(), "requires Linux")
def test_misc(self):
list_binaries = [
'/usr/bin/ls',
'/usr/bin/ssh',
'/usr/bin/nm',
'/usr/bin/openssl',
'/usr/bin/bc',
'/usr/bin/bzip2',
'/usr/bin/cp',
'/usr/bin/find',
'/usr/bin/file',
]
for binary in list_binaries:
self.logger.debug("Test with '{}'".format(binary))
self.run_add_segment(binary)
def run_add_segment(self, target):
if not os.path.isfile(target):
self.logger.debug("%s does not exists. Skip!", target)
return
stub = None
if is_x86_64():
stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief.bin"))
elif is_aarch64():
stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief_aarch64.bin"))
name = os.path.basename(target)
target = lief.parse(target)
output = os.path.join(self.tmp_dir, "{}.segment".format(name))
for i in range(6):
stub_segment = stub.segments[0]
segment = lief.ELF.Segment()
segment.content = stub.segments[0].content
segment.type = stub_segment.type
segment.alignment = stub_segment.alignment
segment.flags = stub_segment.flags
new_segment = target.add(segment)
new_ep = (stub.header.entrypoint - stub.imagebase - stub_segment.file_offset) + new_segment.virtual_address
target.header.entrypoint = new_ep
target.write(output)
st = os.stat(output)
os.chmod(output, st.st_mode | stat.S_IEXEC)
p = Popen(output, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = p.communicate()
self.logger.debug(stdout.decode("utf8"))
self.assertIsNotNone(re.search(r'LIEF is Working', stdout.decode("utf8")))
# TODO(romain): To fix
#@unittest.skipUnless(is_linux(), "requires Linux x86-64")
#def test_libc(self):
# stub = None
# if is_x86_64():
# stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief.bin"))
# elif is_aarch64():
# stub = lief.parse(os.path.join(CURRENT_DIRECTORY, "hello_lief_aarch64.bin"))
# tmp_dir = tempfile.mkdtemp(suffix='_lief_test_add_segment_libc')
# self.logger.debug("temp dir: {}".format(tmp_dir))
# libc_name = "libc.so.6"
# for e in lief.parse("/bin/ls").libraries:
# if e.startswith("libc."):
# libc_name = e
# break
# libc_path = '/usr/lib/{}'.format(libc_name)
# if not os.path.isfile(libc_path):
# libc_path = '/usr/lib/aarch64-linux-gnu/{}'.format(libc_name)
# self.logger.debug("libc used: {}".format(libc_path))
# libc = lief.parse(libc_path)
# out = os.path.join(tmp_dir, libc_name)
# stub_segment = stub.segments[0]
# for i in range(10):
# segment = lief.ELF.Segment()
# segment.content = stub.segments[0].content
# segment.type = stub_segment.type
# segment.alignment = stub_segment.alignment
# segment.flags = stub_segment.flags
# new_segment = libc.add(segment)
# new_ep = (stub.header.entrypoint - stub.imagebase - stub_segment.file_offset) + new_segment.virtual_address
# libc.header.entrypoint = new_ep
# if libc.has(lief.ELF.DYNAMIC_TAGS.INIT_ARRAY):
# init_array = libc.get(lief.ELF.DYNAMIC_TAGS.INIT_ARRAY)
# callbacks = init_array.array
# callbacks[0] = new_ep
# init_array.array = callbacks
# if libc.has(lief.ELF.DYNAMIC_TAGS.INIT):
# init = libc.get(lief.ELF.DYNAMIC_TAGS.INIT)
# init.value = new_ep
# libc.write(out)
# st = os.stat(out)
# os.chmod(out, st.st_mode | stat.S_IEXEC)
# p = Popen(["/usr/bin/ls"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"LD_LIBRARY_PATH": tmp_dir})
# stdout, _ = p.communicate()
# self.logger.debug(stdout.decode("utf8"))
# self.assertIsNotNone(re.search(r'LIEF is Working', stdout.decode("utf8")))
# if os.path.isdir(tmp_dir):
# shutil.rmtree(tmp_dir)
def tearDown(self):
# Delete it
if os.path.isdir(self.tmp_dir):
shutil.rmtree(self.tmp_dir)
if __name__ == '__main__':
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
root_logger.addHandler(ch)
unittest.main(verbosity=2)
| apache-2.0 | -2,289,807,504,114,586,600 | 36.351464 | 137 | 0.581494 | false |
r3n0us/irhelper | modules/cmds/vol_pslist_module.py | 1 | 23357 | import re
import collections
import json
import sys
sys.path.append(sys.path[0]+"/../../")
from modules.utils.helper import *
from modules.db import DBops as dbops
##TODO remove sqlite and create dbops
import sqlite3
result = {'status': True, 'message': '', 'cmd_results': '', 'errors': []}
def vol_pslist(project):
global result
######TEST AREA
######TEST AREA
print_header("Executing vol_pslist...")
rdb = dbops.DBOps(project.db_name)
if not rdb.table_exists("PSList"):
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="pslist",
output="db",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
print("Gathering more process info...")
if not rdb.table_exists("psinfo2"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="psinfo2",
output="stdout",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if result['status']:
processinfo_data = []
for line in result['cmd_results'].split("\n"):
try:
psinfo_line = line.rstrip("\n").split("|")
psinfo = {}
psinfo['process'] = psinfo_line[0]
psinfo['process_fullname'] = psinfo_line[1]
psinfo['pid'] = psinfo_line[2]
psinfo['ppid'] = psinfo_line[3]
psinfo['imagepath'] = psinfo_line[4]
psinfo['cmdline'] = psinfo_line[5].replace(" ","/").split("//")[0].replace("\/\"","|").replace("\"","")
if psinfo_line[2] == "4":
psinfo['process_fullname'] = "system"
processinfo_data.append(psinfo.copy())
except Exception, e:
err(e)
debug(line)
_table_name = "psinfo2"
rdb = dbops.DBOps(project.db_name)
rdb.new_table(_table_name, {'process':'text','process_fullname':'text',
'pid':'integer', 'ppid':'text','imagepath':'text',
'cmdline':'text'})
rdb.insert_into_table(_table_name, processinfo_data)
if not rdb.table_exists("VerInfo"):
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="verinfo",
output="db",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
###Dump pslist processes in dump dir and run checks
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="procdump",
output="stdout",
result=result,
project=project,
shell=True,
dump=True,
plugin_parms=None)
##Run exiftool and store information
if not rdb.table_exists("exiftool"):
#cmd = "exiftool -j pslist_dump/*"
cmd_array = []
cmd_array.append('exiftool')
cmd_array.append('-j')
cmd_array.append('-q')
cmd_array.append(project.dump_dir)
debug(cmd_array)
try:
rc = subprocess.check_output(cmd_array)
result['status'] = True
cmd_out = rc
except subprocess.CalledProcessError as e:
result['status'] = False
result['message'] = "Exception: exiftool plugin failed!"
err(result['message'])
if result['status']:
debug("Loading exiftool results to DB")
try:
jdata = json.loads(cmd_out)
jdata_keys = []
for i in jdata:
for n in i.keys():
if n not in jdata_keys:
jdata_keys.append(n)
table_columns = {}
for x in jdata_keys:
table_columns[x] = "text"
_table_name = "exiftool"
rdb = dbops.DBOps(project.db_name)
rdb.new_table_from_keys(_table_name, table_columns)
rdb.insert_into_table(_table_name, jdata)
result['cmd_results'] = "PS info finished"
except Exception as e:
err("Error running exiftool")
result['errors'].append(e)
##Now run the analyser code
violations, plist = analyse_processes(project)
result['cmd_results'] = {'violations': [], 'plist': [],
'plist_extended': [],
'suspicious_processes': [],}
result['cmd_results']['plist'] = plist
result['cmd_results']['violations'] = violations
enrich_exif_with_shanon_entropy()
calculate_md5()
epslist_data = enrich_pslist(project, plist)
result['cmd_results']['plist_extended'] = epslist_data
risk_list = analyse_scan_processes(project)
suspicious_plist = []
for p in risk_list:
suspicious_process = {}
suspicious_process['pid'] = p
suspicious_process['risk'] = risk_list[p]
for i in plist:
if str(i['pid']) == str(p):
suspicious_process['name'] = i['name']
break
suspicious_plist.append(suspicious_process.copy())
result['cmd_results']['suspicious_processes'] = suspicious_plist
def enrich_pslist(project, plist):
rdb = dbops.DBOps(project.db_name)
query = "select FileName,CompanyName,OriginalFileName," \
"FileDescription,FileSize,LegalCopyright,FileDescription,md5," \
"InternalName,sentropy from exiftool"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
new_entry = {}
pid = entry['FileName'].split(".")[1]
entry['pid'] = pid
for e in plist:
if str(pid) == str(e['pid']):
entry['process_name'] = e['name']
entry['sn_level'] = check_entropy_level(entry['sentropy'])
return jdata
def calculate_md5():
print_header("Calculating MD5 of dumped files. This may take a while")
rdb = dbops.DBOps("results.db")
rdb.patch_table('exiftool','md5','text')
rows = rdb.get_all_rows('exiftool')
for rs in rows:
try:
md5 = md5sum(rs['SourceFile'])
table_name = "exiftool"
column_name = "md5"
value = str(md5)
key_name = "SourceFile"
_key = rs[key_name]
rdb.update_value(table_name, column_name, value, key_name, _key)
except Exception as e:
err(e)
def enrich_exif_with_shanon_entropy():
'''
The information returned from the exiftool and psinfo contains a lot of
information about the extracted files. To have a more complete view of
the extracted files we can also add entropy information
@param: the data dictionary from exiftool
'''
print_header("Calculating entropy of dumped files. This may take a while")
get_a_cofee()
rdb = dbops.DBOps("results.db")
rdb.patch_table('exiftool','sentropy','REAL')
rows = rdb.get_all_rows('exiftool')
for rs in rows:
try:
sn = str(calculate_shanon_entropy_file(rs['SourceFile']))
table_name = "exiftool"
column_name = "sentropy"
value = sn
key_name = "SourceFile"
_key = rs[key_name]
rdb.update_value(table_name, column_name, value, key_name, _key)
except Exception as e:
pass
def analyse_processes(project):
'''
This module will check all running processes to verify that the correct
parent process has spawned the running one.
Some ideas like the rules format has been taken from DAMM - @ 504ENSICS Labs
@param: param
'''
print_header("Analysing processes")
violations = []
violations_count = 0
violation_message = {'process':'','rule': '','details':''}
known_processes_XP = {
'system' : { 'pid' : 4, 'imagepath' : '', 'user_account' : 'Local System', 'parent' : 'none', 'singleton' : True, 'prio' : '8' },
'smss.exe' : {'imagepath' : 'windows\System32\smss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'system', 'singleton' : True, 'session' : '', 'prio' : '11' },
'lsass.exe' : {'imagepath' : 'windows\system32\lsass.exe', 'user_account' : 'Local System', 'parent' : 'winlogon.exe', 'singleton' : True, 'session' : '0', 'prio' : '9', 'childless' : True, 'starts_at_boot' : True, 'starts_at_boot' : True },
'winlogon.exe' : {'imagepath' : 'windows\system32\winlogon.exe', 'user_account' : 'Local System', 'session' : '0', 'prio' : '13' },
'csrss.exe' : {'imagepath' : 'windows\system32\csrss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'session' : '0', 'prio' : '13', 'starts_at_boot' : True },
'services.exe' : {'imagepath' : 'windows\system32\services.exe' , 'parent' : 'winlogon.exe', 'session' : '0', 'prio' : '9', 'starts_at_boot' : True },
'svchost.exe' : {'imagepath' : 'windows\System32\svchost.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM', 'LOCAL SERVICE', 'NETWORK SERVICE'], 'parent' : 'services.exe', 'singleton' : False, 'session' : '0', 'prio' : '8', 'starts_at_boot' : True },
'explorer.exe' : {'imagepath' : 'windows\explorer.exe' , 'prio' : '8' },
}
###Notes:
###wininit.exe starts from an instance of smss.exe that exits so most likely the parent does not exist
known_processes_Vista = {
'system' : { 'pid' : 4, 'image_path' : '', 'user_account' : 'Local System', 'parent' : 'none', 'singleton' : True, 'prio' : '8' },
'smss.exe' : {'image_path' : 'windows\System32\smss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'system', 'singleton' : True, 'session' : '', 'prio' : '11' },
'wininit.exe' : {'image_path' : 'windows\System32\wininit.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'none', 'session' : '0', 'children' : False, 'prio' : '13', 'starts_at_boot' : True },
'lsass.exe' : {'image_path' : 'windows\system32\lsass.exe' , 'user_account' : 'Local System', 'parent' : 'wininit.exe', 'singleton' : True, 'session' : '0', 'prio' : '9', 'childless' : True, 'starts_at_boot' : True },
'winlogon.exe' : {'image_path' : 'windows\system32\winlogon.exe' , 'user_account' : 'Local System', 'session' : '1' , 'prio' : '13'},
'csrss.exe' : {'image_path' : 'windows\system32\csrss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'prio' : '13', 'starts_at_boot' : True },
'services.exe' : {'image_path' : 'windows\system32\services.exe' , 'parent' : 'wininit.exe', 'session' : '0', 'prio' : '9', 'starts_at_boot' : True },
'svchost.exe' : {'image_path' : 'windows\System32\svchost.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM', 'LOCAL SERVICE', 'NETWORK SERVICE'], 'parent' : 'services.exe', 'singleton' : False, 'session' : '0', 'prio' : '8', 'starts_at_boot' : True },
'lsm.exe' : {'image_path' : 'windows\System32\lsm.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'wininit.exe', 'session' : '0', 'prio' : '8', 'childless' : True, 'starts_at_boot' : True },
'explorer.exe' : {'image_path' : 'windows\explorer.exe' , 'prio' : '8' },
}
##First we need to construct relevant process information structure so
#we can easily verify them
##for every process in our running list
##{pid:2,ppid:3,path:xxx}
## check by name
## example:
## get the element with name system from our list and check if each key matches the required value
#process_fullname|process |pid|ppid|imagepath |Hnds|Sess|Thds
#NoPEB |System |4 |0 |NoPEB |1003|-1 |65
##TODO: here we need a more novel approach for the violation checks
## to minimise false positives . Not all information is available sometimes
##First put all processes from pslist with enriched info into an array
con = sqlite3.connect('results.db')
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute('select psinfo2.process_fullname,psinfo2.process,psinfo2.pid,psinfo2.ppid,'
'psinfo2.imagepath,pslist.hnds,pslist.sess,pslist.thds, '
'(SELECT ps2.process_fullname FROM psinfo2 ps2 WHERE ps2.pid = psinfo2.ppid) AS parentname'
' from psinfo2 inner join pslist on psinfo2.pid = pslist.pid')
rows = cur.fetchall()
target_process_list = []
full_pslist_dict = {}
for rs in rows:
ps = {}
ps['pid'] = rs['pid']
ps['imagepath'] = str(rs['imagepath']).lower().lstrip("c:/\/")
ps['imagepath'] = str(ps['imagepath']).lstrip('??/\/\c:/\/\/')
ps['imagepath'] = str(ps['imagepath']).replace('systemroot','windows')
if ps['imagepath'] == "nopeb":
ps['imagepath'] = ''
ps['ppid'] = rs['ppid']
ps['parent'] = str(rs['parentname']).lower()
if rs['ppid'] == "4":
ps['parent'] = "system"
ps['name'] = rs['process'].lower()
if rs['process'].lower() == "system":
ps['fullname'] = str(rs['process']).lower()
else:
ps['fullname'] = rs['process_fullname'].lower()
target_process_list.append(ps.copy())
full_pslist_dict[ps['name']] = ps.copy()
if str(project.get_volatility_profile()).startswith("WinXP") \
or str(project.get_volatility_profile()).startswith("Win2003"):
rule_list = known_processes_XP
else:
rule_list = known_processes_Vista
for key in rule_list:
for process in target_process_list:
if re.search(process['name'], key, re.IGNORECASE):
for check in rule_list[key]:
if check in process:
###NOt all have peb information
if not str(process[check]).lower() == str(rule_list[key][check]).lower() and str(process[check]).lower() != "nopeb" :
print("Violation detected on: [%s] Actual value: [%s] Expected value: [%s]" %(check, process[check], rule_list[key][check]))
print(process)
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = process
violation_message['rule'] = check
violation_message['details'] = ("Violation detected on: [%s] Actual value: [%s] Expected value: [%s]" %(check,process[check],rule_list[key][check]))
violations.append(violation_message.copy())
##Check for singleton violations as DAMM call it
processes = []
for process in target_process_list:
processes.append(str(process['name']).lower())
counter=collections.Counter(processes)
for key in rule_list:
if key in processes and "singleton" in rule_list[key]:
if int(counter[key]) > 1 and rule_list[key]['singleton']:
print("Violation detected on: [singleton] condition from [%s] Actual value: [%s]" %(key,int(counter[key])))
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = full_pslist_dict[key]
violation_message['rule'] = "[Singleton]"
violation_message['details'] = ("Violation detected on: [singleton] condition from [%s] Actual value: [%s]" %(key,int(counter[key])))
violations.append(violation_message.copy())
print(full_pslist_dict[key])
####Lets try to detect similar wording in well known processes
usual_suspects = ['smss.exe', 'wininit.exe','csrss.exe','svchost.exe',
'lsass.exe','lsm.exe','wmpnetwk.exe','wuauclt.exe']
##Injecting bad process names
#target_process_list.append("scvhost.exe")
#target_process_list.append("lsa.exe")
for process in target_process_list:
for suspect in usual_suspects:
flag, score = score_jaro_distance(process,suspect)
if flag:
print("Possible culrpit process detected: [%s] resembles to: [%s] Score: [%s]" %(process,suspect,score))
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = process
violation_message['rule'] = "[Culrpit]"
violation_message['details'] = ("Possible culrpit process detected: [%s] resembles to: [%s] Score: [%s]" %(process,suspect,score))
violations.append(violation_message.copy())
return violations, target_process_list
def analyse_scan_processes(_project):
## First we retrieve psxview all processes
global result
print_header("Gathering information from scan process")
rdb = dbops.DBOps(_project.db_name)
if not rdb.table_exists("PsXview"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="psxview",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if not rdb.table_exists("ApiHooks"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="apihooks",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if not rdb.table_exists("Malfind"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="malfind",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
##Three arrays
psxview = []
apihooked = []
malfinded = []
process_risk = {}
## Analyse further the ones with PID=false psscan=True and ExitTime null
#select * from psxview where pslist="False" and psscan="True" and exittime="";
if rdb.table_exists("PsXview"):
jdata = {}
#query = 'select * from psxview where pslist=\"False\"' \
# ' and psscan=\"True\" and not ExitTime '
query = "select * from psxview where psscan=\"True\""
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
psxview.append(entry['PID'])
process_risk[entry['PID']] = 1
else:
err("No PSXView data")
if rdb.table_exists("ApiHooks"):
jdata = {}
query = "select PID, Process, VictimModule, Function from ApiHooks"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
apihooked.append(entry['PID'])
if entry['PID'] in psxview:
process_risk[entry['PID']] = 2
else:
process_risk[entry['PID']] = 1
else:
err("No ApiHooks data")
if rdb.table_exists("Malfind"):
jdata = {}
query = "select Pid, Process from Malfind group by Pid"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
malfinded.append(entry['Pid'])
if entry['Pid'] in apihooked and entry['Pid'] in psxview:
process_risk[entry['Pid']] = 3
if entry['Pid'] in apihooked and entry['Pid'] not in psxview:
process_risk[entry['Pid']] = 2
if entry['Pid'] not in apihooked and entry['Pid'] in psxview:
process_risk[entry['Pid']] = 2
else:
err("No Malfind data")
##Then for every process from above check the following :
#1. apihooks
#2. malfind
# more to come this is just a very simple approach (there will be false positives as well
##Finally we assign a risk score:
# 10 to the ones from psscan
# 10 to the ones from apihooks
# 10 to the ones in malfind (next version we identify shellcode with ML ! :)
debug("Process risk list:%s " %process_risk)
return process_risk
def get_result():
return result
def show_json(in_response):
##Function to test json output
print(json.dumps(in_response, sort_keys=False, indent=4))
if __name__ == "__main__":
#
print("Python version: %s\n " %sys.version)
DB_NAME = "results.db"
set_debug(True)
##Get module parameters
image = sys.argv[1]
profile = sys.argv[2]
##Call the actual command
current_wd = sys.path[0]
project = Project(current_wd)
project.init_db(DB_NAME)
project.set_volatility_profile(profile)
project.set_image_name(image)
vol_pslist(project)
show_json(get_result())
| gpl-3.0 | 8,066,239,497,995,908,000 | 39.550347 | 258 | 0.518046 | false |
jim-cooley/abletonremotescripts | remote-scripts/samples/Twister Ableton Script v1.2.2/Twister/Twister.py | 1 | 78676 | from __future__ import with_statement
import Live
import time
import math
import sys
from _Framework.ButtonElement import ButtonElement # Class representing a button a the controller
from _Framework.ButtonMatrixElement import ButtonMatrixElement # Class representing a 2-dimensional set of buttons
from _Framework.ChannelStripComponent import ChannelStripComponent # Class attaching to the mixer of a given track
from _Framework.ClipSlotComponent import ClipSlotComponent # Class representing a ClipSlot within Live
from _Framework.CompoundComponent import CompoundComponent # Base class for classes encompasing other components to form complex components
from _Framework.ControlElement import ControlElement # Base class for all classes representing control elements on a controller
from _Framework.ControlSurface import ControlSurface # Central base class for scripts based on the new Framework
from _Framework.ControlSurfaceComponent import ControlSurfaceComponent # Base class for all classes encapsulating functions in Live
from _Framework.DeviceComponent import DeviceComponent # Class representing a device in Live
from _Framework.EncoderElement import EncoderElement # Class representing a continuous control on the controller
from _Framework.InputControlElement import * # Base class for all classes representing control elements on a controller
from _Framework.MixerComponent import MixerComponent # Class encompassing several channel strips to form a mixer
from _Framework.ModeSelectorComponent import ModeSelectorComponent # Class for switching between modes, handle several functions with few controls
from _Framework.NotifyingControlElement import NotifyingControlElement # Class representing control elements that can send values
from _Framework.SceneComponent import SceneComponent # Class representing a scene in Live
from _Framework.SessionComponent import SessionComponent # Class encompassing several scene to cover a defined section of Live's session
from _Framework.SessionZoomingComponent import DeprecatedSessionZoomingComponent as SessionZoomingComponent # Class using a matrix of buttons to choose blocks of clips in the session
from _Framework.SliderElement import SliderElement # Class representing a slider on the controller
from VCM600.MixerComponent import MixerComponent
from VCM600.TrackFilterComponent import TrackFilterComponent
from _Framework.TransportComponent import TransportComponent # Class encapsulating all functions in Live's transport section
from _Mono_Framework.CodecEncoderElement import CodecEncoderElement
from _Mono_Framework.EncoderMatrixElement import EncoderMatrixElement
from _Mono_Framework.MonoChopperComponent import MonoChopperComponent
from _Mono_Framework.MonoBridgeElement import MonoBridgeElement
from _Mono_Framework.MonoButtonElement import MonoButtonElement
from _Mono_Framework.MonoEncoderElement import MonoEncoderElement
from _Mono_Framework.ResetSendsComponent import ResetSendsComponent
from _Mono_Framework.DetailViewControllerComponent import DetailViewControllerComponent
from _Mono_Framework.DeviceSelectorComponent import DeviceSelectorComponent
from _Mono_Framework.MonomodComponent import MonomodComponent
from _Mono_Framework.MonoDeviceComponent import MonoDeviceComponent
from _Mono_Framework.SwitchboardElement import SwitchboardElement
from _Mono_Framework.MonoClient import MonoClient
from _Mono_Framework.LiveUtils import *
from _Generic.Devices import *
from ModDevices import *
from Map import *
class ShiftModeComponent(ModeSelectorComponent):
def __init__(self, script, callback, *a, **k):
super(ShiftModeComponent, self).__init__(*a, **k)
self._script = script
self.update = callback
self._modes_buttons = []
self._last_mode = 0
self._set_protected_mode_index(0)
def set_mode_buttons(self, buttons):
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement or FlashingButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
def number_of_modes(self):
return 5
def set_mode(self, mode):
assert isinstance(mode, int)
mode += 1
assert (mode in range(self.number_of_modes()))
if (self._mode_index != mode):
self._mode_index = mode
self.update()
elif (self._mode_index != 0):
self._mode_index = 0
self.update()
def _mode_value(self, value, sender):
assert (len(self._modes_buttons) > 0)
assert isinstance(value, int)
assert isinstance(sender, ButtonElement)
assert (self._modes_buttons.count(sender) == 1)
if ((value is not 0) or (not sender.is_momentary())):
self.set_mode(self._modes_buttons.index(sender))
class MonomodModeComponent(ModeSelectorComponent):
__module__ = __name__
__doc__ = ' Class for switching between modes, handle several functions with few controls '
def __init__(self, script, *a, **k):
super(MonomodModeComponent, self).__init__(*a, **k)
self._script = script
self._set_protected_mode_index(0)
def set_mode_buttons(self, buttons):
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
for index in range(len(self._modes_buttons)):
if (index == self._mode_index):
self._modes_buttons[index].turn_on()
else:
self._modes_buttons[index].turn_off()
def set_mode_toggle(self, button):
assert ((button == None) or isinstance(button, ButtonElement or FlashingButtonElement))
if (self._mode_toggle != None):
self._mode_toggle.remove_value_listener(self._toggle_value)
self._mode_toggle = button
if (self._mode_toggle != None):
self._mode_toggle.add_value_listener(self._toggle_value)
def number_of_modes(self):
return 2
class CntrlrDetailViewControllerComponent(DetailViewControllerComponent):
def __init__(self, script, *a, **k):
super(CntrlrDetailViewControllerComponent, self).__init__(*a, **k)
self._script = script
def _nav_value(self, value, sender):
super(CntrlrDetailViewControllerComponent, self)._nav_value(value, sender)
if (self.is_enabled() and (not self._shift_pressed)):
if ((not sender.is_momentary()) or (value != 0)):
modifier_pressed = True
if not ((not self.application().view.is_view_visible('Detail')) or (not self.application().view.is_view_visible('Detail/DeviceChain'))):
self._script._update_selected_device()
class CntrlrSwitchboardElement(SwitchboardElement):
def __init__(self, *a, **k):
super(CntrlrSwitchboardComponent, self).__init__(*a, **k)
class CntrlrMonoDevice(MonoDeviceComponent):
def __init__(self, *a, **k):
super(CntrlrMonoDevice, self).__init__(*a, **k)
class CntrlrMonoClient(MonoClient):
def __init__(self, *a, **k):
super(CntrlrMonoClient, self).__init__(*a, **k)
self._raw = False
def _banner(self):
pass
def disconnect_client(self, *a, **k):
super(CntrlrMonoClient, self).disconnect_client(*a, **k)
if not self._mod_dial == None:
if self._mod_dial._parameter is self._mod_dial_parameter:
self._mod_dial.release_parameter()
def _send_c_key(self, index, value, *a, **k):
self._send('key', index, value)
if self._raw is True:
control = self._host._host._keys[index]
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_grid(self, column, row, value, *a, **k):
self._send('grid', column, row, value)
if self._raw is True:
control = self._host._host._grid.get_button(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_dial(self, column, row, value, *a, **k):
self._send('dial', column, row, value)
if self._raw is True:
control = self._host._host._dial_matrix.get_dial(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_dial_button(self, column, row, value, *a, **k):
if row > 0:
self._send('dial_button', column, row-1, value)
if self._raw is True:
control = self._host._host._dial_button_matrix.get_button(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_key(self, *a):
pass
def _send_grid(self, *a):
pass
def _send_dial(self, *a):
pass
def _send_dial_button(self, *a):
pass
"""receive methods (from m4l)"""
def receive_key(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_key(*a, **k)
def receive_grid(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid(*a, **k)
def receive_grid_row(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_row(*a, **k)
def receive_grid_column(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_column(*a, **k)
def receive_grid_all(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_all(*a, **k)
def receive_mask_key(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_key(*a, **k)
def receive_mask_grid(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_grid(*a, **k)
def receive_mask_column(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_column(*a, **k)
def receive_mask_row(self, row, value):
super(CntrlrMonoClient, self).receive_mask_c_row(*a, **k)
def receive_mask_all(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_all(*a, **k)
def receive_wheel(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_wheel(*a, **k)
def set_local_ring_control(self, *a, **k):
super(CntrlrMonoClient, self).set_c_local_ring_control(*a, **k)
def set_absolute_mode(self, *a, **k):
super(CntrlrMonoClient, self).set_c_absolute_mode(*a, **k)
def receive_mod_color(self, val):
if val != 1:
self._mod_color = val
self._host.shift_update()
"""raw data integration"""
def set_raw_enabled(self, value):
self._raw = value > 0
#self._host.log_message('raw enabled' + str(self._raw))
if(self._raw is True):
self._update_controls_dictionary()
def receive_raw(self, Type, Identifier, value):
#self._host.log_message('recieve raw' + str(Type) + str(Identifier) + str(value))
if self._controls[Type]:
if Identifier in self._controls[Type]:
self._controls[Type][Identifier](value)
def _update_controls_dictionary(self):
if self._host._host != None:
self._controls = [{}, {}]
if self._control_defs['grid'] != None:
for column in range(self._control_defs['grid'].width()):
for row in range(self._control_defs['grid'].height()):
button = self._control_defs['grid'].get_button(column, row)
if button != None:
self._controls[0][button._original_identifier]=self._make_grid_call(column, row)
if self._control_defs['keys'] != None:
for index in range(len(self._control_defs['keys'])):
key = self._control_defs['keys'][index]
if key != None:
self._controls[0][key._original_identifier]=self._make_key_call(index)
if self._control_defs['dials'] != None:
for index in range(12):
column = index%4
row = int(index/4)
dial = self._control_defs['dials'].get_dial(column, row)
if dial != None:
self._controls[1][dial._original_identifier]=self._make_dial_call(index)
if self._control_defs['buttons'] != None:
for index in range(8):
column = index%4
row = int(index/4)+1
button = self._control_defs['buttons'].get_button(column, row)
if button != None:
self._controls[0][button._original_identifier]=self._make_dial_button_call(index+4)
def _make_grid_call(self, column, row):
def recieve_grid(value):
#self._host.log_message('receive grid' + str(value) + str(column) + str(row))
self.receive_c_grid(column, row, value)
return recieve_grid
def _make_key_call(self, number):
def receive_key(value):
#self._host.log_message('receive key' + str(number) + str(value))
self.receive_c_key(number, value)
return receive_key
def _make_dial_call(self, number):
def receive_c_wheel(value):
self.receive_wheel(number, 'value', value)
return receive_wheel
def _make_dial_button_call(self, number):
def receive_c_wheel(value):
self.receive_wheel(number, 'white', value)
return receive_wheel
class CntrlrMonomodComponent(MonomodComponent):
def __init__(self, *a, **k):
super(CntrlrMonomodComponent, self).__init__(*a, **k)
def _send_grid(self, *a):
pass
def _send_key(self, *a):
pass
def disconnect(self, *a, **k):
self._release_mod_dials()
super(CntrlrMonomodComponent, self).disconnect(*a, **k)
def connect_to_clients(self, *a, **k):
super(CntrlrMonomodComponent, self).connect_to_clients(*a, **k)
for index in range(4):
self._client[index]._mod_dial = (self._script._encoder[index]) #assign it a modDial so that we can control its modVolume from the unshifted CNTRLR
def _select_client(self, *a, **k):
super(CntrlrMonomodComponent, self)._select_client(*a, **k)
self._script.set_local_ring_control(self._active_client._c_local_ring_control)
self._script.set_absolute_mode(self._active_client._c_absolute_mode)
self._active_client._device_component.update()
def on_enabled_changed(self, *a, **k):
super(CntrlrMonomodComponent, self).on_enabled_changed(*a, **k)
if self._active_client != None:
if self.is_enabled():
self._active_client._device_component.update()
self._script.set_absolute_mode(self._active_client._c_absolute_mode)
self._script.set_local_ring_control(self._active_client._c_local_ring_control)
else:
for control in self._parameter_controls:
control.release_parameter()
self._script.set_absolute_mode(1)
self._script.set_local_ring_control(1)
def _set_button_matrix(self, grid):
assert isinstance(grid, (ButtonMatrixElement, type(None)))
if grid != self._grid:
if self._grid != None:
self._grid.remove_value_listener(self._matrix_value)
self._grid = grid
if self._grid != None:
self._grid.add_value_listener(self._matrix_value)
self.update()
return None
def _matrix_value(self, value, x, y, is_momentary): #to be sent to client from controller
assert (self._grid != None)
assert (value in range(128))
assert isinstance(is_momentary, type(False))
if (self.is_enabled()):
self._active_client._send_c_grid(x + self._x, y + self._y, value)
def _update_grid(self):
if self.is_enabled() and self._grid != None:
for column in range(4):
for row in range(4):
self._send_c_grid(column, row, self._active_client._c_grid[column][row])
def _alt_value(self, value):
if self._shift_pressed == 0:
self._alt_pressed = value != 0
self._active_client._send('alt', int(self._alt_pressed))
self.update()
def _set_key_buttons(self, buttons):
assert (buttons == None) or (isinstance(buttons, tuple))
for key in self._keys:
if key.value_has_listener(self._key_value):
key.remove_value_listener(self._key_value)
self._keys = []
if buttons != None:
assert len(buttons) == 32
for button in buttons:
assert isinstance(button, MonoButtonElement)
self._keys.append(button)
button.add_value_listener(self._key_value, True)
def _key_value(self, value, sender):
if self.is_enabled():
self._active_client._send_c_key(self._keys.index(sender), int(value!=0))
def _update_keys(self):
for index in range(32):
self._send_c_key(index, self._active_client._c_key[index])
def _set_knobs(self, knobs):
assert (knobs == None) or (isinstance(knobs, tuple))
for knob in self._knobs:
if knob.has_value_listener(self._knob_value):
knob.remove_value_listener(self._knob_value)
self._knobs = []
if knobs != None:
assert len(knobs) == 24
for knob in knobs:
assert isinstance(knob, EncoderElement)
self._knobs.append(knob)
knob.add_value_listener(self._knob_value, True)
def _knob_value(self, value, sender):
if self.is_enabled():
self._active_client._send_c_knob(self._knobs.index(sender), value)
def _dial_matrix_value(self, value, x, y):
if self.is_enabled() and self._active_client != None:
if self._script._absolute_mode == 0:
value = RELATIVE[int(value == 1)]
self._active_client._send_c_dial(x, y, value)
def _dial_button_matrix_value(self, value, x, y, force):
if (self.is_enabled()) and (self._active_client != None):
self._active_client._send_c_dial_button(x, y, value)
def _reset_encoder(self, coord):
self._dial_matrix.get_dial(coord[0], coord[1])._reset_to_center()
"""CNTRLR specific methods"""
def _send_c_grid(self, column, row, value): #to be sent to controller from client
if self.is_enabled() and self._grid != None:
if column in range(self._x, self._x + 4):
if row in range(self._y, self._y + 4):
self._grid.get_button(column - self._x, row - self._y).send_value(int(self._colors[value]))
def _send_c_key(self, index, value):
if self.is_enabled():
#if (self._shift_pressed > 0) or (self._locked > 0):
# self._grid.get_button(index, 7).send_value(int(self._colors[value]))
if self._keys != None and len(self._keys) > index:
self._keys[index].send_value(int(self._colors[value]))
def _send_c_wheel(self, column, row, wheel, parameter=None): #to be sent to controller from client
if self.is_enabled() and wheel != None:
if column < 4 and row < 3:
dial = self._dial_matrix.get_dial(column, row)
if(parameter=='value'):
dial._ring_value = int(wheel['value'])
dial._ring_mode = int(wheel['mode'])
dial._ring_green = int(wheel['green']!=0)
dial._ring_log = int(wheel['log'])
if(parameter=='custom'):
dial._ring_custom = dial._calculate_custom(str(wheel['custom']))
self._dial_button_matrix.send_value(column, row, wheel['white'])
if(self._script._absolute_mode > 0) and (not self._active_client._device_component.is_enabled()):
dial.send_value(wheel['log'], True)
def _update_c_wheel(self):
if self._dial_button_matrix != None:
for column in range(4):
for row in range(3):
self._send_c_wheel(column, row, self._active_client._c_wheel[column][row])
if not self._active_client._device_component.is_enabled():
self._send_to_lcd(column, row, self._active_client._c_wheel[column][row])
#self._script.log_message('dial value update' +str(column) + str(row) + str(self._active_client._wheel[column][row]['value']))
def set_c_local_ring_control(self, val = 1):
self._c_local_ring_control = (val!=0)
self._script.set_local_ring_control(self._c_local_ring_control)
def set_c_absolute_mode(self, val=1):
self._c_absolute_mode = (val!=0)
self._script.set_absolute_mode(self._c_absolute_mode)
def _release_mod_dials(self):
if not self._client is None:
for index in range(4): #for each of our 4 clients:
if not self._client[index]._mod_dial == None: #if the client has a modDial assigned to it
self._client[index]._mod_dial.release_parameter() #remove the modDial's parameter assignment
def _assign_mod_dials(self):
if not self._client is None:
for index in range(4): #recursion to contain all available clients
param = self._client[index]._mod_dial_parameter() #param is a local variable, and we assign its value to the mod_dial_parameter (this is handled by each individual client module)
#self._script.log_message('mod dial param ' + str(param))
if not self._client[index]._mod_dial == None: #if the client has been assigned a mod dial (which it should have been in setup_mod() )
if not param == None: #if the param variable was properly assigned in the client module
self._client[index]._mod_dial.connect_to(param) #connect the physical control to the parameter (this should be the moddial parameter in the m4l patch)
else:
self._client[index]._mod_dial.release_parameter() #if the param was None, release the physical control from any assignments
self._script.request_rebuild_midi_map()
def _display_mod_colors(self):
if not self._client is None:
for index in range(4): #set up a recursion of 4
self._script._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
if self._is_enabled:
self._script._shift_mode._modes_buttons[self._client.index(self._active_client)].send_value(8)
else:
for index in range(4):
self._script._shift_mode._modes_buttons[index].send_value(0)
class Twister(ControlSurface):
__module__ = __name__
__doc__ = " Monomodular controller script for Twister "
def __init__(self, *a, **k):
super(Twister, self).__init__(*a, **k)
"""MonoComponent specific variables - best not change these unless you know what you're doing"""
#self._version_check = 'b994'
self._host_name = 'Twister'
self._color_type = 'OhmRGB'
self._hosts = []
self.hosts = []
self._client = [None for index in range(4)]
self._active_client = None
self._rgb = 0 ##will change which color scheme is used, 0 is Livid 1 is AumHaa 2 is Monochrome(deprecated)
self._timer = 0 #used for flashing states, and is incremented by each call from self._update_display()
self._touched = 0 #used by the LCD patch to determine the last time a control was changed
self._local_ring_control = False #used by CodecEncoderElement to determine whether individual ring LEDs are addressable
self.set_local_ring_control(1) #initialize the local_control state of the encoder rings
self._absolute_mode = 1 #used by CodecEncoderElement to determine whether inc/dec or absolute changes are sent from CNTRLR
self.flash_status = 1 #used to determine whether button LED's use flashing states or not
self._device_selection_follows_track_selection = FOLLOW
with self.component_guard():
"""Initialization methods - comments included in the corresponding method"""
self._setup_monobridge()
self._setup_controls()
self._setup_transport_control()
self._setup_mixer_control()
self._setup_session_control()
self._assign_session_colors()
self._setup_device_control()
self._setup_device_selector()
self._setup_mod()
self._setup_switchboard()
self._setup_chopper()
self._setup_modes()
self.schedule_message(1, self._open_log)
self.song().view.add_selected_track_listener(self._update_selected_device) #Add a listener so that when the track content changes our device selection will aslo be updated
"""script initialization methods"""
def _open_log(self):
self.log_message("<<<<<<<<<<<<<<<<<<<<= " + str(self._host_name) + " log opened =>>>>>>>>>>>>>>>>>>>")
self.show_message(str(self._host_name) + ' Control Surface Loaded')
"""monobridge is used to send parameter names and values to the m4l LCD patch"""
def _setup_monobridge(self):
self._monobridge = MonoBridgeElement(self)
self._monobridge.name = 'MonoBridge'
def _setup_controls(self):
is_momentary = True #this variable will be used when sending arguments to the __init__ function of the modules we are creating instances of
self._fader = [None for index in range(8)]
self._dial_left = [None for index in range(12)]
self._dial_right = [None for index in range(12)]
self._encoder = [None for index in range(12)]
self._encoder_button = [None for index in range(12)]
self._grid = [None for index in range(16)]
self._button = [None for index in range(32)]
#self._side = [None for index in range(6)]
"""Now that we have our arrays, we can fill them with the controltypes that we'll be using."""
for index in range(8):
self._fader[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_FADERS[index], Live.MidiMap.MapMode.absolute, 'Fader_' + str(index), index, self)
self._knobs = []
for index in range(12):
self._dial_left[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_KNOBS_LEFT[index], Live.MidiMap.MapMode.absolute, 'Dial_Left_' + str(index), TW_KNOBS_LEFT[index], self)
self._knobs.append(self._dial_left[index])
for index in range(12):
self._dial_right[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_KNOBS_RIGHT[index], Live.MidiMap.MapMode.absolute, 'Dial_Right_' + str(index), TW_KNOBS_RIGHT[index], self)
self._knobs.append(self._dial_right[index])
for index in range(12):
self._encoder[index] = CodecEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_DIALS[index], Live.MidiMap.MapMode.absolute, 'Encoder_' + str(index), TW_DIALS[index], self)
for index in range(12):
self._encoder_button[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_DIAL_BUTTONS[index], 'Encoder_Button_' + str(index), self)
for index in range(16):
self._grid[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_GRID[index], 'Grid' + str(index), self)
for index in range(32):
self._button[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_BUTTONS[index], 'Button_' + str(index), self)
"""We'll also need to assign some of our controls to ButtonMatrixElements so that we can use them with the Session Zoom and the Mod components"""
"""We use the same formula here: first we create the holders:"""
self._matrix = ButtonMatrixElement() #this is a standard _Framework object used by many of the other scripts
self._matrix.name = 'Matrix'
self._dial_matrix = EncoderMatrixElement(self) #this is a special Mono object, used specifically for the mod components
self._dial_matrix.name = 'Dial_Matrix'
self._dial_button_matrix = ButtonMatrixElement() #this is a special Mono object, used specifically for the mod components
self._dial_button_matrix.name = 'Dial_Button_Matrix'
"""And then we fill the with the control elements that are assigned to them"""
for row in range(4): #we have 4 rows, and 4 columns, forming the 4x4 grid in the center of the controller
button_row = [] #since the matrix is two dimensional, first we create the outer array,
for column in range(4):
button_row.append(self._grid[(row*4) + column]) #then we create the inner array. The process is the same for the other controls here.
self._matrix.add_row(tuple(button_row)) #add_row() is a method of the ButtonMatrixElement. You can look in its parent module to see how it works
for row in range(3):
dial_row = []
for column in range(4):
dial_row.append(self._encoder[(row*4) + column])
self._dial_matrix.add_row(tuple(dial_row))
for row in range(3):
dial_button_row = []
for column in range(4):
dial_button_row.append(self._encoder_button[(row*4) + column])
self._dial_button_matrix.add_row(tuple(dial_button_row))
self._key_matrix = ButtonMatrixElement()
button_row = [] #since we only use one row for the chopper, we can define a 1 dimensional button matrix for this one.
for column in range(16): #We use the ButtonMatrixObject because it takes care of setting up callbacks for all the buttons easily when we need them later
button_row.append(self._button[16 + column])
self._key_matrix.add_row(tuple(button_row))
"""the transport component allows us to assign controls to transport functions in Live"""
def _setup_transport_control(self):
self._transport = TransportComponent()
self._transport.name = 'Transport'
"""the mixer component corresponds and moves with our selection in Live, and allows us to assign physical controls"""
"""to Live's mixer functions without having to make all the links ourselves"""
def _setup_mixer_control(self):
is_momentary = True
self._num_tracks = (4) #A mixer is one-dimensional;
self._mixer = MixerComponent(4, 2, True, False) #These values represent the (Number_of_tracks, Number_of_returns, EQ_component, Filter_component)
self._mixer.name = 'Mixer' #We name everything that we might want to access in m4l
self._mixer.set_track_offset(0) #Sets start point for mixer strip (offset from left)
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
self._mixer.channel_strip(index).name = 'Mixer_ChannelStrip_' + str(index) #We also name the individual channel_strip so we can access it
self._mixer.track_eq(index).name = 'Mixer_EQ_' + str(index) #We also name the individual EQ_component so we can access it
self._mixer.channel_strip(index)._invert_mute_feedback = True #This makes it so that when a track is muted, the corresponding button is turned off
self.song().view.selected_track = self._mixer.channel_strip(0)._track #set the selected strip to the first track, so that we don't, for example, try to assign a button to arm the master track, which would cause an assertion error
self._send_reset = ResetSendsComponent(self) #This creates a custom MonoComponent that allows us to reset all the sends on a track to zero with a single button
self._send_reset.name = 'Sends_Reset' #We name it so that we can access it from m4l
"""the session component represents a grid of buttons that can be used to fire, stop, and navigate clips in the session view"""
def _setup_session_control(self):
is_momentary = True
num_tracks = 4 #we are working with a 4x4 grid,
num_scenes = 4 #so the height and width are both set to 4
right_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 12)
left_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 9)
up_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 11)
down_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 13)
right_button.name = 'Bank_Select_Right_Button'
left_button.name = 'Bank_Select_Left_Button'
up_button.name = 'Bank_Select_Up_Button'
down_button.name = 'Bank_Select_Down_Button'
self._session = SessionComponent(num_tracks, num_scenes) #we create our SessionComponent with the variables we set above it
self._session.name = "Session" #we name it so we can access it in m4l
self._session.set_offsets(0, 0)
self._session.set_track_bank_buttons(right_button, left_button)
self._session.set_scene_bank_buttons(down_button, up_button) #we set the initial offset to the far left, top of the session grid
#self._session.set_stopped_value(STOP_CLIP[self._rgb]) #we assign the colors that will be displayed when the stop_clip button is pressed. This value comes from CNTRLR_Map.py, which is imported in the header of our script
self._scene = [None for index in range(4)] #we create an array to hold the Scene subcomponents so that we can get to them if we need them.
for row in range(num_scenes): #now we'll fill the array with different objects that were created when we called the SessionComponent() module
self._scene[row] = self._session.scene(row) #each session row is a SceneComponent
self._scene[row].name = 'Scene_' + str(row) #name it so we can access it in m4l
for column in range(num_tracks): #now we'll create holders and names for the contents of each scene
clip_slot = self._scene[row].clip_slot(column) #we use our assignment of the scene above to gain access to the individual clipslots. Here, we are just assigning 'clip_slot' each time as a local variable so we can manipulated it's properties
clip_slot.name = str(column) + '_Clip_Slot' + str(row) #name it so that we can acces it in m4l
clip_slot.set_triggered_to_play_value(CLIP_TRG_PLAY[self._rgb]) #set its triggered to play color
clip_slot.set_triggered_to_record_value(CLIP_TRG_REC[self._rgb])#set its triggered to record color
clip_slot.set_stopped_value(CLIP_STOP[self._rgb]) #set its stop color
clip_slot.set_started_value(CLIP_STARTED[self._rgb]) #set its started color
clip_slot.set_recording_value(CLIP_RECORDING[self._rgb]) #set its recording value
self.set_highlighting_session_component(self._session)
self._session.set_mixer(self._mixer) #now we link the MixerComponent we created in _setup_mixer_control() to our session component so that they will follow each other when either is navigated
self._session_zoom = SessionZoomingComponent(self._session) #this creates the ZoomingComponent that allows navigation when the shift button is pressed
self._session_zoom.name = 'Session_Overview' #name it so we can access it in m4l
self._session_zoom.set_stopped_value(ZOOM_STOPPED[self._rgb]) #set the zooms stopped color
self._session_zoom.set_playing_value(ZOOM_PLAYING[self._rgb]) #set the zooms playing color
self._session_zoom.set_selected_value(ZOOM_SELECTED[self._rgb]) #set the zooms selected color
self._session_zoom.set_button_matrix(self._matrix) #assign the ButtonMatrixElement that we created in _setup_controls() to the zooming component so that we can control it
self._session_zoom.set_zoom_button(self._button[31]) #assign a shift button so that we can switch states between the SessionComponent and the SessionZoomingComponent
"""this section is used so that we can reassign the color properties of each state. Legacy, from the OhmModes script, to support either RGB or Monochrome"""
def _assign_session_colors(self):
num_tracks = 4
num_scenes = 4
#self._session.set_stopped_value(STOP_ALL[self._rgb])
for row in range(num_scenes):
for column in range(num_tracks):
self._scene[row].clip_slot(column).set_triggered_to_play_value(CLIP_TRG_PLAY[self._rgb])
self._scene[row].clip_slot(column).set_triggered_to_record_value(CLIP_TRG_REC[self._rgb])
self._scene[row].clip_slot(column).set_stopped_value(CLIP_STOP[self._rgb])
self._scene[row].clip_slot(column).set_started_value(CLIP_STARTED[self._rgb])
self._scene[row].clip_slot(column).set_recording_value(CLIP_RECORDING[self._rgb])
self._session_zoom.set_stopped_value(ZOOM_STOPPED[self._rgb])
self._session_zoom.set_playing_value(ZOOM_PLAYING[self._rgb])
self._session_zoom.set_selected_value(ZOOM_SELECTED[self._rgb])
self.refresh_state()
"""the device component allows us to assign encoders to the selected device in Live"""
def _setup_device_control(self):
self._device = DeviceComponent() #create the device component
self._device.name = 'Device_Component' #name it so we can access it in m4l
self._device._is_banking_enabled = self.device_is_banking_enabled(self._device) #we do this to defeat some undesirable behavior in the DeviceComponent which defeats banking if no controls are assigned
self._device.set_device = self._device_set_device(self._device)
self._device.update = self._device_update(self._device)
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #set its controls to the bottom 8 encoders; we use [index+4] to offset past the first 4 encoders
self.set_device_component(self._device) #assign our component to the control_surface main script; this allows special updating, like being able to lock the devicecomponent to the currently selected device
self._device_navigator = CntrlrDetailViewControllerComponent(self) #this is a special component taken out of the APC scripts; its used to move from one device to another with the controller
self._device_navigator.name = 'Device_Navigator' #name it so that we can access it in m4l
self._device_selection_follows_track_selection = FOLLOW #_device_selection_follows_track_selection is a property of the main ControlSurface script, and does what it says it does. The FOLLOW variable is taken from CNTRLR_Map.py
"""the device selector component allows the user to set buttons that will automatically select a device based on its name"""
"""its not used in the stock CNTRLR script, but it could easily be assigned to any buttons using the correct syntax"""
"""for more information, check out the documentation for the MonOhm script"""
def _setup_device_selector(self):
self._device_selector = DeviceSelectorComponent(self)
self._device_selector.name = 'Device_Selector'
"""this section sets up the host environment that allows the controller to access different mods from the modButtons"""
def _setup_mod(self):
self._host = CntrlrMonomodComponent(self) #the MonomodComponent is the bridge between the CNTRLR's controls and the client patches that connect to m4l
self._host.name = 'Cntrlr_Host' #name it so we can access it
self.hosts = [self._host] #since some CNTRLR's can have more than one grid to access its clients, we create an array to hold all of the hosts that are included in this script. The CNTRLR only holds one.
self._hosts = [self._host] #this is redundant, and needs to be fixed
self._host._set_parameter_controls(self._encoder)
for index in range(4): #now we create our clients that will be connected to the actual m4l mods
self._client[index] = CntrlrMonoClient(self, index) #create an instance, and pass it its index
self._client[index].name = 'Client_' + str(index) #name it so we can access it
self._client[index]._mod_dial = (self._encoder[index]) #assign it a modDial so that we can control its modVolume from the unshifted CNTRLR
self._client[index]._device_component = MonoDeviceComponent(self._client[index], MOD_BANK_DICT, MOD_TYPES)
self._client[index]._control_defs = {'dials':self._dial_matrix, 'buttons':self._dial_button_matrix, 'grid':self._matrix, 'keys':self._button, 'knobs':self._knobs} #assign controls that raw data will be addressed at
self._active_client = self._client[0] #select the first client as our active client
self._active_client._is_active = True #initialize its active state, used by MonomodComponent to determine its status when sending it messages
self._host.connect_to_clients(self) #connect our MonomodComponent to our clients now that they are set up and ready to go.
"""the switchboard allows us to manage connections and disconnections between our clients and any mods that are currently installed in our Live project"""
def _setup_switchboard(self):
self._switchboard = SwitchboardElement(self, self._client) #here we are passing the main script and the array of client modules we created above to create an instance of the switchboard controlelement
self._switchboard.name = 'Switchboard' #name it so we can access it in m4l
"""the clipchopper component is a custom component we can access by switching modes"""
def _setup_chopper(self):
self._chopper = MonoChopperComponent(self, self._mixer) #create the chopper module, and pass it our mixer so that we can use it to navigate which clip is being manipulated
self._chopper.name = 'Chopper' #name it so we can access it via m4l
self._chopper._set_button_matrix(self._key_matrix) #set its controls to the ButtonMatrixElement we created in _setup_controls()
"""since there are many different configurations possible with the modButtons, we'll need to create a ModeSelectorComponent"""
"""to manage the different states of our controller"""
def _setup_modes(self):
self._shift_mode = ShiftModeComponent(self, self.shift_update) #here we call a new component by passing this module and its shift_update method
self._shift_mode.name = 'Mod_Mode' #name it so we can access it
self._shift_mode.set_mode_buttons([self._encoder_button[index] for index in range(4)]) #set the mode buttons that we will use to change states
"""cntrlr modes"""
"""here we set up some methods that will be used to update the control assignments when we change between different modes"""
"""this method is called everytime we change modes. If we make any assignments in the other mode assignment methods, we"""
"""have to be sure to remove them in this function. This creates a 'blank slate' for all the CNTRLRs control elements"""
def deassign_live_controls(self):
#for index in range(4):
# if self._encoder[index].value_has_listener(self._client[index]._mod_dial_value):
# self._encoder[index].remove_value_listener(self._client[index]._mod_dial_value)
"""THIS SECTION IS MISSING FROM THE ORIGINAL SCRIPT AND NEEDS TO BE FIXED...THE ASSIGNMENTS WERE MADE AT __init__"""
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(None) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(None) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(None) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(None) #assign the remaining fader to control our prehear volume of the the master channel strip
for index in range(4): #for the left side of the mixer
self._mixer.channel_strip(index).set_solo_button(None) #remove the solo button assignments
self._mixer.channel_strip(index).set_arm_button(None) #remove the arm button assignments
self._mixer.channel_strip(index).set_mute_button(None) #remove the mute button assignments
self._mixer.channel_strip(index).set_select_button(None) #remove the select button assignments
for column in range(4):
for row in range(4):
self._scene[row].clip_slot(column).set_launch_button(None) #remove the clip launch assignments
self._send_reset.set_buttons(tuple([None for index in range(4)])) #remove the send_reset button assignments - this has to be sent as a tuple
self._session.set_stop_track_clip_buttons(None) #remove the clip_stop button assignments
self._transport.set_play_button(None) #remove the play button assignment
self._transport.set_record_button(None) #remove the record button assignment
self._transport.set_stop_button(None) #remove the stop button assignment
for index in range(16):
self._grid[index].set_on_off_values(127, 0) #reset the on/off values of the grid buttons
self._grid[index].reset() #turn the buttons LEDs off
for index in range(32):
self._button[index].set_on_off_values(127, 0) #reset the on/off values for the key buttons
self._button[index].reset() #turn the buttons LEDs off
self._button[index].release_parameter() #remove the parameter assignment that was assigned to the keys
for client in self._client: #for each of our 4 clients:
if not client._mod_dial == None: #if the client has a modDial assigned to it
client._mod_dial.release_parameter() #remove the modDial's parameter assignment
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #assign the encoders from the device component controls - we are doing this here b
self._device_navigator.set_device_nav_buttons(None, None) #remove the assignment of the device nav buttons
self._device_navigator.set_enabled(False) #turn off the device navigator
self._device.set_on_off_button(None) #remove the assignment of the on/off button from the device component
self._device.set_lock_button(None) #remove the assignment of the lock button from the device component
self._device.set_bank_nav_buttons(None, None) #remove the assignment of the navigation buttons from the device component
self._device.set_enabled(False) #turn off the device component
self._session.set_enabled(False) #turn off the session component
self._session_zoom.set_enabled(False) #turn off the zoom component
for index in range(16):
self._grid[index].clear_send_cache() #set the last_sent value of the grid to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(32):
self._button[index].clear_send_cache() #set the last_sent value of the keys to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(12):
self._device._parameter_controls = None
self._encoder[index].release_parameter()
self._encoder[index].send_value(0, True) #turn off all the encoder rings. We send it the second argument, True, so that it is forced to update regardless of its last_sent property
self._encoder[index].clear_send_cache() #set the last_sent value of the encoder rings to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(8):
self._encoder_button[index+4].send_value(0, True) #turn off all the encoder LEDs. We send it the second argument, True, so that it is forced to update regardless of its last_sent property
self._encoder_button[index+4].clear_send_cache() #set the last_sent value of the encoder LEDs to -1, so that the next value it receives will always be transmitted to the CNTRLR
self._session_zoom.set_zoom_button(None) #remove the assignment of the shift button from the ZoomingComponent
self._host._release_mod_dials()
self.request_rebuild_midi_map() #now that we've finished deassigning all of our controls, we tell the main script to rebuild its MIDI map and update the values in Live
def assign_live_controls(self):
"""the following lines update all of the controls' last_sent properties, so that they forward the next value they receive regardless of whether or not it is the same as the last it recieved"""
"""we also reset the encoder rings and buttons, since the device component will not update them if it is not locked to a device in Live"""
for index in range(16):
self._grid[index].clear_send_cache()
for index in range(32):
self._button[index].clear_send_cache()
for index in range(8):
self._encoder_button[index+4].send_value(0, True)
self._encoder_button[index+4].clear_send_cache()
for index in range(8):
self._encoder[index+4].send_value(0, True)
for index in range(12):
self._encoder[index].clear_send_cache()
"""here we assign the top encoders to the mod_dial, if it exists, in any connected mods"""
self.schedule_message(4, self._assign_mod_dials)
"""here we assign the left side of our mixer's buttons on the lower 32 keys"""
for index in range(4): #we set up a recursive loop to assign all four of our track channel strips' controls
self._button[index].set_on_value(SOLO[self._rgb]) #set the solo color from the Map.py
self._mixer.channel_strip(index).set_solo_button(self._button[index]) #assign the solo buttons to our mixer channel strips
self._button[index+4].set_on_value(ARM[self._rgb]) #set the arm color from the Map.py
self._mixer.channel_strip(index).set_arm_button(self._button[index+4]) #assign the arm buttons to our mixer channel strips
self._button[index+16].set_on_value(MUTE[self._rgb]) #set the mute color from the Map.py
self._mixer.channel_strip(index).set_mute_button(self._button[index+16]) #assign the mute buttons to our mixer channel strips
self._button[index+20].set_on_value(SELECT[self._rgb]) #set the select color from the Map.py
self._mixer.channel_strip(index).set_select_button(self._button[index+20]) #assign the select buttons to our mixer channel strips
self._send_reset.set_buttons(tuple(self._button[index + 8] for index in range(4))) #this is yet another way to quickly assign multiple elements conveniently in-place. We are creating a recursion inside an assignment. The tuple() method creates an immutable array. It can't be modified until it gets where it's going and is unpacked.
self._session.set_stop_track_clip_buttons(tuple(self._button[index+24] for index in range(4))) #these last two lines assign the send_reset buttons and the stop_clip buttons for each track
for index in range(4):
self._button[index+8].send_value(SEND_RESET[self._rgb], True) #now we are going to send a message to turn the LEDs on for the send_reset buttons
self._button[index+24].set_on_off_values(STOP_CLIP[self._rgb], STOP_CLIP[self._rgb]) #this assigns the custom colors defined in the Map.py file to the stop_clip buttons. They have seperate on/off values, but we assign them both the same value so we can always identify them
self._button[index+24].send_value(STOP_CLIP[self._rgb], True) #finally, we send the on/off colors out to turn the LEDs on for the stop clip buttons
self._button[28].set_on_off_values(PLAY_ON[self._rgb], PLAY[self._rgb]) #assing the on/off colors for play. These are two seperate values, dependant upon whether play is engaged or not
self._transport.set_play_button(self._button[28]) #set the transports play control to the corresponding button on the CNTRLR
self._button[30].set_on_off_values(RECORD_ON[self._rgb], RECORD[self._rgb]) #set the on/off colors for the transport record buttons
self._transport.set_record_button(self._button[30]) #assign the correct button for the transport record control
self._button[29].set_on_value(STOP[self._rgb]) #set the on value for the Stop button
self._transport.set_stop_button(self._button[29]) #assign the correct button for the transport stop control
self._button[29].send_value(STOP_OFF[self._rgb], True) #turn on the LED for the stop button
#for index in range(4): #set up a for loop to generate an index for assigning the session nav buttons' colors
# self._button[index + 12].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb]) #assign the colors from Map.py to the session nav buttons
#set the track bank buttons for the Session navigation controls
#set the scnee bank buttons for the Session navigation controls
#for index in range(4):
# self._button[index].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb]) #assign the colors from Map.py to the session nav buttons
#self._session.set_track_bank_buttons(self._side[2], self._side[5]) #set the track bank buttons for the Session navigation controls
#self._session.set_scene_bank_buttons(self._side[4], self._side[6]) #set the scnee bank buttons for the Session navigation controls
"""this section assigns the grid to the clip launch functionality of the SessionComponent"""
for column in range(4): #we need to set up a double recursion so that we can generate the indexes needed to assign the grid buttons
for row in range(4): #the first recursion passes the column index, the second the row index
self._scene[row].clip_slot(column).set_launch_button(self._grid[(row*4)+column]) #we use the indexes to grab the first the scene and then the clip we assigned above, and then we use them again to define the button held in the grid array that we want to assign to the clip slot from the session component
"""this section assigns the faders and knobs"""
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4]) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(self._fader[7]) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(self._fader[6]) #assign the remaining fader to control our prehear volume of the the master channel strip
for track in range(4): #we set up a recursive loop to assign all four of our track channel strips' controls
channel_strip_send_controls = [] #the channelstripcomponent requires that we pass the send controls in an array, so we create a local variable, channel_strip_send_controls, to hold them
for control in range(2): #since we are going to assign two controls to the sends, we create a recursion
channel_strip_send_controls.append(self._dial_left[track + (control * 4)]) #then use the append __builtin__ method to add them to the array
self._mixer.channel_strip(track).set_send_controls(tuple(channel_strip_send_controls)) #now that we have an array containing the send controls, we pass it to the channelstrip component with its set_send_controls() method
self._mixer.channel_strip(track).set_pan_control(self._dial_left[track + 8]) #now we set the pan control to the bottom
self._mixer.track_eq(track).set_gain_controls(tuple([self._dial_right[track+8], self._dial_right[track+4], self._dial_right[track]])) #here's another way of doing the same thing, but instead of creating the array before hand, we define it in-place. Its probably bad coding to mix styles like this, but I'll leave it for those of you trying to figure this stuff out
self._mixer.track_eq(track).set_enabled(True) #turn the eq component on
"""this section assigns the encoders and encoder buttons"""
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #assign the encoders from the device component controls - we are doing this here b
self._encoder_button[7].set_on_value(DEVICE_LOCK[self._rgb]) #set the on color for the Device lock encoder button
self._device.set_lock_button(self._encoder_button[7]) #assign encoder button 7 to the device lock control
self._encoder_button[4].set_on_value(DEVICE_ON[self._rgb]) #set the on color for the Device on/off encoder button
self._device.set_on_off_button(self._encoder_button[4]) #assing encoder button 4 to the device on/off control
for index in range(2): #setup a recursion to generate indexes so that we can reference the correct controls to assing to the device_navigator functions
self._encoder_button[index + 8].set_on_value(DEVICE_NAV[self._rgb]) #assign the on color for the device navigator
self._encoder_button[index + 10].set_on_value(DEVICE_BANK[self._rgb]) #assign the on color for the device bank controls
self._device_navigator.set_device_nav_buttons(self._encoder_button[10], self._encoder_button[11]) #set the device navigators controls to encoder buttons 10 and 11
self._device.set_bank_nav_buttons(self._encoder_button[8], self._encoder_button[9]) #set the device components bank nav controls to encoder buttons 8 and 9
self._session_zoom.set_zoom_button(self._button[31]) #assign the lower right key button to the shift function of the Zoom component
"""now we turn on and update some of the components we've just made assignments to"""
self._device.set_enabled(True) #enable the Device Component
self._device_navigator.set_enabled(True) #enable the Device Navigator
self._session.set_enabled(True) #enable the Session Component
self._session_zoom.set_enabled(True) #enable the Session Zoom
self._device.update() #tell the Device component to update its assingments so that it will detect the currently selected device parameters and display them on the encoder rings
self._session.update() #tell the Session component to update so that the grid will display the currently selected session region
"""this assigns the CNTRLR's controls on for 4th empty modSlot"""
"""these assignments mirror the main section; commenting is restricted to the differences"""
def assign_chopper_controls(self):
"""the following lines update all of the controls' last_sent properties, so that they forward the next value they receive regardless of whether or not it is the same as the last it recieved"""
"""we also reset the encoder rings and buttons, since the device component will not update them if it is not locked to a device in Live"""
for index in range(16):
self._grid[index].clear_send_cache()
for index in range(32):
self._button[index].clear_send_cache()
for index in range(8):
self._encoder_button[index+4].send_value(0, True)
self._encoder_button[index+4].clear_send_cache()
for index in range(12):
self._encoder[index].send_value(0, True)
self._encoder[index].clear_send_cache()
"""here we assign the top encoders to the mod_dial, if it exists, in any connected mods"""
self.schedule_message(4, self._assign_mod_dials)
"""the following lines differ from the assignments in self.assign_live_controls()"""
"""the assignments merely moving certain elements from their original positions"""
for index in range(4):
self._button[index].set_on_value(MUTE[self._rgb])
self._mixer.channel_strip(index).set_mute_button(self._button[index])
self._button[index+4].set_on_value(SELECT[self._rgb])
self._mixer.channel_strip(index).set_select_button(self._button[index+4])
self._session.set_stop_track_clip_buttons(tuple(self._button[index+8] for index in range(4)))
for index in range(4):
self._button[index + 8].set_on_off_values(STOP_CLIP[self._rgb], STOP_CLIP[self._rgb])
self._button[index+8].send_value(STOP_CLIP[self._rgb], True)
#for index in range(4):
# self._button[index + 12].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb])
#self._session.set_scene_bank_buttons(self._button[13], self._button[12])
#self._session.set_track_bank_buttons(self._button[15], self._button[14])
#for index in range(4):
# self._side[index].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb])
#self._session.set_scene_bank_buttons(self._side[4], self._side[6])
#self._session.set_track_bank_buttons(self._side[2], self._side[5])
"""the rest of this method mirrors self._assign_live_controls, comments can be found there"""
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4])
self._mixer.master_strip().set_volume_control(self._fader[7])
self._mixer.set_prehear_volume_control(self._fader[6])
for track in range(4):
channel_strip_send_controls = []
for control in range(2):
channel_strip_send_controls.append(self._dial_left[track + (control * 4)])
self._mixer.channel_strip(track).set_send_controls(tuple(channel_strip_send_controls))
self._mixer.channel_strip(track).set_pan_control(self._dial_left[track + 8])
gain_controls = []
self._mixer.track_eq(track).set_gain_controls(tuple([self._dial_right[track+8], self._dial_right[track+4], self._dial_right[track]]))
self._mixer.track_eq(track).set_enabled(True)
for column in range(4):
for row in range(4):
self._scene[row].clip_slot(column).set_launch_button(self._grid[(row*4)+column])
self._encoder_button[7].set_on_value(DEVICE_LOCK[self._rgb])
self._device.set_lock_button(self._encoder_button[7])
self._encoder_button[4].set_on_value(DEVICE_ON[self._rgb])
self._device.set_on_off_button(self._encoder_button[4])
for index in range(2):
self._encoder_button[index + 8].set_on_value(DEVICE_NAV[self._rgb])
self._encoder_button[index + 10].set_on_value(DEVICE_BANK[self._rgb])
self._device_navigator.set_device_nav_buttons(self._encoder_button[10], self._encoder_button[11])
self._device.set_bank_nav_buttons(self._encoder_button[8], self._encoder_button[9])
self._device.set_enabled(True)
self._device_navigator.set_enabled(True)
self._session.set_enabled(True)
self._session_zoom.set_enabled(True)
self._device.update()
self._session.update()
self.request_rebuild_midi_map()
"""function mode callbacks"""
def display_mod_colors(self):
for index in range(4): #set up a recursion of 4
self._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
"""this method changes modes when we press a modButton. It is also called from Monomod when it needs to update the modDial assignments"""
def shift_update(self):
#self.log_message('shift_update')
self.assign_alternate_mappings(0) #first, we remove any channel reassingments we might have made by assigning alternate mappings, but to channel 0 (the original channel)
self._chopper.set_enabled(False) #disable the chopper, we will enable it later if we are in chopper mode
for index in range(4): #set up a recursion of 4
self._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
if self._shift_mode._mode_index is 0: #if the shift mode is 0, meaning we've selecte the main script mode:
self._host._set_dial_matrix(None, None) #deassign the Monomod Components dial matrix
#self._host._set_knobs(None)
self._host._set_button_matrix(None) #deassign the Monomod Component's button matrix
self._host._set_key_buttons(None) #deassign the Monomod Component's key matrix
self._host.set_enabled(False) #disable the Monomod Component
self.set_local_ring_control(1) #send sysex to the CNTRLR to put it in local ring mode
self.assign_live_controls() #assign our top level control assignments
elif CHOPPER_ENABLE and not self._host._client[3].is_connected() and self._shift_mode._mode_index == 4: #if the fourth mod button has been pressed and there is no mod installed
self.deassign_live_controls() #deassign the top level assignments
for index in range(4): #set up a recursion of 4
if self._shift_mode._mode_index == (index + 1): #for each recursion, if the recursion number is the same as the shift_mode_index +1
self._shift_mode._modes_buttons[index].send_value(1) #turn on the LED below the modButton
self.schedule_message(4, self._assign_mod_dials)
self._host._set_dial_matrix(None, None) #deassign the Monomod Components dial matrix
self._host._set_button_matrix(None) #deassign the Monomod Component's button matrix
self._host._set_key_buttons(None) #deassign the Monomod Component's key matrix
self._host.set_enabled(False) #disable the Monomod Component
self.set_local_ring_control(1) #send sysex to the CNTRLR to put it in local ring mode
self.assign_chopper_controls() #assign the controls for the Chopper Component
self._chopper.set_enabled(True) #turn the Chopper Component on
else: #otherwise, if we are in modMode
self.deassign_live_controls() #remove all of our assignments from the controls and refresh their caches
self.assign_mixer_controls()
self._host.set_enabled(True) #turn on the Monomod Component
self._host._set_dial_matrix(self._dial_matrix, self._dial_button_matrix) #assign the encoders to it
#self._host._set_knobs(tuple(self._knobs))
self._host._set_button_matrix(self._matrix) #assign the 4x4 to it
self._host._set_key_buttons(tuple(self._button)) #assign the lower buttons to it
self._host._select_client(self._shift_mode._mode_index-1) #select the client corresponding to the button we pressed
self._host.display_active_client() #tell Monomod Component to update the LEDs on the CNTRLR corresponding to the client that is selected
for index in range(4): #set up a recursion for each of our modButtons
if self._shift_mode._mode_index == (index + 1): #if the button is the mode we've chosen
self._shift_mode._modes_buttons[index].send_value(1) #turn the LED white
if not self._host._active_client.is_connected(): #if there is not a mod in the currently selected modSlot
self.assign_alternate_mappings(self._shift_mode._mode_index) #assign a different MIDI channel that the controls translated to when entering Live
def assign_mixer_controls(self):
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4]) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(self._fader[7]) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(self._fader[6]) #assign the remaining fader to control our prehear volume of the the master channel strip
"""assign alternate mappings to the controls when a modSlot is selected that doesn't contain a mod"""
def assign_alternate_mappings(self, chan):
chan = min(16, max(chan, 0))
for index in range(8):
self._encoder_button[index + 4].set_channel(chan) #set the contols channel to the methods second argument
self._encoder_button[index + 4].set_enabled(chan is 0) #if the channel is not 0, we need to disable the control so that it
self._encoder_button[index + 4].force_next_send()
for encoder in self._encoder: #is forwarded to Live, but not used by the script for internal processing
encoder.set_channel(chan)
encoder.set_enabled(chan is 0)
encoder.force_next_send()
for button in self._button:
button.set_channel(chan)
button.set_enabled(chan is 0)
button.force_next_send()
for cell in self._grid:
cell.set_channel(chan)
cell.set_enabled(chan is 0)
cell.force_next_send()
self.request_rebuild_midi_map()
"""reassign the original channel and identifier to all the controls that can be remapped through assign_alternate_mappings"""
def assign_original_mappings(self):
for index in range(8):
self._encoder_button[index + 4].set_channel(self._encoder_button[index + 4]._original_channel)
self._encoder_button[index + 4].set_enabled(True)
self._encoder_button[index + 4].force_next_send()
for encoder in self._encoder:
encoder.set_channel(encoder._original_channel)
encoder.set_enabled(True)
encoder.force_next_send()
for button in self._button:
button.set_channel(button._original_channel)
button.set_enabled(True)
button.force_next_send()
for cell in self._grid:
cell.set_channel(cell._original_channel)
cell.set_enabled(True)
cell.force_next_send()
self.request_rebuild_midi_map()
"""called on timer"""
def update_display(self):
super(Twister, self).update_display() #since we are overriding this from the inherited method, we need to call the original routine as well
self._timer = (self._timer + 1) % 256 #each 100/60ms, increase the self._timer property by one. Start over at 0 when we hit 256
if(self._local_ring_control is False): #if local rings are turned off, then we need to send the new values if they've changed
self.send_ring_leds()
self.flash() #call the flash method below
"""this method recurses through all the controls, causing them to flash depending on their stored values"""
def flash(self):
if(self.flash_status > 0):
for control in self.controls:
if isinstance(control, MonoButtonElement):
control.flash(self._timer)
"""m4l bridge"""
"""this is a method taken and modified from the MackieControl scripts"""
"""it takes a display string and modifies it to be a specified length"""
def generate_strip_string(self, display_string):
NUM_CHARS_PER_DISPLAY_STRIP = 12
if (not display_string):
return (' ' * NUM_CHARS_PER_DISPLAY_STRIP)
if ((len(display_string.strip()) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.endswith('dB') and (display_string.find('.') != -1))):
display_string = display_string[:-2]
if (len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)):
for um in [' ',
'i',
'o',
'u',
'e',
'a']:
while ((len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.rfind(um, 1) != -1)):
um_pos = display_string.rfind(um, 1)
display_string = (display_string[:um_pos] + display_string[(um_pos + 1):])
else:
display_string = display_string.center((NUM_CHARS_PER_DISPLAY_STRIP - 1))
ret = u''
for i in range((NUM_CHARS_PER_DISPLAY_STRIP - 1)):
if ((ord(display_string[i]) > 127) or (ord(display_string[i]) < 0)):
ret += ' '
else:
ret += display_string[i]
ret += ' '
ret = ret.replace(' ', '_')
assert (len(ret) == NUM_CHARS_PER_DISPLAY_STRIP)
return ret
"""this method forwards display information from control elements to the LCD patch"""
def notification_to_bridge(self, name, value, sender):
if(isinstance(sender, (MonoEncoderElement, CodecEncoderElement))):
pn = str(self.generate_strip_string(name))
pv = str(self.generate_strip_string(value))
self._monobridge._send(sender.name, 'lcd_name', pn)
self._monobridge._send(sender.name, 'lcd_value', pv)
"""this method regulates parameter values from being sent on updates if the control has not actually been changed"""
def touched(self):
if self._touched is 0:
self._monobridge._send('touch', 'on')
self.schedule_message(2, self.check_touch)
self._touched +=1
"""this method is called by the LCD patch to determine whether any controls have been changed"""
def check_touch(self):
if self._touched > 5:
self._touched = 5
elif self._touched > 0:
self._touched -= 1
if self._touched is 0:
self._monobridge._send('touch', 'off')
else:
self.schedule_message(2, self.check_touch)
"""this is an unnused method. It provides a way to retrieve all the clip names belonging to the current session views clips"""
def get_clip_names(self):
clip_names = []
for scene in self._session._scenes:
for clip_slot in scene._clip_slots:
if clip_slot.has_clip() is True:
clip_names.append(clip_slot._clip_slot)##.clip.name)
return clip_slot._clip_slot
return clip_names
"""midi functionality"""
"""this method needs to be here so that Live knows what to do (nothing, in this case) when it receives sysex from the CNTRLR"""
def handle_sysex(self, midi_bytes):
pass
"""this method can be linked to from m4l, and provides a way to update the parameter value of an assigned DeviceComponent parameter control"""
def to_encoder(self, num, val):
rv=int(val*127)
self._device._parameter_controls[num].receive_value(rv)
p = self._device._parameter_controls[num]._parameter_to_map_to
newval = (val * (p.max - p.min)) + p.min
p.value = newval
"""this method sets the instance variable for local ring control, and sends the appropriate sysex string to change states on the CNTRLR"""
def set_local_ring_control(self, val = 1):
self._local_ring_control = (val!=0)
if(self._local_ring_control is True):
self._send_midi(tuple([240, 0, 1, 97, 8, 32, 0, 247]))
else:
self._send_midi(tuple([240, 0, 1, 97, 8, 32, 1, 247]))
"""this method sets the instance variable for absolute encoder changes, and sends the appropriate sysex string to change states on the CNTRLR"""
def set_absolute_mode(self, val = 1):
self._absolute_mode = (val!=0)
if self._absolute_mode is True:
self._send_midi(tuple([240, 0, 1, 97, 8, 17, 0, 0, 0, 0, 0, 0, 0, 0, 247]))
else:
self._send_midi(tuple([240, 0, 1, 97, 8, 17, 127, 127, 127, 127, 127, 127, 127, 127, 247]))
"""this method is used to update the individual elements of the encoder rings when the CNTRLR is in local ring control mode"""
def send_ring_leds(self):
if self._host._is_enabled == True:
leds = [240, 0, 1, 97, 8, 31]
for index in range(12):
wheel = self._encoder[index]
bytes = wheel._get_ring()
leds.append(bytes[0])
leds.append(int(bytes[1]) + int(bytes[2]))
leds.append(247)
self._send_midi(tuple(leds))
def _release_mod_dials(self):
if not self._client is None:
for client in self._client: #for each of our 4 clients:
if not client._mod_dial == None: #if the client has a modDial assigned to it
client._mod_dial.release_parameter() #remove the modDial's parameter assignment
def _assign_mod_dials(self):
if not self._client is None:
for client in self._client: #recursion to contain all available clients
param = client._mod_dial_parameter() #param is a local variable, and we assign its value to the mod_dial_parameter (this is handled by each individual client module)
if not client._mod_dial == None: #if the client has been assigned a mod dial (which it should have been in setup_mod() )
if not param == None: #if the param variable was properly assigned in the client module
client._mod_dial.connect_to(param) #connect the physical control to the parameter (this should be the moddial parameter in the m4l patch)
else:
client._mod_dial.release_parameter() #if the param was None, release the physical control from any assignments
self.request_rebuild_midi_map()
"""general functionality"""
"""this method is called by Live when it needs to disconnect. It's very important that any observers that were set up in the script are removed here"""
def disconnect(self):
"""clean things up on disconnect"""
if self.song().view.selected_track_has_listener(self._update_selected_device):
self.song().view.remove_selected_track_listener(self._update_selected_device)
self._hosts = []
self.log_message("<<<<<<<<<<<<<<<<<<<<<<<<< " + str(self._host_name) + " log closed >>>>>>>>>>>>>>>>>>>>>>>>>") #Create entry in log file
super(Cntrlr, self).disconnect()
"""this provides a hook that can be called from m4l to change the DeviceComponent's behavior"""
def device_follows_track(self, val):
self._device_selection_follows_track_selection = (val == 1)
return self
"""this is a customizationo of the inherited behavior of ControlSurface"""
def _update_selected_device(self):
if self._device_selection_follows_track_selection is True:
track = self.song().view.selected_track
device_to_select = track.view.selected_device
if device_to_select == None and len(track.devices) > 0:
device_to_select = track.devices[0]
if device_to_select != None:
self.song().view.select_device(device_to_select)
#self._device.set_device(device_to_select)
self.set_appointed_device(device_to_select)
#self._device_selector.set_enabled(True)
self.request_rebuild_midi_map()
return None
"""this provides a hook to get the current tracks length from other modules"""
def _get_num_tracks(self):
return self.num_tracks
"""device component methods and overrides"""
"""this closure replaces the default DeviceComponent update() method without requiring us to build an override class"""
"""it calls the _update_selected_device method of this script in addition to its normal routine"""
"""it also ensures a rebuilt midi_map; for some reason the Abe's pulled that part out of the post 8.22 scripts, and under certain circumstances"""
"""things don't work as expected anymore."""
def _device_update(self, device):
def _update():
#for client in self._client:
# if (device._device != None) and (client.device == device._device):
# device._bank_index = max(client._device_component._cntrl_offset, device._bank_index)
DeviceComponent.update(device)
self.request_rebuild_midi_map()
return _update
def _device_set_device(self, device_component):
def set_device(device):
is_monodevice = False
for client in self._client:
if (device != None) and (client.device == device):
is_monodevice = client
if is_monodevice != False:
#device = client._device_component._device
self.log_message('is monodevice' + str(device.name))
assert ((device == None) or isinstance(device, Live.Device.Device))
if ((not device_component._locked_to_device) and (device != device_component._device)):
if (device_component._device != None):
device_component._device.remove_name_listener(device_component._on_device_name_changed)
device_component._device.remove_parameters_listener(device_component._on_parameters_changed)
parameter = device_component._on_off_parameter()
if (parameter != None):
parameter.remove_value_listener(device_component._on_on_off_changed)
if (device_component._parameter_controls != None):
for control in device_component._parameter_controls:
control.release_parameter()
device_component._device = device
if (device_component._device != None):
device_component._bank_index = 0
device_component._device.add_name_listener(self._on_device_name_changed)
device_component._device.add_parameters_listener(self._on_parameters_changed)
parameter = device_component._on_off_parameter()
if (parameter != None):
parameter.add_value_listener(device_component._on_on_off_changed)
for key in device_component._device_bank_registry.keys():
if (key == device_component._device):
device_component._bank_index = device_component._device_bank_registry.get(key, 0)
del device_component._device_bank_registry[key]
break
device_component._bank_name = '<No Bank>' #added
device_component._bank_index = max(is_monodevice._cntrl_offset, device_component._bank_index)
device_component._on_device_name_changed()
device_component.update()
else:
DeviceComponent.set_device(device_component, device)
return set_device
"""this closure replaces the default ChannelStripComponent _on_cf_assign_changed() method without requiring us to build an override class"""
"""it allows us to change different colors to its assigned controls based on the crossfade assignment, which the default _Framework doesn't support"""
def mixer_on_cf_assign_changed(self, channel_strip):
def _on_cf_assign_changed():
if (channel_strip.is_enabled() and (channel_strip._crossfade_toggle != None)):
if (channel_strip._track != None) and (channel_strip._track in (channel_strip.song().tracks + channel_strip.song().return_tracks)):
if channel_strip._track.mixer_device.crossfade_assign == 1: #modified
channel_strip._crossfade_toggle.turn_off()
elif channel_strip._track.mixer_device.crossfade_assign == 0:
channel_strip._crossfade_toggle.send_value(1)
else:
channel_strip._crossfade_toggle.send_value(2)
return _on_cf_assign_changed
"""a closure fix for banking when we deassign the bank buttons and still want to change bank indexes"""
def device_is_banking_enabled(self, device):
def _is_banking_enabled():
return True
return _is_banking_enabled
# a | apache-2.0 | -6,756,718,938,802,655,000 | 51.451333 | 368 | 0.711958 | false |
zenwarr/microhex | src/hex/struct.py | 1 | 1291 | class AbstractDataType(object):
def __init__(self, name):
self.name = name
self.fixedSize = True
def parse(self, cursor):
"""Should return Value structure"""
raise NotImplementedError()
class Integer(AbstractDataType):
def __init__(self, binary_format, signed=True):
pass
def parse(self, cursor):
return struct.unpack(...)
class ZeroString(AbstractDataType):
def __init__(self, encoding):
pass
def parse(self, cursor):
offset = 0
while not cursor.atEnd(offset) and cursor[offset] != 0:
pass
return self.fromEncoding(cursor[:offset])
class PascalString(AbstractDataType):
def __init__(self, encoding):
pass
def parse(self, cursor):
string_length = Integer(signed=False).parse(cursor).value
return self.fromEncoding(cursor[:string_length])
class Win32_UnicodeString(AbstractDataType):
pass
class Enumeration(AbstractDataType):
def __init__(self, primary_type, members):
pass
def parse(self, cursor):
value = self.primaryType.parse(cursor).value
if value in self.members:
return self.members[value]
class Structure(AbstractDataType):
def __init__(self, members):
pass
| mit | -4,922,127,411,357,640,000 | 22.053571 | 65 | 0.630519 | false |
bitmazk/django-multilingual-news | multilingual_news/south_migrations/0014_auto__add_field_newsentry_author_user.py | 1 | 15887 | # flake8: noqa
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
no_dry_run = True
def forwards(self, orm):
# Adding field 'NewsEntry.author_user'
db.add_column(u'multilingual_news_newsentry', 'author_user',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='user_entries', null=True, to=orm['auth.User']),
keep_default=False)
for entry in orm['multilingual_news.NewsEntry'].objects.all():
entry.author_user = entry.author
def backwards(self, orm):
for entry in orm['multilingual_news.NewsEntry'].objects.all():
entry.author = entry.author_user
# Deleting field 'NewsEntry.author_user'
db.delete_column(u'multilingual_news_newsentry', 'author_user_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'multilingual_news.category': {
'Meta': {'object_name': 'Category'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['multilingual_news.Category']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '512'})
},
u'multilingual_news.categoryplugin': {
'Meta': {'object_name': 'CategoryPlugin', '_ormbases': ['cms.CMSPlugin']},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['multilingual_news.Category']", 'symmetrical': 'False'}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'template_argument': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'multilingual_news.categorytranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'CategoryTranslation', 'db_table': "u'multilingual_news_category_translation'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['multilingual_news.Category']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'multilingual_news.newsentry': {
'Meta': {'ordering': "('-pub_date',)", 'object_name': 'NewsEntry'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_entries'", 'null': 'True', 'to': u"orm['auth.User']"}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'newsentries'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['multilingual_news.Category']"}),
'content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'multilingual_news_contents'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'excerpt': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'multilingual_news_excerpts'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'blank': 'True'}),
'image_float': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'image_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'image_source_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_source_url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'multilingual_news.newsentrytranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'NewsEntryTranslation', 'db_table': "u'multilingual_news_newsentry_translation'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['multilingual_news.NewsEntry']"}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'meta_title': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '512'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
u'multilingual_news.recentplugin': {
'Meta': {'object_name': 'RecentPlugin', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'current_language_only': ('django.db.models.fields.BooleanField', [], {}),
'limit': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['multilingual_news']
| mit | 4,539,886,842,019,949,000 | 83.957219 | 218 | 0.563102 | false |
bit0001/chumme | database_manager/interest_manager.py | 1 | 1225 | from sqlite3 import IntegrityError, OperationalError
from .db_context_manager import DBContextManager
QUERIES = {
'insert_interest':
"""
INSERT INTO interests
(interest)
VALUES
(?)
""",
'select_interest_id':
"""
SELECT id FROM interests
WHERE interest = ?
""",
'select_all_interests':
"""
SELECT interest FROM interests
ORDER BY interest
"""
}
class InterestManager:
def __init__(self, db_path: str):
self.db_path = db_path
def get_interests(self):
with DBContextManager(self.db_path) as cursor:
try:
cursor.execute(QUERIES['select_all_interests'])
except OperationalError:
return []
else:
return [row[0] for row in cursor.fetchall()]
def add_interest(self, interest: str):
with DBContextManager(self.db_path) as cursor:
cursor.execute(QUERIES['insert_interest'], (interest,))
def get_interest_id(self, interest) -> int:
with DBContextManager(self.db_path) as cursor:
cursor.execute(QUERIES['select_interest_id'],
(interest,))
return cursor.fetchone()[0]
| apache-2.0 | 434,678,613,978,248,770 | 25.06383 | 67 | 0.582857 | false |
gabberthomson/fm_finpy | ois_products.py | 1 | 4520 | from date_conventions import *
class OvernightIndexSwap:
''' We define the product by its:
- startDate
- endDate
- floatingLegNominal: the nominal used to compute the flows of the floating leg:
if positive the flows are received, negative paid
- fixedLegDates: the union of the start and end dates of all of the flows paid by the fixed leg (each flow has two
dates needed to computed the accrual period, the end date of the i-th flow coincides with the start
date of the i+1-th flow)
- fixedRate: the coupon paid/received in the fixed leg
- fixedLegNominal: the nominal used to compute the flows of the floating leg:
if positive the flows are received, negative paid
'''
def __init__(self, startDate, endDate, floatingLegNominal, fixedLegDates, fixedRate, fixedLegNominal):
# we want opposite signs for the two nominals: if one leg is paid, the other is received
# if this is not the case generates an error that will stop the program
if floatingLegNominal * fixedLegNominal > 0:
raise "Nominal must have opposite sign"
# store the input variables
self.startDate = startDate
self.endDate = endDate
self.fixedRate = fixedRate
self.fixedLegDates = fixedLegDates
self.floatingLegNominal = floatingLegNominal
self.fixedLegNominal = fixedLegNominal
# With this method we compute the value of the floating leg at the observation date of the discount curve
def npv_floating_leg(self, discountCurve):
# this formula comes from the fact that for OIS the evaluation method is still the same of
# the "old" world with just one single curve for forward rate estimation and flow discounting
floatingleg_npv = discountCurve.df(self.startDate) - discountCurve.df(self.endDate)
# We multiply the result for the nominal before returning it
return floatingleg_npv * self.floatingLegNominal
def npv_fixed_leg(self, discountCurve):
# we now evaluate the fixed leg
fixed_npv = 0
for i in range(len(self.fixedLegDates) - 1):
startPeriod = self.fixedLegDates[i]
endPeriod = self.fixedLegDates[i+1]
tau = dc_act360(startPeriod, endPeriod)
df = discountCurve.df(endPeriod)
fixed_npv = fixed_npv + df * tau * self.fixedRate
# We multiply the result for the nominal before returning it
return fixed_npv * self.fixedLegNominal
def npv(self, discountCurve):
# the npv is the sum of the floating and fixed leg values (taken with their sign)
floatingleg_npv = self.npv_floating_leg(discountCurve)
fixed_npv = self.npv_fixed_leg(discountCurve)
# we sum them (the nominal have opposite sign)
npv = fixed_npv + floatingleg_npv
return npv
# This function just makes life easier; it allows to create a standard OIS with less
# parameters because it uses some common conventions:
# - startDate: the start date of the swap
# - maturity: the maturity of the swap express as a number of months (2 year: 24 months)
# - fixedTenor: the frequency of the fixed leg expressed in months: semi-annual payments -> fixedTenor = 6
# Market convention is 12 months
# - nominal: the absolute value nominal of the swap (1 is 1 Eur for example)
# - swapType: a string that can be "receiver" (it means that the fixed rate is received) or payer
def buildOIS(startDate, maturity, fixedTenor, fixedRate, nominal = 1, swapType = "receiver"):
endDate = startDate + relativedelta(months = maturity)
fixedLegDates = dates_generator(fixedTenor, startDate, endDate)
if swapType == "receiver":
fixedLegNominal = nominal
floatingLegNominal = - nominal
elif swapType == "payer":
fixedLegNominal = - nominal
floatingLegNominal = nominal
else:
raise "SwapType not supported"
ois = OvernightIndexSwap(startDate, endDate, floatingLegNominal, fixedLegDates, fixedRate, fixedLegNominal)
return ois
from ir_curves import DiscountCurve
if __name__ == '__main__':
obsdate = date(2010,1,1)
pillars = [date(2011,1,1), date(2012,1,1)]
dfs = [0.9, 0.8]
dc = DiscountCurve(obsdate, pillars, dfs)
# we build an Overnight Index Swap with 1 year maturity and strike 8%
startSwap = date(2010,2,1)
maturity = 12
ois = buildOIS(startSwap, maturity, 12, 0.08)
print "Swap NPV:", ois.npv(dc)
| mit | -8,323,141,087,552,210,000 | 46.578947 | 120 | 0.685619 | false |
lichengshuang/createvhost | python/others/others/scan.py | 1 | 2273 | #!/usr/bin/python
#-*- coding:utf-8 -*-
import nmap
import re
import mytools as tool
import sys
from multiprocessing import Pool
from functools import partial
reload(sys)
sys.setdefaultencoding('utf8')
def nmScan(host,portrange,whitelist):
p = re.compile("^(\d*)\-(\d*)$")
# if type(hostlist) != list:
# help()
portmatch = re.match(p,portrange)
if not portmatch:
help()
if host == '121.42.32.172':
whitelist = [25,]
result = ''
nm = nmap.PortScanner()
tmp = nm.scan(host,portrange)
result = result + "<h2>ip地址:%s 主机名:[%s] ...... %s</h2><hr>" %(host,tmp['scan'][host]['hostname'],tmp['scan'][host]['status']['state'])
try:
ports = tmp['scan'][host]['tcp'].keys()
for port in ports:
info = ''
if port not in whitelist:
info = '<strong><font color=red>Alert:非预期端口</font><strong> '
else:
info = '<strong><font color=green>Info:正常开放端口</font><strong> '
portinfo = "%s <strong>port</strong> : %s <strong>state</strong> : %s <strong>product<strong/> : %s <br>" %(info,port,tmp['scan'][host]['tcp'][port]['state'], tmp['scan'][host]['tcp'][port]['product'])
result = result + portinfo
except KeyError,e:
if whitelist:
whitestr = ','.join(whitelist)
result = result + "未扫到开放端口!请检查%s端口对应的服务状态" %whitestr
else:
result = result + "扫描结果正常,无暴漏端口"
return result
def help():
print "Usage: nmScan(['127.0.0.1',],'0-65535')"
return None
if __name__ == "__main__":
hostlist = ['115.231.79.2']
pool = Pool(5)
nmargu = partial(nmScan,portrange='0-65535',whitelist=[])
results = pool.map(nmargu,hostlist)
#send email
sender = '[email protected]'
receiver = ['[email protected]',]
subject = '服务器端口扫描'
smtpserver = 'smtp.exmail.qq.com'
smtpuser = '[email protected]'
smtppass = 'gccmx163'
mailcontent = '<br>'.join(results)
tool.sendemail(sender,receiver,subject,mailcontent,smtpserver,smtpuser,smtppass)
| apache-2.0 | -457,130,135,378,470,200 | 36.982456 | 349 | 0.553349 | false |
ubuntunux/PyEngine3D | PyEngine3D/Render/RenderInfo.py | 1 | 2991 | import math
from PyEngine3D.Utilities import *
def always_pass(*args):
return False
def cone_sphere_culling_actor(camera, actor):
to_actor = actor.transform.pos - camera.transform.pos
dist = length(to_actor)
if 0.0 < dist:
to_actor /= dist
rad = math.acos(np.dot(to_actor, -camera.transform.front)) - camera.half_cone
projected_dist = dist * math.sin(rad)
radius = actor.model.mesh.radius * max(actor.transform.scale)
if 0.0 < rad and radius < projected_dist:
return True
elif HALF_PI < rad and radius < dist:
return True
return False
def view_frustum_culling_geometry(camera, light, actor, geometry_bound_box):
to_geometry = geometry_bound_box.bound_center - camera.transform.pos
for i in range(4):
d = np.dot(camera.frustum_vectors[i], to_geometry)
if geometry_bound_box.radius < d:
return True
return False
def shadow_culling(camera, light, actor, geometry_bound_box):
bound_min = np.dot(np.array([geometry_bound_box.bound_min[0], geometry_bound_box.bound_min[1], geometry_bound_box.bound_min[2], 1.0], dtype=np.float32), light.shadow_view_projection)[: 3]
bound_max = np.dot(np.array([geometry_bound_box.bound_max[0], geometry_bound_box.bound_max[1], geometry_bound_box.bound_max[2], 1.0], dtype=np.float32), light.shadow_view_projection)[: 3]
minimum = np.minimum(bound_min, bound_max)
maximum = np.maximum(bound_min, bound_max)
if any(x < -1.0 for x in maximum) or any(1.0 < x for x in minimum):
return True
return False
def gather_render_infos(culling_func, camera, light, actor_list, solid_render_infos, translucent_render_infos):
for actor in actor_list:
for i in range(actor.get_geometry_count()):
if not actor.visible:
continue
if culling_func(camera, light, actor, actor.get_geometry_bound_box(i)):
continue
material_instance = actor.get_material_instance(i)
render_info = RenderInfo()
render_info.actor = actor
render_info.geometry = actor.get_geometry(i)
render_info.geometry_data = actor.get_geometry_data(i)
render_info.gl_call_list = actor.get_gl_call_list(i)
render_info.material = material_instance.material if material_instance else None
render_info.material_instance = material_instance
if render_info.material_instance is not None and render_info.material_instance.is_translucent():
if translucent_render_infos is not None:
translucent_render_infos.append(render_info)
elif solid_render_infos is not None:
solid_render_infos.append(render_info)
class RenderInfo:
def __init__(self):
self.actor = None
self.geometry = None
self.geometry_data = None
self.gl_call_list = None
self.material = None
self.material_instance = None
| bsd-2-clause | 8,852,743,141,399,126,000 | 37.844156 | 191 | 0.647944 | false |
MarkusHackspacher/unknown-horizons | horizons/gui/tabs/tabwidget.py | 1 | 5897 | # ###################################################
# Copyright (C) 2008-2017 The Unknown Horizons Team
# [email protected]
# This file is part of Unknown Horizons.
#
# Unknown Horizons is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ###################################################
import logging
import traceback
import weakref
from fife.extensions.pychan.widgets import Container, Icon
from horizons.gui.util import load_uh_widget
from horizons.gui.widgets.imagebutton import ImageButton
from horizons.util.changelistener import metaChangeListenerDecorator
from horizons.util.python.callback import Callback
@metaChangeListenerDecorator('remove')
class TabWidget:
"""The TabWidget class handles widgets which consist of many
different tabs(subpanels, switchable via buttons(TabButtons).
"""
log = logging.getLogger("gui.tabs.tabwidget")
def __init__(self, ingame_gui, tabs=None, name=None, active_tab=None):
"""
@param ingame_gui: IngameGui instance
@param tabs: tab instances to show
@param name: optional name for the tabwidget
@param active_tab: int id of tab, 0 <= active_tab < len(tabs)
"""
super().__init__() #TODO: check if this call is needed
self.name = name
self.ingame_gui = ingame_gui
self._tabs = [] if not tabs else tabs
self.current_tab = self._tabs[0] # Start with the first tab
self.current_tab.ensure_loaded() # loading current_tab widget
self.widget = load_uh_widget("tab_base.xml")
self.widget.position_technique = 'right-239:top+209'
self.content = self.widget.findChild(name='content')
self._init_tab_buttons()
# select a tab to show (first one is default)
if active_tab is not None:
self.show_tab(active_tab)
def _init_tab_buttons(self):
"""Add enough tabbuttons for all widgets."""
def on_tab_removal(tabwidget):
# called when a tab is being removed (via weakref since tabs shouldn't have references to the parent tabwidget)
# If one tab is removed, the whole tabwidget will die..
# This is easy usually the desired behavior.
if tabwidget():
tabwidget().on_remove()
# Load buttons
for index, tab in enumerate(self._tabs):
# don't add a reference to the
tab.add_remove_listener(Callback(on_tab_removal, weakref.ref(self)))
container = Container(name="container_{}".format(index))
background = Icon(name="bg_{}".format(index))
button = ImageButton(name=str(index), size=(50, 50))
if self.current_tab is tab:
background.image = tab.button_background_image_active
button.path = tab.path_active
else:
background.image = tab.button_background_image
button.path = tab.path
button.capture(Callback(self.show_tab, index))
if hasattr(tab, 'helptext') and tab.helptext:
button.helptext = tab.helptext
container.size = (50, 52)
container.addChild(background)
container.addChild(button)
self.content.addChild(container)
self.widget.size = (54, 55 * len(self._tabs))
self.widget.adaptLayout()
self._apply_layout_hack()
def show_tab(self, number):
"""Used as callback function for the TabButtons.
@param number: tab number that is to be shown.
"""
if number >= len(self._tabs):
# this usually indicates a non-critical error, therefore we can handle it without crashing
traceback.print_stack()
self.log.warning("Invalid tab number %s, available tabs: %s", number, self._tabs)
return
if self.current_tab.is_visible():
self.current_tab.hide()
new_tab = self._tabs[number]
old_bg = self.content.findChild(name="bg_{}".format(self._tabs.index(self.current_tab)))
old_bg.image = self.current_tab.button_background_image
name = str(self._tabs.index(self.current_tab))
old_button = self.content.findChild(name=name)
old_button.path = self.current_tab.path
new_bg = self.content.findChild(name="bg_{}".format(number))
new_bg.image = self.current_tab.button_background_image_active
new_button = self.content.findChild(name=str(number))
new_button.path = new_tab.path_active
self.current_tab = new_tab
# important to display the tabs correctly in front
self.widget.hide()
self.show()
self._apply_layout_hack()
def _apply_layout_hack(self):
# pychan layouting depends on time, it's usually in a better mood later.
# this introduces some flickering, but fixes #916
from horizons.extscheduler import ExtScheduler
def do_apply_hack():
# just query widget when executing, since if lazy loading is used, the widget
# does not exist yet in the outer function
self.current_tab.widget.adaptLayout()
ExtScheduler().add_new_object(do_apply_hack, self, run_in=0)
def _draw_widget(self):
"""Draws the widget, but does not show it automatically"""
self.current_tab.position = (self.widget.position[0] + self.widget.size[0] - 11,
self.widget.position[1] - 52)
self.current_tab.refresh()
def show(self):
"""Show the current widget"""
# show before drawing so that position_technique properly sets
# button positions (which we want to draw our tabs relative to)
self.widget.show()
self._draw_widget()
self.current_tab.show()
self.ingame_gui.minimap_to_front()
def hide(self, caller=None):
"""Hides current tab and this widget"""
self.current_tab.hide()
self.widget.hide()
| gpl-2.0 | 8,626,222,870,787,321,000 | 37.542484 | 114 | 0.709852 | false |
brclark-usgs/flopy | flopy/utils/reference.py | 1 | 57544 | """
Module spatial referencing for flopy model objects
"""
import sys
import os
import numpy as np
import warnings
class SpatialReference(object):
"""
a class to locate a structured model grid in x-y space
Parameters
----------
delr : numpy ndarray
the model discretization delr vector
(An array of spacings along a row)
delc : numpy ndarray
the model discretization delc vector
(An array of spacings along a column)
lenuni : int
the length units flag from the discretization package
(default 2)
xul : float
the x coordinate of the upper left corner of the grid
Enter either xul and yul or xll and yll.
yul : float
the y coordinate of the upper left corner of the grid
Enter either xul and yul or xll and yll.
xll : float
the x coordinate of the lower left corner of the grid
Enter either xul and yul or xll and yll.
yll : float
the y coordinate of the lower left corner of the grid
Enter either xul and yul or xll and yll.
rotation : float
the counter-clockwise rotation (in degrees) of the grid
proj4_str: str
a PROJ4 string that identifies the grid in space. warning: case
sensitive!
units : string
Units for the grid. Must be either feet or meters
epsg : int
EPSG code that identifies the grid in space. Can be used in lieu of
proj4. PROJ4 attribute will auto-populate if there is an internet
connection(via get_proj4 method).
See https://www.epsg-registry.org/ or spatialreference.org
length_multiplier : float
multiplier to convert model units to spatial reference units.
delr and delc above will be multiplied by this value. (default=1.)
Attributes
----------
xedge : ndarray
array of column edges
yedge : ndarray
array of row edges
xgrid : ndarray
numpy meshgrid of xedges
ygrid : ndarray
numpy meshgrid of yedges
xcenter : ndarray
array of column centers
ycenter : ndarray
array of row centers
xcentergrid : ndarray
numpy meshgrid of column centers
ycentergrid : ndarray
numpy meshgrid of row centers
vertices : 1D array
1D array of cell vertices for whole grid in C-style (row-major) order
(same as np.ravel())
Notes
-----
xul and yul can be explicitly (re)set after SpatialReference
instantiation, but only before any of the other attributes and methods are
accessed
"""
xul, yul = None, None
xll, yll = None, None
rotation = 0.
length_multiplier = 1.
origin_loc = 'ul' # or ll
defaults = {"xul": None, "yul": None, "rotation": 0.,
"proj4_str": None,
"units": None, "lenuni": 2, "length_multiplier": None}
lenuni_values = {'undefined': 0,
'feet': 1,
'meters': 2,
'centimeters': 3}
lenuni_text = {v:k for k, v in lenuni_values.items()}
def __init__(self, delr=np.array([]), delc=np.array([]), lenuni=2,
xul=None, yul=None, xll=None, yll=None, rotation=0.0,
proj4_str=None, epsg=None, units=None,
length_multiplier=None):
for delrc in [delr, delc]:
if isinstance(delrc, float) or isinstance(delrc, int):
msg = ('delr and delcs must be an array or sequences equal in '
'length to the number of rows/columns.')
raise TypeError(msg)
self.delc = np.atleast_1d(np.array(delc)).astype(np.float64) # * length_multiplier
self.delr = np.atleast_1d(np.array(delr)).astype(np.float64) # * length_multiplier
if self.delr.sum() == 0 or self.delc.sum() == 0:
if xll is None or yll is None:
msg = ('Warning: no grid spacing or lower-left corner '
'supplied. Setting the offset with xul, yul requires '
'arguments for delr and delc. Origin will be set to '
'zero.')
print(msg)
xll, yll = 0, 0
xul, yul = None, None
self._lenuni = lenuni
self._proj4_str = proj4_str
self._epsg = epsg
if epsg is not None:
self._proj4_str = getproj4(self._epsg)
self.supported_units = ["feet", "meters"]
self._units = units
self._length_multiplier = length_multiplier
self._reset()
self.set_spatialreference(xul, yul, xll, yll, rotation)
@property
def xll(self):
if self.origin_loc == 'll':
xll = self._xll if self._xll is not None else 0.
elif self.origin_loc == 'ul':
# calculate coords for lower left corner
xll = self._xul - (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
return xll
@property
def yll(self):
if self.origin_loc == 'll':
yll = self._yll if self._yll is not None else 0.
elif self.origin_loc == 'ul':
# calculate coords for lower left corner
yll = self._yul - (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
return yll
@property
def xul(self):
if self.origin_loc == 'll':
# calculate coords for upper left corner
xul = self._xll + (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
xul = self._xul if self._xul is not None else 0.
return xul
@property
def yul(self):
if self.origin_loc == 'll':
# calculate coords for upper left corner
yul = self._yll + (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
yul = self._yul if self._yul is not None else 0.
return yul
@property
def proj4_str(self):
if self._proj4_str is not None and \
"epsg" in self._proj4_str.lower():
if "init" not in self._proj4_str.lower():
proj4_str = "+init=" + self._proj4_str
else:
proj4_str = self._proj4_str
# set the epsg if proj4 specifies it
tmp = [i for i in self._proj4_str.split() if 'epsg' in i.lower()]
self._epsg = int(tmp[0].split(':')[1])
else:
proj4_str = self._proj4_str
return proj4_str
@property
def epsg(self):
#don't reset the proj4 string here
#because proj4 attribute may already be populated
#(with more details than getproj4 would return)
#instead reset proj4 when epsg is set
#(on init or setattr)
return self._epsg
@property
def lenuni(self):
return self._lenuni
def _parse_units_from_proj4(self):
units = None
try:
# need this because preserve_units doesn't seem to be
# working for complex proj4 strings. So if an
# epsg code was passed, we have no choice, but if a
# proj4 string was passed, we can just parse it
if "EPSG" in self.proj4_str.upper():
import pyproj
crs = pyproj.Proj(self.proj4_str,
preseve_units=True,
errcheck=True)
proj_str = crs.srs
else:
proj_str = self.proj4_str
# http://proj4.org/parameters.html#units
# from proj4 source code
# "us-ft", "0.304800609601219", "U.S. Surveyor's Foot",
# "ft", "0.3048", "International Foot",
if "units=m" in proj_str:
units = "meters"
elif "units=ft" in proj_str or \
"units=us-ft" in proj_str or \
"to_meters:0.3048" in proj_str:
units = "feet"
return units
except:
pass
@property
def units(self):
if self._units is not None:
units = self._units.lower()
else:
units = self._parse_units_from_proj4()
if units is None:
#print("warning: assuming SpatialReference units are meters")
units = 'meters'
assert units in self.supported_units
return units
@property
def length_multiplier(self):
"""Attempt to identify multiplier for converting from
model units to sr units, defaulting to 1."""
lm = None
if self._length_multiplier is not None:
lm = self._length_multiplier
else:
if self.model_length_units == 'feet':
if self.units == 'meters':
lm = 0.3048
elif self.units == 'feet':
lm = 1.
elif self.model_length_units == 'meters':
if self.units == 'feet':
lm = 1/.3048
elif self.units == 'meters':
lm = 1.
elif self.model_length_units == 'centimeters':
if self.units == 'meters':
lm = 1/100.
elif self.units == 'feet':
lm = 1/30.48
else: # model units unspecified; default to 1
lm = 1.
return lm
@property
def model_length_units(self):
return self.lenuni_text[self.lenuni]
@property
def bounds(self):
"""Return bounding box in shapely order."""
xmin, xmax, ymin, ymax = self.get_extent()
return xmin, ymin, xmax, ymax
@staticmethod
def load(namefile=None, reffile='usgs.model.reference'):
"""Attempts to load spatial reference information from
the following files (in order):
1) usgs.model.reference
2) NAM file (header comment)
3) SpatialReference.default dictionary
"""
reffile = os.path.join(os.path.split(namefile)[0], reffile)
d = SpatialReference.read_usgs_model_reference_file(reffile)
if d is not None:
return d
d = SpatialReference.attribs_from_namfile_header(namefile)
if d is not None:
return d
else:
return SpatialReference.defaults
@staticmethod
def attribs_from_namfile_header(namefile):
# check for reference info in the nam file header
d = SpatialReference.defaults.copy()
if namefile is None:
return None
header = []
with open(namefile, 'r') as f:
for line in f:
if not line.startswith('#'):
break
header.extend(line.strip().replace('#', '').split(';'))
for item in header:
if "xul" in item.lower():
try:
d['xul'] = float(item.split(':')[1])
except:
pass
elif "yul" in item.lower():
try:
d['yul'] = float(item.split(':')[1])
except:
pass
elif "rotation" in item.lower():
try:
d['rotation'] = float(item.split(':')[1])
except:
pass
elif "proj4_str" in item.lower():
try:
proj4_str = ':'.join(item.split(':')[1:]).strip()
if proj4_str.lower() == 'none':
proj4_str = None
d['proj4_str'] = proj4_str
except:
pass
elif "start" in item.lower():
try:
d['start_datetime'] = item.split(':')[1].strip()
except:
pass
# spatial reference length units
elif "units" in item.lower():
d['units'] = item.split(':')[1].strip()
# model length units
elif "lenuni" in item.lower():
d['lenuni'] = int(item.split(':')[1].strip())
# multiplier for converting from model length units to sr length units
elif "length_multiplier" in item.lower():
d['length_multiplier'] = float(item.split(':')[1].strip())
return d
@staticmethod
def read_usgs_model_reference_file(reffile='usgs.model.reference'):
"""read spatial reference info from the usgs.model.reference file
https://water.usgs.gov/ogw/policy/gw-model/modelers-setup.html"""
ITMUNI = {0: "undefined", 1: "seconds", 2: "minutes", 3: "hours", 4: "days",
5: "years"}
itmuni_values = {v: k for k, v in ITMUNI.items()}
d = SpatialReference.defaults.copy()
d.pop('proj4_str') # discard default to avoid confusion with epsg code if entered
if os.path.exists(reffile):
with open(reffile) as input:
for line in input:
if line.strip()[0] != '#':
info = line.strip().split('#')[0].split()
if len(info) > 1:
d[info[0].lower()] = ' '.join(info[1:])
d['xul'] = float(d['xul'])
d['yul'] = float(d['yul'])
d['rotation'] = float(d['rotation'])
# convert the model.reference text to a lenuni value
# (these are the model length units)
if 'length_units' in d.keys():
d['lenuni'] = SpatialReference.lenuni_values[d['length_units']]
if 'time_units' in d.keys():
d['itmuni'] = itmuni_values[d['time_units']]
if 'start_date' in d.keys():
start_datetime = d.pop('start_date')
if 'start_time' in d.keys():
start_datetime += ' {}'.format(d.pop('start_time'))
d['start_datetime'] = start_datetime
if 'epsg' in d.keys():
try:
d['epsg'] = int(d['epsg'])
except Exception as e:
raise Exception(
"error reading epsg code from file:\n" + str(e))
# this prioritizes epsg over proj4 if both are given
# (otherwise 'proj4' entry will be dropped below)
elif 'proj4' in d.keys():
d['proj4_str'] = d['proj4']
# drop any other items that aren't used in sr class
d = {k:v for k, v in d.items() if k.lower() in SpatialReference.defaults.keys()
or k.lower() in {'epsg', 'start_datetime', 'itmuni'}}
return d
else:
return None
def __setattr__(self, key, value):
reset = True
if key == "delr":
super(SpatialReference, self). \
__setattr__("delr", np.atleast_1d(np.array(value)))
elif key == "delc":
super(SpatialReference, self). \
__setattr__("delc", np.atleast_1d(np.array(value)))
elif key == "xul":
super(SpatialReference, self). \
__setattr__("_xul", float(value))
self.origin_loc = 'ul'
elif key == "yul":
super(SpatialReference, self). \
__setattr__("_yul", float(value))
self.origin_loc = 'ul'
elif key == "xll":
super(SpatialReference, self). \
__setattr__("_xll", float(value))
self.origin_loc = 'll'
elif key == "yll":
super(SpatialReference, self). \
__setattr__("_yll", float(value))
self.origin_loc = 'll'
elif key == "length_multiplier":
super(SpatialReference, self). \
__setattr__("_length_multiplier", float(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "rotation":
super(SpatialReference, self). \
__setattr__("rotation", float(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "lenuni":
super(SpatialReference, self). \
__setattr__("_lenuni", int(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "units":
value = value.lower()
assert value in self.supported_units
super(SpatialReference, self). \
__setattr__("_units", value)
elif key == "proj4_str":
super(SpatialReference, self). \
__setattr__("_proj4_str", value)
# reset the units and epsg
units = self._parse_units_from_proj4()
if units is not None:
self._units = units
self._epsg = None
elif key == "epsg":
super(SpatialReference, self). \
__setattr__("_epsg", value)
# reset the units and proj4
self._units = None
self._proj4_str = getproj4(self._epsg)
else:
super(SpatialReference, self).__setattr__(key, value)
reset = False
if reset:
self._reset()
def reset(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
return
def _reset(self):
self._xgrid = None
self._ygrid = None
self._ycentergrid = None
self._xcentergrid = None
self._vertices = None
return
@property
def nrow(self):
return self.delc.shape[0]
@property
def ncol(self):
return self.delr.shape[0]
def __eq__(self, other):
if not isinstance(other, SpatialReference):
return False
if other.xul != self.xul:
return False
if other.yul != self.yul:
return False
if other.rotation != self.rotation:
return False
if other.proj4_str != self.proj4_str:
return False
return True
@classmethod
def from_namfile(cls, namefile):
attribs = SpatialReference.attribs_from_namfile_header(namefile)
try:
attribs.pop("start_datetime")
except:
pass
return SpatialReference(**attribs)
@classmethod
def from_gridspec(cls, gridspec_file, lenuni=0):
f = open(gridspec_file, 'r')
raw = f.readline().strip().split()
nrow = int(raw[0])
ncol = int(raw[1])
raw = f.readline().strip().split()
xul, yul, rot = float(raw[0]), float(raw[1]), float(raw[2])
delr = []
j = 0
while j < ncol:
raw = f.readline().strip().split()
for r in raw:
if '*' in r:
rraw = r.split('*')
for n in range(int(rraw[0])):
delr.append(float(rraw[1]))
j += 1
else:
delr.append(float(r))
j += 1
delc = []
i = 0
while i < nrow:
raw = f.readline().strip().split()
for r in raw:
if '*' in r:
rraw = r.split('*')
for n in range(int(rraw[0])):
delc.append(float(rraw[1]))
i += 1
else:
delc.append(float(r))
i += 1
f.close()
return cls(np.array(delr), np.array(delc),
lenuni, xul=xul, yul=yul, rotation=rot)
@property
def attribute_dict(self):
return {"xul": self.xul, "yul": self.yul, "rotation": self.rotation,
"proj4_str": self.proj4_str}
def set_spatialreference(self, xul=None, yul=None, xll=None, yll=None,
rotation=0.0):
"""
set spatial reference - can be called from model instance
"""
if xul is not None and xll is not None:
msg = ('Both xul and xll entered. Please enter either xul, yul or '
'xll, yll.')
raise ValueError(msg)
if yul is not None and yll is not None:
msg = ('Both yul and yll entered. Please enter either xul, yul or '
'xll, yll.')
raise ValueError(msg)
# set the origin priority based on the left corner specified
# (the other left corner will be calculated). If none are specified
# then default to upper left
if xul is None and yul is None and xll is None and yll is None:
self.origin_loc = 'ul'
xul = 0.
yul = self.delc.sum()
elif xll is not None:
self.origin_loc = 'll'
else:
self.origin_loc = 'ul'
self.rotation = rotation
self._xll = xll if xll is not None else 0.
self._yll = yll if yll is not None else 0.
self._xul = xul if xul is not None else 0.
self._yul = yul if yul is not None else 0.
#self.set_origin(xul, yul, xll, yll)
return
def __repr__(self):
s = "xul:{0:<.10G}; yul:{1:<.10G}; rotation:{2:<G}; ". \
format(self.xul, self.yul, self.rotation)
s += "proj4_str:{0}; ".format(self.proj4_str)
s += "units:{0}; ".format(self.units)
s += "lenuni:{0}; ".format(self.lenuni)
s += "length_multiplier:{}".format(self.length_multiplier)
return s
def set_origin(self, xul=None, yul=None, xll=None, yll=None):
if self.origin_loc == 'll':
# calculate coords for upper left corner
self._xll = xll if xll is not None else 0.
self.yll = yll if yll is not None else 0.
self.xul = self._xll + (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
self.yul = self.yll + (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
self.xul = xul if xul is not None else 0.
self.yul = yul if yul is not None else 0.
self._xll = self.xul - (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
self.yll = self.yul - (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
self._reset()
return
@property
def theta(self):
return -self.rotation * np.pi / 180.
@property
def xedge(self):
return self.get_xedge_array()
@property
def yedge(self):
return self.get_yedge_array()
@property
def xgrid(self):
if self._xgrid is None:
self._set_xygrid()
return self._xgrid
@property
def ygrid(self):
if self._ygrid is None:
self._set_xygrid()
return self._ygrid
@property
def xcenter(self):
return self.get_xcenter_array()
@property
def ycenter(self):
return self.get_ycenter_array()
@property
def ycentergrid(self):
if self._ycentergrid is None:
self._set_xycentergrid()
return self._ycentergrid
@property
def xcentergrid(self):
if self._xcentergrid is None:
self._set_xycentergrid()
return self._xcentergrid
def _set_xycentergrid(self):
self._xcentergrid, self._ycentergrid = np.meshgrid(self.xcenter,
self.ycenter)
self._xcentergrid, self._ycentergrid = self.transform(
self._xcentergrid,
self._ycentergrid)
def _set_xygrid(self):
self._xgrid, self._ygrid = np.meshgrid(self.xedge, self.yedge)
self._xgrid, self._ygrid = self.transform(self._xgrid, self._ygrid)
@staticmethod
def rotate(x, y, theta, xorigin=0., yorigin=0.):
"""
Given x and y array-like values calculate the rotation about an
arbitrary origin and then return the rotated coordinates. theta is in
degrees.
"""
# jwhite changed on Oct 11 2016 - rotation is now positive CCW
# theta = -theta * np.pi / 180.
theta = theta * np.pi / 180.
xrot = xorigin + np.cos(theta) * (x - xorigin) - np.sin(theta) * \
(y - yorigin)
yrot = yorigin + np.sin(theta) * (x - xorigin) + np.cos(theta) * \
(y - yorigin)
return xrot, yrot
def transform(self, x, y, inverse=False):
"""
Given x and y array-like values, apply rotation, scale and offset,
to convert them from model coordinates to real-world coordinates.
"""
if isinstance(x, list):
x = np.array(x)
y = np.array(y)
if not np.isscalar(x):
x, y = x.copy(), y.copy()
if not inverse:
x *= self.length_multiplier
y *= self.length_multiplier
x += self.xll
y += self.yll
x, y = SpatialReference.rotate(x, y, theta=self.rotation,
xorigin=self.xll, yorigin=self.yll)
else:
x, y = SpatialReference.rotate(x, y, -self.rotation,
self.xll, self.yll)
x -= self.xll
y -= self.yll
x /= self.length_multiplier
y /= self.length_multiplier
return x, y
def get_extent(self):
"""
Get the extent of the rotated and offset grid
Return (xmin, xmax, ymin, ymax)
"""
x0 = self.xedge[0]
x1 = self.xedge[-1]
y0 = self.yedge[0]
y1 = self.yedge[-1]
# upper left point
x0r, y0r = self.transform(x0, y0)
# upper right point
x1r, y1r = self.transform(x1, y0)
# lower right point
x2r, y2r = self.transform(x1, y1)
# lower left point
x3r, y3r = self.transform(x0, y1)
xmin = min(x0r, x1r, x2r, x3r)
xmax = max(x0r, x1r, x2r, x3r)
ymin = min(y0r, y1r, y2r, y3r)
ymax = max(y0r, y1r, y2r, y3r)
return (xmin, xmax, ymin, ymax)
def get_grid_lines(self):
"""
Get the grid lines as a list
"""
xmin = self.xedge[0]
xmax = self.xedge[-1]
ymin = self.yedge[-1]
ymax = self.yedge[0]
lines = []
# Vertical lines
for j in range(self.ncol + 1):
x0 = self.xedge[j]
x1 = x0
y0 = ymin
y1 = ymax
x0r, y0r = self.transform(x0, y0)
x1r, y1r = self.transform(x1, y1)
lines.append([(x0r, y0r), (x1r, y1r)])
# horizontal lines
for i in range(self.nrow + 1):
x0 = xmin
x1 = xmax
y0 = self.yedge[i]
y1 = y0
x0r, y0r = self.transform(x0, y0)
x1r, y1r = self.transform(x1, y1)
lines.append([(x0r, y0r), (x1r, y1r)])
return lines
def get_grid_line_collection(self, **kwargs):
"""
Get a LineCollection of the grid
"""
from matplotlib.collections import LineCollection
lc = LineCollection(self.get_grid_lines(), **kwargs)
return lc
def get_xcenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every column in the grid in model space - not offset or rotated.
"""
x = np.add.accumulate(self.delr) - 0.5 * self.delr
return x
def get_ycenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every row in the grid in model space - not offset of rotated.
"""
Ly = np.add.reduce(self.delc)
y = Ly - (np.add.accumulate(self.delc) - 0.5 *
self.delc)
return y
def get_xedge_array(self):
"""
Return a numpy one-dimensional float array that has the cell edge x
coordinates for every column in the grid in model space - not offset
or rotated. Array is of size (ncol + 1)
"""
xedge = np.concatenate(([0.], np.add.accumulate(self.delr)))
return xedge
def get_yedge_array(self):
"""
Return a numpy one-dimensional float array that has the cell edge y
coordinates for every row in the grid in model space - not offset or
rotated. Array is of size (nrow + 1)
"""
length_y = np.add.reduce(self.delc)
yedge = np.concatenate(([length_y], length_y -
np.add.accumulate(self.delc)))
return yedge
def write_gridSpec(self, filename):
""" write a PEST-style grid specification file
"""
f = open(filename, 'w')
f.write(
"{0:10d} {1:10d}\n".format(self.delc.shape[0], self.delr.shape[0]))
f.write("{0:15.6E} {1:15.6E} {2:15.6E}\n".format(self.xul, self.yul,
self.rotation))
for r in self.delr:
f.write("{0:15.6E} ".format(r))
f.write('\n')
for c in self.delc:
f.write("{0:15.6E} ".format(c))
f.write('\n')
return
def write_shapefile(self, filename='grid.shp', epsg=None, prj=None):
"""Write a shapefile of the grid with just the row and column attributes"""
from ..export.shapefile_utils import write_grid_shapefile2
if epsg is None and prj is None:
epsg = self.epsg
write_grid_shapefile2(filename, self, array_dict={}, nan_val=-1.0e9,
epsg=epsg, prj=prj)
def get_vertices(self, i, j):
pts = []
xgrid, ygrid = self.xgrid, self.ygrid
pts.append([xgrid[i, j], ygrid[i, j]])
pts.append([xgrid[i + 1, j], ygrid[i + 1, j]])
pts.append([xgrid[i + 1, j + 1], ygrid[i + 1, j + 1]])
pts.append([xgrid[i, j + 1], ygrid[i, j + 1]])
pts.append([xgrid[i, j], ygrid[i, j]])
return pts
def get_rc(self, x, y):
"""Return the row and column of a point or sequence of points
in real-world coordinates.
Parameters
----------
x : scalar or sequence of x coordinates
y : scalar or sequence of y coordinates
Returns
-------
r : row or sequence of rows (zero-based)
c : column or sequence of columns (zero-based)
"""
if np.isscalar(x):
c = (np.abs(self.xcentergrid[0] - x)).argmin()
r = (np.abs(self.ycentergrid[:, 0] - y)).argmin()
else:
xcp = np.array([self.xcentergrid[0]] * (len(x)))
ycp = np.array([self.ycentergrid[:, 0]] * (len(x)))
c = (np.abs(xcp.transpose() - x)).argmin(axis=0)
r = (np.abs(ycp.transpose() - y)).argmin(axis=0)
return r, c
def get_grid_map_plotter(self):
"""
Create a QuadMesh plotting object for this grid
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
from matplotlib.collections import QuadMesh
verts = np.vstack((self.xgrid.flatten(), self.ygrid.flatten())).T
qm = QuadMesh(self.ncol, self.nrow, verts)
return qm
def plot_array(self, a, ax=None, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
a : np.ndarray
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
qm = ax.pcolormesh(self.xgrid, self.ygrid, a, **kwargs)
return qm
def export_array(self, filename, a, nodata=-9999,
fieldname='value',
**kwargs):
"""Write a numpy array to Arc Ascii grid
or shapefile with the model reference.
Parameters
----------
filename : str
Path of output file. Export format is determined by
file extention.
'.asc' Arc Ascii grid
'.shp' Shapefile
a : 2D numpy.ndarray
Array to export
nodata : scalar
Value to assign to np.nan entries (default -9999)
fieldname : str
Attribute field name for array values (shapefile export only).
(default 'values')
kwargs:
keyword arguments to np.savetxt (ascii)
or flopy.export.shapefile_utils.write_grid_shapefile2
Notes
-----
Rotated grids will be unrotated prior to export to Arc Ascii format,
using scipy.ndimage.rotate. As a result, their pixels will no longer
coincide exactly with the model grid.
"""
if filename.lower().endswith(".asc"):
if len(np.unique(self.delr)) != len(np.unique(self.delc)) != 1 \
or self.delr[0] != self.delc[0]:
raise ValueError('Arc ascii arrays require a uniform grid.')
xll, yll = self.xll, self.yll
cellsize = self.delr[0] * self.length_multiplier
a = a.copy()
a[np.isnan(a)] = nodata
if self.rotation != 0:
try:
from scipy.ndimage import rotate
a = rotate(a, self.rotation, cval=nodata)
height_rot, width_rot = a.shape
xmin, ymin, xmax, ymax = self.bounds
dx = (xmax - xmin) / width_rot
dy = (ymax - ymin) / height_rot
cellsize = dx
xll, yll = xmin, ymin
except ImportError:
print('scipy package required to export rotated grid.')
pass
filename = '.'.join(filename.split('.')[:-1]) + '.asc' # enforce .asc ending
nrow, ncol = a.shape
a[np.isnan(a)] = nodata
txt = 'ncols {:d}\n'.format(ncol)
txt += 'nrows {:d}\n'.format(nrow)
txt += 'xllcorner {:f}\n'.format(xll)
txt += 'yllcorner {:f}\n'.format(yll)
txt += 'cellsize {}\n'.format(cellsize)
txt += 'NODATA_value {:.0f}\n'.format(nodata)
with open(filename, 'w') as output:
output.write(txt)
with open(filename, 'ab') as output:
np.savetxt(output, a, **kwargs)
print('wrote {}'.format(filename))
elif filename.lower().endswith(".shp"):
from ..export.shapefile_utils import write_grid_shapefile2
epsg = kwargs.get('epsg', None)
prj = kwargs.get('prj', None)
if epsg is None and prj is None:
epsg = self.epsg
write_grid_shapefile2(filename, self, array_dict={fieldname: a}, nan_val=nodata,
epsg=epsg, prj=prj)
def contour_array(self, ax, a, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
ax : matplotlib.axes.Axes
ax to add the contours
a : np.ndarray
array to contour
Returns
-------
contour_set : ContourSet
"""
contour_set = ax.contour(self.xcentergrid, self.ycentergrid,
a, **kwargs)
return contour_set
@property
def vertices(self):
"""Returns a list of vertices for"""
if self._vertices is None:
self._set_vertices()
return self._vertices
def _set_vertices(self):
"""populate vertices for the whole grid"""
jj, ii = np.meshgrid(range(self.ncol), range(self.nrow))
jj, ii = jj.ravel(), ii.ravel()
vrts = np.array(self.get_vertices(ii, jj)).transpose([2, 0, 1])
self._vertices = [v.tolist() for v in vrts] # conversion to lists
"""
code above is 3x faster
xgrid, ygrid = self.xgrid, self.ygrid
ij = list(map(list, zip(xgrid[:-1, :-1].ravel(), ygrid[:-1, :-1].ravel())))
i1j = map(list, zip(xgrid[1:, :-1].ravel(), ygrid[1:, :-1].ravel()))
i1j1 = map(list, zip(xgrid[1:, 1:].ravel(), ygrid[1:, 1:].ravel()))
ij1 = map(list, zip(xgrid[:-1, 1:].ravel(), ygrid[:-1, 1:].ravel()))
self._vertices = np.array(map(list, zip(ij, i1j, i1j1, ij1, ij)))
"""
def interpolate(self, a, xi, method='nearest'):
"""
Use the griddata method to interpolate values from an array onto the
points defined in xi. For any values outside of the grid, use
'nearest' to find a value for them.
Parameters
----------
a : numpy.ndarray
array to interpolate from. It must be of size nrow, ncol
xi : numpy.ndarray
array containing x and y point coordinates of size (npts, 2). xi
also works with broadcasting so that if a is a 2d array, then
xi can be passed in as (xgrid, ygrid).
method : {'linear', 'nearest', 'cubic'}
method to use for interpolation (default is 'nearest')
Returns
-------
b : numpy.ndarray
array of size (npts)
"""
from scipy.interpolate import griddata
# Create a 2d array of points for the grid centers
points = np.empty((self.ncol * self.nrow, 2))
points[:, 0] = self.xcentergrid.flatten()
points[:, 1] = self.ycentergrid.flatten()
# Use the griddata function to interpolate to the xi points
b = griddata(points, a.flatten(), xi, method=method, fill_value=np.nan)
# if method is linear or cubic, then replace nan's with a value
# interpolated using nearest
if method != 'nearest':
bn = griddata(points, a.flatten(), xi, method='nearest')
idx = np.isnan(b)
b[idx] = bn[idx]
return b
def get_2d_vertex_connectivity(self):
"""
Create the cell 2d vertices array and the iverts index array. These
are the same form as the ones used to instantiate an unstructured
spatial reference.
Returns
-------
verts : ndarray
array of x and y coordinates for the grid vertices
iverts : list
a list with a list of vertex indices for each cell in clockwise
order starting with the upper left corner
"""
x = self.xgrid.flatten()
y = self.ygrid.flatten()
nrowvert = self.nrow + 1
ncolvert = self.ncol + 1
npoints = nrowvert * ncolvert
verts = np.empty((npoints, 2), dtype=np.float)
verts[:, 0] = x
verts[:, 1] = y
iverts = []
for i in range(self.nrow):
for j in range(self.ncol):
iv1 = i * ncolvert + j # upper left point number
iv2 = iv1 + 1
iv4 = (i + 1) * ncolvert + j
iv3 = iv4 + 1
iverts.append([iv1, iv2, iv3, iv4])
return verts, iverts
def get_3d_shared_vertex_connectivity(self, nlay, botm, ibound=None):
# get the x and y points for the grid
x = self.xgrid.flatten()
y = self.ygrid.flatten()
# set the size of the vertex grid
nrowvert = self.nrow + 1
ncolvert = self.ncol + 1
nlayvert = nlay + 1
nrvncv = nrowvert * ncolvert
npoints = nrvncv * nlayvert
# create and fill a 3d points array for the grid
verts = np.empty((npoints, 3), dtype=np.float)
verts[:, 0] = np.tile(x, nlayvert)
verts[:, 1] = np.tile(y, nlayvert)
istart = 0
istop = nrvncv
for k in range(nlay + 1):
verts[istart:istop, 2] = self.interpolate(botm[k],
verts[istart:istop, :2],
method='linear')
istart = istop
istop = istart + nrvncv
# create the list of points comprising each cell. points must be
# listed a specific way according to vtk requirements.
iverts = []
for k in range(nlay):
koffset = k * nrvncv
for i in range(self.nrow):
for j in range(self.ncol):
if ibound is not None:
if ibound[k, i, j] == 0:
continue
iv1 = i * ncolvert + j + koffset
iv2 = iv1 + 1
iv4 = (i + 1) * ncolvert + j + koffset
iv3 = iv4 + 1
iverts.append([iv4 + nrvncv, iv3 + nrvncv,
iv1 + nrvncv, iv2 + nrvncv,
iv4, iv3, iv1, iv2])
return verts, iverts
def get_3d_vertex_connectivity(self, nlay, botm, ibound=None):
if ibound is None:
ncells = nlay * self.nrow * self.ncol
ibound = np.ones((nlay, self.nrow, self.ncol), dtype=np.int)
else:
ncells = (ibound != 0).sum()
npoints = ncells * 8
verts = np.empty((npoints, 3), dtype=np.float)
iverts = []
ipoint = 0
for k in range(nlay):
for i in range(self.nrow):
for j in range(self.ncol):
if ibound[k, i, j] == 0:
continue
ivert = []
pts = self.get_vertices(i, j)
pt0, pt1, pt2, pt3, pt0 = pts
z = botm[k + 1, i, j]
verts[ipoint, 0:2] = np.array(pt1)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt2)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt0)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt3)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
z = botm[k, i, j]
verts[ipoint, 0:2] = np.array(pt1)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt2)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt0)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt3)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
iverts.append(ivert)
return verts, iverts
class SpatialReferenceUnstructured(SpatialReference):
"""
a class to locate an unstructured model grid in x-y space
Parameters
----------
verts : ndarray
2d array of x and y points.
iverts : list of lists
should be of len(ncells) with a list of vertex numbers for each cell
ncpl : ndarray
array containing the number of cells per layer. ncpl.sum() must be
equal to the total number of cells in the grid.
layered : boolean
flag to indicated that the grid is layered. In this case, the vertices
define the grid for single layer, and all layers use this same grid.
In this case the ncpl value for each layer must equal len(iverts).
If not layered, then verts and iverts are specified for all cells and
all layers in the grid. In this case, npcl.sum() must equal
len(iverts).
lenuni : int
the length units flag from the discretization package
proj4_str: str
a PROJ4 string that identifies the grid in space. warning: case
sensitive!
units : string
Units for the grid. Must be either feet or meters
epsg : int
EPSG code that identifies the grid in space. Can be used in lieu of
proj4. PROJ4 attribute will auto-populate if there is an internet
connection(via get_proj4 method).
See https://www.epsg-registry.org/ or spatialreference.org
length_multiplier : float
multiplier to convert model units to spatial reference units.
delr and delc above will be multiplied by this value. (default=1.)
Attributes
----------
xcenter : ndarray
array of x cell centers
ycenter : ndarray
array of y cell centers
Notes
-----
"""
def __init__(self, xc, yc, verts, iverts, ncpl, layered=True, lenuni=1,
proj4_str="EPSG:4326", epsg=None, units=None,
length_multiplier=1.):
self.xc = xc
self.yc = yc
self.verts = verts
self.iverts = iverts
self.ncpl = ncpl
self.layered = layered
self.lenuni = lenuni
self._proj4_str = proj4_str
self.epsg = epsg
if epsg is not None:
self._proj4_str = getproj4(epsg)
self.supported_units = ["feet", "meters"]
self._units = units
self.length_multiplier = length_multiplier
# set defaults
self.xul = 0.
self.yul = 0.
self.rotation = 0.
if self.layered:
assert all([n == len(iverts) for n in ncpl])
assert self.xc.shape[0] == self.ncpl[0]
assert self.yc.shape[0] == self.ncpl[0]
else:
msg = ('Length of iverts must equal ncpl.sum '
'({} {})'.format(len(iverts), ncpl))
assert len(iverts) == ncpl.sum(), msg
assert self.xc.shape[0] == self.ncpl.sum()
assert self.yc.shape[0] == self.ncpl.sum()
return
def write_shapefile(self, filename='grid.shp'):
"""
Write shapefile of the grid
Parameters
----------
filename : string
filename for shapefile
Returns
-------
"""
raise NotImplementedError()
return
def write_gridSpec(self, filename):
"""
Write a PEST-style grid specification file
Parameters
----------
filename : string
filename for grid specification file
Returns
-------
"""
raise NotImplementedError()
return
@classmethod
def from_gridspec(cls, fname):
"""
Create a new SpatialReferenceUnstructured grid from an PEST
grid specification file
Parameters
----------
fname : string
File name for grid specification file
Returns
-------
sru : flopy.utils.reference.SpatialReferenceUnstructured
"""
raise NotImplementedError()
return
@classmethod
def from_argus_export(cls, fname, nlay=1):
"""
Create a new SpatialReferenceUnstructured grid from an Argus One
Trimesh file
Parameters
----------
fname : string
File name
nlay : int
Number of layers to create
Returns
-------
sru : flopy.utils.reference.SpatialReferenceUnstructured
"""
from ..utils.geometry import get_polygon_centroid
f = open(fname, 'r')
line = f.readline()
ll = line.split()
ncells, nverts = ll[0:2]
ncells = int(ncells)
nverts = int(nverts)
verts = np.empty((nverts, 2), dtype=np.float)
xc = np.empty((ncells), dtype=np.float)
yc = np.empty((ncells), dtype=np.float)
# read the vertices
f.readline()
for ivert in range(nverts):
line = f.readline()
ll = line.split()
c, iv, x, y = ll[0:4]
verts[ivert, 0] = x
verts[ivert, 1] = y
# read the cell information and create iverts, xc, and yc
iverts = []
for icell in range(ncells):
line = f.readline()
ll = line.split()
ivlist = []
for ic in ll[2:5]:
ivlist.append(int(ic) - 1)
if ivlist[0] != ivlist[-1]:
ivlist.append(ivlist[0])
iverts.append(ivlist)
xc[icell], yc[icell] = get_polygon_centroid(verts[ivlist, :])
# close file and return spatial reference
f.close()
return cls(xc, yc, verts, iverts, np.array(nlay * [len(iverts)]))
def __setattr__(self, key, value):
super(SpatialReference, self).__setattr__(key, value)
return
def get_extent(self):
"""
Get the extent of the grid
Returns
-------
extent : tuple
min and max grid coordinates
"""
xmin = self.verts[:, 0].min()
xmax = self.verts[:, 0].max()
ymin = self.verts[:, 1].min()
ymax = self.verts[:, 1].max()
return (xmin, xmax, ymin, ymax)
def get_xcenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every cell in the grid in model space - not offset or
rotated.
"""
return self.xc
def get_ycenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every cell in the grid in model space - not offset of
rotated.
"""
return self.yc
def plot_array(self, a, ax=None):
"""
Create a QuadMesh plot of the specified array using patches
Parameters
----------
a : np.ndarray
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
from ..plot import plotutil
if ax is None:
ax = plt.gca()
patch_collection = plotutil.plot_cvfd(self.verts, self.iverts, a=a,
ax=ax)
return patch_collection
def get_grid_line_collection(self, **kwargs):
"""
Get a patch collection of the grid
"""
from ..plot import plotutil
edgecolor = kwargs.pop('colors')
pc = plotutil.cvfd_to_patch_collection(self.verts, self.iverts)
pc.set(facecolor='none')
pc.set(edgecolor=edgecolor)
return pc
def contour_array(self, ax, a, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
ax : matplotlib.axes.Axes
ax to add the contours
a : np.ndarray
array to contour
Returns
-------
contour_set : ContourSet
"""
contour_set = ax.tricontour(self.xcenter, self.ycenter,
a, **kwargs)
return contour_set
class epsgRef:
"""Sets up a local database of projection file text referenced by epsg code.
The database is located in the site packages folder in epsgref.py, which
contains a dictionary, prj, of projection file text keyed by epsg value.
"""
def __init__(self):
sp = [f for f in sys.path if f.endswith('site-packages')][0]
self.location = os.path.join(sp, 'epsgref.py')
def _remove_pyc(self):
try: # get rid of pyc file
os.remove(self.location + 'c')
except:
pass
def make(self):
if not os.path.exists(self.location):
newfile = open(self.location, 'w')
newfile.write('prj = {}\n')
newfile.close()
def reset(self, verbose=True):
if os.path.exists(self.location):
os.remove(self.location)
self._remove_pyc()
self.make()
if verbose:
print('Resetting {}'.format(self.location))
def add(self, epsg, prj):
"""add an epsg code to epsgref.py"""
with open(self.location, 'a') as epsgfile:
epsgfile.write("prj[{:d}] = '{}'\n".format(epsg, prj))
def remove(self, epsg):
"""removes an epsg entry from epsgref.py"""
from epsgref import prj
self.reset(verbose=False)
if epsg in prj.keys():
del prj[epsg]
for epsg, prj in prj.items():
self.add(epsg, prj)
@staticmethod
def show():
import importlib
import epsgref
importlib.reload(epsgref)
from epsgref import prj
for k, v in prj.items():
print('{}:\n{}\n'.format(k, v))
def getprj(epsg, addlocalreference=True, text='esriwkt'):
"""Gets projection file (.prj) text for given epsg code from spatialreference.org
See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
addlocalreference : boolean
adds the projection file text associated with epsg to a local
database, epsgref.py, located in site-packages.
Returns
-------
prj : str
text for a projection (*.prj) file.
"""
epsgfile = epsgRef()
prj = None
try:
from epsgref import prj
prj = prj.get(epsg)
except:
epsgfile.make()
if prj is None:
prj = get_spatialreference(epsg, text=text)
if addlocalreference:
epsgfile.add(epsg, prj)
return prj
def get_spatialreference(epsg, text='esriwkt'):
"""Gets text for given epsg code and text format from spatialreference.org
Fetches the reference text using the url:
http://spatialreference.org/ref/epsg/<epsg code>/<text>/
See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
text : str
string added to url
Returns
-------
url : str
"""
from flopy.utils.flopy_io import get_url_text
url = "http://spatialreference.org/ref/epsg/{0}/{1}/".format(epsg, text)
text = get_url_text(url,
error_msg='No internet connection or epsg code {} '
'not found on spatialreference.org.'.format(epsg))
if text is None: # epsg code not listed on spatialreference.org may still work with pyproj
return '+init=epsg:{}'.format(epsg)
return text.replace("\n", "")
def getproj4(epsg):
"""Gets projection file (.prj) text for given epsg code from
spatialreference.org. See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
Returns
-------
prj : str
text for a projection (*.prj) file.
"""
return get_spatialreference(epsg, text='proj4') | bsd-3-clause | 7,134,044,055,945,005,000 | 32.212842 | 94 | 0.501373 | false |
privacyidea/privacyideaadm | privacyideautils/commands/audit.py | 1 | 2377 | # -*- coding: utf-8 -*-
#
# 2020-04-13 Cornelius Kölbel <[email protected]>
# migrate to click
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import click
import datetime
import logging
from privacyideautils.clientutils import (showresult,
dumpresult,
privacyideaclient,
__version__)
@click.group()
@click.pass_context
def audit(ctx):
"""
Manage the audit log. Basically fetch audit information.
"""
pass
@audit.command()
@click.pass_context
@click.option("--page", help="The page number to view", type=int)
@click.option("--rp", help="The number of entries per page", type=int)
@click.option("--sortname", help="The name of the column to sort by", default="number")
@click.option("--sortorder", help="The order to sort (desc, asc)",
type=click.Choice(["desc", "asc"]), default="desc")
@click.option("--query", help="A search tearm to search for")
@click.option("--qtype", help="The column to search for")
def list(ctx, page, rp, sortname, sortorder, query, qtype):
"""
List the audit log
"""
client = ctx.obj["pi_client"]
param = {}
if page:
param["page"] = page
if rp:
param["rp"] = rp
if sortname:
param["sortname"] = sortname
if sortorder:
param["sortorder"] = sortorder
if query:
param["query"] = query
if qtype:
param["qtype"] = qtype
resp = client.auditsearch(param)
r1 = resp.data
auditdata = r1.get("result").get("value").get("auditdata")
count = r1.get("result").get("value").get("count")
for row in auditdata:
print(row)
print("Total: {0!s}".format(count))
| agpl-3.0 | 7,420,498,847,606,918,000 | 32.942857 | 87 | 0.631313 | false |
arikpoz/deep-visualization-toolbox | run_webui.py | 1 | 1847 | #! /usr/bin/env python
import os
import thread
from live_vis import LiveVis
from bindings import bindings
try:
import settings
except:
print '\nError importing settings.py. Check the error message below for more information.'
print "If you haven't already, you'll want to open the settings_model_selector.py file"
print 'and edit it to point to your caffe checkout.\n'
raise
if not os.path.exists(settings.caffevis_caffe_root):
raise Exception('ERROR: Set caffevis_caffe_root in settings.py first.')
import cv2
from flask import Flask, render_template, Response
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
def gen():
while True:
frame = get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
def get_frame():
# We are using Motion JPEG, but OpenCV defaults to capture raw images,
# so we must encode it into JPEG in order to correctly display the
# video stream.
global lv
ret, jpeg = cv2.imencode('.jpg', lv.window_buffer[:,:,::-1])
return jpeg.tobytes()
@app.route('/video_feed')
def video_feed():
return Response(gen(), mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
global lv
def someFunc():
print "someFunc was called"
lv.run_loop()
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
# The reloader has already run - do what you want to do here
lv = LiveVis(settings)
help_keys, _ = bindings.get_key_help('help_mode')
quit_keys, _ = bindings.get_key_help('quit')
print '\n\nRunning toolbox. Push %s for help or %s to quit.\n\n' % (help_keys[0], quit_keys[0])
thread.start_new_thread(someFunc, ())
app.run(host='127.0.0.1', debug=True)
| mit | 5,724,847,422,443,700,000 | 25.768116 | 103 | 0.644288 | false |
smtpinc/sendapi-python | lib/smtpcom/sendapi/api.py | 1 | 2905 | from smtpcom.sendapi.send import SendAPI
from smtpcom.sendapi.report import ReportAPI
from smtpcom.sendapi.campaign import CampaignAPI
from smtpcom.sendapi.template import TemplateAPI
class API(object):
def __init__(self, content_type='json'):
self.__report = ReportAPI(content_type)
self.__template = TemplateAPI(content_type)
self.__campaign = CampaignAPI(content_type)
self.__email = SendAPI(content_type)
def create_campaign(self, *args, **kwargs):
return self.__campaign.create_campaign(*args, **kwargs)
def get_campaigns(self, *args, **kwargs):
return self.__campaign.get_campaigns(*args, **kwargs)
def add_template(self, *args, **kwargs):
return self.__template.add_template(*args, **kwargs)
def update_template(self, *args, **kwargs):
return self.__template.update_template(*args, **kwargs)
def delete_campaign(self, campaign_id):
return self.__campaign.delete_campaign(campaign_id)
def update_campaign(self, campaign_id, campaign_name):
return self.__campaign.update_campaign(campaign_id, campaign_name)
def delete_template(self, template_id):
return self.__template.delete_template(template_id)
def get_template(self, template_id):
return self.__template.get_template(template_id)
def get_templates(self, count, page):
return self.__template.get_templates(count, page)
def export_clicks(self, *args, **kwargs):
return self.__report.export_clicks(*args, **kwargs)
def export_clicks_by_url(self, *args, **kwargs):
return self.__report.export_clicks_by_url(*args, **kwargs)
def get_clicks_by_url(self, *args, **kwargs):
return self.__report.get_clicks_by_url(*args, **kwargs)
def export_opens(self, *args, **kwargs):
return self.__report.export_opens(*args, **kwargs)
def export_summary_stats(self, *args, **kwargs):
return self.__report.export_summary_stats(*args, **kwargs)
def get_clicks(self, *args, **kwargs):
return self.__report.get_clicks(*args, **kwargs)
def get_failed_sends(self, *args, **kwargs):
return self.__report.get_failed_sends(*args, **kwargs)
def get_opens(self, *args, **kwargs):
return self.__report.get_opens(*args, **kwargs)
def update_realtime_reporting(self, *args, **kwargs):
return self.__report.update_realtime_reporting(*args, **kwargs)
def get_realtime_reporting(self):
return self.__report.get_realtime_reporting()
def get_senders(self):
return self.__report.get_senders()
def get_sends(self, count, page):
return self.__report.get_sends(count, page)
def get_summary_stats(self, *args, **kwargs):
return self.__report.get_summary_stats(*args, **kwargs)
def send(self, *args, **kwargs):
return self.__email.send(*args, **kwargs)
| mit | 8,842,121,755,625,203,000 | 34.864198 | 74 | 0.657143 | false |
markovmodel/thermotools | test/test_dtram.py | 1 | 6092 | # This file is part of thermotools.
#
# Copyright 2015 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# thermotools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import thermotools.dtram as dtram
import numpy as np
from numpy.testing import assert_allclose
def test_prior():
assert_allclose(np.log(dtram.get_prior()), dtram.get_log_prior(), atol=1.0E-16)
def test_lognu_zero_counts():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.zeros(shape=(nt, nm, nm), dtype=np.intc) # C_K_ii may have an internal prior
scratch_i = np.zeros(shape=(nm,), dtype=np.float64)
new_log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
ref_log_lagrangian_mult = np.log(dtram.get_prior() * np.ones(shape=(nt, nm), dtype=np.float64))
dtram.update_log_lagrangian_mult(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij,
scratch_i, new_log_lagrangian_mult)
assert_allclose(new_log_lagrangian_mult, ref_log_lagrangian_mult, atol=1.0E-16)
def test_lognu_all_factors_unity():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.ones(shape=(nt, nm, nm), dtype=np.intc)
scratch_i = np.zeros(shape=(nm,), dtype=np.float64)
new_log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
ref_log_lagrangian_mult = np.log(nm*np.ones(shape=(nt, nm), dtype=np.float64))
dtram.update_log_lagrangian_mult(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij,
scratch_i, new_log_lagrangian_mult)
assert_allclose(new_log_lagrangian_mult, ref_log_lagrangian_mult, atol=1.0E-16)
def test_lognu_K_range():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
for K in range(nt):
log_lagrangian_mult[K, :] = np.log(K + 1.0)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.ones(shape=(nt, nm, nm), dtype=np.intc)
scratch_i = np.zeros(shape=(nm,), dtype=np.float64)
new_log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
ref_log_lagrangian_mult = np.log(nm*np.ones(shape=(nt, nm), dtype=np.float64))
dtram.update_log_lagrangian_mult(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij,
scratch_i, new_log_lagrangian_mult)
assert_allclose(new_log_lagrangian_mult, ref_log_lagrangian_mult, atol=1.0E-16)
def test_fi_zero_counts():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.zeros(shape=(nt, nm, nm), dtype=np.intc)
scratch_TM = np.zeros(shape=(nt, nm), dtype=np.float64)
scratch_M = np.zeros(shape=(nm,), dtype=np.float64)
new_conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
dtram.update_conf_energies(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij,
scratch_TM, new_conf_energies)
assert_allclose(new_conf_energies, 0.0, atol=1.0E-16)
def test_fi_all_factors_unity():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.ones(shape=(nt, nm, nm), dtype=np.intc)
scratch_TM = np.zeros(shape=(nt, nm), dtype=np.float64)
scratch_M = np.zeros(shape=(nm,), dtype=np.float64)
new_conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
dtram.update_conf_energies(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij,
scratch_TM, new_conf_energies)
assert_allclose(new_conf_energies, 0.0, atol=1.0E-16)
def test_pij_zero_counts():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.zeros(shape=(nt, nm, nm), dtype=np.intc)
scratch_M = np.zeros(shape=(nm,), dtype=np.float64)
p_K_ij = dtram.estimate_transition_matrices(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij, scratch_M)
ref_p_ij = np.eye(nm, dtype=np.float64)
for K in range(nt):
assert_allclose(p_K_ij[K, :, :], ref_p_ij, atol=1.0E-16)
def test_pij_all_factors_unity():
nm = 200
nt = 100
log_lagrangian_mult = np.zeros(shape=(nt, nm), dtype=np.float64)
bias_energies = np.zeros(shape=(nt, nm), dtype=np.float64)
conf_energies = np.zeros(shape=(nm,), dtype=np.float64)
C_K_ij = np.ones(shape=(nt, nm, nm), dtype=np.intc)
scratch_M = np.zeros(shape=(nm,), dtype=np.float64)
p_K_ij = dtram.estimate_transition_matrices(
log_lagrangian_mult, bias_energies, conf_energies, C_K_ij, scratch_M)
ref_p_ij = np.ones(shape=(nm, nm), dtype=np.float64) + \
np.eye(nm, dtype=np.float64) * dtram.get_prior()
ref_p_ij /= ref_p_ij.sum(axis=1)[:, np.newaxis]
for K in range(nt):
assert_allclose(p_K_ij[K, :, :], ref_p_ij, atol=1.0E-16)
| lgpl-3.0 | 1,462,784,848,544,553,000 | 45.151515 | 99 | 0.66694 | false |
capitalone/cloud-custodian | tools/c7n_gcp/c7n_gcp/output.py | 1 | 6324 | # Copyright 2018-2019 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
"""
TODO: provider policy execution initialization for outputs
"""
import datetime
import logging
import time
try:
from google.cloud.storage import Bucket, Client as StorageClient
except ImportError:
Bucket, StorageClient = None, None
try:
from google.cloud.logging import Client as LogClient
from google.cloud.logging.handlers import CloudLoggingHandler
from google.cloud.logging.resource import Resource
except ImportError:
LogClient = None
from c7n.output import (
blob_outputs,
log_outputs,
metrics_outputs,
BlobOutput,
Metrics,
LogOutput)
from c7n.utils import local_session
@metrics_outputs.register('gcp')
class StackDriverMetrics(Metrics):
METRICS_PREFIX = 'custom.googleapis.com/custodian/policy'
DESCRIPTOR_COMMON = {
'metricsKind': 'GAUGE',
'labels': [{
'key': 'policy',
'valueType': 'STRING',
'description': 'Custodian Policy'}],
}
METRICS_DESCRIPTORS = {
'resourcecount': {
'type': '{}/{}'.format(METRICS_PREFIX, 'resourcecount'),
'valueType': 'INT64',
'units': 'items',
'description': 'Number of resources that matched the given policy',
'displayName': 'Resources',
},
'resourcetime': {
'type': '{}/{}'.format(METRICS_PREFIX, 'resourcetime'),
'valueType': 'DOUBLE',
'units': 's',
'description': 'Time to query the resources for a given policy',
'displayName': 'Query Time',
},
'actiontime': {
'type': '{}/{}'.format(METRICS_PREFIX, 'actiontime'),
'valueType': 'DOUBLE',
'units': 's',
'description': 'Time to perform actions for a given policy',
'displayName': 'Action Time',
},
}
# Custom metrics docs https://tinyurl.com/y8rrghwc
log = logging.getLogger('c7n_gcp.metrics')
def __init__(self, ctx, config=None):
super(StackDriverMetrics, self).__init__(ctx, config)
self.project_id = local_session(self.ctx.session_factory).get_default_project()
self.write_metrics_project_id = self.config.get('project_id', self.project_id)
def initialize(self):
"""One time initialization of metrics descriptors.
# tbd - unclear if this adding significant value.
"""
client = local_session(self.ctx.session_factory).client(
'monitoring', 'v3', 'projects.metricDescriptors')
descriptor_map = {
n['type'].rsplit('/', 1)[-1]: n for n in client.execute_command('list', {
'name': 'projects/%s' % self.project_id,
'filter': 'metric.type=startswith("{}")'.format(self.METRICS_PREFIX)}).get(
'metricsDescriptors', [])}
created = False
for name in self.METRICS_DESCRIPTORS:
if name in descriptor_map:
continue
created = True
md = self.METRICS_DESCRIPTORS[name]
md.update(self.DESCRIPTOR_COMMON)
client.execute_command(
'create', {'name': 'projects/%s' % self.project_id, 'body': md})
if created:
self.log.info("Initializing StackDriver Metrics Descriptors")
time.sleep(5)
def _format_metric(self, key, value, unit, dimensions):
# Resource is a Google controlled vocabulary with artificial
# limitations on resource type there's not much useful we can
# utilize.
now = datetime.datetime.utcnow()
metrics_series = {
'metric': {
'type': 'custom.googleapis.com/custodian/policy/%s' % key.lower(),
'labels': {
'policy': self.ctx.policy.name,
'project_id': self.project_id
},
},
'metricKind': 'GAUGE',
'valueType': 'INT64',
'resource': {
'type': 'global',
},
'points': [{
'interval': {
'endTime': now.isoformat('T') + 'Z',
'startTime': now.isoformat('T') + 'Z'},
'value': {'int64Value': int(value)}}]
}
return metrics_series
def _put_metrics(self, ns, metrics):
session = local_session(self.ctx.session_factory)
client = session.client('monitoring', 'v3', 'projects.timeSeries')
params = {'name': "projects/{}".format(self.write_metrics_project_id),
'body': {'timeSeries': metrics}}
client.execute_command('create', params)
@log_outputs.register('gcp', condition=bool(LogClient))
class StackDriverLogging(LogOutput):
def get_log_group(self):
log_group = self.config.netloc
if log_group:
log_group = "custodian-%s-%s" % (log_group, self.ctx.policy.name)
else:
log_group = "custodian-%s" % self.ctx.policy.name
return log_group
def get_handler(self):
# TODO drop these grpc variants for the REST versions, and we can drop
# protobuf/grpc deps, and also so we can record tests.
log_group = self.get_log_group()
project_id = local_session(self.ctx.session_factory).get_default_project()
client = LogClient(project_id)
return CloudLoggingHandler(
client,
log_group,
labels={
'policy': self.ctx.policy.name,
'resource': self.ctx.policy.resource_type},
resource=Resource(type='project', labels={'project_id': project_id}))
def leave_log(self):
super(StackDriverLogging, self).leave_log()
# Flush and stop the background thread
self.handler.transport.flush()
self.handler.transport.worker.stop()
@blob_outputs.register('gs', condition=bool(StorageClient))
class GCPStorageOutput(BlobOutput):
def __init__(self, ctx, config=None):
super().__init__(ctx, config)
self.bucket = Bucket(StorageClient(), self.bucket)
def upload_file(self, path, key):
blob = self.bucket.blob(key)
blob.upload_from_filename(path)
| apache-2.0 | 3,785,309,899,641,609,700 | 33.369565 | 91 | 0.580645 | false |
mabotech/mabo.io | py/vision/vision24/vision_crop2.py | 1 | 6011 |
import time
import socket
import gevent
import numpy as np
import sys
import cv2
from load_config import LoadConfig
import cvlib
conf = LoadConfig("config.toml").config
"""
def match():
img = cv2.imread("box_in_scene2.png")#sys.argv[1])
temp = cv2.imread("box4.png")#sys.argv[2])
try:
dist = int(sys.argv[3])
except IndexError:
dist = 200
try:
num = int(sys.argv[4])
except IndexError:
num = -1
skp, tkp = findKeyPoints(img, temp, dist)
newimg = drawKeyPoints(img, temp, skp, tkp, num)
cv2.imshow("image", newimg)
cv2.waitKey(0)
"""
def supress(v, w):
#v[0],v[1],
print v
if v[2] < w/2 and v[2] > 20:# and v[0] - v[2] >0 and v[1] - v[2]>0 :
return True
def main():
print conf
target = cv2.imread(conf["app"]["target"])#sys.argv[2])
#target = cv2.cvtColor(target, cv2.COLOR_BGR2GRAY)
#print type(target)
#cv2.NamedWindow("camera", 1)
#capture = cv2.VideoCapture(0)
capture = cv2.VideoCapture(conf["app"]["camera_uri"])
i = 0
pt1 = (conf["app"]["crop_start"][0],conf["app"]["crop_start"][1])
w = conf["app"]["corp_width"]
pt2 = (pt1[0]+w,pt1[1]+w)
debug = 1# conf["app"]["debug"]
cp = [0,0]
while True:
#i = i +1
#if i > 200:
# i = 0
ret, img_read = capture.read() #cv.QueryFrame(capture)
#if i == 1:
# pass
if ret == False:
print ret,
time.sleep(0.1)
#raise(Exception("can't connect camera"))
#mat=cv2.GetMat(img)
#img_p = np.asarray(mat)
#img_p = cv.CreateImage(cv.GetSize(img),cv.IPL_DEPTH_8U,1)
#print dir(img)
"""
im_gray = cv.CreateImage(cv.GetSize(img),cv.IPL_DEPTH_8U,1)
cv.CvtColor(img,im_gray,cv.CV_RGB2GRAY)
# Sobel operator
dstSobel = cv.CreateMat(im_gray.height, im_gray.width, cv.CV_32FC1)
# Sobel(src, dst, xorder, yorder, apertureSize = 3)
cv.Sobel(im_gray,dstSobel,1,1,3)
"""
#print ret
try:
# skp: source key points, tkp: target key points
t1 = time.time()
#img[200:400, 100:300] # Crop from x, y, w, h -> 100, 200, 300, 400
#im[y1:y2, x1:x2]
#
crop_img = img_read[pt1[1]:pt2[1], pt1[0]:pt2[0]]
#print(len(crop_img))
distance = conf["app"]["distance"]
#skp, tkp = cvlib.findKeyPoints(crop_img , target, distance)
skp = 1
if skp == None:
print("skp is none")
img_read = cv2.medianBlur(img_read,5)
img_read = cv2.cvtColor(img_read, cv2.COLOR_BGR2GRAY)
cv2.imshow("camera", img_read)
#continue
else:
print "==" * 20
print "time:[%.3f]" %(time.time() - t1)
#print "skp", len(skp)#, skp
#print "tkp",len(tkp)#, tkp
if debug:
crop_img = cv2.medianBlur(crop_img,5)
gray = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY)
circles = cv2.HoughCircles(gray, cv2.cv.CV_HOUGH_GRADIENT,
30, ## dp
200, ## minDist
param1=100,
param2=100, ##
minRadius=70,
maxRadius=200)
print circles
circles = np.uint16(np.around(circles))
j = 0
cv2.rectangle(img_read, pt1, pt2, (0,255,0))
for i in circles[0,:]:
if supress(i, w):
j = j + 1
"""if i[0] - cp[0] > 30 or i[1] - cp[1] > 30 :
pass
else:
"""
cv2.circle(img_read,(pt1[0]+i[0],pt1[1]+i[1]),i[2],(0,255,0),2)
cv2.circle(img_read,(pt1[0]+i[0],pt1[1]+i[1]),2,(0,0,255),3)
cp = [ i[0], i[1] ]
#newimg = cvlib.drawKeyPoints(img_read, target, skp, tkp, pt1, pt2, -1)
cv2.imshow("camera", img_read)
#gevent.sleep(1)
except Exception as ex:
print(ex)
#gevent.sleep(3)
continue
#cv.ShowImage('camera', newimg)
# image smoothing and subtraction
# imageBlur = cv.CreateImage(cv.GetSize(im_gray), im_gray.depth, im_gray.nChannels)
# # filering the original image
# # Smooth(src, dst, smoothtype=CV_GAUSSIAN, param1=3, param2=0, param3=0, param4=0)
# cv.Smooth(im_gray, imageBlur, cv.CV_BLUR, 11, 11)
# diff = cv.CreateImage(cv.GetSize(im_gray), im_gray.depth, im_gray.nChannels)
# # subtraction (original - filtered)
# cv.AbsDiff(im_gray,imageBlur,diff)
# cv.ShowImage('camera', diff)
if cv2.waitKey(10) == 27:
break
#gevent.sleep(0.1)
# cv2.destroyWindow("camera")
if __name__ == "__main__":
main()
| mit | -8,267,754,926,154,588,000 | 25.955157 | 91 | 0.414241 | false |
copperleaftech/django-import-export | tests/core/tests/widgets_tests.py | 1 | 7800 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from decimal import Decimal
from datetime import date, datetime, time, timedelta
from unittest import SkipTest
from django.test.utils import override_settings
from django.test import TestCase
from django.utils import timezone
from import_export import widgets
from core.models import (
Author,
Category,
)
class BooleanWidgetTest(TestCase):
def setUp(self):
self.widget = widgets.BooleanWidget()
def test_clean(self):
self.assertTrue(self.widget.clean("1"))
self.assertTrue(self.widget.clean(1))
self.assertEqual(self.widget.clean(""), None)
def test_render(self):
self.assertEqual(self.widget.render(None), "")
class DateWidgetTest(TestCase):
def setUp(self):
self.date = date(2012, 8, 13)
self.widget = widgets.DateWidget('%d.%m.%Y')
def test_render(self):
self.assertEqual(self.widget.render(self.date), "13.08.2012")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.2012"), self.date)
@override_settings(USE_TZ=True)
def test_use_tz(self):
self.assertEqual(self.widget.render(self.date), "13.08.2012")
self.assertEqual(self.widget.clean("13.08.2012"), self.date)
class DateTimeWidgetTest(TestCase):
def setUp(self):
self.datetime = datetime(2012, 8, 13, 18, 0, 0)
self.widget = widgets.DateTimeWidget('%d.%m.%Y %H:%M:%S')
def test_render(self):
self.assertEqual(self.widget.render(self.datetime),
"13.08.2012 18:00:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.2012 18:00:00"),
self.datetime)
@override_settings(USE_TZ=True)
def test_use_tz(self):
self.assertEqual(self.widget.render(self.datetime),
"13.08.2012 18:00:00")
aware_dt = timezone.make_aware(self.datetime,
timezone.get_default_timezone())
self.assertEqual(self.widget.clean("13.08.2012 18:00:00"),
aware_dt)
class DateWidgetBefore1900Test(TestCase):
def setUp(self):
self.date = date(1868, 8, 13)
self.widget = widgets.DateWidget('%d.%m.%Y')
def test_render(self):
self.assertEqual(self.widget.render(self.date), "13.08.1868")
def test_clean(self):
self.assertEqual(self.widget.clean("13.08.1868"), self.date)
class TimeWidgetTest(TestCase):
def setUp(self):
self.time = time(20, 15, 0)
self.widget = widgets.TimeWidget('%H:%M:%S')
def test_render(self):
self.assertEqual(self.widget.render(self.time), "20:15:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("20:15:00"), self.time)
class DurationWidgetTest(TestCase):
def setUp(self):
try:
from django.utils.dateparse import parse_duration
except ImportError:
# Duration fields were added in Django 1.8
raise SkipTest
self.duration = timedelta(hours=1, minutes=57, seconds=0)
self.widget = widgets.DurationWidget()
def test_render(self):
self.assertEqual(self.widget.render(self.duration), "1:57:00")
def test_render_none(self):
self.assertEqual(self.widget.render(None), "")
def test_clean(self):
self.assertEqual(self.widget.clean("1:57:00"), self.duration)
class DecimalWidgetTest(TestCase):
def setUp(self):
self.value = Decimal("11.111")
self.widget = widgets.DecimalWidget()
def test_clean(self):
self.assertEqual(self.widget.clean("11.111"), self.value)
def test_render(self):
self.assertEqual(self.widget.render(self.value), self.value)
def test_clean_string_zero(self):
self.assertEqual(self.widget.clean("0"), Decimal("0"))
self.assertEqual(self.widget.clean("0.0"), Decimal("0"))
class IntegerWidgetTest(TestCase):
def setUp(self):
self.value = 0
self.widget = widgets.IntegerWidget()
def test_clean_integer_zero(self):
self.assertEqual(self.widget.clean(0), self.value)
def test_clean_string_zero(self):
self.assertEqual(self.widget.clean("0"), self.value)
self.assertEqual(self.widget.clean("0.0"), self.value)
class ForeignKeyWidgetTest(TestCase):
def setUp(self):
self.widget = widgets.ForeignKeyWidget(Author)
self.author = Author.objects.create(name='Foo')
def test_clean(self):
self.assertEqual(self.widget.clean(1), self.author)
def test_clean_empty(self):
self.assertEqual(self.widget.clean(""), None)
def test_render(self):
self.assertEqual(self.widget.render(self.author), self.author.pk)
def test_render_empty(self):
self.assertEqual(self.widget.render(None), "")
def test_clean_multi_column(self):
class BirthdayWidget(widgets.ForeignKeyWidget):
def get_queryset(self, value, row):
return self.model.objects.filter(
birthday=row['birthday']
)
author2 = Author.objects.create(name='Foo')
author2.birthday = "2016-01-01"
author2.save()
birthday_widget = BirthdayWidget(Author, 'name')
row = {'name': "Foo", 'birthday': author2.birthday}
self.assertEqual(birthday_widget.clean("Foo", row), author2)
class ManyToManyWidget(TestCase):
def setUp(self):
self.widget = widgets.ManyToManyWidget(Category)
self.widget_name = widgets.ManyToManyWidget(Category, field="name")
self.cat1 = Category.objects.create(name=u'Cat úňíčóďě')
self.cat2 = Category.objects.create(name='Cat 2')
def test_clean(self):
value = "%s,%s" % (self.cat1.pk, self.cat2.pk)
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_field(self):
value = "%s,%s" % (self.cat1.name, self.cat2.name)
cleaned_data = self.widget_name.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_field_spaces(self):
value = "%s, %s" % (self.cat1.name, self.cat2.name)
cleaned_data = self.widget_name.clean(value)
self.assertEqual(len(cleaned_data), 2)
self.assertIn(self.cat1, cleaned_data)
self.assertIn(self.cat2, cleaned_data)
def test_clean_typo(self):
value = "%s," % self.cat1.pk
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_int(self):
value = self.cat1.pk
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_float(self):
value = float(self.cat1.pk)
cleaned_data = self.widget.clean(value)
self.assertEqual(len(cleaned_data), 1)
self.assertIn(self.cat1, cleaned_data)
def test_render(self):
self.assertEqual(self.widget.render(Category.objects),
"%s,%s" % (self.cat1.pk, self.cat2.pk))
self.assertEqual(self.widget_name.render(Category.objects),
u"%s,%s" % (self.cat1.name, self.cat2.name))
| bsd-2-clause | -8,862,368,140,113,530,000 | 30.423387 | 75 | 0.629796 | false |
camilonova/sentry | src/sentry/utils/http.py | 1 | 3666 | """
sentry.utils.http
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import six
import urllib
from django.conf import settings
from urlparse import urlparse, urljoin
def absolute_uri(url=None):
if not url:
return settings.SENTRY_URL_PREFIX
return urljoin(settings.SENTRY_URL_PREFIX.rstrip('/') + '/', url.lstrip('/'))
def safe_urlencode(params, doseq=0):
"""
UTF-8-safe version of safe_urlencode
The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values
which can't fail down to ascii.
"""
# Snippet originally from pysolr: https://github.com/toastdriven/pysolr
if hasattr(params, "items"):
params = params.items()
new_params = list()
for k, v in params:
k = k.encode("utf-8")
if isinstance(v, six.string_types):
new_params.append((k, v.encode("utf-8")))
elif isinstance(v, (list, tuple)):
new_params.append((k, [i.encode("utf-8") for i in v]))
else:
new_params.append((k, six.text_type(v)))
return urllib.urlencode(new_params, doseq)
def is_same_domain(url1, url2):
"""
Returns true if the two urls should be treated as if they're from the same
domain (trusted).
"""
url1 = urlparse(url1)
url2 = urlparse(url2)
return url1.netloc == url2.netloc
def get_origins(project=None):
if settings.SENTRY_ALLOW_ORIGIN == '*':
return frozenset(['*'])
if settings.SENTRY_ALLOW_ORIGIN:
result = settings.SENTRY_ALLOW_ORIGIN.split(' ')
else:
result = []
if project:
# TODO: we should cache this
from sentry.plugins.helpers import get_option
optval = get_option('sentry:origins', project)
if optval:
result.extend(optval)
# lowercase and strip the trailing slash from all origin values
# filter out empty values
return frozenset(filter(bool, map(lambda x: x.lower().rstrip('/'), result)))
def is_valid_origin(origin, project=None):
"""
Given an ``origin`` which matches a base URI (e.g. http://example.com)
determine if a valid origin is present in the project settings.
Origins may be defined in several ways:
- http://domain.com[:port]: exact match for base URI (must include port)
- *: allow any domain
- *.domain.com: matches domain.com and all subdomains, on any port
- domain.com: matches domain.com on any port
"""
allowed = get_origins(project)
if '*' in allowed:
return True
if not origin:
return False
# we always run a case insensitive check
origin = origin.lower()
# Fast check
if origin in allowed:
return True
# XXX: In some cases origin might be localhost (or something similar) which causes a string value
# of 'null' to be sent as the origin
if origin == 'null':
return False
parsed = urlparse(origin)
# There is no hostname, so the header is probably invalid
if parsed.hostname is None:
return False
for valid in allowed:
if '://' in valid:
# Support partial uri matches that may include path
if origin.startswith(valid):
return True
continue
if valid[:2] == '*.':
# check foo.domain.com and domain.com
if parsed.hostname.endswith(valid[1:]) or parsed.hostname == valid[2:]:
return True
continue
if parsed.hostname == valid:
return True
return False
| bsd-3-clause | 5,602,243,267,741,755,000 | 26.358209 | 101 | 0.619749 | false |
sauli6692/ibc-server | core/models/user.py | 1 | 2164 | from django.db import models
from django.utils import timezone
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.base_user import BaseUserManager, AbstractBaseUser
from django.utils.translation import ugettext_lazy as _
from .mixins.log_fields import LogFieldsMixin
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, username, password, is_superuser, **extra_fields):
"""Creates user with username and password."""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
user = self.model(
username=username,
is_active=True,
is_superuser=is_superuser,
last_login=now,
**extra_fields
)
print(user.__dict__)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, password=None, **extra_fields):
return self._create_user(username, password, False, **extra_fields)
def create_superuser(self, username, password, **extra_fields):
return self._create_user(username, password, True, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin, LogFieldsMixin):
class Meta:
verbose_name = _('User')
verbose_name_plural = _('Users')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
objects = UserManager()
username = models.CharField(_('User'), max_length=50, unique=True)
is_active = models.BooleanField(_('Active'), default=True,)
owner = models.OneToOneField(
'pmm.Member',
verbose_name=_('Owner'),
on_delete=models.CASCADE,
null=False
)
@property
def is_staff(self):
"""Needed to acces to admin."""
return self.is_superuser
def __str__(self):
return self.username
def get_full_name(self):
"""Get member full name."""
return '{0} {1}'.format(self.owner.first_name, self.owner.last_name).strip()
def get_short_name(self):
"""Get member first name."""
return self.owner.first_name
| mit | -6,175,051,724,267,287,000 | 29.055556 | 84 | 0.635397 | false |
relic7/prodimages | python/pm_update_photodate_andmultistyles.py | 1 | 8394 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys, re, csv
def update_pm_photodate(colorstyle):
import subprocess
update_url = 'http://dmzimage01.l3.bluefly.com:8080/photo/{0}'.format(colorstyle)
subprocess.call([
"curl",
'-d',
"sample_image=Y",
'-d',
"photographed_date=now",
"-X",
"PUT",
"-format",
update_url,
])
def found3digit_rename(filename):
import os
#print filename
fdir = os.path.dirname(filename)
destdir = fdir #.strip("['")
#print destdir
fname = os.path.basename(filename)
style = fname.split('_')[0]
ext = fname.split('.')[-1]
oldname = filename
incr = 1
newstyle = str(style + "_" + str(incr) + "." + ext)
newname = os.path.join(destdir, newstyle)
while os.path.isfile(newname) == True:
newstyle = str(style + "_" + str(incr) + "." + ext)
newname = os.path.join(destdir, newstyle)
print newname
incr += 1
os.path.isfile(newname)
else:
#print newname
os.rename(oldname,newname)
return
def splitswim_updatepm(file_path):
import re
regex_fullmultistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[b-zB-Z][a-zA-Z]{1,10}[1-9][0-9]{8}_[1-6].+?\.CR2')
#regex_multistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[1-6]\.jpg')
regex_split = re.compile(r'[b-zB-Z][a-zA-Z]{1,10}')
if re.findall(regex_fullmultistyle, file_path):
print "Multistyle".format(file_path)
try:
fname = file_path.split('/')[-1]
secondarycat = re.split(regex_split, fname)
primarystyle = secondarycat[0][:9]
secondarystyle = secondarycat[1][:9]
# m = re.match(r"(\d+)\.?(\d+)?", "24")
#m = re.match(regex_fullmultistyle,file_path)
# m.groups('0') # Now, the second group defaults to '0'.
# groupdict([m])
#primarystyle = m.groups('0')[0]
#secondarystyle = m.groups('0')[1]
# try:
# secondarycategory = fname.split('_')[2]
# print secondarycategory,"SECOND"
# except:
# pass
# print primarystyle,secondarystyle
try:
return primarystyle, secondarystyle
except:
pass
except OSError:
print "FailedSwimSplit {}".format(file_path)
pass
##############################RUN###########################
from PIL import Image
import os, sys, re, glob, datetime
todaysdate = str(datetime.date.today())
todaysfolder = "{0}{1}{2}_".format(todaysdate[5:7],todaysdate[8:10],todaysdate[2:4])
eFashion_root = '/mnt/Post_Ready/eFashionPush'
aPhoto_root = '/mnt/Post_Ready/aPhotoPush'
#rootdir = sys.argv[1]
#walkedout = recursive_dirlist(rootdir)
regex = re.compile(r'.*?/[0-9]{9}_[1].*?\.[jpgJPGCR2]{3}$')
regex_raw = re.compile(r'.*?/RAW/.+?/[0-9]{9}_[1].*?\.[jpgJPGCR2]{3}$')
#regex = re.compile(r'.+?\.[jpgJPG]{3}$')
basedir = os.path.join('/mnt/Production_Raw/PHOTO_STUDIO_OUTPUT/ON_FIGURE/*/', todaysfolder + '*')
basedirstill = os.path.join(aPhoto_root, todaysfolder + '*')
flagged = ''
try:
args = sys.argv
except:
args = ''
if args and len(args) == 1:
globalldirs = os.path.abspath(sys.argv[1])
#globexportdir = os.path.abspath(sys.argv[1])#glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
#globstilldir = os.path.abspath(sys.argv[1])#'.'
flagged = 'SET'# glob.glob(os.path.join(basedirstill, "*/*.jpg"))
elif args and len(args) > 1:
globalldirs = args[1:]
flagged = 'SET'
else:
#globrawdir = glob.glob(os.path.join(basedir, "*/*/*.CR2"))
#globexportdir = glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
globstilldir = glob.glob(os.path.join(basedirstill, "*/*.jpg"))
#curl -d sample_image=Y -d photographed_date=now -X PUT http://dmzimage01.l3.bluefly.com:8080/photo/"$outName"
globalldirs = globstilldir
colorstyles_unique = []
#stylestringsdict = {}
for line in globalldirs:
#stylestringsdict_tmp = {}
regex_fullmultistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[b-zB-Z][a-zA-Z]{1,10}[1-9][0-9]{8}_[1-6].+?\.CR2')
try:
if re.findall(regex_fullmultistyle, line):
swimpair = splitswim_updatepm(line)
primarystyle = swimpair[0]
secondarystyle = swimpair[1]
#if primarystyle not in colorstyles_unique:
print "YAY_SWIMTOP-->{0}".format(primarystyle)
colorstyles_unique.append(primarystyle)
colorstyles_unique = sorted(colorstyles_unique)
#if secondarystyle not in colorstyles_unique:
print "YAY_SWIMBOTTOM-->{0}".format(secondarystyle)
colorstyles_unique.append(secondarystyle)
colorstyles_unique = sorted(colorstyles_unique)
elif re.findall(regex_raw,line):
try:
file_path = line
filename = file_path.split('/')[-1]
colorstyle = filename.split('_')[0]
alt = filename.split('_')[1]
shot_ext = file_path.split('_')[-1]
shot_number = shot_ext.split('.')[0]
ext = shot_ext.split('.')[-1]
## Unique Styles Only
if colorstyle not in colorstyles_unique:
print colorstyle
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
print "Already Accounted {0}".format(colorstyle)
except IOError:
print "IOError on {0}".format(line)
except AttributeError:
print "AttributeError on {0}".format(line)
## If file_path doesnt match the Regular 9digit_# format, checks for 2 styles in 1 shot
elif len(line) == 9 and line.isdigit():
colorstyle = line
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
try:
file_path = line
filename = file_path.split('/')[-1]
colorstyle = filename.split('_')[0]
alt = filename.split('_')[1]
#shot_ext = file_path.split('_')[-1]
#shot_number = shot_ext.split('.')[0]
#ext = shot_ext.split('.')[-1]
## Unique Styles Only
if colorstyle not in colorstyles_unique:
print colorstyle
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
print "Already Accounted {0}".format(colorstyle)
except IOError:
print "IOError on {0}".format(line)
except AttributeError:
print "AttributeError on {0}".format(line)
except:
print "Error appending {}".format(line)
pass
############ Send Shots to PM API to update photodate
colorstyles_unique = set(sorted(colorstyles_unique))
for colorstyle in colorstyles_unique:
try:
update_pm_photodate(colorstyle)
except:
print "FAILED UPDATE for {0}".format(colorstyle)
########### Check for Exports Remove Shot Number & and Move to eFashionPush ##########
if not flagged:
try:
import shutil
if globexportdir:
try:
for f in globexportdir:
found3digit_rename(f)
except:
print 'Faild'
### Get ShootDir Name from last "f" in previous glob and rename ops, then create if not exist
## eFashionPush Dir to Create for Exports used below
eFashion_name = file_path.split('/')[6]
#eFashion_name = '121913'
eFashion_dir = os.path.join(eFashion_root, eFashion_name)
# if not os.path.isdir(eFashion_dir):
# os.makedirs(eFashion_dir, 16877)
## Refresh and Get Renamed files then copy to eFashion Dir
globexportdir = glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
if globexportdir:
for f in globexportdir:
shutil.copy2(f, eFashion_dir)
except:
pass | mit | 3,212,388,557,149,291,500 | 33.979167 | 114 | 0.546938 | false |
jonnybazookatone/adsws | adsws/tests/test_commands.py | 1 | 12919 | import time
import datetime
import random
from werkzeug.security import gen_salt
from flask.ext.testing import TestCase
from adsws.modules.oauth2server.models import OAuthClient, Scope, OAuthToken
from adsws.tests.test_accounts import AccountsSetup
from adsws.core.users import User
from adsws.core import db, user_manipulator
from adsws import factory
from adsws.testsuite import make_test_suite, run_test_suite
from adsws.accounts.manage import cleanup_tokens, cleanup_clients, \
cleanup_users, parse_timedelta, update_scopes
class TestManageScopes(AccountsSetup):
def _create_client(self, client_id='test', user_id=0, scopes='adsws:internal'):
# create a client in the database
c1 = OAuthClient(
client_id=client_id,
client_secret='client secret %s' % random.random(),
name='bumblebee',
description='',
is_confidential=False,
user_id=user_id,
_redirect_uris='%s/client/authorized' % self.app.config.get('SITE_SECURE_URL'),
_default_scopes=scopes
)
db.session.add(c1)
db.session.commit()
return OAuthClient.query.filter_by(client_secret=c1.client_secret).one()
def _create_token(self, client_id='test', user_id=0, scopes='adsws:internal'):
token = OAuthToken(
client_id=client_id,
user_id=user_id,
access_token='access token %s' % random.random(),
refresh_token='refresh token %s' % random.random(),
expires=datetime.datetime(2500, 1, 1),
_scopes=scopes,
is_personal=False,
is_internal=True,
)
db.session.add(token)
db.session.commit()
return OAuthToken.query.filter_by(id=token.id).one()
def test_update_scopes_forced(self):
"""Verify that scopes are updated without clients"""
self._create_client('test0', 0, scopes='')
self._create_token('test0', 0, scopes='adsws:foo one')
self._create_token('test0', 1, scopes='adsws:foo one two')
update_scopes(self.app, 'adsws:foo one', 'foo bar', force_token_update=True)
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='bar foo').all()) == 0)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='bar foo').all()) == 1)
def test_update_scopes(self):
"""Verify that scopes are updated properly"""
self._create_client('test0', 0, scopes='adsws:foo')
self._create_client('test1', 1, scopes='adsws:foo one two')
self._create_client('test2', 2, scopes='adsws:foo two one')
self._create_client('test3', 3, scopes='adsws:foo')
self._create_client('test4', 4, scopes='adsws:foo')
self._create_token('test0', 0, scopes='adsws:foo one')
self._create_token('test1', 1, scopes='adsws:foo one two')
self._create_token('test1', 1, scopes='adsws:foo two one')
self._create_token('test1', 1, scopes='foo bar')
# normally, scopes will be sorted alphabetically (but we fake it here)
self.assertIsNotNone(OAuthClient.query.filter_by(_default_scopes= 'adsws:foo one two').one())
self.assertIsNotNone(OAuthClient.query.filter_by(_default_scopes= 'adsws:foo two one').one())
update_scopes(self.app, 'adsws:foo one two', 'foo bar')
# manager will save tokens alphab sorted
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='bar foo').all()) == 2)
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='adsws:foo').all()) == 3)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='bar foo').all()) == 2)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='adsws:foo one').all()) == 1)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='foo bar').all()) == 1)
update_scopes(self.app, 'foo bar', 'xxx')
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='bar foo').all()) == 0)
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='xxx').all()) == 2)
self.assertTrue(len(OAuthClient.query.filter_by(_default_scopes='adsws:foo').all()) == 3)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='bar foo').all()) == 0)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='adsws:foo one').all()) == 1)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='foo bar').all()) == 0)
self.assertTrue(len(OAuthToken.query.filter_by(_scopes='xxx').all()) == 3)
class TestManage_Accounts(TestCase):
"""
Tests for manage.py/flask.ext.script commands
"""
def tearDown(self):
db.drop_all(app=self.app)
def setUp(self):
"""
Sets up all of the users, clients, and tokens that management commands
will run against.
"""
db.create_all(app=self.app)
now = datetime.datetime.now()
delta = datetime.timedelta
times = [
now,
now-delta(seconds=3),
now+delta(seconds=3),
now+delta(hours=1),
]
self.times = times # Save for comparisons in the tests
# This is a user that has registered but not confirmed their account
u = user_manipulator.create(
email="unconfirmed@unittest",
registered_at=now+delta(seconds=1),
)
db.session.add(u)
# This is a user that has registered but not confirmed their account,
# and furthermore will not have a registered_at attribute set
u = user_manipulator.create(
email="blankuser@unittest",
)
db.session.add(u)
# This is a user that has registered and confirmed their account
u = user_manipulator.create(
email="user@unittest",
registered_at=now,
confirmed_at=now,
)
db.session.add(u)
for _time in times:
client = OAuthClient(
user_id=u.id,
client_id=gen_salt(20),
client_secret=gen_salt(20),
is_confidential=False,
is_internal=True,
_default_scopes="",
last_activity=_time,
)
db.session.add(client)
token = OAuthToken(
client_id=client.client_id,
user_id=u.id,
access_token=gen_salt(20),
refresh_token=gen_salt(20),
expires=_time,
_scopes="",
is_personal=False,
is_internal=True,
)
db.session.add(token)
# Add a client without a last_activity to verify that the cleanup
# scripts do not break under this condition
client = OAuthClient(
user_id=u.id,
client_id=gen_salt(20),
client_secret=gen_salt(20),
is_confidential=False,
is_internal=True,
_default_scopes="",
)
db.session.add(client)
# Add a token without an expiry to verify that the cleanup scripts
# do not break under this condition
token = OAuthToken(
client_id=client.client_id,
user_id=u.id,
access_token=gen_salt(20),
refresh_token=gen_salt(20),
_scopes="",
is_personal=False,
is_internal=True,
)
db.session.add(token)
db.session.commit()
def create_app(self):
app = factory.create_app(
SQLALCHEMY_BINDS=None,
SQLALCHEMY_DATABASE_URI='sqlite://',
EXTENSIONS=['adsws.ext.sqlalchemy'],
DEBUG=False,
)
return app
def test_parse_datetime(self):
"""
Tests that a string formatted datetime is correctly parsed
"""
td = parse_timedelta("days=31")
self.assertIsInstance(td, datetime.timedelta)
self.assertEqual(td.total_seconds(), 31*24*60*60)
td = parse_timedelta("hours=23")
self.assertIsInstance(td, datetime.timedelta)
self.assertEqual(td.total_seconds(), 23*60*60)
def test_cleanup_user(self):
"""
Tests that unconfirmed users are properly expunged from the database
as a result of the management:cleanup_users function
"""
original_users = [u.email for u in db.session.query(User).all()]
# This should not remove any users, since our one unconfirmed user
# has a registration time of 1 second into the future
# Additionally, ensure that users with a null registered_at attribute
# should are not deleted
cleanup_users(app_override=self.app, timedelta="seconds=0.1")
users = [u.email for u in db.session.query(User).all()]
self.assertItemsEqual(original_users, users)
# After sleeping 1 second, registered_at should be now. Sleep for an
# additional 0.1 sec so that cleanup_clients with timedelta=0.1s
# should delete the "unconfirmed@unittest" user
time.sleep(1.1)
cleanup_users(app_override=self.app, timedelta="seconds=0.1")
users = [u.email for u in db.session.query(User).all()]
self.assertNotEqual(original_users, users)
self.assertNotIn("unconfirmed@unittest", users)
def test_cleanup_token(self):
"""
Tests that expired oauth2tokens are properly removed from the database
as a result of the cleanup_token procedure
"""
original_tokens = db.session.query(OAuthToken).all()
self.assertEqual(5, len(original_tokens))
self.assertItemsEqual(
filter(None, [i.expires for i in original_tokens]),
self.times,
)
# Only those tokens which have already expired should be removed
cleanup_tokens(app_override=self.app)
current_tokens = db.session.query(OAuthToken).all()
self.assertNotEqual(original_tokens, current_tokens)
self.assertEqual(3, len(current_tokens))
self.assertEqual(
filter(None, [i.expires for i in current_tokens]),
[i for i in self.times if i >= datetime.datetime.now()],
)
# Sleep to let one token expire
# and check that this token has been removed after calling
# the cleanup_tokens script again
time.sleep(3)
cleanup_tokens(app_override=self.app)
current_tokens = db.session.query(OAuthToken).all()
self.assertNotEqual(original_tokens, current_tokens)
self.assertEqual(2, len(current_tokens))
self.assertEqual(
filter(None, [i.expires for i in current_tokens]),
[i for i in self.times if i >= datetime.datetime.now()],
)
def test_cleanup_client(self):
"""
Tests that oauth2clients whose last_activity attribute are properly
removed from the database as a result of the cleanup_client procedure
"""
original_clients = db.session.query(OAuthClient).all()
self.assertEqual(5, len(original_clients))
self.assertItemsEqual(
filter(None, [i.last_activity for i in original_clients]),
self.times,
)
# No clients should be cleaned
cleanup_clients(app_override=self.app, timedelta="days=31")
current_clients = db.session.query(OAuthClient).all()
self.assertEqual(5, len(current_clients))
# Cleanup all clients that are older than 0 seconds from now()
cleanup_clients(app_override=self.app, timedelta="seconds=0")
current_clients = db.session.query(OAuthClient).all()
self.assertEqual(3, len(current_clients))
# Wait 3 seconds, then perform the same cleanup. Should have one less
# client after this operation.
time.sleep(3.1)
cleanup_clients(app_override=self.app, timedelta="seconds=0.1")
current_clients = db.session.query(OAuthClient).all()
self.assertEqual(2, len(current_clients))
# Cleanup the client whose last_activity was set to 1 hour
# into the future. This case should never happen in practice!
cleanup_clients(app_override=self.app, timedelta="hours=-1")
current_clients = db.session.query(OAuthClient).all()
self.assertEqual(1, len(current_clients))
# Only the client with last_activity=None should remain
current_clients = db.session.query(OAuthClient).all()
self.assertEqual(1, len(current_clients))
self.assertIsNone(current_clients[0].last_activity)
TEST_SUITE = make_test_suite(TestManage_Accounts, TestManageScopes)
if __name__ == "__main__":
run_test_suite(TEST_SUITE) | gpl-2.0 | -6,234,452,241,877,001,000 | 38.753846 | 101 | 0.606781 | false |
baklanovp/pystella | tests/test_band.py | 1 | 8215 | import unittest
import numpy as np
import pystella.rf.band as band
from pystella.rf.rad_func import Flux2MagAB, MagAB2Flux
from pystella.util.phys_var import phys
__author__ = 'bakl'
class BandTests(unittest.TestCase):
def test_load_names(self):
bands = band.band_load_names()
self.assertTrue(len(bands) > 0, "You have to see more bands. Not %d" % len(bands))
def test_band_colors_name(self):
bands = band.band_load_names()
for bname in bands:
self.assertTrue(bname in band.colors(), "You have not color for band: %s" % bname)
def test_band_by_name(self):
b = band.band_by_name("BesU")
self.assertTrue(b.is_load, "The band should be loaded and with data")
def test_aliases_load(self):
band.Band.load_settings()
aliases = band.band_get_aliases()
self.assertTrue(len(aliases), "Should be more aliases.")
def test_aliases(self):
bo = band.band_by_name("BesU")
ba = band.band_by_name("U")
self.assertTrue(ba.is_load, "The band should be loaded and with data")
self.assertCountEqual(bo.wl, ba.wl, msg="The alias wl should be the same as original")
self.assertCountEqual(bo.resp_wl, ba.resp_wl, msg="The alias wl should be the same as original")
def test_available_bands(self):
bands = ['U', 'B', 'V', 'R', "I"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % b)
bands = ['g', 'i', 'r', 'u', "z"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % b)
bands4 = dict(UVM2="photonUVM2.dat", UVW1="photonUVW1.dat", UVW2="photonUVW2.dat",
SwiftU="photonU_Swift.dat", SwiftB="photonB_Swift.dat", SwiftV="photonV_Swift.dat")
bands = ['SwiftU', 'SwiftB', 'SwiftV', 'UVM2', "UVW1", "UVW2"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % n)
def test_zero_point(self):
zp = 0.748 # See filters.ini
b = band.band_by_name('U')
self.assertAlmostEqual(b.zp, zp, msg="Zero points of band %s equals %f. Should be %f" % (b.Name, b.zp, zp))
def test_band_ubvri(self):
import pylab as plt
b = band.band_by_name(band.Band.NameUBVRI)
plt.plot(b.wl * phys.cm_to_angs, b.resp_wl, band.colors(b.Name), label=b.Name, linewidth=2)
plt.legend(loc=4)
plt.ylabel('Amplitude Response')
plt.xlabel('Wave [A]')
plt.grid(linestyle=':')
plt.show()
def test_wl_eff(self):
# wl_eff = {'U': 3650, 'B': 4450, 'V': 5510, 'R': 6580, 'I': 8060}
wl_eff = {'u': 3560, 'g': 4830, 'r': 6260, 'i': 7670, 'z': 8890,
'U': 3600, 'B': 4380, 'V': 5450, 'R': 6410, 'I': 7980, 'J': 12200, 'H': 16300,
'K': 21900}
for bname, wl in wl_eff.items():
b = band.band_by_name(bname)
res = b.wl_eff_angs
print('{} {:.0f} VS {:.0f}'.format(bname, res, wl))
self.assertAlmostEqual(res, wl, delta=wl * 0.03, msg="The effective wavelength of band %s equals %f. "
"Should be %f" % (b.Name, res, wl))
def test_fwhm(self):
# wl_eff = {'U': 3650, 'B': 4450, 'V': 5510, 'R': 6580, 'I': 8060}
wl_fwhm = {'U': 660., 'B': 940., 'V': 880, 'R': 1380., 'I': 1490.,
'J': 2130., 'H': 3070., 'K': 3900.} # AA
# convert to cm
wl_fwhm = {bn: wl * 1e-8 for bn, wl in wl_fwhm.items()}
for bname, wl in wl_fwhm.items():
b = band.band_by_name(bname)
res = b.fwhm
print('{} {:.3e} VS {:.3e}'.format(bname, res, wl))
self.assertAlmostEqual(res, wl, delta=wl * 0.1,
msg="The fwhm of band {} equals {:.3e}. Should be {:.3e}".format(b.Name, res, wl))
def test_band_uniform(self):
b = band.BandUni()
self.assertTrue(np.any(b.resp_wl == 1), "Response values is equal 1. band: %s" % b.name)
self.assertTrue(np.any(b.resp_fr == 1), "Response values is equal 1. band: %s" % b.name)
def test_band_zp_vs_Jy(self):
bands = band.band_load_names()
for bname in bands:
b = band.band_by_name(bname)
if b.is_zp and b.is_Jy:
m_ab = Flux2MagAB(b.Jy * phys.jy_to_erg)
self.assertAlmostEqual(m_ab, b.zp, msg="Band [%s] zp and Jy should be coincide each other. "
"zp=%f, m_zp(Jy) = %f, Jy = %f"
% (b.Name, b.zp, m_ab, b.Jy),
delta=0.01)
def test_zp_AB(self):
# see https://www.gemini.edu/sciops/instruments/magnitudes-and-fluxes
# see http://ssc.spitzer.caltech.edu/warmmission/propkit/pet/magtojy/
qq = 1.
qq1 = MagAB2Flux(Flux2MagAB(qq))
self.assertAlmostEqual(qq, qq1, msg="MagAB2Flux(Flux2MagAB(x)) %f. Should be %f" % (qq, qq1), delta=0.05)
# U
f, ab = 1823., 0.748
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band U equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band U equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# B
f, ab = 4260., -0.174
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band B equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band B equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# V
f, ab = 3640., -0.0028 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
# f, ab = 3781., -0.044
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# R
f, ab = 3080., 0.18 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# I
f, ab = 2550., 0.38 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
# f, ab = 3781., -0.044
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# J
f, ab = 1600., 0.88970004336
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
print("Flux of Zero points of band u equals %f. m_zp = %f" % (f, m_ab))
self.assertAlmostEqual(f_ab, f, msg="Zero points of band u equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band u equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# g
f, ab = 3991., -0.103
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band g equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band g equals %f. Should be %f" % (m_ab, ab), delta=0.005)
def main():
unittest.main()
if __name__ == '__main__':
main()
| mit | 4,692,399,836,042,024,000 | 45.676136 | 119 | 0.546683 | false |
inspirehep/json-merger | json_merger/config.py | 1 | 3179 | # -*- coding: utf-8 -*-
#
# This file is part of Inspirehep.
# Copyright (C) 2016 CERN.
#
# Inspirehep is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Inspirehep is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Inspirehep; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
class DictMergerOps(object):
"""Possible strategies for merging two base values.
Attributes:
FALLBACK_KEEP_HEAD: In case of conflict keep the `head` value.
FALLBACK_KEEP_UPDATE: In case of conflict keep the `update` value.
"""
allowed_ops = [
'FALLBACK_KEEP_HEAD',
'FALLBACK_KEEP_UPDATE'
]
@staticmethod
def keep_longest(head, update, down_path):
"""Keep longest field among `head` and `update`.
"""
if update is None:
return 'f'
if head is None:
return 's'
return 'f' if len(head) >= len(update) else 's'
for mode in DictMergerOps.allowed_ops:
setattr(DictMergerOps, mode, mode)
class UnifierOps(object):
"""
Attributes:
KEEP_ONLY_HEAD_ENTITIES: Merge entities in `update` with their match
in `head` having as a base the match in `root`.
KEEP_ONLY_UPDATE_ENTITIES: Merge entities in 'head' with their match
in `update` having as a base the match in `root`.
KEEP_UPDATE_AND_HEAD_ENTITIES_HEAD_FIRST: Perform an union of all
entities from `head` and `update` and merge the matching ones.
Also, preserve the order relations between the entities in both
lists. If two entities can have the same position first pick the
one that is present in the `head` object.
KEEP_UPDATE_AND_HEAD_ENTITIES_UPDATE_FIRST: Same behavior as
KEEP_UPDATE_AND_HEAD_ENTITIES_HEAD_FIRST but first pick the
`update` entities.
KEEP_UPDATE_ENTITIES_CONFLICT_ON_HEAD_DELETE: If an entity was added
in the diff between the `root` and `head` lists but it's not
present in the `update` list then raise a conflict.
"""
allowed_ops = [
'KEEP_ONLY_HEAD_ENTITIES',
'KEEP_ONLY_UPDATE_ENTITIES',
'KEEP_UPDATE_AND_HEAD_ENTITIES_HEAD_FIRST',
'KEEP_UPDATE_AND_HEAD_ENTITIES_UPDATE_FIRST',
'KEEP_UPDATE_ENTITIES_CONFLICT_ON_HEAD_DELETE',
'KEEP_UPDATE_AND_HEAD_ENTITIES_CONFLICT_ON_HEAD_DELETE',
]
for mode in UnifierOps.allowed_ops:
setattr(UnifierOps, mode, mode)
| gpl-2.0 | -2,736,223,809,281,155,000 | 35.125 | 76 | 0.670337 | false |
brylie/qgis-plugin-builder-test | help/source/conf.py | 1 | 7041 | # -*- coding: utf-8 -*-
#
# buildertest documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 12 17:11:03 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'buildertest'
copyright = u'2013, Brylie'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'templateclassdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'buildertest.tex', u'buildertest Documentation',
u'Brylie', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'templateclass', u'buildertest Documentation',
[u'Brylie'], 1)
]
| gpl-2.0 | -8,881,389,148,746,164,000 | 31.597222 | 80 | 0.709274 | false |
dceresoli/ce-espresso | test-suite/testcode/bin/testcode.py | 1 | 32042 | #!/usr/bin/env python2
'''testcode [options] [action1 [action2...]]
testcode is a simple framework for comparing output from (principally numeric)
programs to previous output to reveal regression errors or miscompilation.
Run a set of actions on a set of tests.
Available actions:
compare compare set of test outputs from a previous testcode
run against the benchmark outputs.
diff diff set of test outputs from a previous testcode
run against the benchmark outputs.
make-benchmarks create a new set of benchmarks and update the userconfig
file with the new benchmark id. Also runs the 'run'
action unless the 'compare' action is also given.
recheck compare a set of test outputs and rerun failed tests.
run run a set of tests and compare against the benchmark
outputs. Default action.
tidy Remove files from previous testcode runs from the test
directories.
Requires two configuration files, jobconfig and userconfig. See testcode
documentation for further details.'''
# copyright: (c) 2012 James Spencer
# license: modified BSD; see LICENSE for more details
import glob
import optparse
import os
import re
import subprocess
import sys
import threading
import time
try:
import testcode2
except ImportError:
# try to find testcode2 assuming it is being run directly from the source
# layout.
SCRIPT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
TESTCODE2_LIB = os.path.join(SCRIPT_DIR, '../lib/')
sys.path.extend([TESTCODE2_LIB])
import testcode2
import testcode2.config
import testcode2.util
import testcode2.compatibility
import testcode2.exceptions
import testcode2.validation
#--- testcode initialisation ---
def init_tests(userconfig, jobconfig, test_id, reuse_id, executables=None,
categories=None, nprocs=-1, benchmark=None, userconfig_options=None,
jobconfig_options=None):
'''Initialise tests from the configuration files and command-line options.
userconfig, executables, test_id and userconfig_options are passed to
testcode2.config.userconfig.
jobconfig and jobconfig_options are passed to testcode2.config.parse_jobconfig.
categories is passed to testcode2.config.select_tests.
test_id is used to set the test identifier. If test_id is null and reused_id
is true, then the identifier is set to that of the last tests ran by testcode
otherwise a unique identifier based upon the date is used.
nprocs is the number of processors each test is run on. If negative, the
defaults in the configuration files are used.
benchmark is the benchmark id labelling the set of benchmarks to compare the
tests too. If None, the default in userconfig is used.
Returns:
user_options: dictionary containing user options specified in userconfig.
test_programs: dict of the test programs defined in userconfig.
tests: list of selected tests.
'''
config_exists = os.path.exists(userconfig) and os.path.exists(jobconfig)
try:
(user_options, test_programs) = testcode2.config.parse_userconfig(
userconfig, executables, test_id, userconfig_options)
except testcode2.exceptions.TestCodeError:
err = str(sys.exc_info()[1])
if not config_exists:
err += (' Please run from a directory containing (or specify) the '
'userconfig file. Use ``--help`` to see available options.')
raise testcode2.exceptions.TestCodeError(err)
# Set benchmark if required.
if benchmark:
for key in test_programs:
test_programs[key].benchmark = [benchmark]
try:
(tests, test_categories) = testcode2.config.parse_jobconfig(
jobconfig, user_options, test_programs, jobconfig_options)
except testcode2.exceptions.TestCodeError:
err = str(sys.exc_info()[1])
if not config_exists:
err += (' Please run from a directory containing (or specify) the '
'jobconfig file. Use ``--help`` to see available options.')
raise testcode2.exceptions.TestCodeError(err)
# Set number of processors...
if nprocs >= 0:
for test in tests:
test.nprocs = nprocs
if test.nprocs < test.min_nprocs:
test.nprocs = test.min_nprocs
if test.nprocs > test.max_nprocs:
test.nprocs = test.max_nprocs
# parse selected job categories from command line
# Remove those tests which weren't run most recently if comparing.
if categories:
tests = testcode2.config.select_tests(tests, test_categories,
categories, os.path.abspath(os.path.dirname(userconfig)))
# Sort by path (as that's how they appear in the user's directory).
tests.sort(key=lambda test: test.path)
if not test_id:
test_id = testcode2.config.get_unique_test_id(tests, reuse_id,
user_options['date_fmt'])
for key in test_programs:
test_programs[key].test_id = test_id
return (user_options, test_programs, tests)
#--- create command line interface ---
def parse_cmdline_args(args):
'''Parse command line arguments.
args: list of supplied arguments.
Returns:
options: object returned by optparse containing the options.
actions: list of testcode2 actions to run.
'''
# Curse not being able to use argparse in order to support python <= 2.7!
parser = optparse.OptionParser(usage=__doc__)
allowed_actions = ['compare', 'run', 'diff', 'tidy', 'make-benchmarks',
'recheck']
parser.add_option('-b', '--benchmark', help='Set the file ID of the '
'benchmark files. Default: specified in the [user] section of the '
'userconfig file.')
parser.add_option('-c', '--category', action='append', default=[],
help='Select the category/group of tests. Can be specified '
'multiple times. Default: use the _default_ category if run is an '
'action unless make-benchmarks is an action. All other cases use '
'the _all_ category by default. The _default_ category contains '
'all tests unless otherwise set in the jobconfig file.')
parser.add_option('-e', '--executable', action='append', default=[],
help='Set the executable(s) to be used to run the tests. Can be'
' a path or name of an option in the userconfig file, in which'
' case all test programs are set to use that value, or in the'
' format program_name=value, which affects only the specified'
' program.')
parser.add_option('-i', '--insert', action='store_true', default=False,
help='Insert the new benchmark into the existing list of benchmarks'
' in userconfig rather than overwriting it. Only relevant to the'
' make-benchmarks action. Default: %default.')
parser.add_option('--jobconfig', default='jobconfig', help='Set path to the'
' job configuration file. Default: %default.')
parser.add_option('--job-option', action='append', dest='job_option',
default=[], nargs=3, help='Override/add setting to jobconfig. '
'Takes three arguments. Format: section_name option_name value. '
'Default: none.')
parser.add_option('--older-than', type='int', dest='older_than', default=14,
help='Set the age (in days) of files to remove. Only relevant to '
'the tidy action. Default: %default days.')
parser.add_option('-p', '--processors', type='int', default=-1,
dest='nprocs', help='Set the number of processors to run each test '
'on. Default: use settings in configuration files.')
parser.add_option('-q', '--quiet', action='store_const', const=0,
dest='verbose', default=1, help='Print only minimal output. '
'Default: False.')
parser.add_option('-s', '--submit', dest='queue_system', default=None,
help='Submit tests to a queueing system of the specified type. '
'Only PBS system is currently implemented. Default: %default.')
parser.add_option('-t', '--test-id', dest='test_id', help='Set the file ID '
'of the test outputs. Default: unique filename based upon date '
'if running tests and most recent test_id if comparing tests.')
parser.add_option('--total-processors', type='int', default=-1,
dest='tot_nprocs', help='Set the total number of processors to use '
'to run tests concurrently. Relevant only to the run option. '
'Default: run all tests concurrently run if --submit is used; run '
'tests sequentially otherwise.')
parser.add_option('--userconfig', default='userconfig', help='Set path to '
'the user configuration file. Default: %default.')
parser.add_option('--user-option', action='append', dest='user_option',
default=[], nargs=3, help='Override/add setting to userconfig. '
'Takes three arguments. Format: section_name option_name value. '
'Default: none.')
parser.add_option('-v', '--verbose', default=1, action="count",
dest='verbose', help='Increase verbosity of output. Can be '
'specified multiple times.')
(options, args) = parser.parse_args(args)
# Default action.
if not args or ('make-benchmarks' in args and 'compare' not in args
and 'run' not in args):
# Run tests by default if no action provided.
# Run tests before creating benchmark by default.
args.append('run')
# Default category.
if not options.category:
# We quietly filter out tests which weren't run last when diffing
# or comparing.
options.category = ['_all_']
if 'run' in args and 'make-benchmarks' not in args:
options.category = ['_default_']
test_args = (arg not in allowed_actions for arg in args)
if testcode2.compatibility.compat_any(test_args):
print('At least one action is not understood: %s.' % (' '.join(args)))
parser.print_usage()
sys.exit(1)
# Parse executable option to form dictionary in format expected by
# parse_userconfig.
exe = {}
for item in options.executable:
words = item.split('=')
if len(words) == 1:
# setting executable for all programs (unless otherwise specified)
exe['_tc_all'] = words[0]
else:
# format: program_name=executable
exe[words[0]] = words[1]
options.executable = exe
# Set FILESTEM if test_id refers to a benchmark file or the benchmark
# refers to a test_id.
filestem = testcode2.FILESTEM.copy()
if options.benchmark and options.benchmark[:2] == 't:':
filestem['benchmark'] = testcode2.FILESTEM['test']
options.benchmark = options.benchmark[2:]
if options.test_id and options.test_id[:2] == 'b:':
filestem['test'] = testcode2.FILESTEM['benchmark']
options.test_id = options.test_id[2:]
if filestem['test'] != testcode2.FILESTEM['test'] and 'run' in args:
print('Not allowed to set test filename to be a benchmark filename '
'when running calculations.')
sys.exit(1)
testcode2.FILESTEM = filestem.copy()
# Convert job-options and user-options to dict of dicsts format.
for item in ['user_option', 'job_option']:
uj_opt = getattr(options, item)
opt = dict( (section, {}) for section in
testcode2.compatibility.compat_set(opt[0] for opt in uj_opt) )
for (section, option, value) in uj_opt:
opt[section][option] = value
setattr(options, item, opt)
return (options, args)
#--- actions ---
def run_tests(tests, verbose=1, cluster_queue=None, tot_nprocs=0):
'''Run tests.
tests: list of tests.
verbose: level of verbosity in output.
cluster_queue: name of cluster system to use. If None, tests are run locally.
Currently only PBS is implemented.
tot_nprocs: total number of processors available to run tests on. As many
tests (in a LIFO fashion from the tests list) are run at the same time as
possible without using more processors than this value. If less than 1 and
cluster_queue is specified, then all tests are submitted to the cluster at
the same time. If less than one and cluster_queue is not set, then
tot_nprocs is ignored and the tests are run sequentially (default).
'''
def run_test_worker(semaphore, semaphore_lock, tests, *run_test_args):
'''Launch a test after waiting until resources are available to run it.
semaphore: threading.Semaphore object containing the number of cores/processors
which can be used concurrently to run tests.
semaphore.lock: threading.Lock object used to restrict acquiring the semaphore
to one thread at a time.
tests: list of (serialized) tests to run in this thread.
run_test_args: arguments to pass to test.run_test method.
'''
# Ensure that only one test attempts to register resources with the
# semaphore at a time. This restricts running the tests to a LIFO
# fashion which is not perfect (we don't attempt to backfill with
# smaller tests, for example) but is a reasonable and (most
# importantly) simple first-order approach.
for test in tests:
semaphore_lock.acquire()
# test.nprocs is <1 when program is run in serial.
nprocs_used = max(1, test.nprocs)
for i in range(nprocs_used):
semaphore.acquire()
semaphore_lock.release()
test.run_test(*run_test_args)
for i in range(nprocs_used):
semaphore.release()
# Check executables actually exist...
compat = testcode2.compatibility
executables = [test.test_program.exe for test in tests]
executables = compat.compat_set(executables)
for exe in executables:
mswin = sys.platform.startswith('win') or sys.platform.startswith('cyg')
# The test is not reliable if there's an unholy combination of windows
# and cygwin being used to run the program. We've already warned the
# user (in config.set_program_name) that we struggled to find the
# executable.
if not os.path.exists(exe) and not mswin:
err = 'Executable does not exist: %s.' % (exe)
raise testcode2.exceptions.TestCodeError(err)
if tot_nprocs <= 0 and cluster_queue:
# Running on cluster. Default to submitting all tests at once.
tot_nprocs = sum(test.nprocs for test in tests)
if tot_nprocs > 0:
# Allow at most tot_nprocs cores to be used at once by tests.
max_test_nprocs = max(test.nprocs for test in tests)
if max_test_nprocs > tot_nprocs:
err = ('Number of available cores less than the number required by '
'the largest test: at least %d needed, %d available.'
% (max_test_nprocs, tot_nprocs))
raise testcode2.exceptions.TestCodeError(err)
# Need to serialize tests that run in the same directory with wildcard
# patterns in the output file--otherwise we can't figure out which
# output file belongs to which test. We might be able to for some
# wildcards, but let's err on the side of caution.
wildcards = re.compile('.*(\*|\?|\[.*\]).*')
serialized_tests = []
test_store = {}
for test in tests:
if test.output and wildcards.match(test.output):
if test.path in test_store:
test_store[test.path].append(test)
else:
test_store[test.path] = [test]
else:
serialized_tests.append([test])
for (key, stests) in test_store.items():
if (len(stests) > 1) and verbose > 2:
print('Warning: cannot run tests in %s concurrently.' % stests[0].path)
serialized_tests += test_store.values()
semaphore = threading.BoundedSemaphore(tot_nprocs)
slock = threading.Lock()
jobs = [threading.Thread(
target=run_test_worker,
args=(semaphore, slock, test, verbose, cluster_queue,
os.getcwd())
)
for test in serialized_tests]
for job in jobs:
# daemonise so thread terminates when master dies
try:
job.setDaemon(True)
except AttributeError:
job.daemon = True
job.start()
# We avoid .join() which is blocking making it unresponsive to TERM
while threading.activeCount() > 1:
time.sleep(0.5)
else:
# run straight through, one at a time
for test in tests:
test.run_test(verbose, cluster_queue, os.getcwd())
def compare_tests(tests, verbose=1):
'''Compare tests.
tests: list of tests.
verbose: level of verbosity in output.
Returns:
number of tests not checked due to test output file not existing.
'''
not_checked = 0
for test in tests:
for (inp, args) in test.inputs_args:
test_file = testcode2.util.testcode_filename(
testcode2.FILESTEM['test'],
test.test_program.test_id, inp, args
)
test_file = os.path.join(test.path, test_file)
if os.path.exists(test_file):
test.verify_job(inp, args, verbose, os.getcwd())
else:
if verbose > 0 and verbose <= 2:
info_line = testcode2.util.info_line(test.path, inp, args, os.getcwd())
print('%sNot checked.' % info_line)
if verbose > 1:
print('Skipping comparison. '
'Test file does not exist: %s.\n' % test_file)
not_checked += 1
return not_checked
def recheck_tests(tests, verbose=1, cluster_queue=None, tot_nprocs=0):
'''Check tests and re-run any failed/skipped tests.
tests: list of tests.
verbose: level of verbosity in output.
cluster_queue: name of cluster system to use. If None, tests are run locally.
Currently only PBS is implemented.
tot_nprocs: total number of processors available to run tests on. As many
tests (in a LIFO fashion from the tests list) are run at the same time as
possible without using more processors than this value. If less than 1 and
cluster_queue is specified, then all tests are submitted to the cluster at
the same time. If less than one and cluster_queue is not set, then
tot_nprocs is ignored and the tests are run sequentially (default).
Returns:
not_checked: number of tests not checked due to missing test output.
'''
if verbose == 0:
sep = ' '
else:
sep = '\n\n'
sys.stdout.write('Comparing tests to benchmarks:'+sep)
not_checked = compare_tests(tests, verbose)
end_status(tests, not_checked, verbose, False)
rerun_tests = []
skip = testcode2.validation.Status(name='skipped')
for test in tests:
stat = test.get_status()
if sum(stat[key] for key in ('failed', 'unknown')) != 0:
rerun_tests.append(test)
elif stat['ran'] != 0:
# mark tests as skipped using an internal API (naughty!)
for inp_arg in test.inputs_args:
test._update_status(skip, inp_arg)
if verbose > 0:
print('')
if rerun_tests:
sys.stdout.write('Rerunning failed tests:'+sep)
run_tests(rerun_tests, verbose, cluster_queue, tot_nprocs)
return not_checked
def diff_tests(tests, diff_program, verbose=1):
'''Diff tests.
tests: list of tests.
diff_program: diff program to use.
verbose: level of verbosity in output.
'''
for test in tests:
cwd = os.getcwd()
os.chdir(test.path)
for (inp, args) in test.inputs_args:
have_benchmark = True
try:
benchmark = test.test_program.select_benchmark_file(
test.path, inp, args
)
except testcode2.exceptions.TestCodeError:
err = sys.exc_info()[1]
have_benchmark = False
test_file = testcode2.util.testcode_filename(
testcode2.FILESTEM['test'],
test.test_program.test_id, inp, args
)
if not os.path.exists(test_file):
if verbose > 0:
print('Skipping diff with %s in %s: %s does not exist.'
% (benchmark, test.path, test_file))
elif not have_benchmark:
if verbose > 0:
print('Skipping diff with %s. %s' % (test.path, err))
else:
if verbose > 0:
print('Diffing %s and %s in %s.' %
(benchmark, test_file, test.path))
diff_cmd = '%s %s %s' % (diff_program, benchmark, test_file)
diff_popen = subprocess.Popen(diff_cmd, shell=True)
diff_popen.wait()
os.chdir(cwd)
def tidy_tests(tests, ndays):
'''Tidy up test directories.
tests: list of tests.
ndays: test files older than ndays are deleted.
'''
epoch_time = time.time() - 86400*ndays
test_globs = ['test.out*','test.err*']
print(
'Delete all %s files older than %s days from each job directory?'
% (' '.join(test_globs), ndays)
)
ans = ''
while ans != 'y' and ans != 'n':
ans = testcode2.compatibility.compat_input('Confirm [y/n]: ')
if ans == 'n':
print('No files deleted.')
else:
for test in tests:
cwd = os.getcwd()
os.chdir(test.path)
if test.submit_template:
file_globs = test_globs + [test.submit_template]
else:
file_globs = test_globs
for file_glob in file_globs:
for test_file in glob.glob(file_glob):
if os.stat(test_file)[-2] < epoch_time:
os.remove(test_file)
os.chdir(cwd)
def make_benchmarks(test_programs, tests, userconfig, copy_files_since,
insert_id=False):
'''Make a new set of benchmarks.
test_programs: dictionary of test programs.
tests: list of tests.
userconfig: path to the userconfig file. This is updated with the new benchmark id.
copy_files_since: files produced since the timestamp (in seconds since the
epoch) are copied to the testcode_data subdirectory in each test.
insert_id: insert the new benchmark id into the existing list of benchmark ids in
userconfig if True, otherwise overwrite the existing benchmark ids with the
new benchmark id (default).
'''
# All tests passed?
statuses = [test.get_status() for test in tests]
npassed = sum(status['passed'] for status in statuses)
nran = sum(status['ran'] for status in statuses)
if npassed != nran:
ans = ''
print('Not all tests passed.')
while ans != 'y' and ans != 'n':
ans = testcode2.compatibility.compat_input(
'Create new benchmarks? [y/n] ')
if ans != 'y':
return None
# Get vcs info.
# vcs = {}
# for (key, program) in test_programs.items():
# if program.vcs and program.vcs.vcs:
# vcs[key] = program.vcs.get_code_id()
# else:
# print('Program not under (known) version control system')
# vcs[key] = testcode2.compatibility.compat_input(
# 'Enter revision id for %s: ' % (key))
# HACK
code_id = testcode2.compatibility.compat_input(
'Enter new revision id : ')
# Benchmark label from vcs info.
# if len(vcs) == 1:
# benchmark = vcs.popitem()[1]
# else:
# benchmark = []
# for (key, code_id) in vcs.items():
# benchmark.append('%s-%s' % (key, code_id))
# benchmark = '.'.join(benchmark)
# HACK
benchmark = code_id
# Create benchmarks.
for test in tests:
test.create_new_benchmarks(benchmark, copy_files_since)
# update userconfig file.
if userconfig:
config = testcode2.compatibility.configparser.RawConfigParser()
config.optionxform = str # Case sensitive file.
config.read(userconfig)
if insert_id:
ids = config.get('user', 'benchmark').split()
if benchmark in ids:
ids.remove(benchmark)
ids.insert(0, benchmark)
benchmark = ' '.join(ids)
if len(benchmark.split()) > 1:
print('Setting new benchmarks in userconfig to be: %s.' %
(benchmark))
else:
print('Setting new benchmark in userconfig to be: %s.' %
(benchmark))
config.set('user', 'benchmark', benchmark)
userconfig = open(userconfig, 'w')
config.write(userconfig)
userconfig.close()
#--- info output ---
def start_status(tests, running, verbose=1):
'''Print a header containing useful information.
tests: list of tests.
running: true if tests are to be run.
verbose: level of verbosity in output (no output if <1).
'''
if verbose > 0:
exes = [test.test_program.exe for test in tests]
exes = testcode2.compatibility.compat_set(exes)
if running:
for exe in exes:
print('Using executable: %s.' % (exe))
# All tests use the same test_id and benchmark.
print('Test id: %s.' % (tests[0].test_program.test_id))
if len(tests[0].test_program.benchmark) > 1:
benchmark_ids = ', '.join(tests[0].test_program.benchmark)
print('Benchmarks: %s.' % (benchmark_ids))
else:
print('Benchmark: %s.' % (tests[0].test_program.benchmark[0]))
print('')
def end_status(tests, not_checked=0, verbose=1, final=True):
'''Print a footer containing useful information.
tests: list of tests.
not_checked: number of tests not checked (ie not run or compared).
verbose: level of verbosity in output. A summary footer is produced if greater
than 0; otherwise a minimal status line is printed out.
final: final call (so print a goodbye messge).
'''
def pluralise(string, num):
'''Return plural form (just by adding s) to string if num > 1.'''
if num > 1:
string = string+'s'
return string
def select_tests(stat_key, tests, statuses):
'''Select a subset of tests.
(test.name, test.path) is included if the test object contains at least
one test of the desired status (stat_key).'''
test_subset = [(test.name, test.path) for (test, status)
in zip(tests, statuses) if status[stat_key] != 0]
return sorted(test_subset)
def format_test_subset(subset):
'''Format each entry in the list returned by select_tests.'''
subset_fmt = []
for (name, path) in subset:
if os.path.abspath(name) == os.path.abspath(path):
entry = name
else:
entry = '%s (test name: %s)' % (path, name)
if entry not in subset_fmt:
subset_fmt.append(entry)
return subset_fmt
statuses = [test.get_status() for test in tests]
npassed = sum(status['passed'] for status in statuses)
nwarning = sum(status['warning'] for status in statuses)
nfailed = sum(status['failed'] for status in statuses)
nunknown = sum(status['unknown'] for status in statuses)
nskipped = sum(status['skipped'] for status in statuses)
nran = sum(status['ran'] for status in statuses)
failures = format_test_subset(select_tests('failed', tests, statuses))
warnings = format_test_subset(select_tests('warning', tests, statuses))
skipped = format_test_subset(select_tests('skipped', tests, statuses))
# Treat warnings as passes but add a note about how many warnings.
npassed += nwarning
# Treat skipped tests as tests which weren't run.
nran -= nskipped
# Pedantic.
warning = pluralise('warning', nwarning)
ran_test = pluralise('test', nran)
failed_test = pluralise('test', nfailed)
skipped_test = pluralise('test', nskipped)
add_info_msg = []
if nwarning != 0:
add_info_msg.append('%s %s' % (nwarning, warning))
if nskipped != 0:
add_info_msg.append('%s skipped' % (nskipped,))
if nunknown != 0:
add_info_msg.append('%s unknown' % (nunknown,))
if not_checked != 0:
add_info_msg.append('%s not checked' % (not_checked,))
add_info_msg = ', '.join(add_info_msg)
if add_info_msg:
add_info_msg = ' (%s)' % (add_info_msg,)
if nran == 0:
print('No tests to run.')
elif verbose > 0:
if verbose < 2:
print('') # Obsessive formatting.
msg = '%s%s out of %s %s passed%s.'
if final:
msg = 'All done. %s' % (msg,)
if npassed == nran:
print(msg % ('', npassed, nran, ran_test, add_info_msg))
else:
print(msg % ('ERROR: only ', npassed, nran, ran_test, add_info_msg))
if failures:
print('Failed %s in:\n\t%s' % (failed_test, '\n\t'.join(failures)))
if warnings:
print('%s in:\n\t%s' % (warning.title(), '\n\t'.join(warnings)))
if skipped:
print('Skipped %s in:\n\t%s' % (skipped_test, '\n\t'.join(skipped)))
else:
print(' [%s/%s%s]'% (npassed, nran, add_info_msg))
# ternary operator not in python 2.4. :-(
ret_val = 0
if nran != npassed:
ret_val = 1
return ret_val
#--- main runner ---
def main(args):
'''main controller procedure.
args: command-line arguments passed to testcode2.
'''
start_time = time.time()
(options, actions) = parse_cmdline_args(args)
# Shortcut names to options used multiple times.
verbose = options.verbose
userconfig = options.userconfig
reuse_id = 'run' not in actions and testcode2.compatibility.compat_any(
[action in actions for action in ['compare', 'diff', 'recheck']]
)
(user_options, test_programs, tests) = init_tests(userconfig,
options.jobconfig, options.test_id, reuse_id,
options.executable, options.category, options.nprocs,
options.benchmark, options.user_option,
options.job_option)
ret_val = 0
if not (len(actions) == 1 and 'tidy' in actions):
start_status(tests, 'run' in actions, verbose)
if 'run' in actions:
run_tests(tests, verbose, options.queue_system, options.tot_nprocs)
ret_val = end_status(tests, 0, verbose)
if 'recheck' in actions:
not_checked = recheck_tests(tests, verbose,
options.queue_system,options.tot_nprocs)
ret_val = end_status(tests, not_checked, verbose)
if 'compare' in actions:
not_checked = compare_tests(tests, verbose)
ret_val = end_status(tests, not_checked, verbose)
if 'diff' in actions:
diff_tests(tests, user_options['diff'], verbose)
if 'tidy' in actions:
tidy_tests(tests, options.older_than)
if 'make-benchmarks' in actions:
make_benchmarks(test_programs, tests, userconfig, start_time,
options.insert)
return ret_val
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except testcode2.exceptions.TestCodeError:
err = sys.exc_info()[1]
print(err)
sys.exit(1)
| gpl-2.0 | 2,921,529,414,655,211,000 | 38.803727 | 91 | 0.610324 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/orca/scripts/toolkits/WebKitGtk/script.py | 1 | 18079 | # Orca
#
# Copyright (C) 2010-2011 The Orca Team
#
# Author: Joanmarie Diggs <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2010-2011 The Orca Team"
__license__ = "LGPL"
import pyatspi
import pyatspi.utils as utils
import orca.scripts.default as default
import orca.input_event as input_event
import orca.orca as orca
import orca.settings as settings
import orca.speechserver as speechserver
import orca.orca_state as orca_state
import orca.speech as speech
from orca.orca_i18n import _
import script_settings
from structural_navigation import StructuralNavigation
from braille_generator import BrailleGenerator
from speech_generator import SpeechGenerator
from script_utilities import Utilities
_settingsManager = getattr(orca, '_settingsManager')
########################################################################
# #
# The WebKitGtk script class. #
# #
########################################################################
class Script(default.Script):
CARET_NAVIGATION_KEYS = ['Left', 'Right', 'Up', 'Down', 'Home', 'End']
def __init__(self, app, isBrowser=False):
"""Creates a new script for WebKitGtk applications.
Arguments:
- app: the application to create a script for.
"""
default.Script.__init__(self, app)
self._loadingDocumentContent = False
self._isBrowser = isBrowser
self.sayAllOnLoadCheckButton = None
def getListeners(self):
"""Sets up the AT-SPI event listeners for this script."""
listeners = default.Script.getListeners(self)
listeners["document:reload"] = \
self.onDocumentReload
listeners["document:load-complete"] = \
self.onDocumentLoadComplete
listeners["document:load-stopped"] = \
self.onDocumentLoadStopped
listeners["object:state-changed:busy"] = \
self.onStateChanged
return listeners
def setupInputEventHandlers(self):
"""Defines InputEventHandler fields for this script that can be
called by the key and braille bindings."""
default.Script.setupInputEventHandlers(self)
self.inputEventHandlers.update(
self.structuralNavigation.inputEventHandlers)
self.inputEventHandlers["sayAllHandler"] = \
input_event.InputEventHandler(
Script.sayAll,
# Translators: the Orca "SayAll" command allows the
# user to press a key and have the entire document in
# a window be automatically spoken to the user. If
# the user presses any key during a SayAll operation,
# the speech will be interrupted and the cursor will
# be positioned at the point where the speech was
# interrupted.
#
_("Speaks entire document."))
def getKeyBindings(self):
"""Defines the key bindings for this script. Setup the default
key bindings, then add one in for reading the input line.
Returns an instance of keybindings.KeyBindings.
"""
keyBindings = default.Script.getKeyBindings(self)
bindings = self.structuralNavigation.keyBindings
for keyBinding in bindings.keyBindings:
keyBindings.add(keyBinding)
return keyBindings
def getAppPreferencesGUI(self):
"""Return a GtkGrid containing the application unique configuration
GUI items for the current application."""
from gi.repository import Gtk
grid = Gtk.Grid()
grid.set_border_width(12)
# Translators: when the user loads a new page in WebKit, they
# can optionally tell Orca to automatically start reading a
# page from beginning to end.
#
label = \
_("Automatically start speaking a page when it is first _loaded")
self.sayAllOnLoadCheckButton = \
Gtk.CheckButton.new_with_mnemonic(label)
self.sayAllOnLoadCheckButton.set_active(script_settings.sayAllOnLoad)
grid.attach(self.sayAllOnLoadCheckButton, 0, 0, 1, 1)
grid.show_all()
return grid
def setAppPreferences(self, prefs):
"""Write out the application specific preferences lines and set the
new values.
Arguments:
- prefs: file handle for application preferences.
"""
prefs.writelines("\n")
prefix = "orca.scripts.toolkits.WebKitGtk.script_settings"
prefs.writelines("import %s\n\n" % prefix)
value = self.sayAllOnLoadCheckButton.get_active()
prefs.writelines("%s.sayAllOnLoad = %s\n" % (prefix, value))
script_settings.sayAllOnLoad = value
def getBrailleGenerator(self):
"""Returns the braille generator for this script."""
return BrailleGenerator(self)
def getSpeechGenerator(self):
"""Returns the speech generator for this script."""
return SpeechGenerator(self)
def getStructuralNavigation(self):
"""Returns the 'structural navigation' class for this script."""
types = self.getEnabledStructuralNavigationTypes()
return StructuralNavigation(self, types, True)
def getUtilities(self):
"""Returns the utilites for this script."""
return Utilities(self)
def onCaretMoved(self, event):
"""Called whenever the caret moves.
Arguments:
- event: the Event
"""
lastKey, mods = self.utilities.lastKeyAndModifiers()
if lastKey in ['Tab', 'ISO_Left_Tab']:
return
if lastKey == 'Down' \
and orca_state.locusOfFocus == event.source.parent \
and event.source.getIndexInParent() == 0 \
and orca_state.locusOfFocus.getRole() == pyatspi.ROLE_LINK:
self.updateBraille(event.source)
return
if self.utilities.isWebKitGtk(orca_state.locusOfFocus):
orca.setLocusOfFocus(event, event.source, False)
default.Script.onCaretMoved(self, event)
def onDocumentReload(self, event):
"""Called when the reload button is hit for a web page."""
if event.source.getRole() == pyatspi.ROLE_DOCUMENT_FRAME:
self._loadingDocumentContent = True
def onDocumentLoadComplete(self, event):
"""Called when a web page load is completed."""
if event.source.getRole() != pyatspi.ROLE_DOCUMENT_FRAME:
return
self._loadingDocumentContent = False
if not self._isBrowser:
return
# TODO: We need to see what happens in Epiphany on pages where focus
# is grabbed rather than set the caret at the start. But for simple
# content in both Yelp and Epiphany this is alright for now.
obj, offset = self.setCaretAtStart(event.source)
orca.setLocusOfFocus(event, obj, False)
self.updateBraille(obj)
if script_settings.sayAllOnLoad \
and _settingsManager.getSetting('enableSpeech'):
self.sayAll(None)
def onDocumentLoadStopped(self, event):
"""Called when a web page load is interrupted."""
if event.source.getRole() == pyatspi.ROLE_DOCUMENT_FRAME:
self._loadingDocumentContent = False
def onFocus(self, event):
"""Called whenever an object gets focus.
Arguments:
- event: the Event
"""
obj = event.source
role = obj.getRole()
if role == pyatspi.ROLE_LIST_ITEM and obj.childCount:
return
textRoles = [pyatspi.ROLE_HEADING,
pyatspi.ROLE_PANEL,
pyatspi.ROLE_PARAGRAPH,
pyatspi.ROLE_SECTION]
if role in textRoles:
return
if role == pyatspi.ROLE_LINK and obj.childCount:
try:
text = obj.queryText()
except NotImplementedError:
orca.setLocusOfFocus(event, obj[0])
default.Script.onFocus(self, event)
def onStateChanged(self, event):
"""Called whenever an object's state changes.
Arguments:
- event: the Event
"""
if not event.type.startswith("object:state-changed:busy"):
default.Script.onStateChanged(self, event)
return
if not event.source \
or event.source.getRole() != pyatspi.ROLE_DOCUMENT_FRAME \
or not self._isBrowser:
return
if event.detail1:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Loading. Please wait."))
elif event.source.name:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Finished loading %s.") % event.source.name)
else:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Finished loading."))
def onTextSelectionChanged(self, event):
"""Called when an object's text selection changes.
Arguments:
- event: the Event
"""
# The default script's method attempts to handle various and sundry
# complications that simply do not apply here.
#
spokenRange = self.pointOfReference.get("spokenTextRange") or [0, 0]
startOffset, endOffset = spokenRange
self.speakTextSelectionState(event.source, startOffset, endOffset)
def sayCharacter(self, obj):
"""Speak the character at the caret.
Arguments:
- obj: an Accessible object that implements the AccessibleText
interface
"""
if obj.getRole() == pyatspi.ROLE_SEPARATOR:
speech.speak(self.speechGenerator.generateSpeech(obj))
return
default.Script.sayCharacter(self, obj)
def sayLine(self, obj):
"""Speaks the line of an AccessibleText object that contains the
caret.
Arguments:
- obj: an Accessible object that implements the AccessibleText
interface
"""
default.Script.sayLine(self, obj)
rolesToSpeak = [pyatspi.ROLE_HEADING]
if obj.getRole() in rolesToSpeak:
speech.speak(self.speechGenerator.getRoleName(obj))
def skipObjectEvent(self, event):
"""Gives us, and scripts, the ability to decide an event isn't
worth taking the time to process under the current circumstances.
Arguments:
- event: the Event
Returns True if we shouldn't bother processing this object event.
"""
if event.type.startswith('object:state-changed:focused') \
and event.detail1:
if event.source.getRole() == pyatspi.ROLE_LINK:
return False
lastKey, mods = self.utilities.lastKeyAndModifiers()
if lastKey in self.CARET_NAVIGATION_KEYS:
return True
return default.Script.skipObjectEvent(self, event)
def useStructuralNavigationModel(self):
"""Returns True if we should do our own structural navigation.
This should return False if we're in a form field, or not in
document content.
"""
doNotHandleRoles = [pyatspi.ROLE_ENTRY,
pyatspi.ROLE_TEXT,
pyatspi.ROLE_PASSWORD_TEXT,
pyatspi.ROLE_LIST,
pyatspi.ROLE_LIST_ITEM,
pyatspi.ROLE_MENU_ITEM]
if not self.structuralNavigation.enabled:
return False
if not self.utilities.isWebKitGtk(orca_state.locusOfFocus):
return False
if orca_state.locusOfFocus.getRole() in doNotHandleRoles:
states = orca_state.locusOfFocus.getState()
if states.contains(pyatspi.STATE_FOCUSED):
return False
return True
def setCaretAtStart(self, obj):
"""Attempts to set the caret at the specified offset in obj. Because
this is not always possible, this method will attempt to locate the
first place inside of obj in which the caret can be positioned.
Arguments:
- obj: the accessible object in which the caret should be placed.
Returns the object and offset in which we were able to set the caret.
Otherwise, None if we could not find a text object, and -1 if we were
not able to set the caret.
"""
def implementsText(obj):
return 'Text' in utils.listInterfaces(obj)
child = obj
if not implementsText(obj):
child = utils.findDescendant(obj, implementsText)
if not child:
return None, -1
index = -1
text = child.queryText()
for i in xrange(text.characterCount):
if text.setCaretOffset(i):
index = i
break
return child, index
def sayAll(self, inputEvent):
"""Speaks the contents of the document beginning with the present
location. Overridden in this script because the sayAll could have
been started on an object without text (such as an image).
"""
if not self.utilities.isWebKitGtk(orca_state.locusOfFocus):
return default.Script.sayAll(self, inputEvent)
speech.sayAll(self.textLines(orca_state.locusOfFocus),
self.__sayAllProgressCallback)
return True
def getTextSegments(self, obj, boundary, offset=0):
segments = []
text = obj.queryText()
length = text.characterCount
string, start, end = text.getTextAtOffset(offset, boundary)
while string and offset < length:
string = self.utilities.adjustForRepeats(string)
voice = self.speechGenerator.getVoiceForString(obj, string)
string = self.utilities.adjustForLinks(obj, string, start)
segments.append([string, start, end, voice])
offset = end
string, start, end = text.getTextAtOffset(offset, boundary)
return segments
def textLines(self, obj):
"""Creates a generator that can be used to iterate over each line
of a text object, starting at the caret offset.
Arguments:
- obj: an Accessible that has a text specialization
Returns an iterator that produces elements of the form:
[SayAllContext, acss], where SayAllContext has the text to be
spoken and acss is an ACSS instance for speaking the text.
"""
document = utils.findAncestor(
obj, lambda x: x.getRole() == pyatspi.ROLE_DOCUMENT_FRAME)
allTextObjs = utils.findAllDescendants(
document, lambda x: 'Text' in utils.listInterfaces(x))
allTextObjs = allTextObjs[allTextObjs.index(obj):len(allTextObjs)]
textObjs = filter(lambda x: x.parent not in allTextObjs, allTextObjs)
if not textObjs:
return
boundary = pyatspi.TEXT_BOUNDARY_LINE_START
sayAllStyle = _settingsManager.getSetting('sayAllStyle')
if sayAllStyle == settings.SAYALL_STYLE_SENTENCE:
boundary = pyatspi.TEXT_BOUNDARY_SENTENCE_START
offset = textObjs[0].queryText().caretOffset
for textObj in textObjs:
textSegments = self.getTextSegments(textObj, boundary, offset)
roleInfo = self.speechGenerator.getRoleName(textObj)
if roleInfo:
roleName, voice = roleInfo
textSegments.append([roleName, 0, -1, voice])
for (string, start, end, voice) in textSegments:
yield [speechserver.SayAllContext(textObj, string, start, end),
voice]
offset = 0
def __sayAllProgressCallback(self, context, progressType):
if progressType == speechserver.SayAllContext.PROGRESS:
return
obj = context.obj
orca.setLocusOfFocus(None, obj, notifyScript=False)
offset = context.currentOffset
text = obj.queryText()
if progressType == speechserver.SayAllContext.INTERRUPTED:
text.setCaretOffset(offset)
return
# SayAllContext.COMPLETED doesn't necessarily mean done with SayAll;
# just done with the current object. If we're still in SayAll, we do
# not want to set the caret (and hence set focus) in a link we just
# passed by.
try:
hypertext = obj.queryHypertext()
except NotImplementedError:
pass
else:
linkCount = hypertext.getNLinks()
links = [hypertext.getLink(x) for x in range(linkCount)]
if filter(lambda l: l.startIndex <= offset <= l.endIndex, links):
return
text.setCaretOffset(offset)
| gpl-3.0 | 4,101,551,143,108,722,000 | 33.969052 | 79 | 0.61176 | false |
googleads/google-ads-python | google/ads/googleads/v6/resources/types/search_term_view.py | 1 | 2117 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v6.enums.types import search_term_targeting_status
__protobuf__ = proto.module(
package="google.ads.googleads.v6.resources",
marshal="google.ads.googleads.v6",
manifest={"SearchTermView",},
)
class SearchTermView(proto.Message):
r"""A search term view with metrics aggregated by search term at
the ad group level.
Attributes:
resource_name (str):
Output only. The resource name of the search term view.
Search term view resource names have the form:
``customers/{customer_id}/searchTermViews/{campaign_id}~{ad_group_id}~{URL-base64_search_term}``
search_term (str):
Output only. The search term.
ad_group (str):
Output only. The ad group the search term
served in.
status (google.ads.googleads.v6.enums.types.SearchTermTargetingStatusEnum.SearchTermTargetingStatus):
Output only. Indicates whether the search
term is currently one of your targeted or
excluded keywords.
"""
resource_name = proto.Field(proto.STRING, number=1)
search_term = proto.Field(proto.STRING, number=5, optional=True)
ad_group = proto.Field(proto.STRING, number=6, optional=True)
status = proto.Field(
proto.ENUM,
number=4,
enum=search_term_targeting_status.SearchTermTargetingStatusEnum.SearchTermTargetingStatus,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 4,458,826,763,105,366,500 | 33.145161 | 109 | 0.68871 | false |
gr1d99/shopping-list | shopping_app/views.py | 1 | 12925 | """This module contains all necessary views to power up shopping list web application"""
import time
import main
from flask import flash, redirect, render_template, request, session, url_for
from flask.views import View
from .db.shopping_list.shopping import ShoppingList
from .forms import (CreateShoppingItemForm, CreateShoppingListForm, LoginForm, RegistrationForm)
from .utils.helpers import (check_name, get_shl, check_duplicate_item_name,
change_shl_name, check_item, get_item, check_username,
check_email, get_user)
class RegisterView(View):
"""A view class to handle """
methods = ['GET', 'POST']
def dispatch_request(self):
form = RegistrationForm(request.form)
if 'user' in session:
flash(u'you are already logged in!', 'info')
return redirect(url_for('index'))
if request.method == 'POST':
# get required data
form = RegistrationForm(request.form)
if form.validate():
username = form.username.data
email = form.email.data
password1 = form.password.data
errors = []
if not check_username(username): # check username is already taken
if not check_email(email): # check if email is taken
user = main.APP.user_manager.create_user(username, email, password1)
main.APP_USERS.insert(0, user)
flash(u'Success! you may now login using '
u'your username and password', 'success')
return redirect(url_for('index'))
else:
error = '%(email)s already taken' % dict(email=email)
errors.append(error)
else:
error = '%(username)s already taken' % dict(username=username)
errors.append(error)
flash(u'%(errors)s' % dict(errors=', '.join(errors)), 'warning')
return render_template('register.html', title='Register', form=form)
class LoginView(View):
"""Class that handles user login"""
methods = ['GET', 'POST']
def dispatch_request(self):
if 'user' in session:
flash(u'you are already logged in!', 'info')
return redirect(url_for('index'))
form = LoginForm()
if request.method == 'POST':
form = LoginForm(request.form)
if form.validate():
username = form.username.data
password = form.password.data
user = get_user(username)
if user is not False:
if user.verify_password(password):
session['user'] = username
flash(u'login successful', 'success')
return redirect(url_for('index'))
flash(u'incorrect username or password', 'info')
return render_template('login.html', form=form, title='Login')
class LogoutView(View):
"""A view to logout a user"""
methods = ['GET', ]
def dispatch_request(self):
if 'user' in session:
session.pop('user')
return redirect(url_for('index'))
flash(u'successfully logged out!', 'success')
return redirect(url_for('index'))
class IndexView(View):
"""User home page view"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
if 'user' in session:
is_auth = True
return render_template('index.html', is_auth=is_auth, title='Home Page')
class DashboardView(View):
"""A view to display user dashboard"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
username = None
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one', 'warning')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
username = session.get('user')
owner = session.get('user')
user_shopping_list = [ushl for ushl in main.APP.shopping_list
if owner == ushl.get('shl').added_by]
return render_template('dashboard.html', is_auth=is_auth,
shopping_lists=user_shopping_list, title='Dashboard',
username=username)
class CreateShoppingListView(View):
"""A view to create shopping list"""
methods = ['GET', 'POST']
def dispatch_request(self):
form = CreateShoppingListForm()
is_auth = False
if 'user' not in session:
flash(u'Warning!! you must be logged in', 'warning')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
if request.method == 'POST':
form = CreateShoppingListForm(request.form)
if form.validate():
name = form.name.data
# check if shopping list name exists
if not check_name(name):
user = session.get('user')
today = time.strftime("%x")
shl = ShoppingList()
shl.create(name, user, today)
main.APP.shopping_list.append({'name': name, 'shl': shl})
flash(u'Shopping list created', 'success')
return redirect(url_for('dashboard'))
flash(u'Shopping list with that name already exists, '
u'try another name', 'warning')
flash(u'Correct the errors', 'warning')
return render_template('shopping_list/create-shopping-list.html', is_auth=is_auth,
title='Create Shopping List', form=form)
class ShoppingListDetailView(View):
"""
A View to handle retrieval of a specific shopping list and creation of
its shopping items
"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
form = CreateShoppingItemForm()
name = request.args.get('name')
if not check_name(name):
flash(u'The requested shopping list does not exist!', 'danger')
return redirect(url_for('dashboard'))
shl = get_shl(name)
if request.method == 'POST':
form = CreateShoppingItemForm(request.form)
if form.validate():
shl_item = main.APP.shopping_item()
item_name = form.item_name.data
if check_duplicate_item_name(name, item_name):
flash(u"item with that name already exists", 'warning')
else:
item_quantity = form.quantity.data
item_price = form.price.data
shl_item.create(item_name, float(item_quantity), float(item_price), False)
shl.get('shl').items.append(shl_item)
flash(u'Item successfully added', 'success')
return redirect(url_for('shopping-list-detail', name=name))
flash(u'Please correct the errors below', 'warning')
return render_template(
'shopping_list/shopping-list-detail.html',
obj=shl, form=form, is_auth=is_auth, title=name.capitalize())
class UpdateShoppingListView(View):
"""
A class to update shopping list
"""
methods = ['GET', 'POST']
def dispatch_request(self):
name = request.args.get('name')
form = CreateShoppingListForm(name=name)
if not check_name(name):
flash(u'The requested shopping list does not exist', 'danger')
return redirect(url_for('dashboard'))
if request.method == 'POST':
form = CreateShoppingListForm(request.form)
if form.validate():
new_name = form.name.data
shl = get_shl(name)
shl.get('shl').update('name', new_name)
change_shl_name(name, new_name)
flash(u'Shopping list name changed successfully', 'success')
return redirect(url_for('dashboard'))
return render_template('shopping_list/shopping-list-edit.html', form=form, name=name)
class UpdateShoppingItemView(View):
"""
A View to only update a single shopping item
"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('sname') # name of the shopping list
item_name = request.args.get('iname')
if not check_name(name):
flash(u'The requested shopping list does not exist', 'warning')
return redirect(url_for('dashboard'))
if not check_item(name, item_name):
flash(u'The requested shopping item does not exist', 'warning')
return redirect(url_for('dashboard'))
prev_data = {}
for item in get_shl(name).get('shl').items:
if item.name == item_name:
prev_data.update({'name': item.name})
prev_data.update({'quantity': item.quantity})
prev_data.update({'price': item.price})
prev_data.update({'checked': item.checked})
break
if not prev_data:
flash(u'The shopping item you are trying to update does not exist', 'danger')
form = CreateShoppingItemForm(
item_name=prev_data.pop('name'),
quantity=prev_data.pop('quantity'),
price=prev_data.pop('price'),
checked=prev_data.pop('checked')
)
if request.method == 'POST':
form = CreateShoppingItemForm(request.form)
if form.validate():
new_item_name = form.item_name.data
new_quantity = float(form.quantity.data)
new_price = float(form.price.data)
checked = form.checked.data
item = get_item(name, item_name)
if item:
item.update('name', new_item_name)
item.update('quantity', new_quantity)
item.update('price', new_price)
item.update('checked', checked)
flash(u'Item successfully updated', 'success')
return redirect(url_for('shopping-list-detail', name=name))
return render_template('shopping_list/shopping-item-edit.html', form=form,
item_name=item_name, is_auth=is_auth,
title='Update %(item)s' % dict(item=item_name))
class RemoveShoppingListView(View):
"""A view to remove a single shopping list"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('name')
shl = get_shl(name)
main.APP.shopping_list.remove(shl)
flash(u'Success!! Shopping List removed', 'success')
return redirect(url_for('dashboard'))
class RemoveShoppingItemView(View):
"""A view to remove shopping item"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('name')
item_name = request.args.get('item_name')
shl_items = get_shl(name).get('shl').items
for item in shl_items:
if item.name == item_name:
shl_items.remove(item)
flash(u"Success!! Item succesfully removed", 'success')
return redirect(url_for('shopping-list-detail', name=name))
class AboutView(View):
"""About view"""
methods = ['GET']
def dispatch_request(self):
return render_template('flatpages/about.html', title='About') | mit | 3,321,740,856,181,348,400 | 32.926509 | 96 | 0.554894 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.