code
stringlengths
2
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int32
2
1.05M
# training.py # This is the training script which will be presented to the participant before they sleep # or remain awake # # TODO # Libraries - these seem fine and should not need altering. from psychopy import visual, event, core, misc, data, gui, sound # Participant needs to press y to continue. def ready_cont(): stim_win.flip() user_response=None while user_response==None: allKeys=event.waitKeys() for thisKey in allKeys: if thisKey=='y': user_response=1 if thisKey=='q': core.quit() # Metronome function - This plays the metronome; the timing can also be altered here. # The timing required needs to be passed to metronome function. #music = pyglet.resource.media('klack.ogg', streaming=False) music = sound.Sound(900,secs=0.01) # Just temporary def metronome(met_time): music.play() core.wait(met_time) music.play() core.wait(met_time) music.play() core.wait(met_time) music.play() core.wait(met_time) # The metronome alone so the participant can become familiar with # the speed (no stimuli). def metronome_alone(): stim_win.flip() metronome(cvc_slow_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) # Variables welcome_message = """Welcome to the training session! You will see four syllables in a row. Please read the entire row out loud 4 times in time to the metronome and try to say one syllable per beat. The first time will be slow, and the next 3 repetitions will be a little faster. Try to read as fluently as possible. Do not worry if you make mistakes, just keep in time with the beat. Press y now to hear the speed of the metronome.""" sample_welcome = """The following will be a practice session to familiarize you with the sequences (press y to continue)""" sample_goodbye = """The sample has ended, please press y if you are ready for the real session""" thank_you = """The training session is complete, Please inform a researcher you have finished. Thank you.""" metronome_alone_message = """Playing the metronome...""" # cvc rates cvc_slow_rate = 1.0 # A cvc every 395ms with Warker e al (2008) cvc_faster_rate = 0.395 # interval between each sequence stim_interval = 1.0 between_tests_interval = 2.0 # Stimuli variables - These are the non counterbalanced stimuli. sample_stim = ['gas fak man hang', 'has kag mang fan', 'gak nas ham fang'] real_stim = ['han kas mag fang', 'sing kim hig nif', 'fan mak gas hang', 'min hig kif sing', 'fan mag kas hang', 'hing sik mig nif', 'fak nag mang has', 'hif sin ging kim', 'kag hang fas nam', 'sin hing gim kif', 'kam nag fas hang', 'ning him sik gif', 'mang fas hag kan', 'sif kig hin ming', 'kas mag fang han', 'nim hik sif ging', 'hag mak fang nas', 'sin hik mif ging', 'mak hang fan gas', 'mig sing hik nif', 'gas fang nam hak', 'sing min gik hif', 'fan mak gang has', 'hin sif king gim', 'gan fang has mak', 'ging hik sim nif', 'gang kam fas han', 'gif king hin sim', 'mag nang has fak', 'gik sin mif hing', 'nam has kag fang', 'mif sin hing gik', 'kas hang gam fan', 'hing nim sif gik', 'mak han fas gang', 'mif hin sing kig', 'fak nas ham gang', 'hin sif ging kim', 'kan fang has gam', 'sig mif hin king', 'kam fas nang hag', 'sif ning him kig', 'gang fak han mas', 'sif kim gin hing', 'fam kag hang nas', 'hing nik sig mif', 'kas fan ham gang', 'king sin hif gim'] # Setting up the screen. stim_win = visual.Window(monitor = "testMonitor", units ='norm', fullscr=True) message = visual.TextStim(stim_win, text = welcome_message, font = "Arial") message.setAutoDraw(True) ready_cont() stim_win.flip() # The metronome so participant's know what it's like. # Hmm allow participant to repeat? - Not really fair if # some participants' run it more than others and pronounce # cvc's better due to familiarity with the beat. message.setText(metronome_alone_message) metronome_alone() core.wait(stim_interval) stim_win.flip() # Welcome the participant. message.setText(sample_welcome) ready_cont() # The sample loop stim_win.flip() for i in range(len(sample_stim)): message.setText(sample_stim[i]) stim_win.flip() core.wait(stim_interval) metronome(cvc_slow_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) core.wait(stim_interval) # Ask participant if they are ready to continue message.setText(sample_goodbye) ready_cont() # The real stimuli loop stim_win.flip() for i in range(len(real_stim)): message.setText(real_stim[i]) stim_win.flip() core.wait(stim_interval) metronome(cvc_slow_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) metronome(cvc_faster_rate) core.wait(stim_interval) # Saying goodbye stim_win.flip() message.setText(thank_you) ready_cont() core.wait(stim_interval) #cleanup stim_win.close() core.quit()
vivithemage/constraints
training/fas-sif/training-fas-sif-5.py
Python
gpl-2.0
4,969
# -*- coding: utf-8 -*- # # Copyright 2004-2020 University of Oslo, Norway # # This file is part of Cerebrum. # # Cerebrum is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Cerebrum is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Cerebrum; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import logging from collections import defaultdict from six import text_type from Cerebrum import Entity from Cerebrum.modules.no.OrgLDIF import OrgLdifEntitlementsMixin from Cerebrum.modules.LDIFutils import ( attr_unique, normalize_string, ) from Cerebrum.Utils import make_timer logger = logging.getLogger(__name__) class OrgLDIFHiAMixin(OrgLdifEntitlementsMixin): """Mixin class for norEduLDIFMixin(OrgLDIF) with HiA modifications.""" def __init__(self, *args, **kwargs): super(OrgLDIFHiAMixin, self).__init__(*args, **kwargs) self.attr2syntax['mobile'] = self.attr2syntax['telephoneNumber'] self.attr2syntax['roomNumber'] = (None, None, normalize_string) def init_attr2id2contacts(self): # Changes from the original: # - Get phone and fax from system_manual, others from system_sap. # - Add mobile and roomNumber. sap, manual = self.const.system_sap, self.const.system_manual contacts = [ (attr, self.get_contacts(contact_type=contact_type, source_system=source_system, convert=self.attr2syntax[attr][0], verify=self.attr2syntax[attr][1], normalize=self.attr2syntax[attr][2])) for attr, source_system, contact_type in ( ('telephoneNumber', manual, self.const.contact_phone), ('facsimileTelephoneNumber', manual, self.const.contact_fax), ('mobile', sap, self.const.contact_mobile_phone), ('labeledURI', None, self.const.contact_url))] self.id2labeledURI = contacts[-1][1] self.attr2id2contacts = [v for v in contacts if v[1]] # roomNumber # Some employees have registered their office addresses in SAP. # We store this as co.contact_office. The roomNumber is the alias. attr = 'roomNumber' syntax = self.attr2syntax[attr] contacts = self.get_contact_aliases( contact_type=self.const.contact_office, source_system=self.const.system_sap, convert=syntax[0], verify=syntax[1], normalize=syntax[2]) if contacts: self.attr2id2contacts.append((attr, contacts)) def get_contact_aliases(self, contact_type=None, source_system=None, convert=None, verify=None, normalize=None): """Return a dict {entity_id: [list of contact aliases]}.""" # The code mimics a reduced modules/OrgLDIF.py:get_contacts(). entity = Entity.EntityContactInfo(self.db) cont_tab = defaultdict(list) if not convert: convert = text_type if not verify: verify = bool for row in entity.list_contact_info(source_system=source_system, contact_type=contact_type): alias = convert(text_type(row['contact_alias'])) if alias and verify(alias): cont_tab[int(row['entity_id'])].append(alias) return dict((key, attr_unique(values, normalize=normalize)) for key, values in cont_tab.iteritems()) def init_person_titles(self): """Extends the person_titles dict with employment titles available via the PersonEmployment module.""" super(OrgLDIFHiAMixin, self).init_person_titles() timer = make_timer(logger, 'Fetching personal employment titles...') employments = self.person.search_employment(main_employment=True) for emp in employments: if emp['person_id'] not in self.person_titles: title = [(self.const.language_nb, emp['description'])] self.person_titles[emp['person_id']] = title timer("...personal employment titles done.")
unioslo/cerebrum
Cerebrum/modules/no/hia/OrgLDIF.py
Python
gpl-2.0
4,715
#!/usr/bin/python import sys sys.path.append('/usr/share/mandriva/') from mcc2.backends.services.service import Services if __name__ == '__main__': Services.main()
wiliamsouza/mandriva-control-center
bin/services-mechanism.py
Python
gpl-2.0
169
# -*- python -*- # Copyright (C) 2009-2013 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys import gdb import os import os.path pythondir = '/usr/mips64-elf/share/gcc-4.8.4/python' libdir = '/usr/mips64-elf/mips64-elf/lib/el' # This file might be loaded when there is no current objfile. This # can happen if the user loads it manually. In this case we don't # update sys.path; instead we just hope the user managed to do that # beforehand. if gdb.current_objfile () is not None: # Update module path. We want to find the relative path from libdir # to pythondir, and then we want to apply that relative path to the # directory holding the objfile with which this file is associated. # This preserves relocatability of the gcc tree. # Do a simple normalization that removes duplicate separators. pythondir = os.path.normpath (pythondir) libdir = os.path.normpath (libdir) prefix = os.path.commonprefix ([libdir, pythondir]) # In some bizarre configuration we might have found a match in the # middle of a directory name. if prefix[-1] != '/': prefix = os.path.dirname (prefix) + '/' # Strip off the prefix. pythondir = pythondir[len (prefix):] libdir = libdir[len (prefix):] # Compute the ".."s needed to get from libdir to the prefix. dotdots = ('..' + os.sep) * len (libdir.split (os.sep)) objfile = gdb.current_objfile ().filename dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir) if not dir_ in sys.path: sys.path.insert(0, dir_) # Load the pretty-printers. from libstdcxx.v6.printers import register_libstdcxx_printers register_libstdcxx_printers (gdb.current_objfile ())
bryanperris/winN64dev
mips64-elf/mips64-elf/lib/el/libstdc++.a-gdb.py
Python
gpl-2.0
2,328
################################################################################ # This file is part of IMTAphy # _____________________________________________________________________________ # # Copyright (C) 2011 # Institute of Communication Networks (LKN) # Department of Electrical Engineering and Information Technology (EE & IT) # Technische Universitaet Muenchen # Arcisstr. 21 # 80333 Muenchen - Germany # http://www.lkn.ei.tum.de/~jan/imtaphy/index.html # # _____________________________________________________________________________ # # IMTAphy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # IMTAphy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with IMTAphy. If not, see <http://www.gnu.org/licenses/>. # ################################################################################# import openwns import openwns.node import openwns.geometry.position import imtaphy.Station import imtaphy.Logger import imtaphy.Channel import imtaphy.Pathloss import imtaphy.Scanner import imtaphy.LinkManagement import imtaphy.SCM import imtaphy.ScenarioSupport import imtaphy.Antenna import imtaphy.Logger import imtaphy.Receiver import imtaphy.covarianceEstimation import imtaphy.channelEstimation import imtaphy.Feedback import openwns.probebus from openwns import dB, dBm, fromdB, fromdBm from openwns.evaluation import * import math import random import ltea.dll.schedulers.downlink import ltea.dll.schedulers.uplink import ltea.dll.linkAdaptation.downlink import ltea.dll.linkAdaptation.uplink import ltea.evaluation.default import ltea.helper simTime = 0.11 # total simulation duration in seconds; choose simTime slightly larger than setting + N*windowSize windowSize = 0.0750 # window size during which to measure, e.g., throughput settlingTime = 0.0250 # time at the beginning during which no measurements are taken; windowing starts after settling time # makes the UEs (see end of file) probe time/frequency samples of the channel gain # and installs suitable probes # visualize channels with, e.g. /testConfigs/plotChannel.py output/channelGain_UE3_scmLinkId0_antennaPair00_max.m dumpChannel = False # for plotting a scenario view (e.g. SINR / geometry over area) # enables suitable probes and places mobiles on a uniform grid to sample whole area plotting = False # define the resolution for the grid in x and y direction class probeConfig: xBins = 25 yBins = 25 # dumps a trace file of all received uplink and downlink transmissions to the output # directory; can be viewed with IMTAphyViewer. Disabled by default. See bottom of # config for further options (e.g. restricting to certain cells for speed/file size reasons) phyTracing = False # When running standalone, comment the "from openw..." import # When running a campaign, uncomment the import statement and comment the 2 lines # class params: # pass # For a campaign, comment the params definitions that are set in the campaign config. #from openwns.wrowser.simdb.SimConfig import params class params: pass params.fdd = "DL" # "DL", "DUPLEX" params.scenario = "UMa" # "InH", "UMa", "UMi", "RMa", "SMa" params.scmLinks = "all" #"serving" "all" or "no" params.seed = 42 params.fullBuffer = True if not params.fullBuffer: params.offeredDLtrafficBps = 1E7 # params.offeredULtrafficBps = 1E7 # packetSize = 500 # bytes params.receiver = "MRC" #"NoFilter" # "MMSE" # "MRC" params.numBSAntennas = 2 params.numMSAntennas = 2 params.numMSperBS = 10 params.msSpeed = 0 # speed in km/h, negative values (msSpeed < 0) means scenario-specific default speed params.numULPRBs = 50 params.numDLPRBs = 50 params.feedbackDelay = 6 params.cqiUpdateFrequency = 5 params.dlScheduler = "ProportionalFair" #"ProportionalFair" # "ZF""ProportionalFair" #"PU2RC" # ProportionalFair "RoundRobin" params.pfAlpha = 0.001 # ProportionalFair scheduling fairness tuner with 0 => greedy, 1 => fair params.laThreshold = 0 #positive value in dB => more conservative link-adaptation params.precodingMode = "ClosedLoopCodebookBased" #"SingleAntenna" #"NoPrecoding", "ClosedLoopCodebookBased" params.fixedPMIs = False # true: assign fixed PMIs to each PRB, see below params.outdoorOnlyUMiLoS = True # assign UMi LoS probabiltiy on outdoor part of distance only. 3GPP pathgain+geometry assumes False, otherwise True is used params.powerControl = "calibration" # "calibration" or "3GPPdefault" params.thresholdUL = 0 # uplink LA offset in dB params.adaptiveUplinkLA = True params.bsAntennaConfiguration = "C" #"BASESTATIONITU" # "BASESTATIONITU", or "A", "B", "C", "D", "E" for the corresponding 3GPP configs from 36.814 params.channelEstimation = "perfect" # "thermalNoiseBased", "IandNCovarianceBased" with further parameters see below params.covarianceEstimation = "perfect"# "Wishart32.829" # "Wishart32.829" "None" "equalDiagonal", "perfect", "gaussianError" and "distinguish" (with further parameters) params.maxRank = 4 # affectes MMSE only: 0 means determine from min(numRx,numTx) antennas; MRC is rank 1 by default params.pmis = 5 # 1,2, 3, 4, 5, or 15 numberOfCircles = 1 # tier of cell sites surrounding center site (0: 1 site, 1: 7 sites, 2: 19 sites) random.seed(params.seed) # this fixes the seed for Python within this config.py # simulator setup stuff WNS = openwns.Simulator(simulationModel = openwns.node.NodeSimulationModel()) openwns.setSimulator(WNS) WNS.maxSimTime = simTime WNS.rng.seed = params.seed # this fixes the seed for the C++ simulator# WNS.masterLogger.backtrace.enabled = False WNS.masterLogger.enabled = True #False WNS.outputStrategy = openwns.simulator.OutputStrategy.DELETE WNS.statusWriteInterval = 30 # in seconds WNS.probesWriteInterval = 3600 # in seconds ######## scenario params ######## wrapAround = True # allows evaluating all cells because it virtually surrounds all cells by all others msHeight = 1.5 # meters scenarioConfig = imtaphy.ScenarioSupport.Scenario(params.scenario, numberOfCircles, msHeight) if plotting: scenarioConfig.extendBoundingBoxToMultiplesOf(probeConfig.xBins, probeConfig.yBins) if params.msSpeed < 0: msSpeedKmh = scenarioConfig.msSpeedKmh else: msSpeedKmh = params.msSpeed if wrapAround and not (params.scenario == 'InH'): wrapAroundShiftVectors = imtaphy.LinkManagement.computeShiftVectors(scenarioConfig.getInterSiteDistance(), numberOfCircles) else: wrapAroundShiftVectors = [] # "scenario" is the variable the wrowser looks for to display the scenario scenario = scenarioConfig.getBoundingBox() if params.receiver == "MMSE": filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank) # covarianceEstimation = imtaphy.covarianceEstimation.Diagonal() elif params.receiver == "MMSE-IRC": filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank) # covarianceEstimation = imtaphy.covarianceEstimation.Perfect() elif params.receiver == "MRC": filter = imtaphy.Receiver.MRCFilter() # actually, the MRC doees not care about the covariance # covarianceEstimation = imtaphy.covarianceEstimation.Diagonal() else: raise Exception("Bad receiver filter option") #covarianceEstimation = imtaphy.covarianceEstimation.GaussianError(relativeError_dB = 0.0) #channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3) if params.channelEstimation == "perfect": channelEstimation = None elif params.channelEstimation == "thermalNoiseBased": channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3) elif params.channelEstimation == "IandNCovarianceBased": channelEstimation = imtaphy.channelEstimation.IandNCovarianceBasedGaussianError(gainOverIandN_dB = 10, coloredEstimationError = False) else: raise Exception("Bad channel estimation option") if params.covarianceEstimation == "diagonal": covarianceEstimation = imtaphy.covarianceEstimation.Diagonal() elif params.covarianceEstimation == "equalDiagonal": covarianceEstimation = imtaphy.covarianceEstimation.EqualDiagonal() elif params.covarianceEstimation == "perfect": covarianceEstimation = imtaphy.covarianceEstimation.Perfect() elif params.covarianceEstimation == "gaussianError": # 0 means error as big as I+N cov itself, negative values mean smaller error covarianceEstimation = imtaphy.covarianceEstimation.GaussianError(relativeError_dB = 0) elif params.covarianceEstimation == "Wishart32.829": covarianceEstimation = imtaphy.covarianceEstimation.WishartModel36829(numberOfSamples = 16) elif params.covarianceEstimation == "distinguish": covarianceEstimation = imtaphy.covarianceEstimation.IntraAndInterCellDistinguisher(interCellEstimation = imtaphy.covarianceEstimation.WishartModel36829(numberOfSamples = 16), intraCellEstimation = None) else: raise Exception("Bad covariance estimation option") ueReceiver = imtaphy.Receiver.LinearReceiver(imtaphy.Logger.Logger(params.receiver), filter = filter, noiseFigure = "7 dB", channelEstimation = channelEstimation, covarianceEstimation = covarianceEstimation) eNBreceiver = imtaphy.Receiver.LinearReceiver(imtaphy.Logger.Logger(params.receiver), filter = filter, noiseFigure = "5 dB", channelEstimation = channelEstimation, covarianceEstimation = covarianceEstimation) feederLoss = 0 # for wideband calibration, set to 2 dB pathloss = imtaphy.Pathloss.M2135Pathloss(feederLoss = feederLoss) classifier = imtaphy.LinkManagement.ITUClassifier(params.scenario, onlyOutdoorDistanceUMi = params.outdoorOnlyUMiLoS) if params.scmLinks == "no": scm = imtaphy.SCM.No() linkManager = imtaphy.LinkManagement.LinkManager(classifier = classifier, scmLinkCriterion = "none", handoverMargin = "1 dB", shiftVectors = wrapAroundShiftVectors, useSCMforRSRP = False) else: scm = imtaphy.SCM.M2135SinglePrecision(logger = imtaphy.Logger.Logger("SCM.M2135"), computeEffectiveAntennaGains = False) linkManager = imtaphy.LinkManagement.LinkManager(classifier = classifier, scmLinkCriterion = params.scmLinks, handoverMargin = "1 dB", shiftVectors = wrapAroundShiftVectors, useSCMforRSRP = False) # in case only DL or UL are used, make sure the other direction does not eat too many simulator resources if (params.fdd == "DL"): params.numULPRBs = 0 params.offeredULtrafficBps = 1E-10 # setting it to 0 does not work, triggers division by 0 if (params.fdd == "UL"): params.numDLPRBs = 0 params.offeredDLtrafficBps = 1E-10 # setting it to 0 does not work, triggers division by 0 spectrum = imtaphy.Spectrum.Spectrum(centerFrequencyUplinkHz = scenarioConfig.centerFreqHz, # TODO: different frequencies for UL/DL? centerFrequencyDownlinkHz = scenarioConfig.centerFreqHz, numberOfULPRBs = params.numULPRBs, numberOfDLPRBs = params.numDLPRBs, prbBandwidthHz = 180000) channelConfig = imtaphy.Channel.Channel(pathlossModel = pathloss, spatialChannelModel = scm, linkManager = linkManager, spectrum = spectrum) if (params.fdd=="DL") or (params.fdd=="DUPLEX"): if params.dlScheduler == "PU2RC" or params.dlScheduler == "ZF": if params.pmis == 1: pmis = [0] elif params.pmis == 2: pmis = [0, 1] elif params.pmis == 3: pmis = [0, 1, 4] elif params.pmis == 4: pmis = [0, 1, 4, 5] elif params.pmis == 5: pmis = [0, 1, 4, 5, 12] else: pmis = range(16) openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.PU2RCFeedbackManager(enabled = True, pmis = pmis, precodingMode = params.precodingMode, numPrbsPerSubband = 1, cqiUpdateFrequency = params.cqiUpdateFrequency, #5, rankUpdateFrequency = 10, feedbackTotalDelay = params.feedbackDelay) #6 from openwns.evaluation import * node = openwns.evaluation.createSourceNode(WNS, "groupSize") settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime)) settling.appendChildren(PDF(name = "Group Size", description = "Group Size", minXValue = 1, maxXValue = 4, resolution = 3)) if params.dlScheduler == "PU2RC": node = openwns.evaluation.createSourceNode(WNS, "imperfectTransmissionRatio") settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime)) settling.appendChildren(PDF(name = "Ratio of imperfect transmission resources", description = "Ratio of imperfect transmission resources", minXValue = 0, maxXValue = 1, resolution = 200)) node = openwns.evaluation.createSourceNode(WNS, "imperfectRetransmissionRatio") settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime)) settling.appendChildren(PDF(name = "Ratio of imperfect retransmission resources", description = "Ratio of imperfect retransmission resources", minXValue = 0, maxXValue = 1, resolution = 200)) node = openwns.evaluation.createSourceNode(WNS, "initialFillLevel") settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime)) settling.appendChildren(PDF(name = "Percentage of resources allocated after initial scheduling", description = "Percentage of resources allocated after initial scheduling", minXValue = 0, maxXValue = 1, resolution = 200)) else: if params.fixedPMIs: openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.FixedPMIPRBFeedbackManager(enabled = True, pmis = range(8), randomize = False, fixedRank = 1, numPrbsPerSubband = 2, cqiUpdateFrequency = params.cqiUpdateFrequency, #5, rankUpdateFrequency = 10, feedbackTotalDelay = params.feedbackDelay) #6 else: openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.LTERel8DownlinkFeedbackManager(enabled = True, precodingMode = params.precodingMode, numPrbsPerSubband = 2, cqiUpdateFrequency = params.cqiUpdateFrequency, #5, rankUpdateFrequency = 10, feedbackTotalDelay = params.feedbackDelay) #6 if (params.fdd=="UL") or (params.fdd=="DUPLEX"): openwns.simulator.OpenWNS.modules.imtaphy.uplinkStatusManager = imtaphy.Feedback.LTERel10UplinkChannelStatusManager(enabled = True, precodingMode = "NoPrecoding", # or "NoPrecoding" #TODO: make it SingleAntenna srsUpdateFrequency = 5, statusDelay = 3) # together with 4 TTIs scheduling delay gives 7 TTIs total delay (cf. 36.814) # Channel and (currently also) the feedback manager are singletons so we put their configs into the IMTAphy module itself openwns.simulator.OpenWNS.modules.imtaphy.channelConfig = channelConfig bsPositions = scenarioConfig.bsPlacer.getPositions() # The queues should not be too big to avoid using to much memory for storing outgoing packets # 75376 is the biggest TB size for a single layer with 110 PRBs (20 MHz spectrum) so this should be enough: maxQueueSize = 75376 * min([params.numMSAntennas, params.numMSAntennas]) * params.numDLPRBs / 100 azimuths = scenarioConfig.getAzimuths() for pos in bsPositions: for azimuth in azimuths: # see 3GPP TR 36.814 for downtilt examples # 12 degrees for UMa / UMi downtilt = scenarioConfig.downtilt pos.z = scenarioConfig.bsHeight if params.scenario == "InH": antenna = imtaphy.Antenna.Omnidirectional(type = params.bsAntennaConfiguration, antennaGain = "0 dB", azimuth = math.radians(azimuth), numElements = params.numBSAntennas, wavelengthMeters = scenarioConfig.wavelengthMeters) else: antenna = imtaphy.Antenna.IMTAdvancedAntenna(type = params.bsAntennaConfiguration, azimuth = math.radians(azimuth), downtilt = downtilt, antennaGain = "17 dB", numElements = params.numBSAntennas, wavelengthMeters = scenarioConfig.wavelengthMeters) if params.dlScheduler != "RoundRobin": # This is an outer-loop link-adaptation module that dynamically adapts an individual threshold per user based on HARQ ACK/NACK feedback # In the default config it aims at a first attempt BLER of 10%. For very slow speeds, smaller BLER targets and for higher speeds higher BLER targets might perform better # Adaptive LA usually performs better than static for most schedulers (e.g., PF) but for TD-RR (e.g., in IMT-A calibration) at higher speeds, static is better linkAdaptation = ltea.dll.linkAdaptation.downlink.BLERadaptive(threshold_dB = params.laThreshold, offsetDelta = 0.03, updateInterval = 5, targetBLER = 0.1, rel8Ports = 2, rel10Ports = 0) else: # no dynamic outer-loop link-adaptation, just add a static threshold linkAdaptation = ltea.dll.linkAdaptation.downlink.SINRThreshold(threshold_dB = params.laThreshold, rel8Ports = 2, rel10Ports = 0) if params.dlScheduler == "ProportionalFair": dlScheduler = ltea.dll.schedulers.downlink.ProportionalFair(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = True) elif params.dlScheduler == "PU2RC": dlScheduler = ltea.dll.schedulers.downlink.PU2RCScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, estimateOther="PERFECT", queueSize = maxQueueSize, syncHARQ = False) elif params.dlScheduler == "ZF": dlScheduler = ltea.dll.schedulers.downlink.ZFScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = False) else: # prbsPerUser<=0 means allocating all PRBs to the user, otherwise only the indicated number per TTI dlScheduler = ltea.dll.schedulers.downlink.RoundRobin(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, queueSize = maxQueueSize, prbsPerUser = 0, syncHARQ = False) #dlScheduler = ltea.dll.schedulers.downlink.MultiUserScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = True) if params.adaptiveUplinkLA: linkAdaptationUL = ltea.dll.linkAdaptation.uplink.Adaptive(fastCrossingWeight = 0.01, longTimeWeight = 0.005, crossingThreshold = 45, threshold_dB = params.thresholdUL) else: linkAdaptationUL = ltea.dll.linkAdaptation.uplink.SINRThreshold(threshold_dB = params.thresholdUL) if params.powerControl == "calibration": alpha = 1.0 P0dBmPerPRB = -106 if params.powerControl == "3GPPdefault": # see 3GPP Self-evaluation methodology and results / assumptions by Tetsushi Abe, slide 27 alpha = 0.8 if params.scenario == "InH": P0dBmPerPRB = -80.0 if params.scenario == "UMi": P0dBmPerPRB = -85.0 if params.scenario == "UMa": P0dBmPerPRB = -83.0 if params.scenario == "RMa": P0dBmPerPRB = -84.0 ulScheduler = ltea.dll.schedulers.uplink.RoundRobin(linkAdaptation = linkAdaptationUL, alpha = alpha, P0dBmPerPRB = P0dBmPerPRB, threegppCalibration = True, Ks = 0, prachPeriod = 99999999999, #no MCS-based PowerControl pathlossEstimationMethod = "WBL") WNS.simulationModel.nodes.append(ltea.nodes.eNB(pos, antenna, dlScheduler, ulScheduler, eNBreceiver, windowSize, settlingTime, None, fullBuffer = params.fullBuffer)) if params.scenario == "InH": msPositions = imtaphy.ScenarioSupport.placeMobilesUniformlyRandomlyInRectangle(params.numMSperBS * len(bsPositions), bsPositions, scenarioConfig) else: if plotting: msPositions = imtaphy.ScenarioSupport.placeMobilesEquallyInCells(bsPositions, scenarioConfig, probeConfig) else: msPositions = imtaphy.ScenarioSupport.placeMobilesUniformlyRandomlyInCells(params.numMSperBS * len(bsPositions) * len(azimuths), bsPositions, scenarioConfig) UEs = [] for pos in msPositions: pos.z = msHeight directionOfTravel = random.uniform(-math.pi, math.pi) arrayBroadsideAzimuth = directionOfTravel + math.pi / 2 if arrayBroadsideAzimuth > math.pi: arrayBroadsideAzimuth -= 2 * math.pi antenna = imtaphy.Antenna.Omnidirectional(type = "MobileStationITU", antennaGain = "0 dB", azimuth = arrayBroadsideAzimuth, numElements = params.numMSAntennas, wavelengthMeters = scenarioConfig.wavelengthMeters) ulScheduler = None ulScheduler = ltea.dll.schedulers.uplink.UE(totalTxPowerdBm = scenarioConfig.msTotalTxPowerdBm) ue = ltea.nodes.UE(pos, msSpeedKmh, directionOfTravel, antenna, ulScheduler, ueReceiver, windowSize, settlingTime, None, fullBuffer = params.fullBuffer) UEs.append(ue) WNS.simulationModel.nodes.append(ue) if not params.fullBuffer: ltea.helper.createEPCandTraffic(simulator = WNS, offeredDLtrafficBps = params.offeredDLtrafficBps, offeredULtrafficBps = params.offeredULtrafficBps, packetSize = packetSize, probeWindow = windowSize, settlingTime = settlingTime, useTCP = False, enableLogger = False) # see 3GPP TS 36.104 Section 5.6 Channel bandwidth if params.numDLPRBs == 6: bandwidthDLHz = 1.4e6 elif params.numDLPRBs == 15: bandwidthDLHz = 3e6 elif params.numDLPRBs == 25: bandwidthDLHz = 5e6 elif params.numDLPRBs == 50: bandwidthDLHz = 1e7 elif params.numDLPRBs == 75: bandwidthDLHz = 1.5e7 elif params.numDLPRBs == 100: bandwidthDLHz = 2e7 else: bandwidthDLHz = 1 # won't make sense but... if params.numULPRBs == 6: bandwidthULHz = 1.4e6 elif params.numULPRBs == 15: bandwidthULHz = 3e6 elif params.numULPRBs == 25: bandwidthULHz = 5e6 elif params.numULPRBs == 50: bandwidthULHz = 1e7 elif params.numULPRBs == 75: bandwidthULHz = 1.5e7 elif params.numULPRBs == 100: bandwidthULHz = 2e7 else: bandwidthULHz = 1 # won't make sense but... if plotting: ltea.evaluation.default.installProbes(WNS, settlingTime = settlingTime, numBSs = len(bsPositions)*len(azimuths), users = params.numMSperBS, bandwidthDL = bandwidthDLHz, bandwidthUL = bandwidthULHz, restrictToBSIds= None, scenarioConfig = scenarioConfig, probeConfig = probeConfig) else: ltea.evaluation.default.installProbes(WNS, settlingTime = settlingTime, numBSs = len(bsPositions)*len(azimuths), users = params.numMSperBS, bandwidthDL = bandwidthDLHz, bandwidthUL = bandwidthULHz, restrictToBSIds= None) if dumpChannel: dumpUEsNodeIDs = [] for i in range(0, min(9, len(UEs))): ue = UEs[i] ue.dll.enableChannelGainProbing() dumpUEsNodeIDs.append(ue.nodeID) ltea.evaluation.default.installChannelPlottingProbes(WNS, params.numMSAntennas * params.numBSAntennas, params.numDLPRBs, 500, # TTIs dumpUEsNodeIDs # list of UE node ids to dump the channel for ) if phyTracing: import openwns.evaluation node = openwns.evaluation.createSourceNode(WNS, "phyRxTracing") json = openwns.evaluation.JSONTrace(key="__json__", description="PhyInterfaceRx Tracing Test") centralSite = node.appendChildren(Accept(by = 'BSID', ifIn = [1, 2, 3], suffix="")) # only trace inner site (otherwise trace big/slow to open) ues = centralSite.appendChildren(Accept(by = 'NodeType', ifIn = [1], suffix="DL")) ues.appendChildren(json) eNBs = centralSite.appendChildren(Accept(by = 'NodeType', ifIn = [2], suffix="UL")) eNBs.appendChildren(json)
creasyw/IMTAphy
modules/phy/imtaphy/testConfigs/config.py
Python
gpl-2.0
29,929
#!/usr/bin/env python ## Copyright (C) 2008 Red Hat, Inc. ## Copyright (C) 2008 Tim Waugh <[email protected]> ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. import sys import traceback _debug=False def debugprint (x): if _debug: try: print x except: pass def get_debugging (): return _debug def set_debugging (d): global _debug _debug = d def fatalException (exitcode=1): nonfatalException (type="fatal", end="Exiting") sys.exit (exitcode) def nonfatalException (type="non-fatal", end="Continuing anyway.."): d = get_debugging () set_debugging (True) debugprint ("Caught %s exception. Traceback:" % type) (type, value, tb) = sys.exc_info () tblast = traceback.extract_tb (tb, limit=None) if len (tblast): tblast = tblast[:len (tblast) - 1] extxt = traceback.format_exception_only (type, value) for line in traceback.format_tb(tb): debugprint (line.strip ()) debugprint (extxt[0].strip ()) debugprint (end) set_debugging (d)
KDE/printer-applet
debug.py
Python
gpl-2.0
1,703
#If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23. #Find the sum of all the multiples of 3 or 5 below 1000. print sum([x for x in xrange(1,1000) if (x % 3 == 0) or (x % 5 == 0)])
ecolitan/projecteuler-answers
Multiples_of_3_and_5.py
Python
gpl-2.0
260
import logging import re import socket import binascii import sys import os import time import gevent import subprocess import atexit from Config import config from Crypt import CryptRsa from Site import SiteManager from lib.PySocks import socks from gevent.coros import RLock from util import helper from Debug import Debug class TorManager: def __init__(self, fileserver_ip=None, fileserver_port=None): self.privatekeys = {} # Onion: Privatekey self.site_onions = {} # Site address: Onion self.tor_exe = "tools/tor/tor.exe" self.tor_process = None self.log = logging.getLogger("TorManager") self.start_onions = None self.conn = None self.lock = RLock() if config.tor == "disable": self.enabled = False self.start_onions = False self.status = "Disabled" else: self.enabled = True self.status = "Waiting" if fileserver_port: self.fileserver_port = fileserver_port else: self.fileserver_port = config.fileserver_port self.ip, self.port = config.tor_controller.split(":") self.port = int(self.port) self.proxy_ip, self.proxy_port = config.tor_proxy.split(":") self.proxy_port = int(self.proxy_port) # Test proxy port if config.tor != "disable": try: assert self.connect(), "No connection" self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) except Exception, err: self.log.debug("Tor proxy port %s check error: %s" % (config.tor_proxy, err)) self.enabled = False # Change to self-bundled Tor ports from lib.PySocks import socks self.port = 49051 self.proxy_port = 49050 socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) if os.path.isfile(self.tor_exe): # Already, downloaded: sync mode self.startTor() else: # Not downloaded yet: Async mode gevent.spawn(self.startTor) def startTor(self): if sys.platform.startswith("win"): try: if not os.path.isfile(self.tor_exe): self.downloadTor() self.log.info("Starting Tor client %s..." % self.tor_exe) tor_dir = os.path.dirname(self.tor_exe) self.tor_process = subprocess.Popen(r"%s -f torrc" % self.tor_exe, cwd=tor_dir, close_fds=True) for wait in range(1,10): # Wait for startup time.sleep(wait * 0.5) self.enabled = True if self.connect(): break # Terminate on exit atexit.register(self.stopTor) except Exception, err: self.log.error("Error starting Tor client: %s" % Debug.formatException(err)) self.enabled = False return False def stopTor(self): self.log.debug("Stopping...") self.tor_process.terminate() def downloadTor(self): self.log.info("Downloading Tor...") # Check Tor webpage for link download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read() download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1) if not download_url.startswith("http"): download_url = "https://www.torproject.org/download/" + download_url # Download Tor client self.log.info("Downloading %s" % download_url) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() # Handle redirect if data_size < 1024 and "The document has moved" in data.getvalue(): download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1) data = helper.httpRequest(download_url, as_file=True) data_size = data.tell() if data_size > 1024: import zipfile zip = zipfile.ZipFile(data) self.log.info("Unpacking Tor") for inner_path in zip.namelist(): if ".." in inner_path: continue dest_path = inner_path dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path) dest_path = re.sub("^Data/", "tools/tor/data/", dest_path) dest_path = re.sub("^Tor/", "tools/tor/", dest_path) dest_dir = os.path.dirname(dest_path) if dest_dir and not os.path.isdir(dest_dir): os.makedirs(dest_dir) if dest_dir != dest_path.strip("/"): data = zip.read(inner_path) if not os.path.isfile(dest_path): open(dest_path, 'wb').write(data) else: self.log.error("Bad response from server: %s" % data.getvalue()) return False def connect(self): if not self.enabled: return False self.site_onions = {} self.privatekeys = {} if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) else: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.log.debug("Connecting to %s:%s" % (self.ip, self.port)) try: with self.lock: conn.connect((self.ip, self.port)) res_protocol = self.send("PROTOCOLINFO", conn) version = re.search('Tor="([0-9\.]+)"', res_protocol).group(1) # Version 0.2.7.5 required because ADD_ONION support assert int(version.replace(".", "0")) >= 20705, "Tor version >=0.2.7.5 required" # Auth cookie file cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) if cookie_match: cookie_file = cookie_match.group(1) auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) else: res_auth = self.send("AUTHENTICATE", conn) assert "250 OK" in res_auth, "Authenticate error %s" % res_auth self.status = "Connected (%s)" % res_auth self.conn = conn except Exception, err: self.conn = None self.status = "Error (%s)" % err self.log.error("Tor controller connect error: %s" % err) self.enabled = False return self.conn def disconnect(self): self.conn.close() self.conn = None def startOnions(self): self.log.debug("Start onions") self.start_onions = True # Get new exit node ip def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: self.status = "Reset circuits error (%s)" % res self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() self.privatekeys[onion_address] = onion_privatekey self.status = "OK (%s onion running)" % len(self.privatekeys) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: self.status = "AddOnion error (%s)" % res self.log.error("Tor addOnion error: %s" % res) return False def delOnion(self, address): res = self.request("DEL_ONION %s" % address) if "250 OK" in res: del self.privatekeys[address] self.status = "OK (%s onion running)" % len(self.privatekeys) return True else: self.status = "DelOnion error (%s)" % res self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False def request(self, cmd): with self.lock: if not self.enabled: return False if not self.conn: if not self.connect(): return "" return self.send(cmd) def send(self, cmd, conn=None): if not conn: conn = self.conn self.log.debug("> %s" % cmd) conn.send("%s\r\n" % cmd) back = conn.recv(1024 * 64) self.log.debug("< %s" % back.strip()) return back def getPrivatekey(self, address): return self.privatekeys[address] def getPublickey(self, address): return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): with self.lock: if not self.enabled: return None if self.start_onions: # Different onion for every site onion = self.site_onions.get(site_address) else: # Same onion for every site onion = self.site_onions.get("global") site_address = "global" if not onion: self.site_onions[site_address] = self.addOnion() onion = self.site_onions[site_address] self.log.debug("Created new hidden service for %s: %s" % (site_address, onion)) return onion def createSocket(self, onion, port): if not self.enabled: return False self.log.debug("Creating new socket to %s:%s" % (onion, port)) if config.tor == "always": # Every socket is proxied by default sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((onion, int(port))) else: sock = socks.socksocket() sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port) sock.connect((onion, int(port))) return sock
bashrc/zeronet-debian
src/src/Tor/TorManager.py
Python
gpl-2.0
10,295
from flask.ext.wtf import Form from wtforms import StringField, BooleanField, PasswordField, SelectField, DateTimeField, TextAreaField
MansoorMajeed/encrypted-notes
app/forms.py
Python
gpl-2.0
138
import cgi import hashlib import http.server import io import os import posixpath import ssl import threading import time import urllib.parse import pyftpdlib.authorizers import pyftpdlib.handlers import pyftpdlib.servers class FTPServer: def __init__(self, port, root, report_size): class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler): proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'} authorizer = pyftpdlib.authorizers.DummyAuthorizer() authorizer.add_anonymous(root) handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE handler.authorizer = authorizer self.server = pyftpdlib.servers.FTPServer(('', port), handler) def serve(self): self.server.serve_forever() class HTTPServer: def __init__(self, port, cert, root, report_size): class RequestHandler(http.server.BaseHTTPRequestHandler): def do_GET(self): path = self.path.split('?', 1)[0].split('#', 1)[0] path = urllib.parse.unquote(path) path = posixpath.normpath(path) path = os.path.join(root, path.lstrip('/')) try: with open(path, 'rb') as f: data = f.read() self.send_response(200) content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream' self.send_header('Content-Type', content_type) self.send_header('Content-Transfer-Encoding', 'binary') if report_size: self.send_header('Content-Length', len(data)) self.end_headers() self.wfile.write(data) except FileNotFoundError: self.send_error(404) def do_POST(self): def dechunk(f): bio = io.BytesIO() while True: chunksize = bytearray() while not chunksize.endswith(b'\r\n'): chunksize += f.read(1) chunksize = chunksize.decode().split(':')[0] chunksize = int(chunksize, 16) if chunksize == 0: break chunk = f.read(chunksize) assert(f.read(2) == b'\r\n') bio.write(chunk) bio.seek(0) return bio def verify_hash(f, hashtype, hsh): try: chksum = hashlib.new(hashtype) except ValueError: return False chksum.update(f.read()) return chksum.hexdigest() == hsh if self.headers.get('Transfer-Encoding') == 'chunked': fp = dechunk(self.rfile) else: fp = self.rfile data = cgi.FieldStorage(fp=fp, headers=self.headers, environ={'REQUEST_METHOD': 'POST'}, # accept maximum of 10MB of data limit=10 * 1024 * 1024) try: if 'filename' in data: resp = b'Missing' self.send_response(200) self.send_header('Content-Type', 'text/plain') self.send_header('Content-Length', len(resp)) self.end_headers() self.wfile.write(resp) else: hashtype = [k for k in data.keys() if k.endswith('sum')][0] hsh = data[hashtype].value hashtype = hashtype.split('sum')[0] if verify_hash(data['file'].file, hashtype, hsh): self.send_response(204) self.end_headers() else: self.send_error(500) except (KeyError, IndexError): self.send_error(400) self.server = http.server.HTTPServer(('', port), RequestHandler) if cert: self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True) def serve(self): self.server.serve_forever() def main(): servers = [ FTPServer(2100, '/srv', True), FTPServer(2101, '/srv', False), HTTPServer(8000, None, '/srv', True), HTTPServer(8001, None, '/srv', False), HTTPServer(4430, '/cert.pem', '/srv', True), HTTPServer(4431, '/cert.pem', '/srv', False), ] threads = [threading.Thread(target=s.serve) for s in servers[1:]] for t in threads: t.setDaemon(True) t.start() try: while True: time.sleep(1) except KeyboardInterrupt: pass if __name__ == '__main__': main()
rebase-helper/rebase-helper
containers/integration.py
Python
gpl-2.0
5,136
''' Created on Sep 15, 2010 @author: duncantait ''' from SimPy.Simulation import * import numpy as np import random import math class G(): #Settings for HF Stations num_channels = 18 num_stations = 10 class Network(): stations = [] class Medium(): def __init__(self): self.channels = [] for i in range(G.num_channels): S = Store(name=i,capacity=1) self.channels.append(S) class StationContainer(): def __init__(self,ID): self.ID = ID self.Operator = Operator(ID) self.StationSettings = StationSettings(ID) self.Scanning = Scanning(ID) self.Tx = Tx(ID) def initComponents(self): self.Operator.initCounterparts() self.StationSettings.initCounterparts() self.Scanning.initCounterparts() self.Tx.initCounterparts() def activate(self): activate(self.Operator,self.Operator.sendMessage(),at=0.0) activate(self.StationSettings,self.StationSettings.sounding(),at=0.0) activate(self.Scanning,self.Scanning.scan(),at=0.0) activate(self.Tx,self.Tx.sending(),at=0.0) class Operator(Process): def __init__(self, ID): Process.__init__(self) self.ID = ID def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] def sendMessage(self): while True: #every so often operator wants to send a message: adds to queue. yield hold, self, random.uniform(0,1200) #Create a Message of type 'CALL' frameInfo = frameDetails(self.ID,self.decideDestination(),0,fType.CALL,False,-1,-1) frameInfo.channels = self.ChannelOrder(frameInfo.destination) yield put,self,self.Tx.sendQ,[frameInfo] yield hold, self, random.uniform(0,1200) def decideDestination(self): while True: dest = random.randint(0,G.num_channels-1) if dest != self.ID: return dest def ChannelOrder(self,channel=-1,station=-1): #sorts best channels best-worst if channel==-1: ordered = self.StationSettings.channelBER[station,:].argsort() return ordered[::-1] #reverse order of array if station==-1: ordered = self.StationSettings.channelBER[:,channel].argsort() return ordered[::-1] class StationSettings(Process): def __init__(self, ID): Process.__init__(self) self.ID = ID self.state = sState.SCANNING #can be scanning, linking or linked. self.sending = False self.channelBER = np.zeros((G.num_channels,G.num_stations)) #LQA: Link Quality Analysis self.timeout = 2 #current timeout counter for linking/linked mode, if this hits zero, go back to scanning self.Td = 2 #dwell time per channel self.Twce = 2 #wait for calling cycle to end self.Twr = 2 self.minLQA = 0.2 self.bitrate = 392 self.hardwareTime = 20 #e.g. power up/down time, modulation/demodulation, encoding/decoding, crypto in ms. #tune up/down time. Included in Twrt (wait for response and tune time) def Sounding(self): while True: yield hold, self, random.uniform(0,120) #Sound yield hold, self, 1800 class Scanning(Process): #Is HF ALWAYS scanning? No, either scanning, linking or linked def __init__(self, ID): self.ID = ID Process.__init__(self) self.currentChannel = 0 def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] self.Tx = [N.Tx for N in Network.stations if N.ID==self.ID][0] def scan(self): while True: #Different responses depending on mode. #Rules: cannot receive while sending <----------------- #Otherwise, packets will be interpreted as to the mode the station is in. channel = Medium.channels[self.currentChannel] yield (get,self,channel,1),(hold,self,self.StationSettings.timeout) if self.acquired(channel): signal = self.got yield put, self , channel, signal frameInfo = self.decode(signal) #This implies picking up the signal frame by frame from the channel if (frameInfo.LQA > self.StationSettings.minLQA) and (frameInfo.destination==self.ID): yield (put,self,channel,['PH:'+str(self.ID)]),(hold,self,self.StationSettings.Twce) if self.stored(channel): yield get,self,channel,1 #Yank sniffer packet back off channel. if frameInfo.type== fType.CALL: if self.StationSettings.state==sState.SCANNING: yield put,self,self.Tx.sendQ,[frameInfo] self.StationSettings.state=sState.LINKING yield waitevent,self,self.Tx.sE if frameInfo.type== fType.RESPONSE: if self.StationSettings.state==sState.LINKING: yield put,self,self.Tx.sendQ,[frameInfo] yield waitevent,self,self.Tx.sE if frameInfo.type== fType.ACK: if self.StationSettings.state==sState.LINKING: yield put,self,self.Tx.sendQ,[frameInfo] self.StationSettings.state=sState.LINKED yield waitevent,self,self.Tx.sE if frameInfo.type== fType.QUICK_ID: if (self.StationSettings.state==sState.SCANNING or sState.LINKED) and (frameInfo.terminate==False): 'I dont think you can have a QUICK ID out of the blue, and it doesnt need a reply...' #yield put,self,self.Tx.sendQ,[frameInfo] #yield waitevent,self,self.Tx.sE elif frameInfo.terminate==True: self.StationSettings.state=sState.SCANNING if frameInfo.type== fType.MSG: if self.StationSettings.state== sState.LINKED and frameInfo.terminate==False: 'again, why the reply? just keep channel open...' elif frameInfo.terminate==True: self.StationSettings.state=sState.SCANNING #yield put,self,self.Tx.sendQ,[frameInfo] #yield waitevent,self,self.Tx.sE else: print 'Invalid Packet' self.StationSettings.state=sState.SCANNING else: print 'Timed out' self.StationSettings.state=sState.SCANNING else: 'Frame unsuitable: Continue Scan' self.StationSettings.state=sState.SCANNING else: 'Channel Empty: Continue Scan' self.StationSettings.state=sState.SCANNING if self.StationSettings.state==sState.SCANNING: if self.currentChannel==G.num_channels-1: self.currentChannel = 0 else: self.currentChannel+=1 def decode(self,frameInfo): #Return a packet useable to send straightaway. All data is known to achieve this. returnInfo = self.convertReply(frameInfo) returnInfo = self.responseSize(returnInfo) returnInfo = self.calculate_LQA(returnInfo) returnInfo.channels = self.currentChannel #Messages and Acks/Responses always have to be on the same channel as before... which is all #That is dealt with in 'Scanning' returnInfo.terminate = False #This needs to be somewhat randomised, but for now this will do. return returnInfo #If LQA below certain amount, reject in PEM above def convertReply(self, frameInfo): #Convert incoming packet into it's appropriate output type. returnInfo = frameInfo if frameInfo.type==fType.OUT: returnInfo.type= fType.CALL if frameInfo.type==fType.CALL: returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin returnInfo.type = fType.RESPONSE elif frameInfo.type == fType.RESPONSE: returnInfo.type = fType.ACK returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin elif frameInfo.type == fType.ACK: returnInfo.type = fType.MSG returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin returnInfo = self.decidePayload(returnInfo) #Messages get a payload. return returnInfo def responseSize(self,frameInfo): returnInfo = frameInfo destination = self.get_address(frameInfo.destination) origin = self.get_address(frameInfo.origin) if returnInfo.type == fType.RESPONSE or fType.ACK: returnInfo.size += len(destination)*2*49 + len(origin)*49 #each word is 49bits after encoding return returnInfo def decidePayload(self, frameInfo): #Data Block Mode: Basic mode 0-572 bits, Extended 572-262820 bits (+18 each for cyclic redundancy check), #Extended data blocks are 588 bits (49*12 + 16 FCS) Basic are 49 bits. note 572 bits = 81 ASCII chars. #Other modes are AMD (auto msg display) and DTM (data text msg), but less efficient for larger data #Upper bound performance = 375 * (588/1176) = 187.5bps #Also, many many CMD words that do many things. (Important one being LQA transfer) #See pages around 231, need to add CMD and extra necessary words to these data_blocks etc. returnInfo = frameInfo mode = random.randint(0,10) if mode==0 or mode==1: #basic data block mode returnInfo.size += random.randint(1,81)*7 + 16 elif mode==2: #extended data block mode (least likely) returnInfo.size += random.randint(82,37260)*7 + 16 elif mode==3 or mode==4 or mode==5 or mode==6: #CMD message returnInfo.size += 24 #1 extra word elif mode==7 or mode==8 or mode==9 or mode==10: returnInfo.size += 0 #null return returnInfo def get_address(self, address): words = [] div = math.floor(len(address)/3) rem = len(address)%3 i = 0 rep = True for word in range(div): words.append(address[i:i+3]) if rep==False and i >= 3: words.append('DATA') else: words.append('REP') rep = not rep i += 3 if rem>0: final_word = address[i:i+rem] + '@'*(3-rem) words.append(final_word) return words # #Instead of 'crafting messages' and spending ages about it. Merely use the functions written already #(for making the words etc.) to calculate the SIZE of the signal, and make this a parameter of the #frameInfo that sits on the channel. This can then be used to say HOW LONG it stays on the channel, and #how long the receiver must receive for (although this doesn't matter too much as the receiver is effectively #locked in one state once it enters linking/linked mode (line 101). This solves Response/Ack problem too # class Tx(Process): def __init__(self,ID): self.ID = ID Process.__init__(self) self.sendQ = Store(name=ID,capacity='unbounded') self.sE = SimEvent(name='TxSent') def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] def sending(self): while True: yield get,self,self.sendQ,1 frameInfo = self.got[0] #data in form frameDetails() signal_time = frameInfo.size*self.StationSettings.bitrate + self.StationSettings.hardwareTime frameInfo.LQA = self.calculate_LQA(frameInfo.destination) unSent = True for chanNum in frameInfo.channels: if unSent: channel = Medium.channels(chanNum) if channel.nrBuffered==0: print 'Channel', chanNum, 'free, occupying..' yield put,self,channel,[frameInfo] unSent = False if self.type == fType.CALL: #call cycle yield hold,self,2*self.StationSettings.Td*G.num_stations #THIS NEEDS ATTENTION AS IT IS DIFFERENT FROM THE REST - This could actually be ok... just needs some additions for propagation time #could use 'signal_time' from 'size' but kind of backwards... if self.interrupted(): print 'Collision occurred, station:', self.ID else: yield hold,self,signal_time #How long does it take to get there?! if self.interrupted(): print 'Collision occurred, station:', self.ID yield get,self,channel,1 #Message delivered. #UNPASSIVATE SCANNING PEM self.sE.signal(frameInfo) self.StationSettings.timeout = self.StationSettings.Twr #INVESTIGATE THIS TIMEOUT VARIABLE, WHAT DOES IT ACTUALLY DO? SEEM TO REMEMBER IT BEING A GOOD IDEA. def calculate_LQA(self, destination): #This algorithm has potential to be highly detailed #Parameters needed: positions of 2 stations --> distance #Ionospheric conditions #Time of day, sunspot cycle. #For now, stations closer in numbers are better connected. #This should be in Rx as it needs to eventually interface with an Environment process distance = abs(self.ID - destination)/G.num_stations LQA = random.normalvariate(100-(distance*100),4) if LQA > 1: LQA=1 if LQA < 0: LQA=0 ##CATER FOR IF OUTGOING FRAME FAILS AND NEEDS TO REPEAT USING A DIFFERENT CHANNEL! (extra parameter?) #class OutgoingFrame(Process): # def __init__(self,ID,frameInfo,frame): # #channels is a list of channels, for a response or single channel call, it will only contain 1 entry # Process.__init__(self) # self.ID = ID # self.destination = frameInfo.destination # self.channelOrder = frameInfo.channels # self.type = frameInfo.type # self.frame = frame # def initComponents(self): # self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] # self.Tx = [N.Tx for N in Network.stations if N.ID==self.ID][0] # def go(self): # unSent = True # for chanNum in self.channelOrder: # if unSent: # channel = Medium.channels(chanNum) # if channel.nrBuffered==0: # print 'Channel', chanNum, 'free, occupying..' # yield put,self,channel,[self.frame] # unSent = False # if self.type == fType.OUT: #call cycle # yield hold,self,2*self.StationSettings.Td*G.num_stations # if self.interrupted(): # print 'Collision occurred, station:', self.ID # if self.type == fType.RESPONSE: # yield hold,self,self.StationSettings.Twr #How long does it take to get there?! # if self.interrupted(): # print 'Collision occurred, station:', self.ID # yield get,self,channel,1 #Message delivered. # #UNPASSIVATE SCANNING PEM # self.StationSettings.timeout = self.StationSettings.Twr class frameDetails(): def __init__(self,origin,destination,size,type,terminate,channels,LQA): self.origin = origin self.destination = destination self.size = size self.type = type self.terminate = terminate self.channels = channels self.LQA = LQA class fType(): MSG = 1 QUICK_ID = 2 CALL = 3 RESPONSE = 4 ACK = 5 OUT = 6 class sState(): SCANNING = 1 LINKING = 2 LINKED = 3 initialize() Medium = Medium() Network.stations = [StationContainer(i) for i in range(G.num_stations)] for N in Network.stations: N.initComponents() N.activate() simulate(until=G.max_time)
IncidentNormal/TestApps
ALE/HF_Sim_Book.py
Python
gpl-2.0
17,239
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright © 2011 Thomas Schreiber # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # by Thomas Schreiber <[email protected]> # from PyQt4.QtGui import * from PyQt4.QtCore import * from view.changesUi import Ui_changeSummary import string class ChangeWin(QDialog): """A QDialog that lists changes before they are commited. :param QDialog: Parent class. """ def __init__(self, parent): """Initialize ChangeWin. :param parent: Caller. """ QDialog.__init__(self, parent) self.ui=Ui_changeSummary() self.ui.setupUi(self) def setChanges(self, changeDict): """Add changes to ChangeWin. :param changeDict: Dictionary of changes. """ installString = '' upgradeString = '' removeString = '' for app in changeDict['repoInstalls']: installString += app + ' ' for app in changeDict['aurInstalls']: installString += app + ' ' for app in changeDict['aurBuildDeps']: installString += app + ' ' for app in changeDict['aurDeps']: installString += app + ' ' for app in changeDict['repoUpgrades']: upgradeString += app + ' ' for app in changeDict['aurUpgrades']: upgradeString += app + ' ' for app in changeDict['removes']: removeString += app + ' ' self.ui.toInstallEdit.setText(installString) self.ui.toUpgradeEdit.setText(upgradeString) self.ui.toRemoveEdit.setText(removeString) # vim: set ts=4 sw=4 noet:
ubiquill/Potluck
src/view/Changes.py
Python
gpl-2.0
2,296
# # kickstart.py: kickstart install support # # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 # Red Hat, Inc. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from pyanaconda.errors import ScriptError, errorHandler from blivet.deviceaction import ActionCreateFormat, ActionDestroyFormat, ActionResizeDevice, ActionResizeFormat from blivet.devices import LUKSDevice from blivet.devices.lvm import LVMVolumeGroupDevice, LVMCacheRequest from blivet.devicelibs.lvm import LVM_PE_SIZE, KNOWN_THPOOL_PROFILES from blivet.devicelibs.crypto import MIN_CREATE_ENTROPY from blivet.formats import getFormat from blivet.partitioning import doPartitioning from blivet.partitioning import growLVM from blivet.errors import PartitioningError, StorageError, BTRFSValueError from blivet.size import Size, KiB from blivet import udev from blivet import autopart from blivet.platform import platform import blivet.iscsi import blivet.fcoe import blivet.zfcp import blivet.arch import glob from pyanaconda import iutil from pyanaconda.iutil import open # pylint: disable=redefined-builtin import os import os.path import tempfile from pyanaconda.flags import flags, can_touch_runtime_system from pyanaconda.constants import ADDON_PATHS, IPMI_ABORTED import shlex import requests import sys import pykickstart.commands as commands from pyanaconda import keyboard from pyanaconda import ntp from pyanaconda import timezone from pyanaconda.timezone import NTP_PACKAGE, NTP_SERVICE from pyanaconda import localization from pyanaconda import network from pyanaconda import nm from pyanaconda.simpleconfig import SimpleConfigFile from pyanaconda.users import getPassAlgo from pyanaconda.desktop import Desktop from pyanaconda.i18n import _ from pyanaconda.ui.common import collect from pyanaconda.addons import AddonSection, AddonData, AddonRegistry, collect_addon_paths from pyanaconda.bootloader import GRUB2, get_bootloader from pyanaconda.pwpolicy import F22_PwPolicy, F22_PwPolicyData from pykickstart.constants import CLEARPART_TYPE_NONE, FIRSTBOOT_SKIP, FIRSTBOOT_RECONFIG, KS_SCRIPT_POST, KS_SCRIPT_PRE, \ KS_SCRIPT_TRACEBACK, KS_SCRIPT_PREINSTALL, SELINUX_DISABLED, SELINUX_ENFORCING, SELINUX_PERMISSIVE from pykickstart.base import BaseHandler from pykickstart.errors import formatErrorMsg, KickstartError, KickstartValueError from pykickstart.parser import KickstartParser from pykickstart.parser import Script as KSScript from pykickstart.sections import Section from pykickstart.sections import NullSection, PackageSection, PostScriptSection, PreScriptSection, PreInstallScriptSection, TracebackScriptSection from pykickstart.version import returnClassForVersion import logging log = logging.getLogger("anaconda") stderrLog = logging.getLogger("anaconda.stderr") storage_log = logging.getLogger("blivet") stdoutLog = logging.getLogger("anaconda.stdout") from pyanaconda.anaconda_log import logger, logLevelMap, setHandlersLevel, DEFAULT_LEVEL class AnacondaKSScript(KSScript): """ Execute a kickstart script This will write the script to a file named /tmp/ks-script- before execution. Output is logged by the program logger, the path specified by --log or to /tmp/ks-script-\\*.log """ def run(self, chroot): """ Run the kickstart script @param chroot directory path to chroot into before execution """ if self.inChroot: scriptRoot = chroot else: scriptRoot = "/" # Environment variables that cause problems for %post scripts env_prune = ["LIBUSER_CONF"] (fd, path) = tempfile.mkstemp("", "ks-script-", scriptRoot + "/tmp") iutil.eintr_retry_call(os.write, fd, self.script.encode("utf-8")) iutil.eintr_ignore(os.close, fd) iutil.eintr_retry_call(os.chmod, path, 0o700) # Always log stdout/stderr from scripts. Using --log just lets you # pick where it goes. The script will also be logged to program.log # because of execWithRedirect. if self.logfile: if self.inChroot: messages = "%s/%s" % (scriptRoot, self.logfile) else: messages = self.logfile d = os.path.dirname(messages) if not os.path.exists(d): os.makedirs(d) else: # Always log outside the chroot, we copy those logs into the # chroot later. messages = "/tmp/%s.log" % os.path.basename(path) with open(messages, "w") as fp: rc = iutil.execWithRedirect(self.interp, ["/tmp/%s" % os.path.basename(path)], stdout=fp, root=scriptRoot, env_prune=env_prune) if rc != 0: log.error("Error code %s running the kickstart script at line %s", rc, self.lineno) if self.errorOnFail: err = "" with open(messages, "r") as fp: err = "".join(fp.readlines()) errorHandler.cb(ScriptError(self.lineno, err)) iutil.ipmi_report(IPMI_ABORTED) sys.exit(0) class AnacondaInternalScript(AnacondaKSScript): def __init__(self, *args, **kwargs): AnacondaKSScript.__init__(self, *args, **kwargs) self._hidden = True def __str__(self): # Scripts that implement portions of anaconda (copying screenshots and # log files, setfilecons, etc.) should not be written to the output # kickstart file. return "" def getEscrowCertificate(escrowCerts, url): if not url: return None if url in escrowCerts: return escrowCerts[url] needs_net = not url.startswith("/") and not url.startswith("file:") if needs_net and not nm.nm_is_connected(): msg = _("Escrow certificate %s requires the network.") % url raise KickstartError(msg) log.info("escrow: downloading %s", url) try: request = iutil.requests_session().get(url, verify=True) except requests.exceptions.SSLError as e: msg = _("SSL error while downloading the escrow certificate:\n\n%s") % e raise KickstartError(msg) except requests.exceptions.RequestException as e: msg = _("The following error was encountered while downloading the escrow certificate:\n\n%s") % e raise KickstartError(msg) try: escrowCerts[url] = request.content finally: request.close() return escrowCerts[url] def deviceMatches(spec, devicetree=None): """ Return names of block devices matching the provided specification. :param str spec: a device identifier (name, UUID=<uuid>, &c) :keyword devicetree: device tree to look up devices in (optional) :type devicetree: :class:`blivet.DeviceTree` :returns: names of matching devices :rtype: list of str parse methods will not have access to a devicetree, while execute methods will. The devicetree is superior in that it can resolve md array names and in that it reflects scheduled device removals, but for normal local disks udev.resolve_devspec should suffice. """ full_spec = spec if not full_spec.startswith("/dev/"): full_spec = os.path.normpath("/dev/" + full_spec) # the regular case matches = udev.resolve_glob(full_spec) # Use spec here instead of full_spec to preserve the spec and let the # called code decide whether to treat the spec as a path instead of a name. if devicetree is None: dev = udev.resolve_devspec(spec) else: dev = getattr(devicetree.resolveDevice(spec), "name", None) # udev.resolve_devspec returns None if there's no match, but we don't # want that ending up in the list. if dev and dev not in matches: matches.append(dev) return matches def lookupAlias(devicetree, alias): for dev in devicetree.devices: if getattr(dev, "req_name", None) == alias: return dev return None # Remove any existing formatting on a device, but do not remove the partition # itself. This sets up an existing device to be used in a --onpart option. def removeExistingFormat(device, storage): deps = storage.deviceDeps(device) while deps: leaves = [d for d in deps if d.isleaf] for leaf in leaves: storage.destroyDevice(leaf) deps.remove(leaf) storage.devicetree.registerAction(ActionDestroyFormat(device)) def getAvailableDiskSpace(storage): """ Get overall disk space available on disks we may use. :param storage: blivet.Blivet instance :return: overall disk space available :rtype: :class:`blivet.size.Size` """ free_space = storage.freeSpaceSnapshot # blivet creates a new free space dict to instead of modifying the old one, # so there is no worry about the dictionary changing during iteration. return sum(disk_free for disk_free, fs_free in free_space.values()) def refreshAutoSwapSize(storage): """ Refresh size of the auto partitioning request for swap device according to the current state of the storage configuration. :param storage: blivet.Blivet instance """ for request in storage.autoPartitionRequests: if request.fstype == "swap": disk_space = getAvailableDiskSpace(storage) request.size = autopart.swapSuggestion(disk_space=disk_space) break ### ### SUBCLASSES OF PYKICKSTART COMMAND HANDLERS ### class Authconfig(commands.authconfig.FC3_Authconfig): def __init__(self, *args, **kwargs): commands.authconfig.FC3_Authconfig.__init__(self, *args, **kwargs) self.packages = [] def setup(self): if self.seen: self.packages = ["authconfig"] def execute(self, *args): cmd = "/usr/sbin/authconfig" if not os.path.lexists(iutil.getSysroot()+cmd): if flags.automatedInstall and self.seen: msg = _("%s is missing. Cannot setup authentication.") % cmd raise KickstartError(msg) else: return args = ["--update", "--nostart"] + shlex.split(self.authconfig) if not flags.automatedInstall and \ (os.path.exists(iutil.getSysroot() + "/lib64/security/pam_fprintd.so") or \ os.path.exists(iutil.getSysroot() + "/lib/security/pam_fprintd.so")): args += ["--enablefingerprint"] try: iutil.execInSysroot(cmd, args) except RuntimeError as msg: log.error("Error running %s %s: %s", cmd, args, msg) class AutoPart(commands.autopart.F21_AutoPart): def parse(self, args): retval = commands.autopart.F21_AutoPart.parse(self, args) if self.fstype: fmt = blivet.formats.getFormat(self.fstype) if not fmt or fmt.type is None: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("autopart fstype of %s is invalid.") % self.fstype)) return retval def execute(self, storage, ksdata, instClass): from blivet.autopart import doAutoPartition from pyanaconda.storage_utils import sanity_check if not self.autopart: return if self.fstype: try: storage.setDefaultFSType(self.fstype) storage.setDefaultBootFSType(self.fstype) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Settings default fstype to %s failed.") % self.fstype)) # sets up default autopartitioning. use clearpart separately # if you want it instClass.setDefaultPartitioning(storage) storage.doAutoPart = True if self.encrypted: storage.encryptedAutoPart = True storage.encryptionPassphrase = self.passphrase storage.encryptionCipher = self.cipher storage.autoPartEscrowCert = getEscrowCertificate(storage.escrowCertificates, self.escrowcert) storage.autoPartAddBackupPassphrase = self.backuppassphrase if self.type is not None: storage.autoPartType = self.type doAutoPartition(storage, ksdata, min_luks_entropy=MIN_CREATE_ENTROPY) errors = sanity_check(storage) if errors: raise PartitioningError("autopart failed:\n" + "\n".join(str(error) for error in errors)) class Bootloader(commands.bootloader.F21_Bootloader): def __init__(self, *args, **kwargs): commands.bootloader.F21_Bootloader.__init__(self, *args, **kwargs) self.location = "mbr" def parse(self, args): commands.bootloader.F21_Bootloader.parse(self, args) if self.location == "partition" and isinstance(get_bootloader(), GRUB2): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("GRUB2 does not support installation to a partition."))) if self.isCrypted and isinstance(get_bootloader(), GRUB2): if not self.password.startswith("grub.pbkdf2."): raise KickstartValueError(formatErrorMsg(self.lineno, msg="GRUB2 encrypted password must be in grub.pbkdf2 format.")) return self def execute(self, storage, ksdata, instClass): if flags.imageInstall and blivet.arch.isS390(): self.location = "none" if self.location == "none": location = None elif self.location == "partition": location = "boot" else: location = self.location if not location: storage.bootloader.skip_bootloader = True return if self.appendLine: args = self.appendLine.split() storage.bootloader.boot_args.update(args) if self.password: if self.isCrypted: storage.bootloader.encrypted_password = self.password else: storage.bootloader.password = self.password if location: storage.bootloader.set_preferred_stage1_type(location) if self.timeout is not None: storage.bootloader.timeout = self.timeout # Throw out drives specified that don't exist or cannot be used (iSCSI # device on an s390 machine) disk_names = [d.name for d in storage.disks if not d.format.hidden and not d.protected and (not blivet.arch.isS390() or not isinstance(d, blivet.devices.iScsiDiskDevice))] diskSet = set(disk_names) for drive in self.driveorder[:]: matches = set(deviceMatches(drive, devicetree=storage.devicetree)) if matches.isdisjoint(diskSet): log.warning("requested drive %s in boot drive order doesn't exist or cannot be used", drive) self.driveorder.remove(drive) storage.bootloader.disk_order = self.driveorder if self.bootDrive: matches = set(deviceMatches(self.bootDrive, devicetree=storage.devicetree)) if len(matches) > 1: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("More than one match found for given boot drive \"%s\".") % self.bootDrive)) elif matches.isdisjoint(diskSet): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Requested boot drive \"%s\" doesn't exist or cannot be used.") % self.bootDrive)) else: self.bootDrive = disk_names[0] drive = storage.devicetree.resolveDevice(self.bootDrive) storage.bootloader.stage1_disk = drive if self.leavebootorder: flags.leavebootorder = True if self.nombr: flags.nombr = True class BTRFS(commands.btrfs.F23_BTRFS): def execute(self, storage, ksdata, instClass): for b in self.btrfsList: b.execute(storage, ksdata, instClass) class BTRFSData(commands.btrfs.F23_BTRFSData): def execute(self, storage, ksdata, instClass): devicetree = storage.devicetree storage.doAutoPart = False members = [] # Get a list of all the devices that make up this volume. for member in self.devices: dev = devicetree.resolveDevice(member) if not dev: # if using --onpart, use original device member_name = ksdata.onPart.get(member, member) dev = devicetree.resolveDevice(member_name) or lookupAlias(devicetree, member) if dev and dev.format.type == "luks": try: dev = devicetree.getChildren(dev)[0] except IndexError: dev = None if dev and dev.format.type != "btrfs": raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Btrfs partition \"%(device)s\" has a format of \"%(format)s\", but should have a format of \"btrfs\".") % {"device": member, "format": dev.format.type})) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Tried to use undefined partition \"%s\" in Btrfs volume specification.") % member)) members.append(dev) if self.subvol: name = self.name elif self.label: name = self.label else: name = None if len(members) == 0 and not self.preexist: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Btrfs volume defined without any member devices. Either specify member devices or use --useexisting."))) # allow creating btrfs vols/subvols without specifying mountpoint if self.mountpoint in ("none", "None"): self.mountpoint = "" # Sanity check mountpoint if self.mountpoint != "" and self.mountpoint[0] != '/': raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The mount point \"%s\" is not valid. It must start with a /.") % self.mountpoint)) # If a previous device has claimed this mount point, delete the # old one. try: if self.mountpoint: device = storage.mountpoints[self.mountpoint] storage.destroyDevice(device) except KeyError: pass if self.preexist: device = devicetree.resolveDevice(self.name) if not device: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Btrfs volume \"%s\" specified with --useexisting does not exist.") % self.name)) device.format.mountpoint = self.mountpoint else: try: request = storage.newBTRFS(name=name, subvol=self.subvol, mountpoint=self.mountpoint, metaDataLevel=self.metaDataLevel, dataLevel=self.dataLevel, parents=members, createOptions=self.mkfsopts) except BTRFSValueError as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) class Realm(commands.realm.F19_Realm): def __init__(self, *args): commands.realm.F19_Realm.__init__(self, *args) self.packages = [] self.discovered = "" def setup(self): if not self.join_realm: return try: argv = ["discover", "--verbose"] + \ self.discover_options + [self.join_realm] output = iutil.execWithCapture("realm", argv, filter_stderr=True) except OSError: # TODO: A lousy way of propagating what will usually be # 'no such realm' # The error message is logged by iutil return # Now parse the output for the required software. First line is the # realm name, and following lines are information as "name: value" self.packages = ["realmd"] self.discovered = "" lines = output.split("\n") if not lines: return self.discovered = lines.pop(0).strip() log.info("Realm discovered: %s", self.discovered) for line in lines: parts = line.split(":", 1) if len(parts) == 2 and parts[0].strip() == "required-package": self.packages.append(parts[1].strip()) log.info("Realm %s needs packages %s", self.discovered, ", ".join(self.packages)) def execute(self, *args): if not self.discovered: return for arg in self.join_args: if arg.startswith("--no-password") or arg.startswith("--one-time-password"): pw_args = [] break else: # no explicit password arg using implicit --no-password pw_args = ["--no-password"] argv = ["join", "--install", iutil.getSysroot(), "--verbose"] + \ pw_args + self.join_args rc = -1 try: rc = iutil.execWithRedirect("realm", argv) except OSError: pass if rc == 0: log.info("Joined realm %s", self.join_realm) class ClearPart(commands.clearpart.F21_ClearPart): def parse(self, args): retval = commands.clearpart.F21_ClearPart.parse(self, args) if self.type is None: self.type = CLEARPART_TYPE_NONE if self.disklabel and self.disklabel not in platform.diskLabelTypes: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disklabel \"%s\" given in clearpart command is not " "supported on this platform.") % self.disklabel)) # Do any glob expansion now, since we need to have the real list of # disks available before the execute methods run. drives = [] for spec in self.drives: matched = deviceMatches(spec) if matched: drives.extend(matched) else: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" given in clearpart command does not exist.") % spec)) self.drives = drives # Do any glob expansion now, since we need to have the real list of # devices available before the execute methods run. devices = [] for spec in self.devices: matched = deviceMatches(spec) if matched: devices.extend(matched) else: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Device \"%s\" given in clearpart device list does not exist.") % spec)) self.devices = devices return retval def execute(self, storage, ksdata, instClass): storage.config.clearPartType = self.type storage.config.clearPartDisks = self.drives storage.config.clearPartDevices = self.devices if self.initAll: storage.config.initializeDisks = self.initAll if self.disklabel: if not platform.setDefaultDiskLabelType(self.disklabel): log.warn("%s is not a supported disklabel type on this platform. " "Using default disklabel %s instead.", self.disklabel, platform.defaultDiskLabelType) storage.clearPartitions() class Fcoe(commands.fcoe.F13_Fcoe): def parse(self, args): fc = commands.fcoe.F13_Fcoe.parse(self, args) if fc.nic not in nm.nm_devices(): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("NIC \"%s\" given in fcoe command does not exist.") % fc.nic)) if fc.nic in (info[0] for info in blivet.fcoe.fcoe().nics): log.info("Kickstart fcoe device %s already added from EDD, ignoring", fc.nic) else: msg = blivet.fcoe.fcoe().addSan(nic=fc.nic, dcb=fc.dcb, auto_vlan=True) if not msg: msg = "Succeeded." blivet.fcoe.fcoe().added_nics.append(fc.nic) log.info("adding FCoE SAN on %s: %s", fc.nic, msg) return fc class Firewall(commands.firewall.F20_Firewall): def __init__(self, *args, **kwargs): commands.firewall.F20_Firewall.__init__(self, *args, **kwargs) self.packages = [] def setup(self): if self.seen: self.packages = ["firewalld"] def execute(self, storage, ksdata, instClass): args = [] # enabled is None if neither --enable or --disable is passed # default to enabled if nothing has been set. if self.enabled == False: args += ["--disabled"] else: args += ["--enabled"] if "ssh" not in self.services and "ssh" not in self.remove_services \ and "22:tcp" not in self.ports: args += ["--service=ssh"] for dev in self.trusts: args += ["--trust=%s" % (dev,)] for port in self.ports: args += ["--port=%s" % (port,)] for remove_service in self.remove_services: args += ["--remove-service=%s" % (remove_service,)] for service in self.services: args += ["--service=%s" % (service,)] cmd = "/usr/bin/firewall-offline-cmd" if not os.path.exists(iutil.getSysroot()+cmd): if self.enabled: msg = _("%s is missing. Cannot setup firewall.") % (cmd,) raise KickstartError(msg) else: iutil.execInSysroot(cmd, args) class Firstboot(commands.firstboot.FC3_Firstboot): def setup(self, *args): # firstboot should be disabled by default after kickstart installations if flags.automatedInstall and not self.seen: self.firstboot = FIRSTBOOT_SKIP def execute(self, *args): action = "enable" services = ["initial-setup-graphical.service", "initial-setup-text.service"] if not any(os.path.exists(iutil.getSysroot() + "/lib/systemd/system/" + path) for path in services): # none of the first boot utilities installed, nothing to do here return if self.firstboot == FIRSTBOOT_SKIP: action = "disable" elif self.firstboot == FIRSTBOOT_RECONFIG: f = open(iutil.getSysroot() + "/etc/reconfigSys", "w+") f.close() iutil.execInSysroot("systemctl", [action] + services) class Group(commands.group.F12_Group): def execute(self, storage, ksdata, instClass, users): for grp in self.groupList: kwargs = grp.__dict__ kwargs.update({"root": iutil.getSysroot()}) users.createGroup(grp.name, **kwargs) class IgnoreDisk(commands.ignoredisk.RHEL6_IgnoreDisk): def parse(self, args): retval = commands.ignoredisk.RHEL6_IgnoreDisk.parse(self, args) # See comment in ClearPart.parse drives = [] for spec in self.ignoredisk: matched = deviceMatches(spec) if matched: drives.extend(matched) else: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" given in ignoredisk command does not exist.") % spec)) self.ignoredisk = drives drives = [] for spec in self.onlyuse: matched = deviceMatches(spec) if matched: drives.extend(matched) else: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" given in ignoredisk command does not exist.") % spec)) self.onlyuse = drives return retval class Iscsi(commands.iscsi.F17_Iscsi): def parse(self, args): tg = commands.iscsi.F17_Iscsi.parse(self, args) if tg.iface: if not network.wait_for_network_devices([tg.iface]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Network interface \"%(nic)s\" required by iSCSI \"%(iscsiTarget)s\" target is not up.") % {"nic": tg.iface, "iscsiTarget": tg.target})) mode = blivet.iscsi.iscsi().mode if mode == "none": if tg.iface: blivet.iscsi.iscsi().create_interfaces(nm.nm_activated_devices()) elif ((mode == "bind" and not tg.iface) or (mode == "default" and tg.iface)): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("iscsi --iface must be specified (binding used) either for all targets or for none"))) try: blivet.iscsi.iscsi().addTarget(tg.ipaddr, tg.port, tg.user, tg.password, tg.user_in, tg.password_in, target=tg.target, iface=tg.iface) log.info("added iscsi target %s at %s via %s", tg.target, tg.ipaddr, tg.iface) except (IOError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) return tg class IscsiName(commands.iscsiname.FC6_IscsiName): def parse(self, args): retval = commands.iscsiname.FC6_IscsiName.parse(self, args) blivet.iscsi.iscsi().initiator = self.iscsiname return retval class Lang(commands.lang.F19_Lang): def execute(self, *args, **kwargs): localization.write_language_configuration(self, iutil.getSysroot()) # no overrides needed here Eula = commands.eula.F20_Eula class LogVol(commands.logvol.F23_LogVol): def execute(self, storage, ksdata, instClass): for l in self.lvList: l.execute(storage, ksdata, instClass) if self.lvList: growLVM(storage) class LogVolData(commands.logvol.F23_LogVolData): def execute(self, storage, ksdata, instClass): devicetree = storage.devicetree storage.doAutoPart = False # FIXME: we should be running sanityCheck on partitioning that is not ks # autopart, but that's likely too invasive for #873135 at this moment if self.mountpoint == "/boot" and blivet.arch.isS390(): raise KickstartValueError(formatErrorMsg(self.lineno, msg="/boot can not be of type 'lvmlv' on s390x")) # we might have truncated or otherwise changed the specified vg name vgname = ksdata.onPart.get(self.vgname, self.vgname) size = None if self.percent: size = Size(0) if self.mountpoint == "swap": ty = "swap" self.mountpoint = "" if self.recommended or self.hibernation: disk_space = getAvailableDiskSpace(storage) size = autopart.swapSuggestion(hibernation=self.hibernation, disk_space=disk_space) self.grow = False else: if self.fstype != "": ty = self.fstype else: ty = storage.defaultFSType if size is None and not self.preexist: if not self.size: raise KickstartValueError(formatErrorMsg(self.lineno, msg="Size can not be decided on from kickstart nor obtained from device.")) try: size = Size("%d MiB" % self.size) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg="The size \"%s\" is invalid." % self.size)) if self.thin_pool: self.mountpoint = "" ty = None # Sanity check mountpoint if self.mountpoint != "" and self.mountpoint[0] != '/': raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The mount point \"%s\" is not valid. It must start with a /.") % self.mountpoint)) # Check that the VG this LV is a member of has already been specified. vg = devicetree.getDeviceByName(vgname) if not vg: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("No volume group exists with the name \"%s\". Specify volume groups before logical volumes.") % self.vgname)) pool = None if self.thin_volume: pool = devicetree.getDeviceByName("%s-%s" % (vg.name, self.pool_name)) if not pool: err = formatErrorMsg(self.lineno, msg=_("No thin pool exists with the name \"%s\". Specify thin pools before thin volumes.") % self.pool_name) raise KickstartValueError(err) # If this specifies an existing request that we should not format, # quit here after setting up enough information to mount it later. if not self.format: if not self.name: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("logvol --noformat must also use the --name= option."))) dev = devicetree.getDeviceByName("%s-%s" % (vg.name, self.name)) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Logical volume \"%s\" given in logvol command does not exist.") % self.name)) if self.resize: size = dev.raw_device.alignTargetSize(size) if size < dev.currentSize: # shrink try: devicetree.registerAction(ActionResizeFormat(dev, size)) devicetree.registerAction(ActionResizeDevice(dev, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": dev.name})) else: # grow try: devicetree.registerAction(ActionResizeDevice(dev, size)) devicetree.registerAction(ActionResizeFormat(dev, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": dev.name})) dev.format.mountpoint = self.mountpoint dev.format.mountopts = self.fsopts if ty == "swap": storage.addFstabSwap(dev) return # Make sure this LV name is not already used in the requested VG. if not self.preexist: tmp = devicetree.getDeviceByName("%s-%s" % (vg.name, self.name)) if tmp: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Logical volume name \"%(logvol)s\" is already in use in volume group \"%(volgroup)s\".") % {"logvol": self.name, "volgroup": vg.name})) if not self.percent and size and not self.grow and size < vg.peSize: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Logical volume size \"%(logvolSize)s\" must be larger than the volume group extent size of \"%(extentSize)s\".") % {"logvolSize": size, "extentSize": vg.peSize})) # Now get a format to hold a lot of these extra values. fmt = getFormat(ty, mountpoint=self.mountpoint, label=self.label, fsprofile=self.fsprofile, createOptions=self.mkfsopts, mountopts=self.fsopts) if not fmt.type and not self.thin_pool: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The \"%s\" file system type is not supported.") % ty)) add_fstab_swap = None # If we were given a pre-existing LV to create a filesystem on, we need # to verify it and its VG exists and then schedule a new format action # to take place there. Also, we only support a subset of all the # options on pre-existing LVs. if self.preexist: device = devicetree.getDeviceByName("%s-%s" % (vg.name, self.name)) if not device: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Logical volume \"%s\" given in logvol command does not exist.") % self.name)) removeExistingFormat(device, storage) if self.resize: size = device.raw_device.alignTargetSize(size) try: devicetree.registerAction(ActionResizeDevice(device, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": device.name})) devicetree.registerAction(ActionCreateFormat(device, fmt)) if ty == "swap": add_fstab_swap = device else: # If a previous device has claimed this mount point, delete the # old one. try: if self.mountpoint: device = storage.mountpoints[self.mountpoint] storage.destroyDevice(device) except KeyError: pass if self.thin_volume: parents = [pool] else: parents = [vg] pool_args = {} if self.thin_pool: if self.profile: matching = (p for p in KNOWN_THPOOL_PROFILES if p.name == self.profile) profile = next(matching, None) if profile: pool_args["profile"] = profile else: log.warning("No matching profile for %s found in LVM configuration", self.profile) if self.metadata_size: pool_args["metadatasize"] = Size("%d MiB" % self.metadata_size) if self.chunk_size: pool_args["chunksize"] = Size("%d KiB" % self.chunk_size) if self.maxSizeMB: try: maxsize = Size("%d MiB" % self.maxSizeMB) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg="The maximum size \"%s\" is invalid." % self.maxSizeMB)) else: maxsize = None if self.cache_size and self.cache_pvs: pv_devices = [lookupAlias(devicetree, pv) for pv in self.cache_pvs] cache_size = Size("%d MiB" % self.cache_size) cache_mode = self.cache_mode or None cache_request = LVMCacheRequest(cache_size, pv_devices, cache_mode) else: cache_request = None try: request = storage.newLV(fmt=fmt, name=self.name, parents=parents, size=size, thin_pool=self.thin_pool, thin_volume=self.thin_volume, grow=self.grow, maxsize=maxsize, percent=self.percent, cacheRequest=cache_request, **pool_args) except (StorageError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) if ty == "swap": add_fstab_swap = request if self.encrypted: if self.passphrase and not storage.encryptionPassphrase: storage.encryptionPassphrase = self.passphrase # try to use the global passphrase if available # XXX: we require the LV/part with --passphrase to be processed # before this one to setup the storage.encryptionPassphrase self.passphrase = self.passphrase or storage.encryptionPassphrase cert = getEscrowCertificate(storage.escrowCertificates, self.escrowcert) if self.preexist: luksformat = fmt device.format = getFormat("luks", passphrase=self.passphrase, device=device.path, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=device) else: luksformat = request.format request.format = getFormat("luks", passphrase=self.passphrase, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase, min_luks_entropy=MIN_CREATE_ENTROPY) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=request) if ty == "swap": # swap is on the LUKS device not on the LUKS' parent device, # override the info here add_fstab_swap = luksdev storage.createDevice(luksdev) if add_fstab_swap: storage.addFstabSwap(add_fstab_swap) class Logging(commands.logging.FC6_Logging): def execute(self, *args): if logger.loglevel == DEFAULT_LEVEL: # not set from the command line level = logLevelMap[self.level] logger.loglevel = level setHandlersLevel(log, level) setHandlersLevel(storage_log, level) if logger.remote_syslog == None and len(self.host) > 0: # not set from the command line, ok to use kickstart remote_server = self.host if self.port: remote_server = "%s:%s" %(self.host, self.port) logger.updateRemote(remote_server) class Network(commands.network.F22_Network): def __init__(self, *args, **kwargs): commands.network.F22_Network.__init__(self, *args, **kwargs) self.packages = [] def setup(self): if network.is_using_team_device(): self.packages = ["teamd"] def execute(self, storage, ksdata, instClass): network.write_network_config(storage, ksdata, instClass, iutil.getSysroot()) class MultiPath(commands.multipath.FC6_MultiPath): def parse(self, args): raise NotImplementedError(_("The %s kickstart command is not currently supported.") % "multipath") class DmRaid(commands.dmraid.FC6_DmRaid): def parse(self, args): raise NotImplementedError(_("The %s kickstart command is not currently supported.") % "dmraid") class Partition(commands.partition.F23_Partition): def execute(self, storage, ksdata, instClass): for p in self.partitions: p.execute(storage, ksdata, instClass) if self.partitions: doPartitioning(storage) class PartitionData(commands.partition.F23_PartData): def execute(self, storage, ksdata, instClass): devicetree = storage.devicetree kwargs = {} storage.doAutoPart = False if self.onbiosdisk != "": # eddDict is only modified during storage.reset(), so don't do that # while executing storage. for (disk, biosdisk) in storage.eddDict.items(): if "%x" % biosdisk == self.onbiosdisk: self.disk = disk break if not self.disk: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("No disk found for specified BIOS disk \"%s\".") % self.onbiosdisk)) size = None if self.mountpoint == "swap": ty = "swap" self.mountpoint = "" if self.recommended or self.hibernation: disk_space = getAvailableDiskSpace(storage) size = autopart.swapSuggestion(hibernation=self.hibernation, disk_space=disk_space) self.grow = False # if people want to specify no mountpoint for some reason, let them # this is really needed for pSeries boot partitions :( elif self.mountpoint == "None": self.mountpoint = "" if self.fstype: ty = self.fstype else: ty = storage.defaultFSType elif self.mountpoint == 'appleboot': ty = "appleboot" self.mountpoint = "" elif self.mountpoint == 'prepboot': ty = "prepboot" self.mountpoint = "" elif self.mountpoint == 'biosboot': ty = "biosboot" self.mountpoint = "" elif self.mountpoint.startswith("raid."): ty = "mdmember" kwargs["name"] = self.mountpoint self.mountpoint = "" if devicetree.getDeviceByName(kwargs["name"]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("RAID partition \"%s\" is defined multiple times.") % kwargs["name"])) if self.onPart: ksdata.onPart[kwargs["name"]] = self.onPart elif self.mountpoint.startswith("pv."): ty = "lvmpv" kwargs["name"] = self.mountpoint self.mountpoint = "" if devicetree.getDeviceByName(kwargs["name"]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("PV partition \"%s\" is defined multiple times.") % kwargs["name"])) if self.onPart: ksdata.onPart[kwargs["name"]] = self.onPart elif self.mountpoint.startswith("btrfs."): ty = "btrfs" kwargs["name"] = self.mountpoint self.mountpoint = "" if devicetree.getDeviceByName(kwargs["name"]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Btrfs partition \"%s\" is defined multiple times.") % kwargs["name"])) if self.onPart: ksdata.onPart[kwargs["name"]] = self.onPart elif self.mountpoint == "/boot/efi": if blivet.arch.isMactel(): ty = "macefi" else: ty = "EFI System Partition" self.fsopts = "defaults,uid=0,gid=0,umask=077,shortname=winnt" else: if self.fstype != "": ty = self.fstype elif self.mountpoint == "/boot": ty = storage.defaultBootFSType else: ty = storage.defaultFSType if not size and self.size: try: size = Size("%d MiB" % self.size) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The size \"%s\" is invalid.") % self.size)) # If this specified an existing request that we should not format, # quit here after setting up enough information to mount it later. if not self.format: if not self.onPart: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("part --noformat must also use the --onpart option."))) dev = devicetree.resolveDevice(self.onPart) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Partition \"%s\" given in part command does not exist.") % self.onPart)) if self.resize: size = dev.raw_device.alignTargetSize(size) if size < dev.currentSize: # shrink try: devicetree.registerAction(ActionResizeFormat(dev, size)) devicetree.registerAction(ActionResizeDevice(dev, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": dev.name})) else: # grow try: devicetree.registerAction(ActionResizeDevice(dev, size)) devicetree.registerAction(ActionResizeFormat(dev, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": dev.name})) dev.format.mountpoint = self.mountpoint dev.format.mountopts = self.fsopts if ty == "swap": storage.addFstabSwap(dev) return # Now get a format to hold a lot of these extra values. kwargs["fmt"] = getFormat(ty, mountpoint=self.mountpoint, label=self.label, fsprofile=self.fsprofile, mountopts=self.fsopts, createOptions=self.mkfsopts, size=size) if not kwargs["fmt"].type: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The \"%s\" file system type is not supported.") % ty)) # If we were given a specific disk to create the partition on, verify # that it exists first. If it doesn't exist, see if it exists with # mapper/ on the front. If that doesn't exist either, it's an error. if self.disk: disk = devicetree.resolveDevice(self.disk) # if this is a multipath member promote it to the real mpath if disk and disk.format.type == "multipath_member": mpath_device = storage.devicetree.getChildren(disk)[0] storage_log.info("kickstart: part: promoting %s to %s", disk.name, mpath_device.name) disk = mpath_device if not disk: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" given in part command does not exist.") % self.disk)) if not disk.partitionable: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Cannot install to unpartitionable device \"%s\".") % self.disk)) should_clear = storage.shouldClear(disk) if disk and (disk.partitioned or should_clear): kwargs["parents"] = [disk] elif disk: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" in part command is not partitioned.") % self.disk)) if not kwargs["parents"]: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Disk \"%s\" given in part command does not exist.") % self.disk)) kwargs["grow"] = self.grow kwargs["size"] = size if self.maxSizeMB: try: maxsize = Size("%d MiB" % self.maxSizeMB) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The maximum size \"%s\" is invalid.") % self.maxSizeMB)) else: maxsize = None kwargs["maxsize"] = maxsize kwargs["primary"] = self.primOnly add_fstab_swap = None # If we were given a pre-existing partition to create a filesystem on, # we need to verify it exists and then schedule a new format action to # take place there. Also, we only support a subset of all the options # on pre-existing partitions. if self.onPart: device = devicetree.resolveDevice(self.onPart) if not device: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Partition \"%s\" given in part command does not exist.") % self.onPart)) removeExistingFormat(device, storage) if self.resize: size = device.raw_device.alignTargetSize(size) try: devicetree.registerAction(ActionResizeDevice(device, size)) except ValueError: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Target size \"%(size)s\" for device \"%(device)s\" is invalid.") % {"size": self.size, "device": device.name})) devicetree.registerAction(ActionCreateFormat(device, kwargs["fmt"])) if ty == "swap": add_fstab_swap = device # tmpfs mounts are not disks and don't occupy a disk partition, # so handle them here elif self.fstype == "tmpfs": try: request = storage.newTmpFS(**kwargs) except (StorageError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) else: # If a previous device has claimed this mount point, delete the # old one. try: if self.mountpoint: device = storage.mountpoints[self.mountpoint] storage.destroyDevice(device) except KeyError: pass try: request = storage.newPartition(**kwargs) except (StorageError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) if ty == "swap": add_fstab_swap = request if self.encrypted: if self.passphrase and not storage.encryptionPassphrase: storage.encryptionPassphrase = self.passphrase # try to use the global passphrase if available # XXX: we require the LV/part with --passphrase to be processed # before this one to setup the storage.encryptionPassphrase self.passphrase = self.passphrase or storage.encryptionPassphrase cert = getEscrowCertificate(storage.escrowCertificates, self.escrowcert) if self.onPart: luksformat = kwargs["fmt"] device.format = getFormat("luks", passphrase=self.passphrase, device=device.path, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase, min_luks_entropy=MIN_CREATE_ENTROPY) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=device) else: luksformat = request.format request.format = getFormat("luks", passphrase=self.passphrase, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase, min_luks_entropy=MIN_CREATE_ENTROPY) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=request) if ty == "swap": # swap is on the LUKS device not on the LUKS' parent device, # override the info here add_fstab_swap = luksdev storage.createDevice(luksdev) if add_fstab_swap: storage.addFstabSwap(add_fstab_swap) class Raid(commands.raid.F23_Raid): def execute(self, storage, ksdata, instClass): for r in self.raidList: r.execute(storage, ksdata, instClass) class RaidData(commands.raid.F23_RaidData): def execute(self, storage, ksdata, instClass): raidmems = [] devicetree = storage.devicetree devicename = self.device if self.preexist: device = devicetree.resolveDevice(devicename) if device: devicename = device.name kwargs = {} storage.doAutoPart = False if self.mountpoint == "swap": ty = "swap" self.mountpoint = "" elif self.mountpoint.startswith("pv."): ty = "lvmpv" kwargs["name"] = self.mountpoint ksdata.onPart[kwargs["name"]] = devicename if devicetree.getDeviceByName(kwargs["name"]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("PV partition \"%s\" is defined multiple times.") % kwargs["name"])) self.mountpoint = "" elif self.mountpoint.startswith("btrfs."): ty = "btrfs" kwargs["name"] = self.mountpoint ksdata.onPart[kwargs["name"]] = devicename if devicetree.getDeviceByName(kwargs["name"]): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Btrfs partition \"%s\" is defined multiple times.") % kwargs["name"])) self.mountpoint = "" else: if self.fstype != "": ty = self.fstype elif self.mountpoint == "/boot" and \ "mdarray" in storage.bootloader.stage2_device_types: ty = storage.defaultBootFSType else: ty = storage.defaultFSType # Sanity check mountpoint if self.mountpoint != "" and self.mountpoint[0] != '/': raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The mount point \"%s\" is not valid. It must start with a /.") % self.mountpoint)) # If this specifies an existing request that we should not format, # quit here after setting up enough information to mount it later. if not self.format: if not devicename: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("raid --noformat must also use the --device option."))) dev = devicetree.getDeviceByName(devicename) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("RAID device \"%s\" given in raid command does not exist.") % devicename)) dev.format.mountpoint = self.mountpoint dev.format.mountopts = self.fsopts if ty == "swap": storage.addFstabSwap(dev) return # Get a list of all the RAID members. for member in self.members: dev = devicetree.resolveDevice(member) if not dev: # if member is using --onpart, use original device mem = ksdata.onPart.get(member, member) dev = devicetree.resolveDevice(mem) or lookupAlias(devicetree, member) if dev and dev.format.type == "luks": try: dev = devicetree.getChildren(dev)[0] except IndexError: dev = None if dev and dev.format.type != "mdmember": raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("RAID device \"%(device)s\" has a format of \"%(format)s\", but should have a format of \"mdmember\".") % {"device": member, "format": dev.format.type})) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Tried to use undefined partition \"%s\" in RAID specification.") % member)) raidmems.append(dev) # Now get a format to hold a lot of these extra values. kwargs["fmt"] = getFormat(ty, label=self.label, fsprofile=self.fsprofile, mountpoint=self.mountpoint, mountopts=self.fsopts, createOptions=self.mkfsopts) if not kwargs["fmt"].type: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The \"%s\" file system type is not supported.") % ty)) kwargs["name"] = devicename kwargs["level"] = self.level kwargs["parents"] = raidmems kwargs["memberDevices"] = len(raidmems) - self.spares kwargs["totalDevices"] = len(raidmems) # If we were given a pre-existing RAID to create a filesystem on, # we need to verify it exists and then schedule a new format action # to take place there. Also, we only support a subset of all the # options on pre-existing RAIDs. if self.preexist: device = devicetree.getDeviceByName(devicename) if not device: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("RAID volume \"%s\" specified with --useexisting does not exist.") % devicename)) removeExistingFormat(device, storage) devicetree.registerAction(ActionCreateFormat(device, kwargs["fmt"])) if ty == "swap": storage.addFstabSwap(device) else: if devicename and devicename in (a.name for a in storage.mdarrays): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The RAID volume name \"%s\" is already in use.") % devicename)) # If a previous device has claimed this mount point, delete the # old one. try: if self.mountpoint: device = storage.mountpoints[self.mountpoint] storage.destroyDevice(device) except KeyError: pass try: request = storage.newMDArray(**kwargs) except (StorageError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) if ty == "swap": storage.addFstabSwap(request) if self.encrypted: if self.passphrase and not storage.encryptionPassphrase: storage.encryptionPassphrase = self.passphrase cert = getEscrowCertificate(storage.escrowCertificates, self.escrowcert) if self.preexist: luksformat = kwargs["fmt"] device.format = getFormat("luks", passphrase=self.passphrase, device=device.path, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=device) else: luksformat = request.format request.format = getFormat("luks", passphrase=self.passphrase, cipher=self.cipher, escrow_cert=cert, add_backup_passphrase=self.backuppassphrase) luksdev = LUKSDevice("luks%d" % storage.nextID, fmt=luksformat, parents=request) storage.createDevice(luksdev) class RepoData(commands.repo.F21_RepoData): def __init__(self, *args, **kwargs): """ Add enabled kwarg :param enabled: The repo has been enabled :type enabled: bool """ self.enabled = kwargs.pop("enabled", True) self.repo_id = kwargs.pop("repo_id", None) commands.repo.F21_RepoData.__init__(self, *args, **kwargs) class ReqPart(commands.reqpart.F23_ReqPart): def execute(self, storage, ksdata, instClass): from blivet.autopart import doReqPartition if not self.reqpart: return reqs = platform.setPlatformBootloaderReqs() if self.addBoot: bootPartitions = platform.setPlatformBootPartition() # blivet doesn't know this - anaconda sets up the default boot fstype # in various places in this file, as well as in setDefaultPartitioning # in the install classes. We need to duplicate that here. for part in bootPartitions: if part.mountpoint == "/boot": part.fstype = storage.defaultBootFSType reqs += bootPartitions doReqPartition(storage, reqs) class RootPw(commands.rootpw.F18_RootPw): def execute(self, storage, ksdata, instClass, users): if not self.password and not flags.automatedInstall: self.lock = True algo = getPassAlgo(ksdata.authconfig.authconfig) users.setRootPassword(self.password, self.isCrypted, self.lock, algo) class SELinux(commands.selinux.FC3_SELinux): def execute(self, *args): selinux_states = {SELINUX_DISABLED: "disabled", SELINUX_ENFORCING: "enforcing", SELINUX_PERMISSIVE: "permissive"} if self.selinux is None: # Use the defaults set by the installed (or not) selinux package return elif self.selinux not in selinux_states: log.error("unknown selinux state: %s", self.selinux) return try: selinux_cfg = SimpleConfigFile(iutil.getSysroot()+"/etc/selinux/config") selinux_cfg.read() selinux_cfg.set(("SELINUX", selinux_states[self.selinux])) selinux_cfg.write() except IOError as msg: log.error("Error setting selinux mode: %s", msg) class Services(commands.services.FC6_Services): def execute(self, storage, ksdata, instClass): for svc in self.disabled: if not svc.endswith(".service"): svc += ".service" iutil.execInSysroot("systemctl", ["disable", svc]) for svc in self.enabled: if not svc.endswith(".service"): svc += ".service" iutil.execInSysroot("systemctl", ["enable", svc]) class SshKey(commands.sshkey.F22_SshKey): def execute(self, storage, ksdata, instClass, users): for usr in self.sshUserList: users.setUserSshKey(usr.username, usr.key) class Timezone(commands.timezone.F23_Timezone): def __init__(self, *args): commands.timezone.F23_Timezone.__init__(self, *args) self._added_chrony = False self._enabled_chrony = False self._disabled_chrony = False def setup(self, ksdata): if self.nontp: if iutil.service_running(NTP_SERVICE) and \ can_touch_runtime_system("stop NTP service"): ret = iutil.stop_service(NTP_SERVICE) if ret != 0: log.error("Failed to stop NTP service") if self._added_chrony and NTP_PACKAGE in ksdata.packages.packageList: ksdata.packages.packageList.remove(NTP_PACKAGE) self._added_chrony = False # Both un-enable and disable chrony, because sometimes it's installed # off by default (packages) and sometimes not (liveimg). if self._enabled_chrony and NTP_SERVICE in ksdata.services.enabled: ksdata.services.enabled.remove(NTP_SERVICE) self._enabled_chrony = False if NTP_SERVICE not in ksdata.services.disabled: ksdata.services.disabled.append(NTP_SERVICE) self._disabled_chrony = True else: if not iutil.service_running(NTP_SERVICE) and \ can_touch_runtime_system("start NTP service"): ret = iutil.start_service(NTP_SERVICE) if ret != 0: log.error("Failed to start NTP service") if not NTP_PACKAGE in ksdata.packages.packageList: ksdata.packages.packageList.append(NTP_PACKAGE) self._added_chrony = True if self._disabled_chrony and NTP_SERVICE in ksdata.services.disabled: ksdata.services.disabled.remove(NTP_SERVICE) self._disabled_chrony = False if not NTP_SERVICE in ksdata.services.enabled and \ not NTP_SERVICE in ksdata.services.disabled: ksdata.services.enabled.append(NTP_SERVICE) self._enabled_chrony = True def execute(self, *args): # write out timezone configuration if not timezone.is_valid_timezone(self.timezone): # this should never happen, but for pity's sake log.warning("Timezone %s set in kickstart is not valid, falling "\ "back to default (America/New_York).", self.timezone) self.timezone = "America/New_York" timezone.write_timezone_config(self, iutil.getSysroot()) # write out NTP configuration (if set) chronyd_conf_path = os.path.normpath(iutil.getSysroot() + ntp.NTP_CONFIG_FILE) if self.ntpservers and os.path.exists(chronyd_conf_path): pools, servers = ntp.internal_to_pools_and_servers(self.ntpservers) try: ntp.save_servers_to_config(pools, servers, conf_file_path=chronyd_conf_path) except ntp.NTPconfigError as ntperr: log.warning("Failed to save NTP configuration: %s", ntperr) class User(commands.user.F19_User): def execute(self, storage, ksdata, instClass, users): algo = getPassAlgo(ksdata.authconfig.authconfig) for usr in self.userList: kwargs = usr.__dict__ kwargs.update({"algo": algo, "root": iutil.getSysroot()}) # If the user password came from a kickstart and it is blank we # need to make sure the account is locked, not created with an # empty password. if ksdata.user.seen and kwargs.get("password", "") == "": kwargs["password"] = None if not users.createUser(usr.name, **kwargs): log.error("User %s already exists, not creating.", usr.name) class VolGroup(commands.volgroup.F21_VolGroup): def execute(self, storage, ksdata, instClass): for v in self.vgList: v.execute(storage, ksdata, instClass) class VolGroupData(commands.volgroup.F21_VolGroupData): def execute(self, storage, ksdata, instClass): pvs = [] devicetree = storage.devicetree storage.doAutoPart = False # Get a list of all the physical volume devices that make up this VG. for pv in self.physvols: dev = devicetree.resolveDevice(pv) if not dev: # if pv is using --onpart, use original device pv_name = ksdata.onPart.get(pv, pv) dev = devicetree.resolveDevice(pv_name) or lookupAlias(devicetree, pv) if dev and dev.format.type == "luks": try: dev = devicetree.getChildren(dev)[0] except IndexError: dev = None if dev and dev.format.type != "lvmpv": raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Physical volume \"%(device)s\" has a format of \"%(format)s\", but should have a format of \"lvmpv\".") % {"device": pv, "format": dev.format.type})) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Tried to use undefined partition \"%s\" in Volume Group specification") % pv)) pvs.append(dev) if len(pvs) == 0 and not self.preexist: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Volume group \"%s\" defined without any physical volumes. Either specify physical volumes or use --useexisting.") % self.vgname)) if self.pesize == 0: # default PE size requested -- we use blivet's default in KiB self.pesize = LVM_PE_SIZE.convertTo(KiB) pesize = Size("%d KiB" % self.pesize) possible_extents = LVMVolumeGroupDevice.get_supported_pe_sizes() if pesize not in possible_extents: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Volume group given physical extent size of \"%(extentSize)s\", but must be one of:\n%(validExtentSizes)s.") % {"extentSize": pesize, "validExtentSizes": ", ".join(str(e) for e in possible_extents)})) # If --noformat or --useexisting was given, there's really nothing to do. if not self.format or self.preexist: if not self.vgname: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("volgroup --noformat and volgroup --useexisting must also use the --name= option."))) dev = devicetree.getDeviceByName(self.vgname) if not dev: raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("Volume group \"%s\" given in volgroup command does not exist.") % self.vgname)) elif self.vgname in (vg.name for vg in storage.vgs): raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("The volume group name \"%s\" is already in use.") % self.vgname)) else: try: request = storage.newVG(parents=pvs, name=self.vgname, peSize=pesize) except (StorageError, ValueError) as e: raise KickstartValueError(formatErrorMsg(self.lineno, msg=str(e))) storage.createDevice(request) if self.reserved_space: request.reserved_space = self.reserved_space elif self.reserved_percent: request.reserved_percent = self.reserved_percent # in case we had to truncate or otherwise adjust the specified name ksdata.onPart[self.vgname] = request.name class XConfig(commands.xconfig.F14_XConfig): def execute(self, *args): desktop = Desktop() if self.startX: desktop.runlevel = 5 if self.defaultdesktop: desktop.desktop = self.defaultdesktop # now write it out desktop.write() class SkipX(commands.skipx.FC3_SkipX): def execute(self, *args): if self.skipx: desktop = Desktop() desktop.runlevel = 3 desktop.write() class ZFCP(commands.zfcp.F14_ZFCP): def parse(self, args): fcp = commands.zfcp.F14_ZFCP.parse(self, args) try: blivet.zfcp.ZFCP().addFCP(fcp.devnum, fcp.wwpn, fcp.fcplun) except ValueError as e: log.warning(str(e)) return fcp class Keyboard(commands.keyboard.F18_Keyboard): def execute(self, *args): keyboard.write_keyboard_config(self, iutil.getSysroot()) class Upgrade(commands.upgrade.F20_Upgrade): # Upgrade is no longer supported. If an upgrade command was included in # a kickstart, warn the user and exit. def parse(self, *args): log.error("The upgrade kickstart command is no longer supported. Upgrade functionality is provided through fedup.") sys.stderr.write(_("The upgrade kickstart command is no longer supported. Upgrade functionality is provided through fedup.")) iutil.ipmi_report(IPMI_ABORTED) sys.exit(1) ### ### %anaconda Section ### class AnacondaSectionHandler(BaseHandler): """A handler for only the anaconda ection's commands.""" commandMap = { "pwpolicy": F22_PwPolicy } dataMap = { "PwPolicyData": F22_PwPolicyData } def __init__(self): BaseHandler.__init__(self, mapping=self.commandMap, dataMapping=self.dataMap) def __str__(self): """Return the %anaconda section""" retval = "" # This dictionary should only be modified during __init__, so if it # changes during iteration something has gone horribly wrong. lst = sorted(self._writeOrder.keys()) for prio in lst: for obj in self._writeOrder[prio]: retval += str(obj) if retval: retval = "\n%anaconda\n" + retval + "%end\n" return retval class AnacondaSection(Section): """A section for anaconda specific commands.""" sectionOpen = "%anaconda" def __init__(self, *args, **kwargs): Section.__init__(self, *args, **kwargs) self.cmdno = 0 def handleLine(self, line): if not self.handler: return self.cmdno += 1 args = shlex.split(line, comments=True) self.handler.currentCmd = args[0] self.handler.currentLine = self.cmdno return self.handler.dispatcher(args, self.cmdno) def handleHeader(self, lineno, args): """Process the arguments to the %anaconda header.""" Section.handleHeader(self, lineno, args) def finalize(self): """Let %anaconda know no additional data will come.""" Section.finalize(self) ### ### HANDLERS ### # This is just the latest entry from pykickstart.handlers.control with all the # classes we're overriding in place of the defaults. commandMap = { "auth": Authconfig, "authconfig": Authconfig, "autopart": AutoPart, "btrfs": BTRFS, "bootloader": Bootloader, "clearpart": ClearPart, "dmraid": DmRaid, "eula": Eula, "fcoe": Fcoe, "firewall": Firewall, "firstboot": Firstboot, "group": Group, "ignoredisk": IgnoreDisk, "iscsi": Iscsi, "iscsiname": IscsiName, "keyboard": Keyboard, "lang": Lang, "logging": Logging, "logvol": LogVol, "multipath": MultiPath, "network": Network, "part": Partition, "partition": Partition, "raid": Raid, "realm": Realm, "reqpart": ReqPart, "rootpw": RootPw, "selinux": SELinux, "services": Services, "sshkey": SshKey, "skipx": SkipX, "timezone": Timezone, "upgrade": Upgrade, "user": User, "volgroup": VolGroup, "xconfig": XConfig, "zfcp": ZFCP, } dataMap = { "BTRFSData": BTRFSData, "LogVolData": LogVolData, "PartData": PartitionData, "RaidData": RaidData, "RepoData": RepoData, "VolGroupData": VolGroupData, } superclass = returnClassForVersion() class AnacondaKSHandler(superclass): AddonClassType = AddonData def __init__(self, addon_paths=None, commandUpdates=None, dataUpdates=None): if addon_paths is None: addon_paths = [] if commandUpdates is None: commandUpdates = commandMap if dataUpdates is None: dataUpdates = dataMap superclass.__init__(self, commandUpdates=commandUpdates, dataUpdates=dataUpdates) self.onPart = {} # collect all kickstart addons for anaconda to addons dictionary # which maps addon_id to it's own data structure based on BaseData # with execute method addons = {} # collect all AddonData subclasses from # for p in addon_paths: <p>/<plugin id>/ks/*.(py|so) # and register them under <plugin id> name for module_name, path in addon_paths: addon_id = os.path.basename(os.path.dirname(os.path.abspath(path))) if not os.path.isdir(path): continue classes = collect(module_name, path, lambda cls: issubclass(cls, self.AddonClassType)) if classes: addons[addon_id] = classes[0](name=addon_id) # Prepare the final structures for 3rd party addons self.addons = AddonRegistry(addons) # The %anaconda section uses its own handler for a limited set of commands self.anaconda = AnacondaSectionHandler() def __str__(self): return superclass.__str__(self) + "\n" + str(self.addons) + str(self.anaconda) class AnacondaPreParser(KickstartParser): # A subclass of KickstartParser that only looks for %pre scripts and # sets them up to be run. All other scripts and commands are ignored. def __init__(self, handler, followIncludes=True, errorsAreFatal=True, missingIncludeIsFatal=True): KickstartParser.__init__(self, handler, missingIncludeIsFatal=False) def handleCommand(self, lineno, args): pass def setupSections(self): self.registerSection(PreScriptSection(self.handler, dataObj=AnacondaKSScript)) self.registerSection(NullSection(self.handler, sectionOpen="%pre-install")) self.registerSection(NullSection(self.handler, sectionOpen="%post")) self.registerSection(NullSection(self.handler, sectionOpen="%traceback")) self.registerSection(NullSection(self.handler, sectionOpen="%packages")) self.registerSection(NullSection(self.handler, sectionOpen="%addon")) self.registerSection(NullSection(self.handler.anaconda, sectionOpen="%anaconda")) class AnacondaKSParser(KickstartParser): def __init__(self, handler, followIncludes=True, errorsAreFatal=True, missingIncludeIsFatal=True, scriptClass=AnacondaKSScript): self.scriptClass = scriptClass KickstartParser.__init__(self, handler) def handleCommand(self, lineno, args): if not self.handler: return return KickstartParser.handleCommand(self, lineno, args) def setupSections(self): self.registerSection(PreScriptSection(self.handler, dataObj=self.scriptClass)) self.registerSection(PreInstallScriptSection(self.handler, dataObj=self.scriptClass)) self.registerSection(PostScriptSection(self.handler, dataObj=self.scriptClass)) self.registerSection(TracebackScriptSection(self.handler, dataObj=self.scriptClass)) self.registerSection(PackageSection(self.handler)) self.registerSection(AddonSection(self.handler)) self.registerSection(AnacondaSection(self.handler.anaconda)) def preScriptPass(f): # The first pass through kickstart file processing - look for %pre scripts # and run them. This must come in a separate pass in case a script # generates an included file that has commands for later. ksparser = AnacondaPreParser(AnacondaKSHandler()) try: ksparser.readKickstart(f) except KickstartError as e: # We do not have an interface here yet, so we cannot use our error # handling callback. print(e) iutil.ipmi_report(IPMI_ABORTED) sys.exit(1) # run %pre scripts runPreScripts(ksparser.handler.scripts) def parseKickstart(f): # preprocessing the kickstart file has already been handled in initramfs. addon_paths = collect_addon_paths(ADDON_PATHS) handler = AnacondaKSHandler(addon_paths["ks"]) ksparser = AnacondaKSParser(handler) # We need this so all the /dev/disk/* stuff is set up before parsing. udev.trigger(subsystem="block", action="change") # So that drives onlined by these can be used in the ks file blivet.iscsi.iscsi().startup() blivet.fcoe.fcoe().startup() blivet.zfcp.ZFCP().startup() # Note we do NOT call dasd.startup() here, that does not online drives, but # only checks if they need formatting, which requires zerombr to be known try: ksparser.readKickstart(f) except KickstartError as e: # We do not have an interface here yet, so we cannot use our error # handling callback. print(e) iutil.ipmi_report(IPMI_ABORTED) sys.exit(1) return handler def appendPostScripts(ksdata): scripts = "" # Read in all the post script snippets to a single big string. for fn in glob.glob("/usr/share/anaconda/post-scripts/*ks"): f = open(fn, "r") scripts += f.read() f.close() # Then parse the snippets against the existing ksdata. We can do this # because pykickstart allows multiple parses to save their data into a # single data object. Errors parsing the scripts are a bug in anaconda, # so just raise an exception. ksparser = AnacondaKSParser(ksdata, scriptClass=AnacondaInternalScript) ksparser.readKickstartFromString(scripts, reset=False) def runPostScripts(scripts): postScripts = [s for s in scripts if s.type == KS_SCRIPT_POST] if len(postScripts) == 0: return log.info("Running kickstart %%post script(s)") for script in postScripts: script.run(iutil.getSysroot()) log.info("All kickstart %%post script(s) have been run") def runPreScripts(scripts): preScripts = [s for s in scripts if s.type == KS_SCRIPT_PRE] if len(preScripts) == 0: return log.info("Running kickstart %%pre script(s)") stdoutLog.info(_("Running pre-installation scripts")) for script in preScripts: script.run("/") log.info("All kickstart %%pre script(s) have been run") def runPreInstallScripts(scripts): preInstallScripts = [s for s in scripts if s.type == KS_SCRIPT_PREINSTALL] if len(preInstallScripts) == 0: return log.info("Running kickstart %%pre-install script(s)") for script in preInstallScripts: script.run("/") log.info("All kickstart %%pre-install script(s) have been run") def runTracebackScripts(scripts): log.info("Running kickstart %%traceback script(s)") for script in filter(lambda s: s.type == KS_SCRIPT_TRACEBACK, scripts): script.run("/") log.info("All kickstart %%traceback script(s) have been run") def resetCustomStorageData(ksdata): for command in ["partition", "raid", "volgroup", "logvol", "btrfs"]: ksdata.resetCommand(command) ksdata.clearpart.type = CLEARPART_TYPE_NONE def doKickstartStorage(storage, ksdata, instClass): """ Setup storage state from the kickstart data """ ksdata.clearpart.execute(storage, ksdata, instClass) if not any(d for d in storage.disks if not d.format.hidden and not d.protected): return # snapshot free space now so that we know how much we had available storage.createFreeSpaceSnapshot() ksdata.bootloader.execute(storage, ksdata, instClass) ksdata.autopart.execute(storage, ksdata, instClass) ksdata.reqpart.execute(storage, ksdata, instClass) ksdata.partition.execute(storage, ksdata, instClass) ksdata.raid.execute(storage, ksdata, instClass) ksdata.volgroup.execute(storage, ksdata, instClass) ksdata.logvol.execute(storage, ksdata, instClass) ksdata.btrfs.execute(storage, ksdata, instClass) # also calls ksdata.bootloader.execute storage.setUpBootLoader()
kparal/anaconda
pyanaconda/kickstart.py
Python
gpl-2.0
89,420
import warnings from . import pedrpc from .base_monitor import BaseMonitor # Important: BaseMonitor needs to come *before* pedrpc.Client in the # Inheritance list for the method resolution order to produce # correct results. class ProcessMonitor(BaseMonitor, pedrpc.Client): """ Proxy class for the process monitor interface. In Versions < 0.2.0, boofuzz had network and process monitors that communicated over RPC. The RPC client was directly passed to the session class, and resolved all method calls dynamically on the RPC partner. Since 0.2.0, every monitor class must implement the abstract class BaseMonitor, which defines a common interface among all Monitors. To aid future typehinting efforts and to disambiguate Network- and Process Monitors, this explicit proxy class has been introduced that fast-forwards all calls to the RPC partner. .. versionadded:: 0.2.0 """ def __init__(self, host, port): BaseMonitor.__init__(self) pedrpc.Client.__init__(self, host, port) self.server_options = {} self.host = host self.port = port def alive(self): """This method is forwarded to the RPC daemon.""" return self.__method_missing("alive") def pre_send(self, target=None, fuzz_data_logger=None, session=None): """This method is forwarded to the RPC daemon.""" return self.__method_missing("pre_send", session.total_mutant_index) def post_send(self, target=None, fuzz_data_logger=None, session=None): """This method is forwarded to the RPC daemon.""" return self.__method_missing("post_send") def set_options(self, *args, **kwargs): """ The old RPC interfaces specified set_foobar methods to set options. As these vary by RPC implementation, this trampoline method translates arguments that have been passed as keyword arguments to set_foobar calls. If you call ``set_options(foobar="barbaz")``, it will result in a call to ``set_foobar("barbaz")`` on the RPC partner. """ # args will be ignored, kwargs will be translated for arg, value in kwargs.items(): eval("self.__method_missing('set_{0}', kwargs['{0}'])".format(arg)) self.server_options.update(**kwargs) def get_crash_synopsis(self): """This method is forwarded to the RPC daemon.""" return self.__method_missing("get_crash_synopsis") def start_target(self): """This method is forwarded to the RPC daemon.""" return self.__method_missing("start_target") def stop_target(self): """This method is forwarded to the RPC daemon.""" return self.__method_missing("stop_target") def restart_target(self, target=None, fuzz_data_logger=None, session=None): """This method is forwarded to the RPC daemon.""" return self.__method_missing("restart_target") def on_new_server(self, new_uuid): """Restores all set options to the RPC daemon if it has restarted since the last call.""" for key, val in self.server_options.items(): self.__hot_transmit(("set_{}".format(key), ((val,), {}))) def set_proc_name(self, new_proc_name): """.. deprecated :: 0.2.0 This option should be set via ``set_options``. """ warnings.warn( "This method is deprecated and will be removed in a future Version of boofuzz." " Please use set_options(log_path=...) instead.", FutureWarning, ) return self.set_options(proc_name=new_proc_name) def set_start_commands(self, new_start_commands): """.. deprecated :: 0.2.0 This option should be set via ``set_options``. """ warnings.warn( "This method is deprecated and will be removed in a future Version of boofuzz." " Please use set_options(log_path=...) instead.", FutureWarning, ) return self.set_options(start_commands=new_start_commands) def set_stop_commands(self, new_stop_commands): """.. deprecated :: 0.2.0 This option should be set via ``set_options``. """ warnings.warn( "This method is deprecated and will be removed in a future Version of boofuzz." " Please use set_options(log_path=...) instead.", FutureWarning, ) return self.set_options(stop_commands=new_stop_commands) def set_crash_filename(self, new_crash_filename): """.. deprecated :: 0.2.0 This option should be set via ``set_options``. """ warnings.warn( "This method is deprecated and will be removed in a future Version of boofuzz." " Please use set_options(log_path=...) instead.", FutureWarning, ) return self.set_options(crash_filename=new_crash_filename) def __repr__(self): return "ProcessMonitor#{}[{}:{}]".format(id(self), self.host, self.port)
jtpereyda/boofuzz
boofuzz/monitors/process_monitor.py
Python
gpl-2.0
5,053
# Copyright (C) 2004 Jeremy S. Sanders # Email: Jeremy Sanders <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ############################################################################## """Widget that represents a page in the document.""" import collections import textwrap import numpy as N from .. import qtall as qt from .. import document from .. import setting from .. import utils from . import widget from . import controlgraph def _(text, disambiguation=None, context='Page'): """Translate text.""" return qt.QCoreApplication.translate(context, text, disambiguation) defaultrange = [1e99, -1e99] def _resolveLinkedAxis(axis): """Follow a chain of axis function dependencies.""" loopcheck = set() while axis is not None and axis.isLinked(): loopcheck.add(axis) axis = axis.getLinkedAxis() if axis in loopcheck: # fail if loop return None return axis class AxisDependHelper: """A class to work out the dependency of widgets on axes and vice versa, in terms of ranges of the axes. Note: Here a widget is really (widget, depname), as each widget can have a different dependency (e.g. sx and sy dependencies for plotters). It then works out the ranges for each of the axes from the plotters. connection types: plotter->axis : axis needs to know data range axis->plotter : plotter needs to know axis range axis<->axis : axes are mutually dependent aim: calculate ranges of axes given plotters problem: cycles in the graph f1<-x: function f1 depends on axis x f2<-y: function f2 depends on axis y y<-f1: axis y depends on function f1 x<-f2: axis x depends on function f2 solution: break dependency cycle: choose somewhere - probably better to choose where widget depends on axis however, axis<->axis cycle can't be broken additional solution: convert all dependencies on axis1 or axis2 to axiscomb x <-> axis1 <-> axis2 For linked axes (e.g. AxisFunction): * Don't keep track of range separately -> propagate to real axis * For dependency order resolution, use real axis * In self.deps, use axisfunction axis so we know which axis to use """ def __init__(self): # map widgets to widgets it depends on self.deps = collections.defaultdict(list) # list of axes self.axes = [] # list of plotters associated with each axis self.axis_plotter_map = collections.defaultdict(list) # ranges for each axis self.ranges = {} # pairs of dependent widgets self.pairs = [] # track axes which map from one axis to another self.axis_to_axislinked = {} self.axislinked_to_axis = {} def recursivePlotterSearch(self, widget): """Find a list of plotters below widget. Builds up a dict of "nodes" representing each widget: plotter/axis Each node is a list of tuples saying which widgets need evaling first The tuples are (widget, depname), where depname is a name for the part of the plotter, e.g. "sx" or "sy" for x or y. """ if widget.isplotter: # keep track of which widgets depend on which axes widgetaxes = {} for axname in widget.getAxesNames(): axis = widget.lookupAxis(axname) widgetaxes[axname] = axis self.axis_plotter_map[axis].append(widget) # if the widget is a plotter, find which axes the widget # can provide range information about for axname, depname in widget.affectsAxisRange(): origaxis = widgetaxes[axname] resolvedaxis = _resolveLinkedAxis(origaxis) if resolvedaxis is not None and resolvedaxis.usesAutoRange(): # only add dependency if axis has an automatic range self.deps[(origaxis, None)].append((widget, depname)) self.pairs.append( ( (widget, depname), (resolvedaxis, None)) ) # find which axes the plotter needs information from for depname, axname in widget.requiresAxisRange(): origaxis = widgetaxes[axname] resolvedaxis = _resolveLinkedAxis(origaxis) if resolvedaxis is not None and resolvedaxis.usesAutoRange(): self.deps[(widget, depname)].append((origaxis, None)) self.pairs.append( ( (resolvedaxis, None), (widget, depname)) ) elif widget.isaxis: if widget.isaxis and widget.isLinked(): # function of another axis linked = widget.getLinkedAxis() if linked is not None: self.axis_to_axislinked[linked] = widget self.axislinked_to_axis[widget] = linked else: # make a range for a normal axis self.axes.append(widget) self.ranges[widget] = list(defaultrange) for c in widget.children: self.recursivePlotterSearch(c) def breakCycles(self, origcyclic): """Remove cycles if possible.""" numcyclic = len(origcyclic) best = -1 for i in range(len(self.pairs)): if not self.pairs[i][0][0].isaxis: p = self.pairs[:i] + self.pairs[i+1:] ordered, cyclic = utils.topological_sort(p) if len(cyclic) <= numcyclic: numcyclic = len(cyclic) best = i # delete best, or last one if none better found p = self.pairs[best] del self.pairs[best] try: idx = self.deps[p[1]].index(p[0]) del self.deps[p[1]][idx] except ValueError: pass def _updateAxisAutoRange(self, axis): """Update auto range for axis.""" # set actual range on axis, as axis no longer has a # dependency axrange = self.ranges[axis] if axrange == defaultrange: axrange = None axis.setAutoRange(axrange) del self.ranges[axis] def _updateRangeFromPlotter(self, axis, plotter, plotterdep): """Update the range for axis from the plotter.""" if axis.isLinked(): # take range and map back to real axis therange = list(defaultrange) plotter.getRange(axis, plotterdep, therange) if therange != defaultrange: # follow up chain loopcheck = set() while axis.isLinked(): loopcheck.add(axis) therange = axis.invertFunctionVals(therange) axis = axis.getLinkedAxis() if axis in loopcheck: axis = None if axis is not None and therange is not None: self.ranges[axis] = [ N.nanmin((self.ranges[axis][0], therange[0])), N.nanmax((self.ranges[axis][1], therange[1])) ] else: plotter.getRange(axis, plotterdep, self.ranges[axis]) def processWidgetDeps(self, dep): """Process dependencies for a single widget.""" widget, widget_dep = dep # iterate over dependent widgets for widgetd, widgetd_dep in self.deps[dep]: if ( widgetd.isplotter and (not widgetd.settings.isSetting('hide') or not widgetd.settings.hide) ): self._updateRangeFromPlotter(widget, widgetd, widgetd_dep) elif widgetd.isaxis: axis = _resolveLinkedAxis(widgetd) if axis in self.ranges: self._updateAxisAutoRange(axis) def processDepends(self): """Go through dependencies of widget. If the dependency has no dependency itself, then update the axis with the widget or vice versa Algorithm: Iterate over dependencies for widget. If the widget has a dependency on a widget which doesn't have a dependency itself, update range from that widget. Then delete that depency from the dependency list. """ # get ordered list, breaking cycles while True: ordered, cyclic = utils.topological_sort(self.pairs) if not cyclic: break self.breakCycles(cyclic) # iterate over widgets in order for dep in ordered: self.processWidgetDeps(dep) # process deps for any axis functions while dep[0] in self.axis_to_axislinked: dep = (self.axis_to_axislinked[dep[0]], None) self.processWidgetDeps(dep) def findAxisRanges(self): """Find the ranges from the plotters and set the axis ranges. Follows the dependencies calculated above. """ self.processDepends() # set any remaining ranges for axis in list(self.ranges.keys()): self._updateAxisAutoRange(axis) class Page(widget.Widget): """A class for representing a page of plotting.""" typename='page' allowusercreation = True description=_('Blank page') @classmethod def addSettings(klass, s): widget.Widget.addSettings(s) # page sizes are initially linked to the document page size s.add( setting.DistancePhysical( 'width', setting.Reference('/width'), descr=_('Width of page'), usertext=_('Page width'), formatting=True) ) s.add( setting.DistancePhysical( 'height', setting.Reference('/height'), descr=_('Height of page'), usertext=_('Page height'), formatting=True) ) s.add( setting.Notes( 'notes', '', descr=_('User-defined notes'), usertext=_('Notes')) ) s.add( setting.PageBrush( 'Background', descr = _('Background page fill'), usertext=_('Background')), pixmap='settings_bgfill', ) @classmethod def allowedParentTypes(klass): from . import root return (root.Root,) @property def userdescription(self): """Return user-friendly description.""" return textwrap.fill(self.settings.notes, 60) def draw(self, parentposn, painthelper, outerbounds=None): """Draw the plotter. Clip graph inside bounds.""" s = self.settings # document should pass us the page bounds x1, y1, x2, y2 = parentposn # find ranges of axes axisdependhelper = AxisDependHelper() axisdependhelper.recursivePlotterSearch(self) axisdependhelper.findAxisRanges() # store axis->plotter mappings in painthelper painthelper.axisplottermap.update(axisdependhelper.axis_plotter_map) # reverse mapping pamap = collections.defaultdict(list) for axis, plotters in painthelper.axisplottermap.items(): for plot in plotters: pamap[plot].append(axis) painthelper.plotteraxismap.update(pamap) if s.hide: bounds = self.computeBounds(parentposn, painthelper) return bounds # clip to page painter = painthelper.painter(self, parentposn) with painter: # w and h are non integer w = self.settings.get('width').convert(painter) h = self.settings.get('height').convert(painter) if not s.Background.hide: path = qt.QPainterPath() path.addRect(qt.QRectF(0, 0, w, h)) utils.brushExtFillPath(painter, s.Background, path) painthelper.setControlGraph(self, [ controlgraph.ControlMarginBox( self, [0, 0, w, h], [-10000, -10000, 10000, 10000], painthelper, ismovable=False) ] ) bounds = widget.Widget.draw( self, parentposn, painthelper, parentposn) return bounds def updateControlItem(self, cgi): """Call helper to set page size.""" cgi.setPageSize() # allow the factory to instantiate this document.thefactory.register(Page)
veusz/veusz
veusz/widgets/page.py
Python
gpl-2.0
13,301
import time import numpy from rga_telnet import * # Connecting to RGA - RGA(HOST,PORT) class RGA: scan = True # This is used for stop of peak scan - if set to False status = [0 for col in range(4)] # Status of the device, look in rga_status method showReadout = True # This one is responsible for the text output from RGA # Class constructor def __init__(self, host, port): print("Starting connection with RGA: ") self.rga_id03 = RgaTelnet(host, port) out = self.rga_readout(1, True) if out.find("MKSRGA Single") > -1: self.status[0] = 1 print("List of available sensors: ") self.rga_id03.write("Sensors\n") out = self.rga_readout(1, True) out = out.replace("\r", "") # Removing \r, \n form the output out = out.replace("\n", "") out = out.split(' ') out[:] = (i for i in out if i != '') # Removing empty fields print("Status of sensors with RGA: ") self.rga_id03.write("SensorState\n") self.rga_readout(1, True) print("Selecting sensor: ") sensor = "Select " + str(out[7]) + "\n" self.rga_id03.write(sensor) self.rga_readout(1, True) print("Taking control over the sensor: ") self.rga_id03.write("Control \"RGA python server\" \"1.0\" \n") out = self.rga_readout(1, True) if out.find("Control OK") > -1: self.status[1] = 1 # Read output def rga_readout(self, timeout, show): out = "Nothing" print_output = self.showReadout and show if print_output: out = self.rga_id03.read("\r\r", timeout) print(out) elif print_output: out = self.rga_id03.read("\r\r", timeout) return out # Release def rga_release(self): print("Release of the sensor: ") self.rga_id03.write("Release\n") self.rga_readout(1, True) self.status[1] = 0 self.status[0] = 0 # Filament control def rga_filament(self, state): if state == "On": self.rga_id03.write("FilamentControl On\n") time.sleep(5) for i in range(3): self.rga_readout(1, True) # Little bit robust but works self.status[2] = 1 elif state == "Off": self.rga_id03.write("FilamentControl Off\n") time.sleep(5) for i in range(3): self.rga_readout(1, True) self.status[2] = 0 else: print("Wrong filament input") # Single peaks scan def rga_peakscan(self, mass_selected): global mass_read mass_read = numpy.array([0, 0, 0]) # Here we convert string to numbers- selecting masses to scan from input mass_selected = [int(i) for i in mass_selected] print("Masses selected for scan :", mass_selected, "\n") # Defining peak jump scan print("Add peak jump measurement: ") self.rga_id03.write("AddPeakJump Peak1 PeakCenter 2 0 0 0\n") self.rga_readout(1, True) # Adding masses to scan for i in range(len(mass_selected)): self.rga_id03.write("MeasurementAddMass " + str(mass_selected[i]) + "\n") # Here we again convert number to string - just for training self.rga_readout(1, True) time.sleep(1) # Adding scan to scan list self.rga_id03.write("ScanAdd Peak1\n") self.rga_readout(1, True) # Starting scan self.rga_id03.write("ScanStart 1\n") self.status[3] = 1 while self.scan: # Processing output string # out = self.rga_id03.read_until("\r\r", 1) out = self.rga_readout(1, True) out = out.split(' ') out[:] = (i for i in out if i != '') # If the list length is 3, it corresponds to one of measured masses if len(out) == 3 and out[0] == "MassReading": new_row = [time.time(), float(out[1]), float(out[2])] # The row is : time, mass number, mass pressure mass_read = numpy.vstack([mass_read, new_row]) # Adding measured value to array if float(out[1]) == mass_selected[-1]: # When last mass value of scan is read , restart scan self.rga_id03.write("ScanResume 1\n") # Stop scan self.rga_id03.write("ScanStop\n") print(self.rga_id03.read("never", 1)) # Collect all garbage output print("Mass read stop...") self.status[3] = 0 self.scan = True # Stop scan def rga_peakscan_stop(self): if self.scan: self.scan = False else: print("Rga is not scanning, nothing to stop") # Read one mass def rga_onemass(self, one_mass): find_mass = numpy.nonzero(mass_read == one_mass) mass_found = mass_read[find_mass[0], :] out = [int(mass_found[-1, 0]), int(mass_found[-1, 1]), mass_found[-1, 2]] return out def rga_status(self): status_str = [] status_str.append([["not connected"], ["connected"], ["RGA connection : "]]) status_str.append([["not controlled"], ["controlled"], ["RGA control : "]]) status_str.append([["off"], ["on"], ["Filament status :"]]) status_str.append([["idle"], ["running"], ["Scan status: "]]) for i in range(4): print("".join(map(str, (status_str[i][2]))) + "".join(map(str, (status_str[i][self.status[i]])))) if __name__ == "__main__": rga_eh1 = RGA("rga-id03-eh1", 10014) rga_eh1.rga_release()
mjjank/mks_rga
RGA.py
Python
gpl-2.0
4,840
# -*- coding: utf-8 -*- # # Copyright 2013 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. from katello.client.api.base import KatelloAPI class PackageAPI(KatelloAPI): """ Connection class to access package calls """ def package(self, packageId, repoId): path = "/api/repositories/%s/packages/%s" % (repoId, packageId) pack = self.server.GET(path)[1] return pack def packages_by_repo(self, repoId): path = "/api/repositories/%s/packages" % repoId pack_list = self.server.GET(path)[1] return pack_list def search(self, query, repoId): path = "/api/repositories/%s/packages/search" % repoId pack_list = self.server.GET(path, {"search": query})[1] return pack_list
Katello/katello-cli
src/katello/client/api/package.py
Python
gpl-2.0
1,296
# -*- coding: utf-8 -*- import fauxfactory import pytest from cfme.services.catalogs.catalog_item import CatalogItem from cfme.automate.service_dialogs import ServiceDialog from cfme.services.catalogs.catalog import Catalog from cfme.services.catalogs.service_catalogs import ServiceCatalogs from cfme.services.catalogs.catalog_item import CatalogBundle from cfme.services import requests from cfme.exceptions import CandidateNotFound from cfme.web_ui import flash from utils import error from utils.log import logger from utils.wait import wait_for pytestmark = [ pytest.mark.meta(server_roles="+automate"), pytest.mark.usefixtures('logged_in', 'uses_infra_providers'), pytest.mark.ignore_stream("5.2") ] @pytest.yield_fixture(scope="function") def dialog(): dialog = "dialog_" + fauxfactory.gen_alphanumeric() element_data = dict( choose_type="Text Box", ele_label="ele_" + fauxfactory.gen_alphanumeric(), ele_name=fauxfactory.gen_alphanumeric(), ele_desc="my ele desc", default_text_box="default value" ) service_dialog = ServiceDialog(label=dialog, description="my dialog", submit=True, cancel=True, tab_label="tab_" + fauxfactory.gen_alphanumeric(), tab_desc="my tab desc", box_label="box_" + fauxfactory.gen_alphanumeric(), box_desc="my box desc") service_dialog.create(element_data) yield dialog @pytest.yield_fixture(scope="function") def catalog(): cat_name = "cat_" + fauxfactory.gen_alphanumeric() catalog = Catalog(name=cat_name, description="my catalog") catalog.create() yield catalog @pytest.yield_fixture(scope="function") def catalog_item(dialog, catalog): item_name = fauxfactory.gen_alphanumeric() catalog_item = CatalogItem(item_type="Generic", name=item_name, description="my catalog", display_in=True, catalog=catalog.name, dialog=dialog) catalog_item.create() yield catalog_item def test_delete_catalog_deletes_service(dialog, catalog): item_name = fauxfactory.gen_alphanumeric() catalog_item = CatalogItem(item_type="Generic", name=item_name, description="my catalog", display_in=True, catalog=catalog.name, dialog=dialog) catalog_item.create() catalog.delete() service_catalogs = ServiceCatalogs("service_name") with error.expected(CandidateNotFound): service_catalogs.order(catalog.name, catalog_item) def test_delete_catalog_item_deletes_service(catalog_item): catalog_item.delete() service_catalogs = ServiceCatalogs("service_name") with error.expected(CandidateNotFound): service_catalogs.order(catalog_item.catalog, catalog_item) def test_service_circular_reference(catalog_item): bundle_name = "first_" + fauxfactory.gen_alphanumeric() catalog_bundle = CatalogBundle(name=bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) catalog_bundle.create([catalog_item.name]) sec_bundle_name = "sec_" + fauxfactory.gen_alphanumeric() sec_catalog_bundle = CatalogBundle(name=sec_bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) sec_catalog_bundle.create([bundle_name]) with error.expected("Error during 'Resource Add': Adding resource <%s> to Service <%s> " "will create a circular reference" % (sec_bundle_name, bundle_name)): catalog_bundle.update({'description': "edit_desc", 'cat_item': sec_catalog_bundle.name}) def test_service_generic_catalog_bundle(catalog_item): bundle_name = "generic_" + fauxfactory.gen_alphanumeric() catalog_bundle = CatalogBundle(name=bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) catalog_bundle.create([catalog_item.name]) service_catalogs = ServiceCatalogs("service_name") service_catalogs.order(catalog_item.catalog, catalog_bundle) flash.assert_no_errors() logger.info('Waiting for cfme provision request for service %s' % bundle_name) row_description = bundle_name cells = {'Description': row_description} row, __ = wait_for(requests.wait_for_request, [cells, True], fail_func=requests.reload, num_sec=900, delay=20) assert row.last_message.text == 'Request complete' def test_bundles_in_bundle(catalog_item): bundle_name = "first_" + fauxfactory.gen_alphanumeric() catalog_bundle = CatalogBundle(name=bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) catalog_bundle.create([catalog_item.name]) sec_bundle_name = "sec_" + fauxfactory.gen_alphanumeric() sec_catalog_bundle = CatalogBundle(name=sec_bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) sec_catalog_bundle.create([bundle_name]) third_bundle_name = "third_" + fauxfactory.gen_alphanumeric() third_catalog_bundle = CatalogBundle(name=third_bundle_name, description="catalog_bundle", display_in=True, catalog=catalog_item.catalog, dialog=catalog_item.dialog) third_catalog_bundle.create([bundle_name, sec_bundle_name]) service_catalogs = ServiceCatalogs("service_name") service_catalogs.order(catalog_item.catalog, third_catalog_bundle) flash.assert_no_errors() logger.info('Waiting for cfme provision request for service %s' % bundle_name) row_description = third_bundle_name cells = {'Description': row_description} row, __ = wait_for(requests.wait_for_request, [cells, True], fail_func=requests.reload, num_sec=900, delay=20) assert row.last_message.text == 'Request complete' def test_delete_dialog_before_parent_item(catalog_item): service_dialog = ServiceDialog(label=catalog_item.dialog) service_dialog.delete() flash.assert_message_match(("Dialog \"{}\": Error during 'destroy': Dialog cannot be deleted " + "because it is connected to other components.").format(catalog_item.dialog))
thom-at-redhat/cfme_tests
cfme/tests/services/test_generic_service_catalogs.py
Python
gpl-2.0
6,331
#!/usr/bin/env python ################################################## ## DEPENDENCIES import sys import os import os.path try: import builtins as builtin except ImportError: import __builtin__ as builtin from os.path import getmtime, exists import time import types from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple from Cheetah.Template import Template from Cheetah.DummyTransaction import * from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList from Cheetah.CacheRegion import CacheRegion import Cheetah.Filters as Filters import Cheetah.ErrorCatchers as ErrorCatchers from Plugins.Extensions.OpenWebif.local import tstrings ################################################## ## MODULE CONSTANTS VFFSL=valueFromFrameOrSearchList VFSL=valueFromSearchList VFN=valueForName currentTime=time.time __CHEETAH_version__ = '2.4.4' __CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0) __CHEETAH_genTime__ = 1447321436.394491 __CHEETAH_genTimestamp__ = 'Thu Nov 12 18:43:56 2015' __CHEETAH_src__ = '/home/knuth/openpli-oe-core/build/tmp/work/fusionhd-oe-linux/enigma2-plugin-extensions-openwebif/1+gitAUTOINC+5837c87afc-r0/git/plugin/controllers/views/mobile/channels.tmpl' __CHEETAH_srcLastModified__ = 'Thu Nov 12 18:43:41 2015' __CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine' if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple: raise AssertionError( 'This template was compiled with Cheetah version' ' %s. Templates compiled before version %s must be recompiled.'%( __CHEETAH_version__, RequiredCheetahVersion)) ################################################## ## CLASSES class channels(Template): ################################################## ## CHEETAH GENERATED METHODS def __init__(self, *args, **KWs): super(channels, self).__init__(*args, **KWs) if not self._CHEETAH__instanceInitialized: cheetahKWArgs = {} allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split() for k,v in KWs.items(): if k in allowedKWs: cheetahKWArgs[k] = v self._initCheetahInstance(**cheetahKWArgs) def respond(self, trans=None): ## CHEETAH: main method generated for this template if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)): trans = self.transaction # is None unless self.awake() was called if not trans: trans = DummyTransaction() _dummyTrans = True else: _dummyTrans = False write = trans.response().write SL = self._CHEETAH__searchList _filter = self._CHEETAH__currentFilter ######################################## ## START - generated method body write(u'''<html>\r <head>\r \t<title>OpenWebif</title>\r \t<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />\r \t<meta name="viewport" content="user-scalable=no, width=device-width"/>\r \t<meta name="apple-mobile-web-app-capable" content="yes" />\r \t<link rel="stylesheet" type="text/css" href="/css/jquery.mobile-1.0.min.css" media="screen"/>\r \t<link rel="stylesheet" type="text/css" href="/css/iphone.css" media="screen"/>\r \t<script src="/js/jquery-1.6.2.min.js"></script>\r \t<script src="/js/jquery.mobile-1.0.min.js"></script>\r </head>\r <body> \r \t<div data-role="page">\r \r \t\t<div id="header">\r \t\t\t<div class="button" onClick="history.back()">''') _v = VFFSL(SL,"tstrings",True)['back'] # u"$tstrings['back']" on line 17, col 49 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['back']")) # from line 17, col 49. write(u'''</div>\r \t\t\t<h1><a style="color:#FFF;text-decoration:none;" href=\'/mobile\'>OpenWebif</a></h1> \t\t</div>\r \r \t\t<div id="contentContainer">\r \t\t\t<ul data-role="listview" data-inset="true" data-theme="d">\r \t\t\t\t<li data-role="list-divider" role="heading" data-theme="b">''') _v = VFFSL(SL,"tstrings",True)['channels'] # u"$tstrings['channels']" on line 23, col 64 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['channels']")) # from line 23, col 64. write(u'''</li>\r ''') for channel in VFFSL(SL,"channels",True): # generated from line 24, col 5 write(u'''\t\t\t\t<li>\r \t\t\t\t<a href="/mobile/channelinfo?sref=''') _v = VFFSL(SL,"channel.ref",True) # u'$channel.ref' on line 26, col 39 if _v is not None: write(_filter(_v, rawExpr=u'$channel.ref')) # from line 26, col 39. write(u'''" style="padding: 3px;">\r \t\t\t\t<span class="ui-li-heading" style="margin-top: 0px; margin-bottom: 3px;">''') _v = VFFSL(SL,"channel.name",True) # u'$channel.name' on line 27, col 78 if _v is not None: write(_filter(_v, rawExpr=u'$channel.name')) # from line 27, col 78. write(u'''</span>\r ''') if VFN(VFFSL(SL,"channel",True),"has_key",False)('now_title'): # generated from line 28, col 5 write(u'''\t\t\t\t<span class="ui-li-desc" style="margin-bottom: 0px;">''') _v = VFFSL(SL,"channel.now_title",True) # u'$channel.now_title' on line 29, col 58 if _v is not None: write(_filter(_v, rawExpr=u'$channel.now_title')) # from line 29, col 58. write(u'''</span>\r ''') write(u'''\t\t\t\t</a>\r \t\t\t\t</li>\r ''') write(u'''\t\t\t</ul>\r \t\t</div>\r \r \t\t<div id="footer">\r \t\t\t<p>OpenWebif Mobile</p>\r \t\t\t<a onclick="document.location.href=\'/index?mode=fullpage\';return false;" href="#">''') _v = VFFSL(SL,"tstrings",True)['show_full_openwebif'] # u"$tstrings['show_full_openwebif']" on line 39, col 86 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['show_full_openwebif']")) # from line 39, col 86. write(u'''</a>\r \t\t</div>\r \t\t\r \t</div>\r </body>\r </html>\r ''') ######################################## ## END - generated method body return _dummyTrans and trans.response().getvalue() or "" ################################################## ## CHEETAH GENERATED ATTRIBUTES _CHEETAH__instanceInitialized = False _CHEETAH_version = __CHEETAH_version__ _CHEETAH_versionTuple = __CHEETAH_versionTuple__ _CHEETAH_genTime = __CHEETAH_genTime__ _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__ _CHEETAH_src = __CHEETAH_src__ _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__ _mainCheetahMethod_for_channels= 'respond' ## END CLASS DEFINITION if not hasattr(channels, '_initCheetahAttributes'): templateAPIClass = getattr(channels, '_CHEETAH_templateClass', Template) templateAPIClass._addCheetahPlumbingCodeToClass(channels) # CHEETAH was developed by Tavis Rudd and Mike Orr # with code, advice and input from many other volunteers. # For more information visit http://www.CheetahTemplate.org/ ################################################## ## if run from command line: if __name__ == '__main__': from Cheetah.TemplateCmdLineIface import CmdLineIface CmdLineIface(templateObj=channels()).run()
pli3/e2-openwbif
plugin/controllers/views/mobile/channels.py
Python
gpl-2.0
7,404
from django.contrib.auth import get_user_model from django.db import transaction from django.db.models.signals import pre_delete from django.dispatch import Signal, receiver from misago.categories.models import Category from misago.categories.signals import delete_category_content, move_category_content from misago.core.pgutils import batch_delete, batch_update from misago.users.signals import delete_user_content, username_changed from .models import Attachment, Post, PostEdit, PostLike, Thread, Poll, PollVote delete_post = Signal() delete_thread = Signal() merge_post = Signal(providing_args=["other_post"]) merge_thread = Signal(providing_args=["other_thread"]) move_post = Signal() move_thread = Signal() remove_thread_participant = Signal(providing_args=["user"]) """ Signal handlers """ @receiver(merge_thread) def merge_threads_posts(sender, **kwargs): other_thread = kwargs['other_thread'] other_thread.post_set.update(category=sender.category, thread=sender) @receiver(merge_post) def merge_posts(sender, **kwargs): other_post = kwargs['other_post'] for user in sender.mentions.iterator(): other_post.mentions.add(user) @receiver(move_thread) def move_thread_content(sender, **kwargs): Post.objects.filter(thread=sender).update(category=sender.category) PostEdit.objects.filter(thread=sender).update(category=sender.category) PostLike.objects.filter(thread=sender).update(category=sender.category) Poll.objects.filter(thread=sender).update(category=sender.category) PollVote.objects.filter(thread=sender).update(category=sender.category) @receiver(delete_category_content) def delete_category_threads(sender, **kwargs): sender.thread_set.all().delete() sender.post_set.all().delete() @receiver(move_category_content) def move_category_threads(sender, **kwargs): new_category = kwargs['new_category'] Thread.objects.filter(category=sender).update(category=new_category) Post.objects.filter(category=sender).update(category=new_category) PostEdit.objects.filter(category=sender).update(category=new_category) PostLike.objects.filter(category=sender).update(category=new_category) Poll.objects.filter(category=sender).update(category=new_category) PollVote.objects.filter(category=sender).update(category=new_category) @receiver(delete_user_content) def delete_user_threads(sender, **kwargs): recount_categories = set() recount_threads = set() for thread in batch_delete(sender.thread_set.all(), 50): recount_categories.add(thread.category_id) with transaction.atomic(): thread.delete() for post in batch_delete(sender.post_set.all(), 50): recount_categories.add(post.category_id) recount_threads.add(post.thread_id) with transaction.atomic(): post.delete() if recount_threads: changed_threads_qs = Thread.objects.filter(id__in=recount_threads) for thread in batch_update(changed_threads_qs, 50): thread.synchronize() thread.save() if recount_categories: for category in Category.objects.filter(id__in=recount_categories): category.synchronize() category.save() @receiver(username_changed) def update_usernames(sender, **kwargs): Thread.objects.filter(starter=sender).update( starter_name=sender.username, starter_slug=sender.slug ) Thread.objects.filter(last_poster=sender).update( last_poster_name=sender.username, last_poster_slug=sender.slug ) Post.objects.filter(poster=sender).update(poster_name=sender.username) Post.objects.filter(last_editor=sender).update( last_editor_name=sender.username, last_editor_slug=sender.slug ) PostEdit.objects.filter(editor=sender).update( editor_name=sender.username, editor_slug=sender.slug ) PostLike.objects.filter(user=sender).update( user_name=sender.username, user_slug=sender.slug ) Attachment.objects.filter(uploader=sender).update( uploader_name=sender.username, uploader_slug=sender.slug ) Poll.objects.filter(poster=sender).update( poster_name=sender.username, poster_slug=sender.slug ) PollVote.objects.filter(voter=sender).update( voter_name=sender.username, voter_slug=sender.slug ) @receiver(pre_delete, sender=get_user_model()) def remove_unparticipated_private_threads(sender, **kwargs): threads_qs = kwargs['instance'].private_thread_set.all() for thread in batch_update(threads_qs, 50): if thread.participants.count() == 1: with transaction.atomic(): thread.delete()
1905410/Misago
misago/threads/signals.py
Python
gpl-2.0
4,750
from model import Model from helpers.candidate import Candidate from helpers.decision import Decision class Osyczka2(Model): def __init__(self): Model.__init__(self) self.initialize_decs() def initialize_decs(self): dec = Decision('x1', 0, 10) self.decs.append(dec) dec = Decision('x2', 0, 10) self.decs.append(dec) dec = Decision('x3', 1, 5) self.decs.append(dec) dec = Decision('x4', 0, 6) self.decs.append(dec) dec = Decision('x5', 1, 5) self.decs.append(dec) dec = Decision('x6', 0, 10) self.decs.append(dec) def f1(self, candidate): vec = candidate.dec_vals part1 = 25 * ((vec[0] - 2) ** 2) part2 = (vec[1] - 2) ** 2 part3 = (((vec[2] - 1) ** 2) * ((vec[3] - 4) ** 2)) part4 = (vec[4] - 1) ** 2 return (-(part1 + part2 + part3 + part4)) def f2(self, candidate): vec = candidate.dec_vals val = 0 for x in vec: val += x ** 2 return val def objectives(self): return [self.f1, self.f2] def aggregate(self, candidate): aggr = 0 self.eval(candidate) for score in candidate.scores: aggr += score return aggr def gen_candidate(self): for i in range(0, self.patience): decs = [dec.generate_valid_val() for dec in self.decs] can = Candidate(dec_vals=list(decs)) if self.ok(can): return can def ok(self, candidate, debug=False): if len(candidate.dec_vals) != 6: return False x1 = candidate.dec_vals[0] x2 = candidate.dec_vals[1] x3 = candidate.dec_vals[2] x4 = candidate.dec_vals[3] x5 = candidate.dec_vals[4] x6 = candidate.dec_vals[5] if not ((x1 + x2) >= 2): if debug: print "Failed 1" return False if not ((x1 + x2) <= 6): if debug: print "Failed 2" return False if not ((x2 - x1) <= 2): if debug: print "Failed 3" return False if not ((x1 - (3 * x2)) <= 2): if debug: print "Failed 4" return False if not ((((x3 - 3) ** 2) + x4) <= 4): if debug: print "Failed 5" return False if not ((((x5 - 3) ** 3) + x6) >= 4): if debug: print "Failed 6" return False return True
rchakra3/x9115rc3
hw/code/6/model/osyczka2.py
Python
gpl-2.0
2,597
# gazetteer.fields # Copyright 2016, James Humphry # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # '''Descriptions of the fields that can make up gazetteer data files, along with information on their SQL equivalents''' import abc class GazetteerField(metaclass=abc.ABCMeta): '''An abstract class that defines a field/column in a gazetteer data table.''' sql_type_name = 'NONE' def __init__(self, field_name, sql_name='', nullable=True): self.field_name = field_name if sql_name == '': self.sql_name = field_name.lower().replace(' ', '_') else: self.sql_name = sql_name self.nullable = nullable def generate_sql(self): '''Return the SQL describing a field of this sort, suitable for inclusion in a CREATE TABLE statement''' if self.nullable: return self.sql_name + ' ' + self.sql_type_name else: return self.sql_name + ' ' + self.sql_type_name + ' NOT NULL' class BigIntField(GazetteerField): '''A gazetteer field corresponding to the SQL type BIGINT.''' sql_type_name = 'BIGINT' class IntegerField(GazetteerField): '''A gazetteer field corresponding to the SQL type INTEGER.''' sql_type_name = 'INTEGER' class SmallIntField(GazetteerField): '''A gazetteer field corresponding to the SQL type SMALLINT.''' sql_type_name = 'SMALLINT' class DoubleField(GazetteerField): '''A gazetteer field corresponding to the SQL type DOUBLE PRECISION.''' sql_type_name = 'DOUBLE PRECISION' class TextField(GazetteerField): '''A gazetteer field corresponding to the SQL type TEXT.''' sql_type_name = 'TEXT' class FixedTextField(GazetteerField): '''A gazetteer field corresponding to the SQL type CHARACTER VARYING() with a defined width.''' def __init__(self, field_name, width, sql_name='', nullable=True): super().__init__(field_name, sql_name, nullable) self.width = width def generate_sql(self): if self.nullable: return self.sql_name + ' CHARACTER VARYING({})'.format(self.width) else: return self.sql_name + ' CHARACTER VARYING({})'.format(self.width)\ + ' NOT NULL' class DateField(GazetteerField): '''A gazetteer field corresponding to the SQL type DATE.''' sql_type_name = 'DATE' class TimeStampField(GazetteerField): '''A gazetteer field corresponding to the SQL type TIMESTAMP.''' sql_type_name = 'TIMESTAMP' class FlagField(GazetteerField): '''This is intended for gazetteer single character fields that are sometimes used as a form of Boolean or basic enumeration type. It may be more efficient to switch these to the "char" type (with the quotations) which is an internal PostgreSQL type which has a fixed width and only takes up one byte.''' sql_type_name = 'CHARACTER VARYING(1)'
jhumphry/gazetteer_etl
gazetteer/fields.py
Python
gpl-2.0
3,587
import time from PyQt4 import QtGui, QtCore, QtOpenGL from PyQt4.QtOpenGL import QGLWidget import OpenGL.GL as gl import OpenGL.arrays.vbo as glvbo import numpy as np import raster import slider import draw_texture import qt_helpers raster_width = 1024 raster_height = 64 raster_n_neurons = 64 spikes_per_frame = 5 class GLPlotWidget(QGLWidget): # default window size width, height = 600, 600 t_last_msg = time.time() spike_count = 0 last_time = None def initializeGL(self): # program for drawing spikes self.raster = raster.RasterProgram(raster_width, raster_height, raster_n_neurons) self.raster.link() # program for fading sparkleplot self.slider = slider.SlideProgram(raster_width, raster_height) self.slider.link() # program for rendering a texture on the screen self.draw_texture = draw_texture.DrawTextureProgram() self.draw_texture.link() def paintGL(self): now = time.time() if self.last_time is None: decay = 0.0 self.dt = None else: dt = now - self.last_time if self.dt is None: self.dt = dt else: #self.dt = dt self.dt = (0.9) * self.dt + (0.1) * dt self.last_time = now if self.dt is not None: self.slider.swap_frame_buffer(int(self.dt/0.001)) self.slider.paint_slid() #data = self.data data = np.random.randint(raster_n_neurons, size=spikes_per_frame).astype('int32') # generate spike data self.spike_count += len(data) # paint the spikes onto the sparkle plot self.slider.swap_frame_buffer(0, False) self.raster.paint_spikes(data) # switch to rendering on the screen gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0) gl.glViewport(0, 0, self.width, self.height) # draw the sparkle plot on the screen self.draw_texture.paint(self.slider.get_current_texture()) # print out spike rate now = time.time() if now > self.t_last_msg + 1: dt = now - self.t_last_msg rate = self.spike_count * 0.000001 / dt print 'Mspikes per second = %g' % rate self.spike_count = 0 self.t_last_msg = now # flag a redraw self.update() def resizeGL(self, width, height): """Called upon window resizing: reinitialize the viewport.""" # update the window size self.width, self.height = width, height # paint within the whole window gl.glViewport(0, 0, width, height) if __name__ == '__main__': # define a Qt window with an OpenGL widget inside it class TestWindow(QtGui.QMainWindow): def __init__(self): super(TestWindow, self).__init__() # initialize the GL widget self.widget = GLPlotWidget() # put the window at the screen position (100, 100) self.setGeometry(100, 100, self.widget.width, self.widget.height) self.setCentralWidget(self.widget) self.show() # show the window win = qt_helpers.create_window(TestWindow)
tcstewar/opengl_texture_rendering
sparkle/main_raster.py
Python
gpl-2.0
3,315
# # gPrime - A web-based genealogy program # # Copyright (C) 2002-2006 Donald N. Allingham # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # #------------------------------------------------------------------------- # # Standard Python modules # #------------------------------------------------------------------------- from ....const import LOCALE as glocale _ = glocale.translation.gettext #------------------------------------------------------------------------- # # Gprime modules # #------------------------------------------------------------------------- from .._hasnotesubstrbase import HasNoteSubstrBase #------------------------------------------------------------------------- # "Media having notes that contain a substring" #------------------------------------------------------------------------- class HasNoteMatchingSubstringOf(HasNoteSubstrBase): """Media having notes containing <substring>""" name = _('Media objects having notes containing <substring>') description = _("Matches media objects whose notes contain text " "matching a substring")
sam-m888/gprime
gprime/filters/rules/media/_hasnotematchingsubstringof.py
Python
gpl-2.0
1,769
from flask.ext.wtf import Form from wtforms import StringField, BooleanField, TextAreaField, SelectField, FileField, IntegerField from wtforms.validators import DataRequired, regexp, NumberRange from wtforms.ext.sqlalchemy.fields import QuerySelectField from picture_hunt.models import Team, Task class TeamForm(Form): name = StringField('Team Name', validators=[DataRequired()]) note = TextAreaField('Note', validators=[DataRequired()]) class TaskForm(Form): name = StringField('Task', validators=[DataRequired()]) points = IntegerField('Points', validators=[DataRequired()]) note = TextAreaField('Note', validators=[DataRequired()]) class UploadForm(Form): team = SelectField('Team',coerce=int, choices=[(None, 'Make a Team first'),],validators=[NumberRange(min=0, message="Please choose a team first")] ) task = SelectField('Task',coerce=int, choices=[(None, 'Make a Task first'),] ,validators=[NumberRange(min=0, message="Please choose a team first")]) media = FileField('Media File', validators=[DataRequired()]) class SearchForm(Form): team = QuerySelectField( query_factory=Team.query.all, get_pk=lambda a: a.id, get_label=lambda a: a.name, allow_blank=True, blank_text="All" ) task = QuerySelectField( query_factory=Task.query.all, get_pk=lambda a: a.id, get_label=lambda a: a.name, allow_blank=True, blank_text="All" )
jkelley05/picture-scavenger-hunt
picture_hunt/forms.py
Python
gpl-2.0
1,679
''' from https://docs.djangoproject.com/en/1.7/topics/auth/customizing/#specifying-a-custom-user-model ''' from django import forms from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import ReadOnlyPasswordHashField from django.utils.translation import gettext_lazy as _ from custom_user.models import User class UserCreationForm(forms.ModelForm): """ A form for creating new users. Includes all the required fields, plus a repeated password. """ password1 = forms.CharField(label=_('Password'), widget=forms.PasswordInput) password2 = forms.CharField(label=_('Password confirmation'), widget=forms.PasswordInput) class Meta: model = User fields = ('email',) def clean_password2(self): # Check that the two password entries match password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise forms.ValidationError(_("Passwords don't match")) return password2 def save(self, commit=True): # Save the provided password in hashed format user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data["password1"]) if commit: user.save() return user class UserChangeForm(forms.ModelForm): """A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash display field. """ password = ReadOnlyPasswordHashField() class Meta: model = User fields = ('email', 'password', 'is_active', 'is_superuser') def clean_password(self): # Regardless of what the user provides, return the initial value. # This is done here, rather than on the field, because the # field does not have access to the initial value return self.initial["password"] class MyUserAdmin(UserAdmin): # The forms to add and change user instances form = UserChangeForm add_form = UserCreationForm # The fields to be used in displaying the User model. # These override the definitions on the base UserAdmin # that reference specific fields on auth.User. list_display = ('email', 'is_superuser') list_filter = ('is_superuser',) fieldsets = ( (None, {'fields': ('email','name', 'password', 'family')}), ('Permissions', {'fields': ('is_superuser','is_active',)}), ('Settings', {'fields': ('language','receive_update_emails','receive_photo_update_emails')}), ) # add_fieldsets is not a standard ModelAdmin attribute. UserAdmin # overrides get_fieldsets to use this attribute when creating a user. add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', 'password1', 'password2')} ), ) search_fields = ('email',) ordering = ('email',) filter_horizontal = () raw_id_fields = ('family',) # Now register the new UserAdmin... admin.site.register(User, MyUserAdmin)
JustinWingChungHui/okKindred
custom_user/admin.py
Python
gpl-2.0
3,150
from PyQRNative import * from PIL.Image import BILINEAR, BICUBIC, ANTIALIAS, NEAREST from reportlab.pdfgen import canvas from reportlab.lib.pagesizes import portrait, A4 from reportlab.lib.units import cm, mm from StringIO import StringIO from plant.tag import create_tag import time from datetime import datetime QR_TYPE = 4 QR_ECC = QRErrorCorrectLevel.H TAG_FONT = 'Courier-Bold' TAG_FONT_PT = 8 FOOT_FONT = 'Helvetica' FOOT_FONT_PT = 8 TOP_YMARGIN = 0.75*cm LAYOUTS = { 'Long sticks': {'qr_size': 2*cm, 'qr_lxmargin': 1*cm, 'qr_rxmargin': 1*cm, 'qr_ymargin': 5.0*cm, 'created': True, 'paired': False}, 'Sticky labels 70x37mm': {'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': False}, 'Sticky labels 70x37mm (paired)': {'qr_size': 2.5*cm, 'qr_lxmargin': 0.50*cm, 'qr_rxmargin': 0.50*cm, 'qr_ymargin': 1.2*cm, 'created': False, 'paired': True}, # 'Verbose labels ?x?mm': # {'qr_size': 0, 'qr_lxmargin': 0, 'qr_ymargin': 0}, } LAYOUT_LIST = LAYOUTS.keys() DUPLEX_LIST = ['No', 'Short side'] # Typ tre cm verkar vara en rimlig storlek, bade med tanke # pa vad som far plats i verkligheten och analyserna gjorda pa # http://www.qrstuff.com/blog/2011/01/18/what-size-should-a-qr-code-be # Lamplig fontstorlek for taggarna verkar vara 8pt Helvetica def validate_params(layout, duplex): if (layout is None) or (layout not in LAYOUT_LIST): return False if (duplex is None) or (duplex not in DUPLEX_LIST): return False if (layout == 'Verbose labels ?x?mm'): raise NotImplementedError return True def generate_new_qrimage(): tag = create_tag() qr = QRCode(QR_TYPE, QR_ECC) qr.addData('https://YOUR_DOMAIN/' + str(tag.tag)) qr.make() return (qr.makeImage(), tag.tag) def generate_qr_from_layout(layout, duplex, pagesize=A4): if duplex == 'Long side': raise NotImplementedError('only short page duplex implemented') now = datetime.now() qr_size = LAYOUTS[layout]['qr_size'] qr_lxmargin = LAYOUTS[layout]['qr_lxmargin'] qr_rxmargin = LAYOUTS[layout]['qr_rxmargin'] qr_ymargin = LAYOUTS[layout]['qr_ymargin'] created = LAYOUTS[layout]['created'] paired = LAYOUTS[layout]['paired'] x = pagesize[0] - (qr_size + qr_lxmargin) y = pagesize[1] - (qr_size + TOP_YMARGIN) # Validate parameters; this is mostly for debugging if (qr_size < 1) or (qr_lxmargin < 1) or (qr_rxmargin < 1) or (qr_ymargin < 1): raise ValueError(u'Internal error: One of qr size, qr x margin or qr y margin is zero.') # Generate QR codes with positions qrimgs = [] while y >= 0: xnum = 0; while x > 0: xnum += 1 if (not paired) or (xnum % 2 != 0): (qrimg, tag) = generate_new_qrimage() qrimgs.append({'image': qrimg, 'tag': tag, 'x': x, 'y': y}) x -= (qr_size + qr_rxmargin + qr_lxmargin) x = pagesize[0] - (qr_size + qr_lxmargin) y -= (qr_size + qr_ymargin) f = StringIO(); pdf = canvas.Canvas(f, pagesize=portrait(pagesize), pageCompression=0) # Plot QR codes on first side pdf.setFont(TAG_FONT, TAG_FONT_PT) for qrimg in qrimgs: x = qrimg['x'] y = qrimg['y'] # drawImage() seems to crash on PIL objects so we use drawInlineImage() instead, even though it's deprecated. # PyQRNative draws a white margin around the QR code, making it about one eigth smaller than the required size. pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True) pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag']) if created: pdf.setFont(FOOT_FONT, FOOT_FONT_PT) pdf.drawString(cm, cm, 'Created on %s' % str(now)) pdf.showPage() if duplex != 'No': pdf.setFont(TAG_FONT, TAG_FONT_PT) pdf.setPageRotation(180) for qrimg in qrimgs: x = portrait(pagesize)[0] - qrimg['x'] - qr_size y = qrimg['y'] pdf.drawInlineImage(qrimg['image'], x, y+(qr_size*0.0625), width=qr_size, height=qr_size, preserveAspectRatio=True) pdf.drawCentredString(x + (qr_size/2), y + 0.05*cm, qrimg['tag']) if created: pdf.setFont(FOOT_FONT, FOOT_FONT_PT) pdf.drawRightString(portrait(pagesize)[0] - cm, cm, 'Created on %s' % str(now)) pdf.showPage() pdf.save() return f
andbof/plantdb
qr/functions.py
Python
gpl-2.0
4,614
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2003-2018 University of Oslo, Norway # # This file is part of Cerebrum. # # Cerebrum is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Cerebrum is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Cerebrum; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import os from Cerebrum.default_config import * CEREBRUM_DATABASE_NAME = os.getenv('DB_NAME') CEREBRUM_DATABASE_CONNECT_DATA['user'] = os.getenv('DB_USER') CEREBRUM_DATABASE_CONNECT_DATA['table_owner'] = os.getenv('DB_USER') CEREBRUM_DATABASE_CONNECT_DATA['host'] = os.getenv('DB_HOST') CEREBRUM_DATABASE_CONNECT_DATA['table_owner'] = os.getenv('DB_USER') CEREBRUM_DDL_DIR = '/src/design' DB_AUTH_DIR = '/db-auth' LOGGING_CONFIGFILE = os.path.join(os.getenv('TEST_CONFIG_DIR'), 'logging.ini')
unioslo/cerebrum
testsuite/docker/test-config/cereconf_local.py
Python
gpl-2.0
1,350
#! /usr/bin/env python # # @refer # http://svn.python.org/projects/python/trunk/Tools/scripts/ftpmirror.py # @note # """ Mirror a remote ftp subtree into a local directory tree. usage: ftpmirror [-v] [-q] [-i] [-m] [-n] [-r] [-s pat] [-l username [-p passwd [-a account]]] hostname[:port] [remotedir [localdir]] -v: verbose -q: quiet -i: interactive mode -m: macintosh server (NCSA telnet 2.4) (implies -n -s '*.o') -n: don't log in -r: remove local files/directories no longer pertinent -l username [-p passwd [-a account]]: login info (default .netrc or anonymous) -s pat: skip files matching pattern hostname: remote host w/ optional port separated by ':' remotedir: remote directory (default initial) localdir: local directory (default current) """ import os import sys import time import getopt import ftplib import netrc from fnmatch import fnmatch # Print usage message and exit def usage(*args): sys.stdout = sys.stderr for msg in args: print(msg) print(__doc__) sys.exit(2) verbose = 1 # 0 for -q, 2 for -v interactive = 0 mac = 0 rmok = 0 nologin = 0 skippats = ['.', '..', '.mirrorinfo'] # Main program: parse command line and start processing def main(): global verbose, interactive, mac, rmok, nologin try: opts, args = getopt.getopt(sys.argv[1:], 'a:bil:mnp:qrs:v') except getopt.error as msg: usage(msg) login = '' passwd = '' account = '' if not args: usage('hostname missing') host = args[0] port = 0 if ':' in host: host, port = host.split(':', 1) port = int(port) try: auth = netrc.netrc().authenticators(host) if auth is not None: login, account, passwd = auth except (netrc.NetrcParseError, IOError): pass for o, a in opts: if o == '-l': login = a if o == '-p': passwd = a if o == '-a': account = a if o == '-v': verbose = verbose + 1 if o == '-q': verbose = 0 if o == '-i': interactive = 1 if o == '-m': mac = 1; nologin = 1; skippats.append('*.o') if o == '-n': nologin = 1 if o == '-r': rmok = 1 if o == '-s': skippats.append(a) remotedir = '' localdir = '' if args[1:]: remotedir = args[1] if args[2:]: localdir = args[2] if args[3:]: usage('too many arguments') # f = ftplib.FTP() if verbose: print("Connecting to '%s%s'..." % (host, (port and ":%d"%port or ""))) f.connect(host,port) if not nologin: if verbose: print('Logging in as %r...' % (login or 'anonymous')) f.login(login, passwd, account) if verbose: print('OK.') pwd = f.pwd() if verbose > 1: print('PWD =', repr(pwd)) if remotedir: if verbose > 1: print('cwd(%s)' % repr(remotedir)) f.cwd(remotedir) if verbose > 1: print('OK.') pwd = f.pwd() if verbose > 1: print('PWD =', repr(pwd)) # mirrorsubdir(f, localdir) # Core logic: mirror one subdirectory (recursively) def mirrorsubdir(f, localdir): pwd = f.pwd() if localdir and not os.path.isdir(localdir): if verbose: print('Creating local directory', repr(localdir)) try: makedir(localdir) except os.error as msg: print("Failed to establish local directory", repr(localdir)) return infofilename = os.path.join(localdir, '.mirrorinfo') try: text = open(infofilename, 'r').read() except IOError as msg: text = '{}' try: info = eval(text) except (SyntaxError, NameError): print('Bad mirror info in', repr(infofilename)) info = {} subdirs = [] listing = [] if verbose: print('Listing remote directory %r...' % (pwd,)) f.retrlines('LIST', listing.append) filesfound = [] for line in listing: if verbose > 1: print('-->', repr(line)) if mac: # Mac listing has just filenames; # trailing / means subdirectory filename = line.strip() mode = '-' if filename[-1:] == '/': filename = filename[:-1] mode = 'd' infostuff = '' else: # Parse, assuming a UNIX listing words = line.split(None, 8) if len(words) < 6: if verbose > 1: print('Skipping short line') continue filename = words[-1].lstrip() i = filename.find(" -> ") if i >= 0: # words[0] had better start with 'l'... if verbose > 1: print('Found symbolic link %r' % (filename,)) linkto = filename[i+4:] filename = filename[:i] infostuff = words[-5:-1] mode = words[0] skip = 0 for pat in skippats: if fnmatch(filename, pat): if verbose > 1: print('Skip pattern', repr(pat), end=' ') print('matches', repr(filename)) skip = 1 break if skip: continue if mode[0] == 'd': if verbose > 1: print('Remembering subdirectory', repr(filename)) subdirs.append(filename) continue filesfound.append(filename) if filename in info and info[filename] == infostuff: if verbose > 1: print('Already have this version of',repr(filename)) continue fullname = os.path.join(localdir, filename) tempname = os.path.join(localdir, '@'+filename) if interactive: doit = askabout('file', filename, pwd) if not doit: if filename not in info: info[filename] = 'Not retrieved' continue try: os.unlink(tempname) except os.error: pass if mode[0] == 'l': if verbose: print("Creating symlink %r -> %r" % (filename, linkto)) try: os.symlink(linkto, tempname) except IOError as msg: print("Can't create %r: %s" % (tempname, msg)) continue else: try: fp = open(tempname, 'wb') except IOError as msg: print("Can't create %r: %s" % (tempname, msg)) continue if verbose: print('Retrieving %r from %r as %r...' % (filename, pwd, fullname)) if verbose: fp1 = LoggingFile(fp, 1024, sys.stdout) else: fp1 = fp t0 = time.time() try: f.retrbinary('RETR ' + filename, fp1.write, 8*1024) except ftplib.error_perm as msg: print(msg) t1 = time.time() bytes = fp.tell() fp.close() if fp1 != fp: fp1.close() try: os.unlink(fullname) except os.error: pass # Ignore the error try: os.rename(tempname, fullname) except os.error as msg: print("Can't rename %r to %r: %s" % (tempname, fullname, msg)) continue info[filename] = infostuff writedict(info, infofilename) if verbose and mode[0] != 'l': dt = t1 - t0 kbytes = bytes / 1024.0 print(int(round(kbytes)), end=' ') print('Kbytes in', end=' ') print(int(round(dt)), end=' ') print('seconds', end=' ') if t1 > t0: print('(~%d Kbytes/sec)' % \ int(round(kbytes/dt),)) print() # # Remove files from info that are no longer remote deletions = 0 for filename in list(info.keys()): if filename not in filesfound: if verbose: print("Removing obsolete info entry for", end=' ') print(repr(filename), "in", repr(localdir or ".")) del info[filename] deletions = deletions + 1 if deletions: writedict(info, infofilename) # # Remove local files that are no longer in the remote directory try: if not localdir: names = os.listdir(os.curdir) else: names = os.listdir(localdir) except os.error: names = [] for name in names: if name[0] == '.' or name in info or name in subdirs: continue skip = 0 for pat in skippats: if fnmatch(name, pat): if verbose > 1: print('Skip pattern', repr(pat), end=' ') print('matches', repr(name)) skip = 1 break if skip: continue fullname = os.path.join(localdir, name) if not rmok: if verbose: print('Local file', repr(fullname), end=' ') print('is no longer pertinent') continue if verbose: print('Removing local file/dir', repr(fullname)) remove(fullname) # # Recursively mirror subdirectories for subdir in subdirs: if interactive: doit = askabout('subdirectory', subdir, pwd) if not doit: continue if verbose: print('Processing subdirectory', repr(subdir)) localsubdir = os.path.join(localdir, subdir) pwd = f.pwd() if verbose > 1: print('Remote directory now:', repr(pwd)) print('Remote cwd', repr(subdir)) try: f.cwd(subdir) except ftplib.error_perm as msg: print("Can't chdir to", repr(subdir), ":", repr(msg)) else: if verbose: print('Mirroring as', repr(localsubdir)) mirrorsubdir(f, localsubdir) if verbose > 1: print('Remote cwd ..') f.cwd('..') newpwd = f.pwd() if newpwd != pwd: print('Ended up in wrong directory after cd + cd ..') print('Giving up now.') break else: if verbose > 1: print('OK.') # Helper to remove a file or directory tree def remove(fullname): if os.path.isdir(fullname) and not os.path.islink(fullname): try: names = os.listdir(fullname) except os.error: names = [] ok = 1 for name in names: if not remove(os.path.join(fullname, name)): ok = 0 if not ok: return 0 try: os.rmdir(fullname) except os.error as msg: print("Can't remove local directory %r: %s" % (fullname, msg)) return 0 else: try: os.unlink(fullname) except os.error as msg: print("Can't remove local file %r: %s" % (fullname, msg)) return 0 return 1 # Wrapper around a file for writing to write a hash sign every block. class LoggingFile: def __init__(self, fp, blocksize, outfp): self.fp = fp self.bytes = 0 self.hashes = 0 self.blocksize = blocksize self.outfp = outfp def write(self, data): self.bytes = self.bytes + len(data) hashes = int(self.bytes) / self.blocksize while hashes > self.hashes: self.outfp.write('#') self.outfp.flush() self.hashes = self.hashes + 1 self.fp.write(data) def close(self): self.outfp.write('\n') # Ask permission to download a file. def askabout(filetype, filename, pwd): prompt = 'Retrieve %s %s from %s ? [ny] ' % (filetype, filename, pwd) while 1: reply = input(prompt).strip().lower() if reply in ['y', 'ye', 'yes']: return 1 if reply in ['', 'n', 'no', 'nop', 'nope']: return 0 print('Please answer yes or no.') # Create a directory if it doesn't exist. Recursively create the # parent directory as well if needed. def makedir(pathname): if os.path.isdir(pathname): return dirname = os.path.dirname(pathname) if dirname: makedir(dirname) os.mkdir(pathname, 0o777) # Write a dictionary to a file in a way that can be read back using # rval() but is still somewhat readable (i.e. not a single long line). # Also creates a backup file. def writedict(dict, filename): dir, fname = os.path.split(filename) tempname = os.path.join(dir, '@' + fname) backup = os.path.join(dir, fname + '~') try: os.unlink(backup) except os.error: pass fp = open(tempname, 'w') fp.write('{\n') for key, value in list(dict.items()): fp.write('%r: %r,\n' % (key, value)) fp.write('}\n') fp.close() try: os.rename(filename, backup) except os.error: pass os.rename(tempname, filename) if __name__ == '__main__': main() #... ###___END___
zhengfish/examples
python3/ftp/ftpmirror.py
Python
gpl-2.0
13,096
# -*- coding: utf-8 -*- """ *************************************************************************** ogr2ogrtabletopostgislist.py --------------------- Date : November 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'November 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from qgis.PyQt.QtCore import QSettings from processing.core.parameters import ParameterString from processing.core.parameters import ParameterTable from processing.core.parameters import ParameterSelection from processing.core.parameters import ParameterBoolean from processing.core.parameters import ParameterTableField from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm from processing.algs.gdal.GdalUtils import GdalUtils from processing.tools.system import isWindows from processing.tools.vector import ogrConnectionString, ogrLayerName class Ogr2OgrTableToPostGisList(GdalAlgorithm): DATABASE = 'DATABASE' INPUT_LAYER = 'INPUT_LAYER' HOST = 'HOST' PORT = 'PORT' USER = 'USER' DBNAME = 'DBNAME' PASSWORD = 'PASSWORD' SCHEMA = 'SCHEMA' TABLE = 'TABLE' PK = 'PK' PRIMARY_KEY = 'PRIMARY_KEY' WHERE = 'WHERE' GT = 'GT' OVERWRITE = 'OVERWRITE' APPEND = 'APPEND' ADDFIELDS = 'ADDFIELDS' LAUNDER = 'LAUNDER' SKIPFAILURES = 'SKIPFAILURES' PRECISION = 'PRECISION' OPTIONS = 'OPTIONS' def dbConnectionNames(self): settings = QSettings() settings.beginGroup('/PostgreSQL/connections/') return settings.childGroups() def defineCharacteristics(self): self.name, self.i18n_name = self.trAlgorithm('Import layer/table as geometryless table into PostgreSQL database') self.group, self.i18n_group = self.trAlgorithm('[OGR] Miscellaneous') self.DB_CONNECTIONS = self.dbConnectionNames() self.addParameter(ParameterSelection(self.DATABASE, self.tr('Database (connection name)'), self.DB_CONNECTIONS)) self.addParameter(ParameterTable(self.INPUT_LAYER, self.tr('Input layer'))) self.addParameter(ParameterString(self.SCHEMA, self.tr('Schema name'), 'public', optional=True)) self.addParameter(ParameterString(self.TABLE, self.tr('Table name, leave blank to use input name'), '', optional=True)) self.addParameter(ParameterString(self.PK, self.tr('Primary key'), 'id', optional=True)) self.addParameter(ParameterTableField(self.PRIMARY_KEY, self.tr('Primary key (existing field, used if the above option is left empty)'), self.INPUT_LAYER, optional=True)) self.addParameter(ParameterString(self.WHERE, self.tr('Select features using a SQL "WHERE" statement (Ex: column=\'value\')'), '', optional=True)) self.addParameter(ParameterString(self.GT, self.tr('Group N features per transaction (Default: 20000)'), '', optional=True)) self.addParameter(ParameterBoolean(self.OVERWRITE, self.tr('Overwrite existing table'), True)) self.addParameter(ParameterBoolean(self.APPEND, self.tr('Append to existing table'), False)) self.addParameter(ParameterBoolean(self.ADDFIELDS, self.tr('Append and add new fields to existing table'), False)) self.addParameter(ParameterBoolean(self.LAUNDER, self.tr('Do not launder columns/table names'), False)) self.addParameter(ParameterBoolean(self.SKIPFAILURES, self.tr('Continue after a failure, skipping the failed record'), False)) self.addParameter(ParameterBoolean(self.PRECISION, self.tr('Keep width and precision of input attributes'), True)) self.addParameter(ParameterString(self.OPTIONS, self.tr('Additional creation options'), '', optional=True)) def getConsoleCommands(self): connection = self.DB_CONNECTIONS[self.getParameterValue(self.DATABASE)] settings = QSettings() mySettings = '/PostgreSQL/connections/' + connection dbname = settings.value(mySettings + '/database') user = settings.value(mySettings + '/username') host = settings.value(mySettings + '/host') port = settings.value(mySettings + '/port') password = settings.value(mySettings + '/password') inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = ogrConnectionString(inLayer)[1:-1] schema = unicode(self.getParameterValue(self.SCHEMA)) table = unicode(self.getParameterValue(self.TABLE)) pk = unicode(self.getParameterValue(self.PK)) pkstring = "-lco FID=" + pk primary_key = self.getParameterValue(self.PRIMARY_KEY) where = unicode(self.getParameterValue(self.WHERE)) wherestring = '-where "' + where + '"' gt = unicode(self.getParameterValue(self.GT)) overwrite = self.getParameterValue(self.OVERWRITE) append = self.getParameterValue(self.APPEND) addfields = self.getParameterValue(self.ADDFIELDS) launder = self.getParameterValue(self.LAUNDER) launderstring = "-lco LAUNDER=NO" skipfailures = self.getParameterValue(self.SKIPFAILURES) precision = self.getParameterValue(self.PRECISION) options = unicode(self.getParameterValue(self.OPTIONS)) arguments = [] arguments.append('-progress') arguments.append('--config PG_USE_COPY YES') arguments.append('-f') arguments.append('PostgreSQL') arguments.append('PG:"host=') arguments.append(host) arguments.append('port=') arguments.append(port) if len(dbname) > 0: arguments.append('dbname=' + dbname) if len(password) > 0: arguments.append('password=' + password) if len(schema) > 0: arguments.append('active_schema=' + schema) else: arguments.append('active_schema=public') arguments.append('user=' + user + '"') arguments.append(ogrLayer) arguments.append('-nlt NONE') arguments.append(ogrLayerName(inLayer)) if launder: arguments.append(launderstring) if append: arguments.append('-append') if addfields: arguments.append('-addfields') if overwrite: arguments.append('-overwrite') if len(pk) > 0: arguments.append(pkstring) elif primary_key is not None: arguments.append("-lco FID=" + primary_key) if len(table) == 0: table = ogrLayerName(inLayer).lower() if schema: table = '{}.{}'.format(schema, table) arguments.append('-nln') arguments.append(table) if skipfailures: arguments.append('-skipfailures') if where: arguments.append(wherestring) if len(gt) > 0: arguments.append('-gt') arguments.append(gt) if not precision: arguments.append('-lco PRECISION=NO') if len(options) > 0: arguments.append(options) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] return commands def commandName(self): return "ogr2ogr"
AsgerPetersen/QGIS
python/plugins/processing/algs/gdal/ogr2ogrtabletopostgislist.py
Python
gpl-2.0
8,963
#!/usr/bin/python import socket def server_test(): s = socket.socket() host = socket.gethostname() port = 12345 s.bind((host, port)) s.listen(5) while True: c, addr = s.accept() print c print 'connect addr: ', addr c.send('Welcome to CaiNiao!') if cmp(c.recv(1024), "GoodBye") == 0: break c.close() s.close()
jianwei1216/my-scripts
mytest/python/MyInternet/myserver.py
Python
gpl-2.0
400
from common import Constant from common import utils from main.logger_helper import L __author__ = 'Dan Cristian <[email protected]>' # saves record to cloud database def save_to_history_cloud(obj): try: L.l.debug('Trying to save historical record to cloud {}'.format(obj)) if Constant.JSON_PUBLISH_GRAPH_X in obj: # name of x field axis_x_field = obj[Constant.JSON_PUBLISH_GRAPH_X] graph_id_field = obj[Constant.JSON_PUBLISH_GRAPH_ID] graph_legend_field = obj[Constant.JSON_PUBLISH_GRAPH_LEGEND] graph_shape_fields = obj[Constant.JSON_PUBLISH_GRAPH_SHAPE] graph_y_fields = obj[Constant.JSON_PUBLISH_GRAPH_Y] # names of fields that have value changed to record smallest amount of data changed_fields = obj[Constant.JSON_PUBLISH_FIELDS_CHANGED] # intersect lists and get only graphable fields that had values changed list_axis_y = list(set(graph_y_fields) & set(changed_fields)) if len(list_axis_y) == 0: L.l.info('Ignoring record save graph={} changed fields={} obj={}'.format(graph_y_fields, changed_fields, obj)) else: L.l.debug('Trying to save y axis {}'.format(list_axis_y)) if axis_x_field in obj and graph_id_field in obj: table = obj[Constant.JSON_PUBLISH_TABLE] trace_unique_id = obj[graph_id_field] # unique record/trace identifier x_val = obj[axis_x_field] graph_legend_item_name = obj[graph_legend_field] # unique key for legend x_val = utils.parse_to_date(x_val) x = x_val index = 0 field_pairs = [[axis_x_field, x], [graph_legend_field, graph_legend_item_name], [Constant.JSON_PUBLISH_RECORD_UUID, obj[Constant.JSON_PUBLISH_RECORD_UUID]], [Constant.JSON_PUBLISH_SOURCE_HOST, obj[Constant.JSON_PUBLISH_SOURCE_HOST]]] for axis_y in list_axis_y: if axis_y in obj: trace_list = [] y = obj[axis_y] # add multiple y values for later save in db as a single record field_pairs.append([axis_y, y]) # upload to cloud if plotly is initialised #from cloud import graph_plotly #if graph_plotly.initialised: # from cloud.graph_plotly import graph_plotly_run # Log.logger.info('Uploading to cloud field {}'.format(graph_legend_field)) # shape visual type for this trace # shape = graph_shape_fields[index] # unique name used for grid on upload # grid_base_name = str(table) # graph_plotly_run.add_grid_data(grid_unique_name=grid_base_name, x=x, y=y, # axis_x_name=axis_x_field, axis_y_name=axis_y, # record_unique_id_name=graph_legend_field, # record_unique_id_value=graph_legend_item_name) #Log.logger.debug('Skip upload to cloud, plotly not init') index += 1 else: L.l.critical('Missing history axis_x [{}], graph_id [{}], in obj {}'.format( axis_x_field,graph_id_field,obj)) else: L.l.critical('Missing history axis X field {}'.format(Constant.JSON_PUBLISH_GRAPH_X)) except Exception as ex: L.l.exception('General error saving historical cloud record, err {} obj={}'.format(ex, obj)) # saves record to cloud database def save_to_history_db(obj): try: table = obj[Constant.JSON_PUBLISH_TABLE] # L.l.debug('Trying to save historical record to db={}'.format(table)) # save to local history DB, append history to source table name dest_table = str(table) + 'History' # L.l.debug('Saving to local db table {} obj={}'.format(dest_table, obj)) from storage.sqalc import models # http://stackoverflow.com/questions/4030982/initialise-class-object-by-name try: class_table = getattr(models, dest_table) new_record = class_table() for field in obj: if hasattr(new_record, field) and field != "id": setattr(new_record, field, obj[field]) if new_record.add_commit_record_to_db(): # L.l.debug('Saved OK to local db table {} obj={}'.format(dest_table, new_record)) pass else: L.l.critical("Cannot save history db record={}".format(obj)) except Exception as ex: L.l.critical("Cannot save history db err={} record={}".format(ex, obj)) except Exception as ex: L.l.exception('General error saving historical db record, err {} obj={}'.format(ex, obj))
dan-cristian/haiot
main/persistence/__init__.py
Python
gpl-2.0
5,411
#!/usr/bin/python3 # vim: ai ts=4 sts=4 et sw=4 # Copyright (c) 2009 Intel Corporation # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation; version 2 of the License # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. import unittest import test_specify def main(): suite = unittest.TestSuite(( test_specify.suite(), )) unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == '__main__': main()
mer-tools/spectacle
tests/alltest.py
Python
gpl-2.0
1,003
import networkx as nx class BaseTestAttributeMixing(object): def setUp(self): G=nx.Graph() G.add_nodes_from([0,1],fish='one') G.add_nodes_from([2,3],fish='two') G.add_nodes_from([4],fish='red') G.add_nodes_from([5],fish='blue') G.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) self.G=G D=nx.DiGraph() D.add_nodes_from([0,1],fish='one') D.add_nodes_from([2,3],fish='two') D.add_nodes_from([4],fish='red') D.add_nodes_from([5],fish='blue') D.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) self.D=D M=nx.MultiGraph() M.add_nodes_from([0,1],fish='one') M.add_nodes_from([2,3],fish='two') M.add_nodes_from([4],fish='red') M.add_nodes_from([5],fish='blue') M.add_edges_from([(0,1),(0,1),(2,3)]) self.M=M S=nx.Graph() S.add_nodes_from([0,1],fish='one') S.add_nodes_from([2,3],fish='two') S.add_nodes_from([4],fish='red') S.add_nodes_from([5],fish='blue') S.add_edge(0,0) S.add_edge(2,2) self.S=S class BaseTestDegreeMixing(object): def setUp(self): self.P4=nx.path_graph(4) self.D=nx.DiGraph() self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) self.M=nx.MultiGraph() self.M.add_path(list(range(4))) self.M.add_edge(0,1) self.S=nx.Graph() self.S.add_edges_from([(0,0),(1,1)])
azlanismail/prismgames
examples/games/car/networkx/algorithms/assortativity/tests/base_test.py
Python
gpl-2.0
1,539
""" SQLAlchemy support. """ from __future__ import absolute_import import datetime from types import GeneratorType import decimal from sqlalchemy import func # from sqlalchemy.orm.interfaces import MANYTOONE from sqlalchemy.orm.collections import InstrumentedList from sqlalchemy.sql.type_api import TypeDecorator try: from sqlalchemy.orm.relationships import RelationshipProperty except ImportError: from sqlalchemy.orm.properties import RelationshipProperty from sqlalchemy.types import ( BIGINT, BOOLEAN, BigInteger, Boolean, CHAR, DATE, DATETIME, DECIMAL, Date, DateTime, FLOAT, Float, INT, INTEGER, Integer, NCHAR, NVARCHAR, NUMERIC, Numeric, SMALLINT, SmallInteger, String, TEXT, TIME, Text, Time, Unicode, UnicodeText, VARCHAR, Enum) from .. import mix_types as t from ..main import ( SKIP_VALUE, LOGGER, TypeMixer as BaseTypeMixer, GenFactory as BaseFactory, Mixer as BaseMixer, partial, faker) class GenFactory(BaseFactory): """ Map a sqlalchemy classes to simple types. """ types = { (String, VARCHAR, Unicode, NVARCHAR, NCHAR, CHAR): str, (Text, UnicodeText, TEXT): t.Text, (Boolean, BOOLEAN): bool, (Date, DATE): datetime.date, (DateTime, DATETIME): datetime.datetime, (Time, TIME): datetime.time, (DECIMAL, Numeric, NUMERIC): decimal.Decimal, (Float, FLOAT): float, (Integer, INTEGER, INT): int, (BigInteger, BIGINT): t.BigInteger, (SmallInteger, SMALLINT): t.SmallInteger, } class TypeMixer(BaseTypeMixer): """ TypeMixer for SQLAlchemy. """ factory = GenFactory def __init__(self, cls, **params): """ Init TypeMixer and save the mapper. """ super(TypeMixer, self).__init__(cls, **params) self.mapper = self.__scheme._sa_class_manager.mapper def postprocess(self, target, postprocess_values): """ Fill postprocess values. """ mixed = [] for name, deffered in postprocess_values: value = deffered.value if isinstance(value, GeneratorType): value = next(value) if isinstance(value, t.Mix): mixed.append((name, value)) continue if isinstance(getattr(target, name), InstrumentedList) and not isinstance(value, list): value = [value] setattr(target, name, value) for name, mix in mixed: setattr(target, name, mix & target) if self.__mixer: target = self.__mixer.postprocess(target) return target @staticmethod def get_default(field): """ Get default value from field. :return value: A default value or NO_VALUE """ column = field.scheme if isinstance(column, RelationshipProperty): column = column.local_remote_pairs[0][0] if not column.default: return SKIP_VALUE if column.default.is_callable: return column.default.arg(None) return getattr(column.default, 'arg', SKIP_VALUE) def gen_select(self, field_name, select): """ Select exists value from database. :param field_name: Name of field for generation. :return : None or (name, value) for later use """ if not self.__mixer or not self.__mixer.params.get('session'): return field_name, SKIP_VALUE relation = self.mapper.get_property(field_name) session = self.__mixer.params.get('session') value = session.query( relation.mapper.class_ ).filter(*select.choices).order_by(func.random()).first() return self.get_value(field_name, value) @staticmethod def is_unique(field): """ Return True is field's value should be a unique. :return bool: """ scheme = field.scheme if isinstance(scheme, RelationshipProperty): scheme = scheme.local_remote_pairs[0][0] return scheme.unique @staticmethod def is_required(field): """ Return True is field's value should be defined. :return bool: """ column = field.scheme if isinstance(column, RelationshipProperty): column = column.local_remote_pairs[0][0] if field.params: return True # According to the SQLAlchemy docs, autoincrement "only has an effect for columns which are # Integer derived (i.e. INT, SMALLINT, BIGINT) [and] Part of the primary key [...]". return not column.nullable and not (column.autoincrement and column.primary_key and isinstance(column.type, Integer)) def get_value(self, field_name, field_value): """ Get `value` as `field_name`. :return : None or (name, value) for later use """ field = self.__fields.get(field_name) if field and isinstance(field.scheme, RelationshipProperty): return field_name, t._Deffered(field_value, field.scheme) return super(TypeMixer, self).get_value(field_name, field_value) def make_fabric(self, column, field_name=None, fake=False, kwargs=None): # noqa """ Make values fabric for column. :param column: SqlAlchemy column :param field_name: Field name :param fake: Force fake data :return function: """ kwargs = {} if kwargs is None else kwargs if isinstance(column, RelationshipProperty): return partial(type(self)( column.mapper.class_, mixer=self.__mixer, fake=self.__fake, factory=self.__factory ).blend, **kwargs) ftype = type(column.type) # augmented types created with TypeDecorator # don't directly inherit from the base types if TypeDecorator in ftype.__bases__: ftype = ftype.impl stype = self.__factory.cls_to_simple(ftype) if stype is str: fab = super(TypeMixer, self).make_fabric( stype, field_name=field_name, fake=fake, kwargs=kwargs) return lambda: fab()[:column.type.length] if ftype is Enum: return partial(faker.random_element, column.type.enums) return super(TypeMixer, self).make_fabric( stype, field_name=field_name, fake=fake, kwargs=kwargs) def guard(self, *args, **kwargs): """ Look objects in database. :returns: A finded object or False """ try: session = self.__mixer.params.get('session') assert session except (AttributeError, AssertionError): raise ValueError('Cannot make request to DB.') qs = session.query(self.mapper).filter(*args, **kwargs) count = qs.count() if count == 1: return qs.first() if count: return qs.all() return False def reload(self, obj): """ Reload object from database. """ try: session = self.__mixer.params.get('session') session.expire(obj) session.refresh(obj) return obj except (AttributeError, AssertionError): raise ValueError('Cannot make request to DB.') def __load_fields(self): """ Prepare SQLALchemyTypeMixer. Select columns and relations for data generation. """ mapper = self.__scheme._sa_class_manager.mapper relations = set() if hasattr(mapper, 'relationships'): for rel in mapper.relationships: relations |= rel.local_columns yield rel.key, t.Field(rel, rel.key) for key, column in mapper.columns.items(): if column not in relations: yield key, t.Field(column, key) class Mixer(BaseMixer): """ Integration with SQLAlchemy. """ type_mixer_cls = TypeMixer def __init__(self, session=None, commit=True, **params): """Initialize the SQLAlchemy Mixer. :param fake: (True) Generate fake data instead of random data. :param session: SQLAlchemy session. Using for commits. :param commit: (True) Commit instance to session after creation. """ super(Mixer, self).__init__(**params) self.params['session'] = session self.params['commit'] = bool(session) and commit def postprocess(self, target): """ Save objects in db. :return value: A generated value """ if self.params.get('commit'): session = self.params.get('session') if not session: LOGGER.warn("'commit' set true but session not initialized.") else: session.add(target) session.commit() return target # Default mixer mixer = Mixer() # pylama:ignore=E1120,E0611
Nebucatnetzer/tamagotchi
pygame/lib/python3.4/site-packages/mixer/backend/sqlalchemy.py
Python
gpl-2.0
8,887
#!/usr/bin/python # -*- coding: utf-8 -*- import csv import codecs import re import argparse import os from prettytable import PrettyTable report08_schools = {} report08_employees = {} report08_school_employees = {} report16_employee = None # The following (combined with report16_absence_reasons) is used when an employee is absent, has multiple assignments and not all schools have input this absence report16_absents = {} # we will store employee school exclusion in the employee_school_exclusions dict # format: key -> employee afm employee_school_exclusions = {} # school exclusions excluced_schools = list() # employee exclusions excluced_employees = dict() def filterAFM(rawAFM): return re.search('=\"(\d*)\"', rawAFM).group(1) def csv_unireader(f, encoding="utf-8"): for row in csv.reader(codecs.iterencode(codecs.iterdecode(f, encoding), "utf-8"), delimiter=';', quotechar='"'): yield [e.decode("utf-8") for e in row] def parseEmployeeExclusionList(reportPath): """ Parses a CSV which in the first column contains the IDs of all employees that need to be excluded from processing :param reportPath: :return: a list of schools ids to exclude """ result = dict() with open(reportPath, 'rb') as report_csvfile: reader = csv_unireader(report_csvfile, encoding='iso8859-7') for row in reader: afm = str(row[0]) afm = afm if len(afm)==9 else '0'+afm result[afm]=(row[1] if len(row)>1 and row[1] != u'' else u'Άγνωστος λόγος εξαίρεσεις') return result def parseSchoolExclusionList(reportPath): """ Parses a CSV which in the first column contains the IDs of all schools that need to be excluded from processing :param reportPath: :return: a list of schools ids to exclude """ result = list() with open(reportPath, 'rb') as report_csvfile: reader = csv_unireader(report_csvfile, encoding='iso8859-7') for row in reader: result.append(row[0]) return result def parseReport16(reportPath='/Users/slavikos/Downloads/CSV_2015-06-03-100905.csv'): """ Parse report 16 (Κατάλογος Εκπαιδευτικών που Απουσιάζουν από Σχολικές Μονάδες) :param reportPath: :return: """ report16_absence_reasons = [u'ΜΑΚΡΟΧΡΟΝΙΑ ΑΔΕΙΑ (>10 ημέρες)',u'ΑΠΟΣΠΑΣΗ ΣΤΟ ΕΞΩΤΕΡΙΚΟ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΦΟΡΕΑ ΥΠ. ΠΑΙΔΕΙΑΣ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΑΛΛΟ ΠΥΣΠΕ / ΠΥΣΔΕ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΦΟΡΕΑ ΕΚΤΟΣ ΥΠ. ΠΑΙΔΕΙΑΣ',u'ΟΛΙΚΗ ΔΙΑΘΕΣΗ ΣΕ ΑΠΟΚΕΝΤΡΩΜΕΝΕΣ ΥΠΗΡΕΣΙΕΣ ΥΠ. ΠΑΙΔΕΙΑΣ'] result = {} with open(reportPath, 'rb') as report_csvfile: reader = csv_unireader(report_csvfile, encoding='iso8859-7') firstRow = True for row in reader: if firstRow: # first row contains firstRow = False continue # note that employee with employeeAfm is missing from school schoolId result[filterAFM(row[12])] = { "schoolId": row[6], "reason": "%s (%s)" % (row[22], row[23]) } # check if generally absent (in case of multiple assignments) and insert in report16_absents if row[24] in report16_absence_reasons or unicode(row[24]).startswith(u'ΜΑΚΡΟΧΡΟΝΙΑ ΑΔΕΙΑ (>10 ημέρες)'): report16_absents[filterAFM(row[12])] = row[24] return result def parseReport08(reportPath='/Users/slavikos/Downloads/CSV_2015-06-02-130003.csv'): excluded_school_types = [u'Νηπιαγωγεία'] with open(reportPath, 'rb') as report08_csvfile: spamreader = csv_unireader(report08_csvfile, encoding='iso8859-7') firstRow = True for row in spamreader: if firstRow: firstRow = False continue #exclude some school types if row[4] in excluded_school_types: continue # check if the school id is excluded if row[6] in excluced_schools: continue # get school object schoolObj = report08_schools.get(row[6], None) if not schoolObj: # first time we see that school schoolObj = { 'id': row[6], 'title': row[7], 'email': row[10], 'employees': list() } # add school to dict report08_schools[row[6]] = schoolObj # fetch employee from cache employeeAfm = filterAFM(row[16]) employeeObj = report08_employees.get(employeeAfm, None) if not employeeObj: # first time we see that employee employeeObj = { 'id': row[15] if row[15] else '', 'afm': employeeAfm, 'name': row[19], 'surname': row[18], 'fatherName': row[20], 'specialization': row[28], 'assigments': list() } # add the employee in the dict report08_employees[employeeObj.get('afm')] = employeeObj # add to the school as dict as well schoolObj['employees'].append(employeeObj) else: # employee exists in the report08_employee dict, so add it # (if he does not exist) in the schools dict as well if employeeObj not in schoolObj['employees']: schoolObj['employees'].append(employeeObj) assigmentObj = { 'schoolId': schoolObj['id'], 'type': row[33], 'assigment': row[34], 'isMaster': True if row[35] == u'Ναι' else False, 'hours': int(row[44]) if row[44] else 0, # Ώρες Υποχ. Διδακτικού Ωραρίου Υπηρέτησης στο Φορέα 'teachingHours': (int(row[46]) if row[46] else 0) + (int(row[47]) if row[47] else 0), } employeeObj['assigments'].append(assigmentObj) # report08_school_employees[schoolObj['id']].append(assigmentObj) def isExcluded(employeeAfm, schoolId): """ Determines if an employee is excluded from school unit id. If the schoolId is None, then the operation will check the general exclusion list. The operation will return None if the employee is not excluded or a description if the employee should be excluded :param employeeAfm: The employee's AFM :type employeeAfm: str :param schoolId: The school ID to check for exclusion :type schoolId: str :return: None if the employee is not excluded or a description if the employee should be excluded """ if schoolId is None: return excluced_employees.get(employeeAfm, None) if len(employee_school_exclusions) > 0: exclusion = employee_school_exclusions.get(employeeAfm, None) if exclusion: # employee is probably excluded if exclusion.get('schoolId', '') == schoolId: return exclusion.get('reason', u"Άγνωστος λόγος εξαίρεσεις") else: return None else: return None else: return None def processSchool(id, filter0=False): schoolObj = report08_schools.get(id, None) acceptedList = list() rejectedList = list() # fetch school employees, if school is not excluded schoolEmployees = schoolObj.get('employees', list()) if id not in excluced_schools else list() for employee in schoolEmployees: # check if the employee is in the general exclusion list excludedReason = isExcluded(employeeAfm=employee['afm'], schoolId=None) # check if the employee is in the exclusion list (for the given school) if excludedReason is None: excludedReason = isExcluded(employeeAfm=employee['afm'], schoolId=schoolObj['id']) if excludedReason: # employee has been excluded rejectedList.append( { 'employee': employee, 'excludedReason': excludedReason, } ) continue if report16_absents and employee['afm'] in report16_absents: # exclude report16_absents from all schools (if they have more than one assignments) continue # some (in our case pe05, pe07) employees may have multiple secondary assignments with equal, more than the main, hours # if this happens, select and enroll them in their main assignment school (as instructed by the ministry of education) foundAssigment = None mainAssigment = None mainAssigmentHours = None assigmentHours = list() if len(employee['assigments']) > 2: for assigment in employee['assigments']: if assigment['assigment'] == u'Από Διάθεση ΠΥΣΠΕ/ΠΥΣΔΕ': mainAssigment = assigment mainAssigmentHours = assigment['hours'] continue else: assigmentHours.append (assigment['hours']) continue maxHours = max(assigmentHours) if assigmentHours.count(maxHours)>1: foundAssigment = mainAssigment # end of multi max assignments primaryAssignemtns = [ u'Από Διάθεση ΠΥΣΠΕ/ΠΥΣΔΕ', u'Απόσπαση (με αίτηση - κύριος φορέας)', u'Οργανικά', u'Οργανικά από Άρση Υπεραριθμίας' ] selectedAssigment = None for assigment in employee['assigments']: if foundAssigment: selectedAssigment = foundAssigment break if not selectedAssigment: selectedAssigment = employee['assigments'][0] continue if assigment['hours'] > selectedAssigment['hours']: # found an assigment with more hours, check the # new assigment selectedAssigment = assigment elif assigment['hours'] == selectedAssigment['hours']: # deal with same hour assignments # selected assigment will be accepted if the type is a primary assignment if assigment['assigment'] in primaryAssignemtns: selectedAssigment = assigment else: pass # we've checked all assignments and we have the selected assignment # in the selectedAssigment variable. Check if the assignment references # the current school and the hours attribute is > 0 if selectedAssigment['schoolId'] == id and selectedAssigment['hours'] > 0: if filter0 and selectedAssigment['teachingHours'] == 0: # we've been asked to filter out employees with assignments # in the current school but without teaching hours rejectedList.append({ 'employee': employee, 'excludedReason': u"Αποκλεισμός λόγο μη ανάθεσης διδακτικού έργου στην μονάδα", }) continue # woooo! we have a winner ! acceptedList.append( { 'employee': employee, 'assigment': selectedAssigment, } ) else: # ok, employee is rejected schName = report08_schools.get(selectedAssigment['schoolId'], None)['title'] rejectedList.append( { 'employee': employee, 'excludedReason': u"Τοποθετημένος για '%s' ώρες στην μονάδα '%s' (%s)\n με σχέση '%s'(Σχ.Έργ.: '%s')" % (selectedAssigment['hours'], selectedAssigment['schoolId'], schName, selectedAssigment['assigment'], selectedAssigment['type']), } ) return { 'school' : schoolObj, 'accepted': sorted(acceptedList, key=lambda employee: employee['employee']['surname']), 'rejected': sorted(rejectedList, key=lambda employee: employee['employee']['surname']), } def writeReportToFile(reportName, resultStr, basePath='/tmp', encoding="utf-8"): filePath = os.path.join(basePath, reportName) with codecs.open(filePath, mode="w", encoding=encoding) as textFile: textFile.write(resultStr) return filePath def replace_all(text, dic): for i, j in dic.iteritems(): text = text.replace(i, j) return text def shortenTitle(schName): shortenDic = {u'ΟΛΟΗΜΕΡΟ' : u'ΟΛ', u'ΔΗΜΟΤΙΚΟ' : u'Δ.', u'ΣΧΟΛΕΙΟ' : u'Σ.', u'/' : ''} return replace_all(schName, shortenDic) def printTabularResults(result, includeRejected=False): schoolObj = result.get('school', dict()) resultString = "\n" resultString = resultString + "::::::::::::::::::::::::::::::::::::::::::::::::\n" resultString = resultString + ":: %s - (%s) ::\n" % (schoolObj['title'], schoolObj['id']) resultString = resultString + "::::::::::::::::::::::::::::::::::::::::::::::::\n" resultString = resultString + "\n\n" x = PrettyTable(["#","ΑΜ", "ΑΦΜ", u"ΕΠΩΝΥΜΟ", u"ΟΝΟΜΑ", u"ΠΑΤΡΩΝΥΜΟ", u"ΕΙΔΙΚΟΤΗΤΑ", u"ΣΧΕΣΗ ΕΡΓΑΣΙΑΣ", u"ΤΟΠΟΘΕΤΗΣΗ ΣΤΗΝ ΜΟΝΑΔΑ", u"ΩΡΑΡΙΟ", u"ΑΝΑΘΕΣΕΙΣ"]) x.align[u"#"] = "l" x.align[u"ΕΠΩΝΥΜΟ"] = "r" x.align[u"ΟΝΟΜΑ"] = "r" x.align[u"ΠΑΤΡΩΝΥΜΟ"] = "r" x.align[u"ΕΙΔΙΚΟΤΗΤΑ"] = "r" x.align[u"ΣΧΕΣΗ ΕΡΓΑΣΙΑΣ"] = "r" x.align[u"ΤΟΠΟΘΕΤΗΣΗ ΣΤΗΝ ΜΟΝΑΔΑ"] = "r" x.align[u"ΩΡΑΡΙΟ"] = "r" x.align[u"ΑΝΑΘΕΣΕΙΣ"] = "r" counter = 1 for r in result.get('accepted', list()): e = r['employee'] a = r['assigment'] x.add_row([counter, e['id'], e['afm'], e['surname'], e['name'], e['fatherName'], e['specialization'], a['type'], a['assigment'], a['hours'], a['teachingHours']]) counter = counter + 1 resultString = resultString + x.get_string() if includeRejected: x = PrettyTable(["#","ΑΜ", "ΑΦΜ", u"ΕΠΩΝΥΜΟ", u"ΟΝΟΜΑ", u"ΠΑΤΡΩΝΥΜΟ", u"ΕΙΔΙΚΟΤΗΤΑ", u"ΑΠΟΚΛΕΙΣΜΟΣ ΑΠΟ ΨΗΦΟΦΟΡΙΑ"]) x.align[u"#"] = "l" x.align[u"ΕΠΩΝΥΜΟ"] = "r" x.align[u"ΟΝΟΜΑ"] = "r" x.align[u"ΠΑΤΡΩΝΥΜΟ"] = "r" x.align[u"ΕΙΔΙΚΟΤΗΤΑ"] = "r" x.align[u"ΑΠΟΚΛΕΙΣΜΟΣ ΑΠΟ ΨΗΦΟΦΟΡΙΑ"] = "l" counter = 1 for r in result.get('rejected', list()): e = r['employee'] x.add_row([counter, e['id'], e['afm'], e['surname'], e['name'], e['fatherName'], e['specialization'], r['excludedReason'] ]) counter = counter + 1 resultString = resultString + "\n\n" resultString = resultString + u"###############################\n" resultString = resultString + u"##### Λίστα Αποκλεισμένων #####\n" resultString = resultString + u"###############################\n" resultString = resultString + "\n\n" resultString = resultString + x.get_string() return resultString if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-r8', "--report8", help="path to myschool report 8", required=True, type=str) parser.add_argument('-r16', "--report16", help="path to myschool report 16", type=str) parser.add_argument('-se', "--schoolExclusion", help="path to school exclusion list", type=str) parser.add_argument('-ee', "--employeeExclusion", help="path to school exclusion list", type=str) parser.add_argument('--schoolId', type=str, help='generate report for the given school id') parser.add_argument('--filter0', action='store_true', default=False, help='filter employees without teaching hour(s)') parser.add_argument('--rejected', action='store_true', default=False, help='print rejected employees in results') parser.add_argument('--outputDir', type=str, help='the base path where output files should be placed') parser.add_argument('--titleFiles', action='store_true', default=False, help='output school titles as filenames') parser.add_argument('--outputEncoding', default='utf-8', help='set output encdoding') args = parser.parse_args() if args.schoolExclusion: # path to school exclusion has been specified, so go and parse excluced_schools = parseSchoolExclusionList(reportPath=args.schoolExclusion) if args.employeeExclusion: excluced_employees = parseEmployeeExclusionList(reportPath=args.employeeExclusion) # parse report 08 as it is mandatory ! parseReport08(reportPath=args.report8) if args.report16: # path to report 16 has been specified, so parse! employee_school_exclusions.update(parseReport16(reportPath=args.report16)) if args.schoolId: schoolObj = report08_schools[args.schoolId] result = processSchool(id=args.schoolId, filter0=args.filter0) r = printTabularResults(result, includeRejected=args.rejected) if args.outputDir: outputFileName = shortenTitle(schoolObj['title']) if args.titleFiles else args.schoolId path = writeReportToFile(reportName=("%s.txt" % outputFileName), resultStr=r, basePath=args.outputDir, encoding=args.outputEncoding) print "[*] School '%s' (%s) report has been written to file '%s'" % (args.schoolId,schoolObj['title'], path) else: print r exit() for school in report08_schools: schoolObj = report08_schools[school] result = processSchool(id=school, filter0=args.filter0) r = printTabularResults(result, includeRejected=args.rejected) if args.outputDir: outputFileName = shortenTitle(schoolObj['title']) if args.titleFiles else school path = writeReportToFile(reportName=("%s.txt" % outputFileName), resultStr=r, basePath=args.outputDir, encoding=args.outputEncoding) print "[*] School '%s' (%s) report has been written to file '%s'" % (school,schoolObj['title'], path) else: print r
dideher/ekloges_dieuthinton
ekloges.py
Python
gpl-2.0
18,757
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # <pep8 compliant> import bpy from mathutils import Matrix # ---------------------------QUICK PARENT------------------ def DefQuickParent(inf, out): ob = bpy.context.object if ob.type == "ARMATURE": target = [object for object in bpy.context.selected_objects if object != ob][0] ob = bpy.context.active_pose_bone if bpy.context.object.type == 'ARMATURE' else bpy.context.object target.select = False bpy.context.scene.frame_set(frame=bpy.context.scene.quick_animation_in) a = Matrix(target.matrix_world) a.invert() i = Matrix(ob.matrix) for frame in range(inf, out): bpy.context.scene.frame_set(frame=frame) ob.matrix = target.matrix_world * a * i bpy.ops.anim.keyframe_insert(type="LocRotScale") else: target = [object for object in bpy.context.selected_objects if object != ob][0] ob = bpy.context.active_pose_bone if bpy.context.object.type == 'ARMATURE' else bpy.context.object target.select = False bpy.context.scene.frame_set(frame=bpy.context.scene.quick_animation_in) a = Matrix(target.matrix_world) a.invert() i = Matrix(ob.matrix_world) for frame in range(inf, out): bpy.context.scene.frame_set(frame=frame) ob.matrix_world = target.matrix_world * a * i bpy.ops.anim.keyframe_insert(type="LocRotScale") class QuickParent(bpy.types.Operator): """Creates a parent from one object to other in a selected frame range""" bl_idname = "anim.quick_parent_osc" bl_label = "Quick Parent" bl_options = {"REGISTER", "UNDO"} def execute(self, context): DefQuickParent( bpy.context.scene.quick_animation_in, bpy.context.scene.quick_animation_out, ) return {'FINISHED'}
oscurart/BlenderAddons
oscurart_tools/oscurart_animation.py
Python
gpl-2.0
2,647
from __future__ import division, print_function import unittest import inspect import sympy from sympy import symbols import numpy as np from symfit.api import Variable, Parameter, Fit, FitResults, Maximize, Minimize, exp, Likelihood, ln, log, variables, parameters from symfit.functions import Gaussian, Exp import scipy.stats from scipy.optimize import curve_fit from symfit.core.support import sympy_to_scipy, sympy_to_py import matplotlib.pyplot as plt import seaborn class TddInPythonExample(unittest.TestCase): def test_gaussian(self): x0, sig = parameters('x0, sig') x = Variable() new = sympy.exp(-(x - x0)**2/(2*sig**2)) self.assertIsInstance(new, sympy.exp) g = Gaussian(x, x0, sig) self.assertTrue(issubclass(g.__class__, sympy.exp)) def test_callable(self): a, b = parameters('a, b') x, y = variables('x, y') func = a*x**2 + b*y**2 result = func(x=2, y=3, a=3, b=9) self.assertEqual(result, 3*2**2 + 9*3**2) xdata = np.arange(1,10) ydata = np.arange(1,10) result = func(x=ydata, y=ydata, a=3, b=9) self.assertTrue(np.array_equal(result, 3*xdata**2 + 9*ydata**2)) def test_read_only_results(self): """ Fit results should be read-only. Let's try to break this! """ xdata = np.linspace(1,10,10) ydata = 3*xdata**2 a = Parameter(3.0, min=2.75) b = Parameter(2.0, max=2.75) x = Variable('x') new = a*x**b fit = Fit(new, xdata, ydata) fit_result = fit.execute() # Break it! try: fit_result.params = 'hello' except AttributeError: self.assertTrue(True) # desired result else: self.assertNotEqual(fit_result.params, 'hello') try: # Bypass the property getter. This will work, as it set's the instance value of __params. fit_result.__params = 'hello' except AttributeError as foo: self.assertTrue(False) # undesired result else: self.assertNotEqual(fit_result.params, 'hello') # The assginment will have succeeded on the instance because we set it from the outside. # I must admit I don't fully understand why this is allowed and I don't like it. # However, the tests below show that it did not influence the class method itself so # fitting still works fine. self.assertEqual(fit_result.__params, 'hello') # Do a second fit and dubble check that we do not overwrtie something crusial. xdata = np.arange(-5, 5, 1) ydata = np.arange(-5, 5, 1) xx, yy = np.meshgrid(xdata, ydata, sparse=False) xdata_coor = np.dstack((xx, yy)) zdata = (2.5*xx**2 + 3.0*yy**2) a = Parameter(2.5, max=2.75) b = Parameter(3.0, min=2.75) x = Variable() y = Variable() new = (a*x**2 + b*y**2) fit_2 = Fit(new, xdata_coor, zdata) fit_result_2 = fit_2.execute() self.assertNotAlmostEqual(fit_result.params.a, fit_result_2.params.a) self.assertAlmostEqual(fit_result.params.a, 3.0) self.assertAlmostEqual(fit_result_2.params.a, 2.5) self.assertNotAlmostEqual(fit_result.params.b, fit_result_2.params.b) self.assertAlmostEqual(fit_result.params.b, 2.0) self.assertAlmostEqual(fit_result_2.params.b, 3.0) def test_fitting(self): xdata = np.linspace(1,10,10) ydata = 3*xdata**2 a = Parameter(3.0) b = Parameter(2.0) x = Variable('x') new = a*x**b fit = Fit(new, xdata, ydata) func = sympy_to_py(new, [x], [a, b]) result = func(xdata, 3, 2) self.assertTrue(np.array_equal(result, ydata)) result = fit.scipy_func(fit.xdata, [3, 2]) self.assertTrue(np.array_equal(result, ydata)) args, varargs, keywords, defaults = inspect.getargspec(func) # self.assertEqual(args, ['x', 'a', 'b']) fit_result = fit.execute() self.assertIsInstance(fit_result, FitResults) self.assertAlmostEqual(fit_result.params.a, 3.0) self.assertAlmostEqual(fit_result.params.b, 2.0) self.assertIsInstance(fit_result.params.a_stdev, float) self.assertIsInstance(fit_result.params.b_stdev, float) self.assertIsInstance(fit_result.r_squared, float) # Test several false ways to access the data. self.assertRaises(AttributeError, getattr, *[fit_result.params, 'a_fdska']) self.assertRaises(AttributeError, getattr, *[fit_result.params, 'c']) self.assertRaises(AttributeError, getattr, *[fit_result.params, 'a_stdev_stdev']) self.assertRaises(AttributeError, getattr, *[fit_result.params, 'a_stdev_']) self.assertRaises(AttributeError, getattr, *[fit_result.params, 'a__stdev']) def test_numpy_functions(self): xdata = np.linspace(1,10,10) ydata = 45*np.log(xdata*2) a = Parameter() b = Parameter(value=2.1, fixed=True) x = Variable() new = a*sympy.log(x*b) def test_grid_fitting(self): xdata = np.arange(-5, 5, 1) ydata = np.arange(-5, 5, 1) xx, yy = np.meshgrid(xdata, ydata, sparse=False) xdata_coor = np.dstack((xx, yy)) zdata = (2.5*xx**2 + 3.0*yy**2) a = Parameter(2.5, max=2.75) b = Parameter(3.0, min=2.75) x = Variable() y = Variable() new = (a*x**2 + b*y**2) fit = Fit(new, xdata_coor, zdata) # Test the flatten function for consistency. xdata_coor_flat, zdata_flat = fit._flatten(xdata_coor, zdata) # _flatten transposes such arrays because the variables are in the deepest dimension instead of the first. # This is normally not a problem because all we want from the fit is the correct parameters. self.assertFalse(np.array_equal(zdata, zdata_flat.reshape((10,10)))) self.assertTrue(np.array_equal(zdata, zdata_flat.reshape((10,10)).T)) self.assertFalse(np.array_equal(xdata_coor, xdata_coor_flat.reshape((10,10,2)))) new_xdata = xdata_coor_flat.reshape((2,10,10)).T self.assertTrue(np.array_equal(xdata_coor, new_xdata)) results = fit.execute() self.assertAlmostEqual(results.params.a, 2.5) self.assertAlmostEqual(results.params.b, 3.) def test_2D_fitting(self): xdata = np.random.randint(-10, 11, size=(2, 400)) zdata = 2.5*xdata[0]**2 + 7.0*xdata[1]**2 a = Parameter() b = Parameter() x = Variable() y = Variable() new = a*x**2 + b*y**2 fit = Fit(new, xdata, zdata) result = fit.scipy_func(fit.xdata, [2, 3]) import inspect args, varargs, keywords, defaults = inspect.getargspec(fit.scipy_func) self.assertEqual(args, ['x', 'p']) fit_result = fit.execute() self.assertIsInstance(fit_result, FitResults) def test_gaussian_fitting(self): xdata = 2*np.random.rand(10000) - 1 # random betwen [-1, 1] ydata = scipy.stats.norm.pdf(xdata, loc=0.0, scale=1.0) x0 = Parameter() sig = Parameter() A = Parameter() x = Variable() g = A * Gaussian(x, x0, sig) fit = Fit(g, xdata, ydata) fit_result = fit.execute() self.assertAlmostEqual(fit_result.params.A, 0.3989423) self.assertAlmostEqual(np.abs(fit_result.params.sig), 1.0) self.assertAlmostEqual(fit_result.params.x0, 0.0) # raise Exception([i for i in fit_result.params]) sexy = g(x=2.0, **fit_result.params) ugly = g( x=2.0, x0=fit_result.params.x0, A=fit_result.params.A, sig=fit_result.params.sig, ) self.assertEqual(sexy, ugly) def test_2_gaussian_2d_fitting(self): np.random.seed(4242) mean = (0.3, 0.3) # x, y mean 0.6, 0.4 cov = [[0.01**2,0],[0,0.01**2]] data = np.random.multivariate_normal(mean, cov, 1000000) mean = (0.7,0.7) # x, y mean 0.6, 0.4 cov = [[0.01**2,0],[0,0.01**2]] data_2 = np.random.multivariate_normal(mean, cov, 1000000) data = np.vstack((data, data_2)) # Insert them as y,x here as np fucks up cartesian conventions. ydata, xedges, yedges = np.histogram2d(data[:,1], data[:,0], bins=100, range=[[0.0, 1.0], [0.0, 1.0]]) xcentres = (xedges[:-1] + xedges[1:]) / 2 ycentres = (yedges[:-1] + yedges[1:]) / 2 # Make a valid grid to match ydata xx, yy = np.meshgrid(xcentres, ycentres, sparse=False) xdata = np.dstack((xx, yy)).T x = Variable() y = Variable() x0_1 = Parameter(0.7, min=0.6, max=0.8) sig_x_1 = Parameter(0.1, min=0.0, max=0.2) y0_1 = Parameter(0.7, min=0.6, max=0.8) sig_y_1 = Parameter(0.1, min=0.0, max=0.2) A_1 = Parameter() g_1 = A_1 * Gaussian(x, x0_1, sig_x_1) * Gaussian(y, y0_1, sig_y_1) x0_2 = Parameter(0.3, min=0.2, max=0.4) sig_x_2 = Parameter(0.1, min=0.0, max=0.2) y0_2 = Parameter(0.3, min=0.2, max=0.4) sig_y_2 = Parameter(0.1, min=0.0, max=0.2) A_2 = Parameter() g_2 = A_2 * Gaussian(x, x0_2, sig_x_2) * Gaussian(y, y0_2, sig_y_2) model = g_1 + g_2 fit = Fit(model, xdata, ydata) fit_result = fit.execute() img = model(x=xx, y=yy, **fit_result.params) img_g_1 = g_1(x=xx, y=yy, **fit_result.params) # Equal up to some precision. Not much obviously. self.assertAlmostEqual(fit_result.params.x0_1, 0.7, 2) self.assertAlmostEqual(fit_result.params.y0_1, 0.7, 2) self.assertAlmostEqual(fit_result.params.x0_2, 0.3, 2) self.assertAlmostEqual(fit_result.params.y0_2, 0.3, 2) def test_gaussian_2d_fitting(self): mean = (0.6,0.4) # x, y mean 0.6, 0.4 cov = [[0.2**2,0],[0,0.1**2]] data = np.random.multivariate_normal(mean, cov, 1000000) # Insert them as y,x here as np fucks up cartesian conventions. ydata, xedges, yedges = np.histogram2d(data[:,0], data[:,1], bins=100, range=[[0.0, 1.0], [0.0, 1.0]]) xcentres = (xedges[:-1] + xedges[1:]) / 2 ycentres = (yedges[:-1] + yedges[1:]) / 2 # Make a valid grid to match ydata xx, yy = np.meshgrid(xcentres, ycentres, sparse=False) xdata = np.dstack((xx, yy)).T # T because np fucks up conventions. x0 = Parameter(0.6) sig_x = Parameter(0.2, min=0.0) x = Variable() y0 = Parameter(0.4) sig_y = Parameter(0.1, min=0.0) A = Parameter() y = Variable() g = A * Gaussian(x, x0, sig_x) * Gaussian(y, y0, sig_y) fit = Fit(g, xdata, ydata) fit_result = fit.execute() # Again, the order seems to be swapped for py3k self.assertAlmostEqual(fit_result.params.x0, np.mean(data[:,0]), 3) self.assertAlmostEqual(fit_result.params.y0, np.mean(data[:,1]), 3) self.assertAlmostEqual(np.abs(fit_result.params.sig_x), np.std(data[:,0]), 3) self.assertAlmostEqual(np.abs(fit_result.params.sig_y), np.std(data[:,1]), 3) self.assertGreaterEqual(fit_result.r_squared, 0.99) def test_minimize(self): x = Parameter(-1.) y = Parameter() model = 2*x*y + 2*x - x**2 - 2*y**2 from sympy import Eq, Ge constraints = [ Ge(y - 1, 0), #y - 1 >= 0, Eq(x**3 - y, 0), # x**3 - y == 0, ] # raise Exception(model.atoms(), model.as_ordered_terms()) # self.assertIsInstance(constraints[0], Eq) # Unbounded fit = Maximize(model) fit_result = fit.execute() self.assertAlmostEqual(fit_result.params.y, 1.) self.assertAlmostEqual(fit_result.params.x, 2.) fit = Maximize(model, constraints=constraints) fit_result = fit.execute() self.assertAlmostEqual(fit_result.params.x, 1.00000009) self.assertAlmostEqual(fit_result.params.y, 1.) def test_scipy_style(self): def func(x, sign=1.0): """ Objective function """ return sign*(2*x[0]*x[1] + 2*x[0] - x[0]**2 - 2*x[1]**2) def func_deriv(x, sign=1.0): """ Derivative of objective function """ dfdx0 = sign*(-2*x[0] + 2*x[1] + 2) dfdx1 = sign*(2*x[0] - 4*x[1]) return np.array([ dfdx0, dfdx1 ]) cons = ( {'type': 'eq', 'fun' : lambda x: np.array([x[0]**3 - x[1]]), 'jac' : lambda x: np.array([3.0*(x[0]**2.0), -1.0])}, {'type': 'ineq', 'fun' : lambda x: np.array([x[1] - 1]), 'jac' : lambda x: np.array([0.0, 1.0])}) from scipy.optimize import minimize res = minimize(func, [-1.0,1.0], args=(-1.0,), jac=func_deriv, method='SLSQP', options={'disp': True}) res = minimize(func, [-1.0,1.0], args=(-1.0,), jac=func_deriv, constraints=cons, method='SLSQP', options={'disp': True}) def test_likelihood_fitting(self): """ Fit using the likelihood method. """ b = Parameter(4, min=3.0) x = Variable() pdf = (1/b) * exp(- x / b) # Draw 100 points from an exponential distribution. # np.random.seed(100) xdata = np.random.exponential(5, 100000) fit = Likelihood(pdf, xdata) fit_result = fit.execute() self.assertAlmostEqual(fit_result.params.b, 5., 1) def test_parameter_add(self): a = Parameter(value=1.0, min=0.5, max=1.5) b = Parameter(1.0, min=0.0) new = a + b self.assertIsInstance(new, sympy.Add) def test_argument_name(self): a = Parameter() b = Parameter(name='b') c = Parameter(name='d') self.assertEqual(a.name, 'a') self.assertEqual(b.name, 'b') self.assertEqual(c.name, 'd') def test_symbol_add(self): x, y = symbols('x y') new = x + y self.assertIsInstance(new, sympy.Add) def test_evaluate_model(self): A = Parameter() x = Variable() new = A * x ** 2 self.assertEqual(new(x=2, A=2), 8) self.assertNotEqual(new(x=2, A=3), 8) def test_symbol_object_add(self): from sympy.core.symbol import Symbol x = Symbol('x') y = Symbol('y') new = x + y self.assertIsInstance(new, sympy.Add) def test_simple_sigma(self): from symfit.api import Variable, Parameter, Fit t_data = np.array([1.4, 2.1, 2.6, 3.0, 3.3]) y_data = np.array([10, 20, 30, 40, 50]) sigma = 0.2 n = np.array([5, 3, 8, 15, 30]) sigma_t = sigma / np.sqrt(n) # We now define our model y = Variable() g = Parameter() t_model = (2 * y / g)**0.5 fit = Fit(t_model, y_data, t_data)#, sigma=sigma_t) fit_result = fit.execute() # h_smooth = np.linspace(0,60,100) # t_smooth = t_model(y=h_smooth, **fit_result.params) # Lets with the results from curve_fit, no weights popt_noweights, pcov_noweights = curve_fit(lambda y, p: (2 * y / p)**0.5, y_data, t_data) self.assertAlmostEqual(fit_result.params.g, popt_noweights[0]) self.assertAlmostEqual(fit_result.params.g_stdev, np.sqrt(pcov_noweights[0, 0])) # Same sigma everywere fit = Fit(t_model, y_data, t_data, sigma=0.0031, absolute_sigma=False) fit_result = fit.execute() popt_sameweights, pcov_sameweights = curve_fit(lambda y, p: (2 * y / p)**0.5, y_data, t_data, sigma=0.0031, absolute_sigma=False) self.assertAlmostEqual(fit_result.params.g, popt_sameweights[0], 4) self.assertAlmostEqual(fit_result.params.g_stdev, np.sqrt(pcov_sameweights[0, 0]), 4) # Same weight everywere should be the same as no weight. self.assertAlmostEqual(fit_result.params.g, popt_noweights[0], 4) self.assertAlmostEqual(fit_result.params.g_stdev, np.sqrt(pcov_noweights[0, 0]), 4) # Different sigma for every point fit = Fit(t_model, y_data, t_data, sigma=0.1*sigma_t, absolute_sigma=False) fit_result = fit.execute() popt, pcov = curve_fit(lambda y, p: (2 * y / p)**0.5, y_data, t_data, sigma=.1*sigma_t) self.assertAlmostEqual(fit_result.params.g, popt[0]) self.assertAlmostEqual(fit_result.params.g_stdev, np.sqrt(pcov[0, 0])) self.assertAlmostEqual(fit_result.params.g, 9.095, 3) self.assertAlmostEqual(fit_result.params.g_stdev, 0.102, 3) # according to Mathematica def test_error_advanced(self): """ Models an example from the mathematica docs and try's to replicate it: http://reference.wolfram.com/language/howto/FitModelsWithMeasurementErrors.html """ data = [ [0.9, 6.1, 9.5], [3.9, 6., 9.7], [0.3, 2.8, 6.6], [1., 2.2, 5.9], [1.8, 2.4, 7.2], [9., 1.7, 7.], [7.9, 8., 10.4], [4.9, 3.9, 9.], [2.3, 2.6, 7.4], [4.7, 8.4, 10.] ] x, y, z = zip(*data) xy = np.vstack((x, y)) z = np.array(z) errors = np.array([.4, .4, .2, .4, .1, .3, .1, .2, .2, .2]) # raise Exception(xy, z) a = Parameter() b = Parameter(0.9) c = Parameter(5) x = Variable() y = Variable() model = a * log(b * x + c * y) fit = Fit(model, xy, z, absolute_sigma=False) fit_result = fit.execute() print(fit_result) # Same as Mathematica default behavior. self.assertAlmostEqual(fit_result.params.a, 2.9956, 4) self.assertAlmostEqual(fit_result.params.b, 0.563212, 4) self.assertAlmostEqual(fit_result.params.c, 3.59732, 4) self.assertAlmostEqual(fit_result.params.a_stdev, 0.278304, 4) self.assertAlmostEqual(fit_result.params.b_stdev, 0.224107, 4) self.assertAlmostEqual(fit_result.params.c_stdev, 0.980352, 4) fit = Fit(model, xy, z, absolute_sigma=True) fit_result = fit.execute() # Same as Mathematica in Measurement error mode, but without suplying # any errors. self.assertAlmostEqual(fit_result.params.a, 2.9956, 4) self.assertAlmostEqual(fit_result.params.b, 0.563212, 4) self.assertAlmostEqual(fit_result.params.c, 3.59732, 4) self.assertAlmostEqual(fit_result.params.a_stdev, 0.643259, 4) self.assertAlmostEqual(fit_result.params.b_stdev, 0.517992, 4) self.assertAlmostEqual(fit_result.params.c_stdev, 2.26594, 4) fit = Fit(model, xy, z, sigma=errors) fit_result = fit.execute() popt, pcov, infodict, errmsg, ier = curve_fit(lambda x_vec, a, b, c: a * np.log(b * x_vec[0] + c * x_vec[1]), xy, z, sigma=errors, absolute_sigma=True, full_output=True) # Same as curve_fit? self.assertAlmostEqual(fit_result.params.a, popt[0], 4) self.assertAlmostEqual(fit_result.params.b, popt[1], 4) self.assertAlmostEqual(fit_result.params.c, popt[2], 4) self.assertAlmostEqual(fit_result.params.a_stdev, np.sqrt(pcov[0,0]), 4) self.assertAlmostEqual(fit_result.params.b_stdev, np.sqrt(pcov[1,1]), 4) self.assertAlmostEqual(fit_result.params.c_stdev, np.sqrt(pcov[2,2]), 4) # Same as Mathematica with MEASUREMENT ERROR self.assertAlmostEqual(fit_result.params.a, 2.68807, 4) self.assertAlmostEqual(fit_result.params.b, 0.941344, 4) self.assertAlmostEqual(fit_result.params.c, 5.01541, 4) self.assertAlmostEqual(fit_result.params.a_stdev, 0.0974628, 4) self.assertAlmostEqual(fit_result.params.b_stdev, 0.247018, 4) self.assertAlmostEqual(fit_result.params.c_stdev, 0.597661, 4) def test_error_analytical(self): """ Test using a case where the analytical answer is known. Modeled after: http://nbviewer.ipython.org/urls/gist.github.com/taldcroft/5014170/raw/31e29e235407e4913dc0ec403af7ed524372b612/curve_fit.ipynb """ N = 10000 sigma = 10 xn = np.arange(N, dtype=np.float) yn = np.zeros_like(xn) yn = yn + np.random.normal(size=len(yn), scale=sigma) a = Parameter() model = a fit = Fit(model, xn, yn, sigma=sigma) fit_result = fit.execute() popt, pcov = curve_fit(lambda x, a: a * np.ones_like(x), xn, yn, sigma=sigma, absolute_sigma=True) self.assertAlmostEqual(fit_result.params.a, popt[0], 5) self.assertAlmostEqual(fit_result.params.a_stdev, np.sqrt(np.diag(pcov))[0], 2) fit_no_sigma = Fit(model, xn, yn) fit_result_no_sigma = fit_no_sigma.execute() popt, pcov = curve_fit(lambda x, a: a * np.ones_like(x), xn, yn,) # With or without sigma, the bestfit params should be in agreement in case of equal weights self.assertAlmostEqual(fit_result.params.a, fit_result_no_sigma.params.a, 5) # Since symfit is all about absolute errors, the sigma will not be in agreement self.assertNotEqual(fit_result.params.a_stdev, fit_result_no_sigma.params.a_stdev, 5) self.assertAlmostEqual(fit_result_no_sigma.params.a, popt[0], 5) self.assertAlmostEqual(fit_result_no_sigma.params.a_stdev, pcov[0][0]**0.5, 5) # Analytical answer for mean of N(0,1): mu = 0.0 sigma_mu = sigma/N**0.5 # self.assertAlmostEqual(fit_result.params.a, mu, 5) self.assertAlmostEqual(fit_result.params.a_stdev, sigma_mu, 5) def test_straight_line_analytical(self): """ Test symfit against a straight line, for which the parameters and their uncertainties are known analytically. Assuming equal weights. :return: """ data = [[0, 1], [1, 0], [3, 2], [5, 4]] x, y = (np.array(i, dtype='float64') for i in zip(*data)) # x = np.arange(0, 100, 0.1) # np.random.seed(10) # y = 3.0*x + 105.0 + np.random.normal(size=x.shape) dx = x - x.mean() dy = y - y.mean() mean_squared_x = np.mean(x**2) - np.mean(x)**2 mean_xy = np.mean(x * y) - np.mean(x)*np.mean(y) a = mean_xy/mean_squared_x b = y.mean() - a * x.mean() self.assertAlmostEqual(a, 0.694915, 6) # values from Mathematica self.assertAlmostEqual(b, 0.186441, 6) print(a, b) S = np.sum((y - (a*x + b))**2) var_a_exact = S/(len(x) * (len(x) - 2) * mean_squared_x) var_b_exact = var_a_exact*np.mean(x ** 2) a_exact = a b_exact = b # We will now compare these exact results with values from symfit a, b, x_var = Parameter(name='a', value=3.0), Parameter(name='b'), Variable(name='x') model = a*x_var + b fit = Fit(model, x, y, absolute_sigma=False) fit_result = fit.execute() popt, pcov = curve_fit(lambda z, c, d: c * z + d, x, y, Dfun=lambda p, x, y, func: np.transpose([x, np.ones_like(x)])) # Dfun=lambda p, x, y, func: print(p, func, x, y)) # curve_fit self.assertAlmostEqual(a_exact, popt[0], 4) self.assertAlmostEqual(b_exact, popt[1], 4) self.assertAlmostEqual(var_a_exact, pcov[0][0], 6) self.assertAlmostEqual(var_b_exact, pcov[1][1], 6) self.assertAlmostEqual(a_exact, fit_result.params.a, 4) self.assertAlmostEqual(b_exact, fit_result.params.b, 4) self.assertAlmostEqual(var_a_exact**0.5, fit_result.params.a_stdev, 6) self.assertAlmostEqual(var_b_exact**0.5, fit_result.params.b_stdev, 6) if __name__ == '__main__': unittest.main()
Eljee/symfit
symfit/tests/tests.py
Python
gpl-2.0
23,874
# Copyright (C) 2008-2010 Adam Olsen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # # The developers of the Exaile media player hereby grant permission # for non-GPL compatible GStreamer and Exaile plugins to be used and # distributed together with GStreamer and Exaile. This permission is # above and beyond the permissions granted by the GPL license by which # Exaile is covered. If you modify this code, you may extend this # exception to your version of the code, but you are not obligated to # do so. If you do not wish to do so, delete this exception statement # from your version. from gi.repository import GLib from gi.repository import Gtk import xl.unicode from xl import event, main, plugins, xdg from xlgui.widgets import common, dialogs from xl.nls import gettext as _, ngettext import logging logger = logging.getLogger(__name__) name = _('Plugins') ui = xdg.get_data_path('ui', 'preferences', 'plugin.ui') class PluginManager(object): """ Gui to manage plugins """ def __init__(self, preferences, builder): """ Initializes the manager """ self.preferences = preferences builder.connect_signals(self) self.plugins = main.exaile().plugins self.message = dialogs.MessageBar( parent=builder.get_object('preferences_pane'), buttons=Gtk.ButtonsType.CLOSE ) self.message.connect('response', self.on_messagebar_response) self.list = builder.get_object('plugin_tree') self.enabled_cellrenderer = builder.get_object('enabled_cellrenderer') if main.exaile().options.Debug: reload_cellrenderer = common.ClickableCellRendererPixbuf() reload_cellrenderer.props.icon_name = 'view-refresh' reload_cellrenderer.props.xalign = 1 reload_cellrenderer.connect('clicked', self.on_reload_cellrenderer_clicked) name_column = builder.get_object('name_column') name_column.pack_start(reload_cellrenderer, True) name_column.add_attribute(reload_cellrenderer, 'visible', 3) self.version_label = builder.get_object('version_label') self.author_label = builder.get_object('author_label') self.name_label = builder.get_object('name_label') self.description = builder.get_object('description_view') self.model = builder.get_object('model') self.filter_model = self.model.filter_new() self.show_incompatible_cb = builder.get_object('show_incompatible_cb') self.filter_model.set_visible_func(self._model_visible_func) selection = self.list.get_selection() selection.connect('changed', self.on_selection_changed) self._load_plugin_list() self._evt_rm1 = event.add_ui_callback( self.on_plugin_event, 'plugin_enabled', None, True ) self._evt_rm2 = event.add_ui_callback( self.on_plugin_event, 'plugin_disabled', None, False ) self.list.connect('destroy', self.on_destroy) GLib.idle_add(selection.select_path, (0,)) GLib.idle_add(self.list.grab_focus) def _load_plugin_list(self): """ Loads the plugin list """ plugins = self.plugins.list_installed_plugins() uncategorized = _('Uncategorized') plugins_dict = {uncategorized: []} failed_list = [] self.plugin_to_path = {} for plugin_name in plugins: try: info = self.plugins.get_plugin_info(plugin_name) compatible = self.plugins.is_compatible(info) broken = self.plugins.is_potentially_broken(info) except Exception: failed_list += [plugin_name] continue # determine icon to show if not compatible: icon = 'dialog-error' elif broken: icon = 'dialog-warning' else: icon = None enabled = plugin_name in self.plugins.enabled_plugins plugin_data = ( plugin_name, info['Name'], str(info['Version']), enabled, icon, broken, compatible, True, ) if 'Category' in info: cat = plugins_dict.setdefault(info['Category'], []) cat.append(plugin_data) else: plugins_dict[uncategorized].append(plugin_data) self.list.set_model(None) self.model.clear() def categorykey(item): if item[0] == uncategorized: return '\xff' * 10 return xl.unicode.strxfrm(item[0]) plugins_dict = sorted(plugins_dict.iteritems(), key=categorykey) for category, plugins_list in plugins_dict: plugins_list.sort(key=lambda x: xl.unicode.strxfrm(x[1])) it = self.model.append( None, (None, category, '', False, '', False, True, False) ) for plugin_data in plugins_list: pit = self.model.append(it, plugin_data) path = self.model.get_string_from_iter(pit) self.plugin_to_path[plugin_data[0]] = path self.list.set_model(self.filter_model) # TODO: Keep track of which categories are already expanded, and only expand those self.list.expand_all() if failed_list: self.message.show_error( _('Could not load plugin info!'), ngettext('Failed plugin: %s', 'Failed plugins: %s', len(failed_list)) % ', '.join(failed_list), ) def on_destroy(self, widget): self._evt_rm1() self._evt_rm2() def on_messagebar_response(self, widget, response): """ Hides the messagebar if requested """ if response == Gtk.ResponseType.CLOSE: widget.hide() def on_plugin_tree_row_activated(self, tree, path, column): """ Enables or disables the selected plugin """ self.enabled_cellrenderer.emit('toggled', path[0]) def on_reload_cellrenderer_clicked(self, cellrenderer, path): """ Reloads a plugin from scratch """ plugin_name = self.filter_model[path][0] enabled = self.filter_model[path][3] if enabled: try: self.plugins.disable_plugin(plugin_name) except Exception as e: self.message.show_error(_('Could not disable plugin!'), str(e)) return logger.info('Reloading plugin %s...', plugin_name) self.plugins.load_plugin(plugin_name, reload_plugin=True) if enabled: try: self.plugins.enable_plugin(plugin_name) except Exception as e: self.message.show_error(_('Could not enable plugin!'), str(e)) return def on_install_plugin_button_clicked(self, button): """ Shows a dialog allowing the user to choose a plugin to install from the filesystem """ dialog = Gtk.FileChooserDialog( _('Choose a Plugin'), self.preferences.parent, buttons=( Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_ADD, Gtk.ResponseType.OK, ), ) filter = Gtk.FileFilter() filter.set_name(_('Plugin Archives')) filter.add_pattern("*.exz") filter.add_pattern("*.tar.gz") filter.add_pattern("*.tar.bz2") dialog.add_filter(filter) filter = Gtk.FileFilter() filter.set_name(_('All Files')) filter.add_pattern('*') dialog.add_filter(filter) result = dialog.run() dialog.hide() if result == Gtk.ResponseType.OK: try: self.plugins.install_plugin(dialog.get_filename()) except plugins.InvalidPluginError as e: self.message.show_error(_('Plugin file installation failed!'), str(e)) return self._load_plugin_list() def on_selection_changed(self, selection, user_data=None): """ Called when a row is selected """ model, paths = selection.get_selected_rows() if not paths: return row = model[paths[0]] if not row[7]: self.author_label.set_label('') self.description.get_buffer().set_text('') self.name_label.set_label('') return info = self.plugins.get_plugin_info(row[0]) self.author_label.set_label(",\n".join(info['Authors'])) self.description.get_buffer().set_text(info['Description'].replace(r'\n', "\n")) self.name_label.set_markup( "<b>%s</b> <small>%s</small>" % (info['Name'], info['Version']) ) def on_enabled_cellrenderer_toggled(self, cellrenderer, path): """ Called when the checkbox is toggled """ path = Gtk.TreePath.new_from_string(path) plugin_name = self.filter_model[path][0] if plugin_name is None: return enable = not self.filter_model[path][3] if enable: try: self.plugins.enable_plugin(plugin_name) except Exception as e: self.message.show_error(_('Could not enable plugin!'), str(e)) return else: try: self.plugins.disable_plugin(plugin_name) except Exception as e: self.message.show_error(_('Could not disable plugin!'), str(e)) return self.on_selection_changed(self.list.get_selection()) def on_plugin_event(self, evtname, obj, plugin_name, enabled): if hasattr(self.plugins.loaded_plugins[plugin_name], 'get_preferences_pane'): self.preferences._load_plugin_pages() path = self.plugin_to_path[plugin_name] self.model[path][3] = enabled def on_show_incompatible_cb_toggled(self, widget): self.filter_model.refilter() def _model_visible_func(self, model, iter, data): row = model[iter] compatible = row[6] return compatible or self.show_incompatible_cb.get_active() def init(preferences, xml): PluginManager(preferences, xml)
genodeftest/exaile
xlgui/preferences/plugin.py
Python
gpl-2.0
11,178
###################################################################### # # FSP3000R7NCU object class # # Copyright (C) 2011 Russell Dwarshuis, Merit Network, Inc. # # This program can be used under the GNU General Public License version 2 # You can find full information here: http://www.zenoss.com/oss # ###################################################################### __doc__="""FSP3000R7NCU FSP3000R7NCU is a component of a FSP3000R7Device Device """ from ZenPacks.Merit.AdvaFSP3000R7.lib.FSP3000R7Component import * import logging log = logging.getLogger('FSP3000R7NCU') class FSP3000R7NCU(FSP3000R7Component): """FSP3000R7NCU object""" portal_type = meta_type = 'FSP3000R7NCU' _relations = (("FSP3000R7Dev", ToOne(ToManyCont, "ZenPacks.Merit.AdvaFSP3000R7.FSP3000R7Device", "FSP3000R7Ncu")), ) InitializeClass(FSP3000R7NCU)
kb8u/ZenPacks.Merit.AdvaFSP3000R7
ZenPacks/Merit/AdvaFSP3000R7/FSP3000R7NCU.py
Python
gpl-2.0
933
# Copyright (C) 2013-2014 Fox Wilson, Peter Foley, Srijay Kasturi, Samuel Damashek, James Forcier and Reed Koser # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import re from random import randint from helpers.orm import Scores from helpers.command import Command def pluralize(s, n): if n == 1: return s else: return s + 's' @Command('score', ['config', 'db', 'botnick']) def cmd(send, msg, args): """Gets scores. Syntax: {command} <--high|--low|nick> """ if not args['config']['feature'].getboolean('hooks'): send("Hooks are disabled, and this command depends on hooks. Please contact the bot admin(s).") return session = args['db'] match = re.match('--(.+)', msg) if match: if match.group(1) == 'high': data = session.query(Scores).order_by(Scores.score.desc()).limit(3).all() send('High Scores:') for x in data: send("%s: %s" % (x.nick, x.score)) elif match.group(1) == 'low': data = session.query(Scores).order_by(Scores.score).limit(3).all() send('Low Scores:') for x in data: send("%s: %s" % (x.nick, x.score)) else: send("%s is not a valid flag" % match.group(1)) return matches = re.findall('(%s+)' % args['config']['core']['nickregex'], msg) if matches: for match in matches: name = match.lower() if name == 'c': send("We all know you love C better than anything else, so why rub it in?") return score = session.query(Scores).filter(Scores.nick == name).scalar() if score is not None: if name == args['botnick'].lower(): output = 'has %s %s! :)' % (score.score, pluralize('point', score.score)) send(output, 'action') else: send("%s has %i %s!" % (name, score.score, pluralize('point', score.score))) else: send("Nobody cares about %s" % name) elif msg: send("Invalid nick") else: count = session.query(Scores).count() if count == 0: send("Nobody cares about anything =(") else: randid = randint(1, count) query = session.query(Scores).get(randid) send("%s has %i %s!" % (query.nick, query.score, pluralize('point', query.score)))
sckasturi/saltlake
commands/score.py
Python
gpl-2.0
3,127
import logging import time import types from autotest.client.shared import error from virttest import utils_misc, utils_test, aexpect def run(test, params, env): """ KVM migration test: 1) Get a live VM and clone it. 2) Verify that the source VM supports migration. If it does, proceed with the test. 3) Send a migration command to the source VM and wait until it's finished. 4) Kill off the source VM. 3) Log into the destination VM after the migration is finished. 4) Compare the output of a reference command executed on the source with the output of the same command on the destination machine. :param test: QEMU test object. :param params: Dictionary with test parameters. :param env: Dictionary with the test environment. """ def guest_stress_start(guest_stress_test): """ Start a stress test in guest, Could be 'iozone', 'dd', 'stress' :param type: type of stress test. """ from tests import autotest_control timeout = 0 if guest_stress_test == "autotest": test_type = params.get("test_type") func = autotest_control.run_autotest_control new_params = params.copy() new_params["test_control_file"] = "%s.control" % test_type args = (test, new_params, env) timeout = 60 elif guest_stress_test == "dd": vm = env.get_vm(env, params.get("main_vm")) vm.verify_alive() session = vm.wait_for_login(timeout=login_timeout) func = session.cmd_output args = ("for((;;)) do dd if=/dev/zero of=/tmp/test bs=5M " "count=100; rm -f /tmp/test; done", login_timeout, logging.info) logging.info("Start %s test in guest", guest_stress_test) bg = utils_test.BackgroundTest(func, args) params["guest_stress_test_pid"] = bg bg.start() if timeout: logging.info("sleep %ds waiting guest test start.", timeout) time.sleep(timeout) if not bg.is_alive(): raise error.TestFail("Failed to start guest test!") def guest_stress_deamon(): """ This deamon will keep watch the status of stress in guest. If the stress program is finished before migration this will restart it. """ while True: bg = params.get("guest_stress_test_pid") action = params.get("action") if action == "run": logging.debug("Check if guest stress is still running") guest_stress_test = params.get("guest_stress_test") if bg and not bg.is_alive(): logging.debug("Stress process finished, restart it") guest_stress_start(guest_stress_test) time.sleep(30) else: logging.debug("Stress still on") else: if bg and bg.is_alive(): try: stress_stop_cmd = params.get("stress_stop_cmd") vm = env.get_vm(env, params.get("main_vm")) vm.verify_alive() session = vm.wait_for_login() if stress_stop_cmd: logging.warn("Killing background stress process " "with cmd '%s', you would see some " "error message in client test result," "it's harmless.", stress_stop_cmd) session.cmd(stress_stop_cmd) bg.join(10) except Exception: pass break time.sleep(10) def get_functions(func_names, locals_dict): """ Find sub function(s) in this function with the given name(s). """ if not func_names: return [] funcs = [] for f in func_names.split(): f = locals_dict.get(f) if isinstance(f, types.FunctionType): funcs.append(f) return funcs def mig_set_speed(): mig_speed = params.get("mig_speed", "1G") return vm.monitor.migrate_set_speed(mig_speed) login_timeout = int(params.get("login_timeout", 360)) mig_timeout = float(params.get("mig_timeout", "3600")) mig_protocol = params.get("migration_protocol", "tcp") mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2 mig_exec_cmd_src = params.get("migration_exec_cmd_src") mig_exec_cmd_dst = params.get("migration_exec_cmd_dst") if mig_exec_cmd_src and "gzip" in mig_exec_cmd_src: mig_exec_file = params.get("migration_exec_file", "/var/tmp/exec") mig_exec_file += "-%s" % utils_misc.generate_random_string(8) mig_exec_cmd_src = mig_exec_cmd_src % mig_exec_file mig_exec_cmd_dst = mig_exec_cmd_dst % mig_exec_file offline = params.get("offline", "no") == "yes" check = params.get("vmstate_check", "no") == "yes" living_guest_os = params.get("migration_living_guest", "yes") == "yes" deamon_thread = None vm = env.get_vm(params["main_vm"]) vm.verify_alive() if living_guest_os: session = vm.wait_for_login(timeout=login_timeout) # Get the output of migration_test_command test_command = params.get("migration_test_command") reference_output = session.cmd_output(test_command) # Start some process in the background (and leave the session open) background_command = params.get("migration_bg_command", "") session.sendline(background_command) time.sleep(5) # Start another session with the guest and make sure the background # process is running session2 = vm.wait_for_login(timeout=login_timeout) try: check_command = params.get("migration_bg_check_command", "") session2.cmd(check_command, timeout=30) session2.close() # run some functions before migrate start. pre_migrate = get_functions(params.get("pre_migrate"), locals()) for func in pre_migrate: func() # Start stress test in guest. guest_stress_test = params.get("guest_stress_test") if guest_stress_test: guest_stress_start(guest_stress_test) params["action"] = "run" deamon_thread = utils_test.BackgroundTest( guest_stress_deamon, ()) deamon_thread.start() # Migrate the VM ping_pong = params.get("ping_pong", 1) for i in xrange(int(ping_pong)): if i % 2 == 0: logging.info("Round %s ping..." % str(i / 2)) else: logging.info("Round %s pong..." % str(i / 2)) vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay, offline, check, migration_exec_cmd_src=mig_exec_cmd_src, migration_exec_cmd_dst=mig_exec_cmd_dst) # Set deamon thread action to stop after migrate params["action"] = "stop" # run some functions after migrate finish. post_migrate = get_functions(params.get("post_migrate"), locals()) for func in post_migrate: func() # Log into the guest again logging.info("Logging into guest after migration...") session2 = vm.wait_for_login(timeout=30) logging.info("Logged in after migration") # Make sure the background process is still running session2.cmd(check_command, timeout=30) # Get the output of migration_test_command output = session2.cmd_output(test_command) # Compare output to reference output if output != reference_output: logging.info("Command output before migration differs from " "command output after migration") logging.info("Command: %s", test_command) logging.info("Output before:" + utils_misc.format_str_for_message(reference_output)) logging.info("Output after:" + utils_misc.format_str_for_message(output)) raise error.TestFail("Command '%s' produced different output " "before and after migration" % test_command) finally: # Kill the background process if session2 and session2.is_alive(): bg_kill_cmd = params.get("migration_bg_kill_command", None) if bg_kill_cmd is not None: try: session2.cmd(bg_kill_cmd) except aexpect.ShellTimeoutError: logging.debug("Remote session not responsive, " "shutting down VM %s", vm.name) vm.destroy(gracefully=True) if deamon_thread is not None: # Set deamon thread action to stop after migrate params["action"] = "stop" deamon_thread.join() else: # Just migrate without depending on a living guest OS vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay, offline, check, migration_exec_cmd_src=mig_exec_cmd_src, migration_exec_cmd_dst=mig_exec_cmd_dst)
spcui/tp-qemu
qemu/tests/migration.py
Python
gpl-2.0
9,706
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name="blake-archive", version="0.1", description="Blake archive web app", license="Closed source", packages=['blake', 'test'], long_description=read('README'), classifiers=["Development Status :: 3 - Alpha"], install_requires=["flask", "sqlalchemy", "flask-sqlalchemy", 'lxml', 'xmltodict', "nose", 'tablib'] )
blakearchive/archive
setup.py
Python
gpl-2.0
480
# -*- encoding: utf-8 -*- ########################################################################### # Module Writen to OpenERP, Open Source Management Solution # # Copyright (c) 2013 Vauxoo - http://www.vauxoo.com/ # All Rights Reserved. # info Vauxoo ([email protected]) ############################################################################ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv, fields class res_partner(osv.Model): _inherit = 'res.partner' _order = "parent_left" _parent_order = "ref" _parent_store = True _columns = { 'parent_right': fields.integer('Parent Right', select=1), 'parent_left': fields.integer('Parent Left', select=1), }
3dfxsoftware/cbss-addons
res_partner_btree/model/res_partner_btree.py
Python
gpl-2.0
1,459
# -*- coding: utf-8 -*- from py.test import mark from translate.filters import checks from translate.lang import data from translate.storage import po, xliff def strprep(str1, str2, message=None): return data.normalized_unicode(str1), data.normalized_unicode(str2), data.normalized_unicode(message) def passes(filterfunction, str1, str2): """returns whether the given strings pass on the given test, handling FilterFailures""" str1, str2, no_message = strprep(str1, str2) try: filterresult = filterfunction(str1, str2) except checks.FilterFailure, e: filterresult = False return filterresult def fails(filterfunction, str1, str2, message=None): """returns whether the given strings fail on the given test, handling only FilterFailures""" str1, str2, message = strprep(str1, str2, message) try: filterresult = filterfunction(str1, str2) except checks.SeriousFilterFailure, e: filterresult = True except checks.FilterFailure, e: if message: exc_message = e.messages[0] filterresult = exc_message != message print exc_message.encode('utf-8') else: filterresult = False return not filterresult def fails_serious(filterfunction, str1, str2, message=None): """returns whether the given strings fail on the given test, handling only SeriousFilterFailures""" str1, str2, message = strprep(str1, str2, message) try: filterresult = filterfunction(str1, str2) except checks.SeriousFilterFailure, e: if message: exc_message = e.messages[0] filterresult = exc_message != message print exc_message.encode('utf-8') else: filterresult = False return not filterresult def test_defaults(): """tests default setup and that checks aren't altered by other constructions""" stdchecker = checks.StandardChecker() assert stdchecker.config.varmatches == [] mozillachecker = checks.MozillaChecker() stdchecker = checks.StandardChecker() assert stdchecker.config.varmatches == [] def test_construct(): """tests that the checkers can be constructed""" stdchecker = checks.StandardChecker() mozillachecker = checks.MozillaChecker() ooochecker = checks.OpenOfficeChecker() gnomechecker = checks.GnomeChecker() kdechecker = checks.KdeChecker() def test_accelerator_markers(): """test that we have the correct accelerator marker for the various default configs""" stdchecker = checks.StandardChecker() assert stdchecker.config.accelmarkers == [] mozillachecker = checks.MozillaChecker() assert mozillachecker.config.accelmarkers == ["&"] ooochecker = checks.OpenOfficeChecker() assert ooochecker.config.accelmarkers == ["~"] gnomechecker = checks.GnomeChecker() assert gnomechecker.config.accelmarkers == ["_"] kdechecker = checks.KdeChecker() assert kdechecker.config.accelmarkers == ["&"] def test_messages(): """test that our helpers can check for messages and that these error messages can contain Unicode""" stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz')) assert fails(stdchecker.validchars, "Some unexpected characters", "©", "Invalid characters: '©' (\\u00a9)") stdchecker = checks.StandardChecker() assert fails_serious(stdchecker.escapes, r"A tab", r"'n Ṱab\t", r"""Escapes in original () don't match escapes in translation ('Ṱab\t')""") def test_accelerators(): """tests accelerators""" stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&")) assert passes(stdchecker.accelerators, "&File", "&Fayile") assert fails(stdchecker.accelerators, "&File", "Fayile") assert fails(stdchecker.accelerators, "File", "&Fayile") assert passes(stdchecker.accelerators, "Mail && News", "Pos en Nuus") assert fails(stdchecker.accelerators, "Mail &amp; News", "Pos en Nuus") assert passes(stdchecker.accelerators, "&Allow", u'&\ufeb2\ufee3\ufe8e\ufea3') assert fails(stdchecker.accelerators, "Open &File", "Vula& Ifayile") kdechecker = checks.KdeChecker() assert passes(kdechecker.accelerators, "&File", "&Fayile") assert fails(kdechecker.accelerators, "&File", "Fayile") assert fails(kdechecker.accelerators, "File", "&Fayile") gnomechecker = checks.GnomeChecker() assert passes(gnomechecker.accelerators, "_File", "_Fayile") assert fails(gnomechecker.accelerators, "_File", "Fayile") assert fails(gnomechecker.accelerators, "File", "_Fayile") assert fails(gnomechecker.accelerators, "_File", "_Fayil_e") mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.accelerators, "&File", "&Fayile") assert passes(mozillachecker.accelerators, "Warn me if this will disable any of my add&-ons", "&Waarsku my as dit enige van my byvoegings sal deaktiveer") assert fails_serious(mozillachecker.accelerators, "&File", "Fayile") assert fails_serious(mozillachecker.accelerators, "File", "&Fayile") assert passes(mozillachecker.accelerators, "Mail &amp; News", "Pos en Nuus") assert fails_serious(mozillachecker.accelerators, "Mail &amp; News", "Pos en &Nuus") assert fails_serious(mozillachecker.accelerators, "&File", "Fayile") ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.accelerators, "~File", "~Fayile") assert fails(ooochecker.accelerators, "~File", "Fayile") assert fails(ooochecker.accelerators, "File", "~Fayile") # We don't want an accelerator for letters with a diacritic assert fails(ooochecker.accelerators, "F~ile", "L~êer") # Bug 289: accept accented accelerator characters afchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&", targetlanguage="fi")) assert passes(afchecker.accelerators, "&Reload Frame", "P&äivitä kehys") # Problems: # Accelerator before variable - see test_acceleratedvariables @mark.xfail(reason="Accelerated variables needs a better implementation") def test_acceleratedvariables(): """test for accelerated variables""" # FIXME: disabled since acceleratedvariables has been removed, but these checks are still needed mozillachecker = checks.MozillaChecker() assert fails(mozillachecker.acceleratedvariables, "%S &Options", "&%S Ikhetho") assert passes(mozillachecker.acceleratedvariables, "%S &Options", "%S &Ikhetho") ooochecker = checks.OpenOfficeChecker() assert fails(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "~%PRODUCTNAME% Ikhetho") assert passes(ooochecker.acceleratedvariables, "%PRODUCTNAME% ~Options", "%PRODUCTNAME% ~Ikhetho") def test_acronyms(): """tests acronyms""" stdchecker = checks.StandardChecker() assert passes(stdchecker.acronyms, "An HTML file", "'n HTML leer") assert fails(stdchecker.acronyms, "An HTML file", "'n LMTH leer") assert passes(stdchecker.acronyms, "It is HTML.", "Dit is HTML.") # We don't mind if you add an acronym to correct bad capitalisation in the original assert passes(stdchecker.acronyms, "An html file", "'n HTML leer") # We shouldn't worry about acronyms that appear in a musttranslate file stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=["OK"])) assert passes(stdchecker.acronyms, "OK", "Kulungile") # Assert punctuation should not hide accronyms assert fails(stdchecker.acronyms, "Location (URL) not found", "Blah blah blah") # Test '-W' (bug 283) assert passes(stdchecker.acronyms, "%s: option `-W %s' is ambiguous", "%s: opsie '-W %s' is dubbelsinnig") def test_blank(): """tests blank""" stdchecker = checks.StandardChecker() assert fails(stdchecker.blank, "Save as", " ") assert fails(stdchecker.blank, "_: KDE comment\\n\nSimple string", " ") def test_brackets(): """tests brackets""" stdchecker = checks.StandardChecker() assert passes(stdchecker.brackets, "N number(s)", "N getal(le)") assert fails(stdchecker.brackets, "For {sic} numbers", "Vier getalle") assert fails(stdchecker.brackets, "For }sic{ numbers", "Vier getalle") assert fails(stdchecker.brackets, "For [sic] numbers", "Vier getalle") assert fails(stdchecker.brackets, "For ]sic[ numbers", "Vier getalle") assert passes(stdchecker.brackets, "{[(", "[({") def test_compendiumconflicts(): """tests compendiumconflicts""" stdchecker = checks.StandardChecker() assert fails(stdchecker.compendiumconflicts, "File not saved", r"""#-#-#-#-# file1.po #-#-#-#-#\n Leer nie gestoor gestoor nie\n #-#-#-#-# file1.po #-#-#-#-#\n Leer nie gestoor""") def test_doublequoting(): """tests double quotes""" stdchecker = checks.StandardChecker() assert fails(stdchecker.doublequoting, "Hot plate", "\"Ipuleti\" elishisa") assert passes(stdchecker.doublequoting, "\"Hot\" plate", "\"Ipuleti\" elishisa") assert fails(stdchecker.doublequoting, "'Hot' plate", "\"Ipuleti\" elishisa") assert passes(stdchecker.doublequoting, "\\\"Hot\\\" plate", "\\\"Ipuleti\\\" elishisa") # We don't want the filter to complain about "untranslated" quotes in xml attributes frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr")) assert passes(frchecker.doublequoting, "Click <a href=\"page.html\">", "Clique <a href=\"page.html\">") assert fails(frchecker.doublequoting, "Do \"this\"", "Do \"this\"") assert passes(frchecker.doublequoting, "Do \"this\"", "Do « this »") assert fails(frchecker.doublequoting, "Do \"this\"", "Do « this » « this »") # This used to fail because we strip variables, and was left with an empty quotation that was not converted assert passes(frchecker.doublequoting, u"Copying `%s' to `%s'", u"Copie de « %s » vers « %s »") vichecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="vi")) assert passes(vichecker.doublequoting, 'Save "File"', u"Lưu « Tập tin »") # Had a small exception with such a case: eschecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="es")) assert passes(eschecker.doublequoting, "<![CDATA[ Enter the name of the Windows workgroup that this server should appear in. ]]>", "<![CDATA[ Ingrese el nombre del grupo de trabajo de Windows en el que debe aparecer este servidor. ]]>") def test_doublespacing(): """tests double spacing""" stdchecker = checks.StandardChecker() assert passes(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. 'n Ander sin.") assert passes(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. No double spacing.") assert fails(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. Missing the double space.") assert fails(stdchecker.doublespacing, "Sentence. Another sentence.", "Sin. Uneeded double space in translation.") ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.doublespacing, "Execute %PROGRAMNAME Calc", "Blah %PROGRAMNAME Calc") assert passes(ooochecker.doublespacing, "Execute %PROGRAMNAME Calc", "Blah % PROGRAMNAME Calc") def test_doublewords(): """tests doublewords""" stdchecker = checks.StandardChecker() assert passes(stdchecker.doublewords, "Save the rhino", "Save the rhino") assert fails(stdchecker.doublewords, "Save the rhino", "Save the the rhino") # Double variables are not an error stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", 1)])) assert passes(stdchecker.doublewords, "%s %s installation", "tsenyo ya %s %s") # Double XML tags are not an error stdchecker = checks.StandardChecker() assert passes(stdchecker.doublewords, "Line one <br> <br> line two", "Lyn een <br> <br> lyn twee") # In some language certain double words are not errors st_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="st")) assert passes(st_checker.doublewords, "Color to draw the name of a message you sent.", "Mmala wa ho taka bitso la molaetsa oo o o rometseng.") assert passes(st_checker.doublewords, "Ten men", "Banna ba ba leshome") assert passes(st_checker.doublewords, "Give SARS the tax", "Lekgetho le le fe SARS") def test_endpunc(): """tests endpunc""" stdchecker = checks.StandardChecker() assert passes(stdchecker.endpunc, "Question?", "Correct?") assert fails(stdchecker.endpunc, " Question?", "Wrong ?") # Newlines must not mask end punctuation assert fails(stdchecker.endpunc, "Exit change recording mode?\n\n", "Phuma esimeni sekugucula kubhalisa.\n\n") mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.endpunc, "Upgrades an existing $ProductShortName$ installation.", "Ku antswisiwa ka ku nghenisiwa ka $ProductShortName$.") # Real examples assert passes(stdchecker.endpunc, "A nickname that identifies this publishing site (e.g.: 'MySite')", "Vito ro duvulela leri tirhisiwaka ku kuma sayiti leri ro kandziyisa (xik.: 'Sayiti ra Mina')") assert fails(stdchecker.endpunc, "Question", u"Wrong\u2026") # Making sure singlequotes don't confuse things assert passes(stdchecker.endpunc, "Pseudo-elements can't be negated '%1$S'.", "Pseudo-elemente kan nie '%1$S' ontken word nie.") stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='km')) assert passes(stdchecker.endpunc, "In this new version, there are some minor conversion improvements on complex style in Openoffice.org Writer.", u"នៅ​ក្នុង​កំណែ​ថ្មីនេះ មាន​ការ​កែសម្រួល​មួយ​ចំនួន​តូច​ទាក់​ទង​នឹង​ការ​បំលែង​ពុម្ពអក្សរ​ខ្មែរ​ ក្នុង​កម្មវិធី​ការិយាល័យ​ ស្លឹករឹត ដែល​មាន​ប្រើ​ប្រាស់​រចនាប័ទ្មស្មុគស្មាញច្រើន\u00a0។") stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='zh')) assert passes(stdchecker.endpunc, "To activate your account, follow this link:\n", u"要啟用戶口,請瀏覽這個鏈結:\n") stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='vi')) assert passes(stdchecker.endpunc, "Do you want to delete the XX dialog?", u"Bạn có muốn xoá hộp thoại XX không?") stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='fr')) assert passes(stdchecker.endpunc, "Header:", u"En-tête :") assert passes(stdchecker.endpunc, "Header:", u"En-tête\u00a0:") def test_endwhitespace(): """tests endwhitespace""" stdchecker = checks.StandardChecker() assert passes(stdchecker.endwhitespace, "A setence.", "I'm correct.") assert passes(stdchecker.endwhitespace, "A setence. ", "I'm correct. ") assert fails(stdchecker.endwhitespace, "A setence. ", "'I'm incorrect.") assert passes(stdchecker.endwhitespace, "Problem with something: %s\n", "Probleem met iets: %s\n") zh_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='zh')) # This should pass since the space is not needed in Chinese assert passes(zh_checker.endwhitespace, "Init. Limit: ", "起始时间限制:") def test_escapes(): """tests escapes""" stdchecker = checks.StandardChecker() assert passes(stdchecker.escapes, r"""A sentence""", "I'm correct.") assert passes(stdchecker.escapes, "A file\n", "'n Leer\n") assert fails_serious(stdchecker.escapes, r"blah. A file", r"bleah.\n'n leer") assert passes(stdchecker.escapes, r"A tab\t", r"'n Tab\t") assert fails_serious(stdchecker.escapes, r"A tab\t", r"'n Tab") assert passes(stdchecker.escapes, r"An escape escape \\", r"Escape escape \\") assert fails_serious(stdchecker.escapes, r"An escape escape \\", "Escape escape") assert passes(stdchecker.escapes, r"A double quote \"", r"Double quote \"") assert fails_serious(stdchecker.escapes, r"A double quote \"", "Double quote") # Escaped escapes assert passes(stdchecker.escapes, "An escaped newline \\n", "Escaped newline \\n") assert fails_serious(stdchecker.escapes, "An escaped newline \\n", "Escaped newline \n") # Real example ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.escapes, ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32", ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32") def test_newlines(): """tests newlines""" stdchecker = checks.StandardChecker() assert passes(stdchecker.newlines, "Nothing to see", "Niks te sien") assert passes(stdchecker.newlines, "Correct\n", "Korrek\n") assert passes(stdchecker.newlines, "Correct\r", "Korrek\r") assert passes(stdchecker.newlines, "Correct\r\n", "Korrek\r\n") assert fails(stdchecker.newlines, "A file\n", "'n Leer") assert fails(stdchecker.newlines, "A file", "'n Leer\n") assert fails(stdchecker.newlines, "A file\r", "'n Leer") assert fails(stdchecker.newlines, "A file", "'n Leer\r") assert fails(stdchecker.newlines, "A file\n", "'n Leer\r\n") assert fails(stdchecker.newlines, "A file\r\n", "'n Leer\n") assert fails(stdchecker.newlines, "blah.\nA file", "bleah. 'n leer") # msgfmt errors assert fails(stdchecker.newlines, "One two\n", "Een\ntwee") assert fails(stdchecker.newlines, "\nOne two", "Een\ntwee") # Real example ooochecker = checks.OpenOfficeChecker() assert fails(ooochecker.newlines, "The arrowhead was modified without saving.\nWould you like to save the arrowhead now?", "Ṱhoho ya musevhe yo khwinifhadzwa hu si na u seiva.Ni khou ṱoda u seiva thoho ya musevhe zwino?") def test_tabs(): """tests tabs""" stdchecker = checks.StandardChecker() assert passes(stdchecker.tabs, "Nothing to see", "Niks te sien") assert passes(stdchecker.tabs, "Correct\t", "Korrek\t") assert passes(stdchecker.tabs, "Correct\tAA", "Korrek\tAA") assert fails_serious(stdchecker.tabs, "A file\t", "'n Leer") assert fails_serious(stdchecker.tabs, "A file", "'n Leer\t") ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.tabs, ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32", ",\t44\t;\t59\t:\t58\t{Tab}\t9\t{space}\t32") def test_filepaths(): """tests filepaths""" stdchecker = checks.StandardChecker() assert passes(stdchecker.filepaths, "%s to the file /etc/hosts on your system.", "%s na die leer /etc/hosts op jou systeem.") assert fails(stdchecker.filepaths, "%s to the file /etc/hosts on your system.", "%s na die leer /etc/gasheer op jou systeem.") def test_kdecomments(): """tests kdecomments""" stdchecker = checks.StandardChecker() assert passes(stdchecker.kdecomments, r"""_: I am a comment\n A string to translate""", "'n String om te vertaal") assert fails(stdchecker.kdecomments, r"""_: I am a comment\n A string to translate""", r"""_: Ek is 'n commment\n 'n String om te vertaal""") assert fails(stdchecker.kdecomments, """_: I am a comment\\n\n""", """_: I am a comment\\n\n""") def test_long(): """tests long messages""" stdchecker = checks.StandardChecker() assert passes(stdchecker.long, "I am normal", "Ek is ook normaal") assert fails(stdchecker.long, "Short.", "Kort.......................................................................................") assert fails(stdchecker.long, "a", "bc") def test_musttranslatewords(): """tests stopwords""" stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=[])) assert passes(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik") stdchecker = checks.StandardChecker(checks.CheckerConfig(musttranslatewords=["Mozilla"])) assert passes(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik") assert fails(stdchecker.musttranslatewords, "This uses Mozilla of course", "hierdie gebruik Mozilla natuurlik") assert passes(stdchecker.musttranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik le mozille soos jy") assert fails(stdchecker.musttranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik Mozilla soos jy") # should always pass if there are no stopwords in the original assert passes(stdchecker.musttranslatewords, "This uses something else. Don't you?", "hierdie gebruik Mozilla soos jy") # check that we can find words surrounded by punctuation assert passes(stdchecker.musttranslatewords, "Click 'Mozilla' button", "Kliek 'Motzille' knoppie") assert fails(stdchecker.musttranslatewords, "Click 'Mozilla' button", "Kliek 'Mozilla' knoppie") assert passes(stdchecker.musttranslatewords, 'Click "Mozilla" button', 'Kliek "Motzille" knoppie') assert fails(stdchecker.musttranslatewords, 'Click "Mozilla" button', 'Kliek "Mozilla" knoppie') assert fails(stdchecker.musttranslatewords, 'Click "Mozilla" button', u'Kliek «Mozilla» knoppie') assert passes(stdchecker.musttranslatewords, "Click (Mozilla) button", "Kliek (Motzille) knoppie") assert fails(stdchecker.musttranslatewords, "Click (Mozilla) button", "Kliek (Mozilla) knoppie") assert passes(stdchecker.musttranslatewords, "Click Mozilla!", "Kliek Motzille!") assert fails(stdchecker.musttranslatewords, "Click Mozilla!", "Kliek Mozilla!") ## We need to define more word separators to allow us to find those hidden untranslated items #assert fails(stdchecker.musttranslatewords, "Click OK", "Blah we-OK") # Don't get confused when variables are the same as a musttranslate word stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", None), ], musttranslatewords=["OK"])) assert passes(stdchecker.musttranslatewords, "Click %OK to start", "Kliek %OK om te begin") # Unicode assert fails(stdchecker.musttranslatewords, "Click OK", u"Kiḽikani OK") def test_notranslatewords(): """tests stopwords""" stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=[])) assert passes(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik") stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Mozilla", "Opera"])) assert fails(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik le mozille natuurlik") assert passes(stdchecker.notranslatewords, "This uses Mozilla of course", "hierdie gebruik Mozilla natuurlik") assert fails(stdchecker.notranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik le mozille soos jy") assert passes(stdchecker.notranslatewords, "This uses Mozilla. Don't you?", "hierdie gebruik Mozilla soos jy") # should always pass if there are no stopwords in the original assert passes(stdchecker.notranslatewords, "This uses something else. Don't you?", "hierdie gebruik Mozilla soos jy") # Cope with commas assert passes(stdchecker.notranslatewords, "using Mozilla Task Manager", u"šomiša Selaola Mošomo sa Mozilla, gomme") # Find words even if they are embedded in punctuation assert fails(stdchecker.notranslatewords, "Click 'Mozilla' button", "Kliek 'Motzille' knoppie") assert passes(stdchecker.notranslatewords, "Click 'Mozilla' button", "Kliek 'Mozilla' knoppie") assert fails(stdchecker.notranslatewords, "Click Mozilla!", "Kliek Motzille!") assert passes(stdchecker.notranslatewords, "Click Mozilla!", "Kliek Mozilla!") assert fails(stdchecker.notranslatewords, "Searches (From Opera)", "adosako (kusukela ku- Ophera)") stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Sun", "NeXT"])) assert fails(stdchecker.notranslatewords, "Sun/NeXT Audio", "Odio dza Ḓuvha/TeVHELAHO") assert passes(stdchecker.notranslatewords, "Sun/NeXT Audio", "Odio dza Sun/NeXT") stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["sendmail"])) assert fails(stdchecker.notranslatewords, "because 'sendmail' could", "ngauri 'rumelameiḽi' a yo") assert passes(stdchecker.notranslatewords, "because 'sendmail' could", "ngauri 'sendmail' a yo") stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Base"])) assert fails(stdchecker.notranslatewords, " - %PRODUCTNAME Base: Relation design", " - %PRODUCTNAME Sisekelo: Umsiko wekuhlobana") stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Writer"])) assert fails(stdchecker.notranslatewords, "&[ProductName] Writer/Web", "&[ProductName] Umbhali/iWebhu") # Unicode - different decompositions stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=[u"\u1e3cike"])) assert passes(stdchecker.notranslatewords, u"You \u1e3cike me", u"Ek \u004c\u032dike jou") def test_numbers(): """test numbers""" stdchecker = checks.StandardChecker() assert passes(stdchecker.numbers, "Netscape 4 was not as good as Netscape 7.", "Netscape 4 was nie so goed soos Netscape 7 nie.") # Check for correct detection of degree. Also check that we aren't getting confused with 1 and 2 byte UTF-8 characters assert fails(stdchecker.numbers, "180° turn", "180 turn") assert passes(stdchecker.numbers, "180° turn", "180° turn") assert fails(stdchecker.numbers, "180° turn", "360 turn") assert fails(stdchecker.numbers, "180° turn", "360° turn") assert passes(stdchecker.numbers, "180~ turn", "180 turn") assert passes(stdchecker.numbers, "180¶ turn", "180 turn") # Numbers with multiple decimal points assert passes(stdchecker.numbers, "12.34.56", "12.34.56") assert fails(stdchecker.numbers, "12.34.56", "98.76.54") # Currency # FIXME we should probably be able to handle currency checking with locale inteligence assert passes(stdchecker.numbers, "R57.60", "R57.60") # FIXME - again locale intelligence should allow us to use other decimal seperators assert fails(stdchecker.numbers, "R57.60", "R57,60") assert fails(stdchecker.numbers, "1,000.00", "1 000,00") # You should be able to reorder numbers assert passes(stdchecker.numbers, "40-bit RC2 encryption with RSA and an MD5", "Umbhalo ocashile i-RC2 onamabhithi angu-40 one-RSA ne-MD5") # Don't fail the numbers check if the entry is a dialogsize entry mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.numbers, 'width: 12em;', 'width: 20em;') def test_options(): """tests command line options e.g. --option""" stdchecker = checks.StandardChecker() assert passes(stdchecker.options, "--help", "--help") assert fails(stdchecker.options, "--help", "--hulp") assert fails(stdchecker.options, "--input=FILE", "--input=FILE") assert passes(stdchecker.options, "--input=FILE", "--input=LÊER") assert fails(stdchecker.options, "--input=FILE", "--tovoer=LÊER") # We don't want just any '--' to trigger this test - the error will be confusing assert passes(stdchecker.options, "Hello! -- Hi", "Hallo! &mdash; Haai") assert passes(stdchecker.options, "--blank--", "--vide--") def test_printf(): """tests printf style variables""" # This should really be a subset of the variable checks # Ideally we should be able to adapt based on #, directives also stdchecker = checks.StandardChecker() assert passes(stdchecker.printf, "I am %s", "Ek is %s") assert fails(stdchecker.printf, "I am %s", "Ek is %d") assert passes(stdchecker.printf, "I am %#100.50hhf", "Ek is %#100.50hhf") assert fails(stdchecker.printf, "I am %#100s", "Ek is %10s") assert fails(stdchecker.printf, "... for user %.100s on %.100s:", "... lomuntu osebenzisa i-%. I-100s e-100s:") assert passes(stdchecker.printf, "%dMB", "%d MG") # Reordering assert passes(stdchecker.printf, "String %s and number %d", "String %1$s en nommer %2$d") assert passes(stdchecker.printf, "String %1$s and number %2$d", "String %1$s en nommer %2$d") assert passes(stdchecker.printf, "String %s and number %d", "Nommer %2$d and string %1$s") assert passes(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %2$f") assert passes(stdchecker.printf, "String %1$s and real number %2$f and number %3$d", "String %1$s en nommer %3$d en reële getal %2$f") assert passes(stdchecker.printf, "Real number %2$f and string %1$s and number %3$d", "String %1$s en nommer %3$d en reële getal %2$f") assert fails(stdchecker.printf, "String %s and number %d", "Nommer %1$d and string %2$s") assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %2$d") assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %1$s en nommer %3$d en reële getal %4$f") assert fails(stdchecker.printf, "String %s and real number %f and number %d", "String %2$s en nommer %3$d en reële getal %2$f") assert fails(stdchecker.printf, "Real number %2$f and string %1$s and number %3$d", "String %1$f en nommer %3$d en reële getal %2$f") # checking python format strings assert passes(stdchecker.printf, "String %(1)s and number %(2)d", "Nommer %(2)d en string %(1)s") assert passes(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(num)d en string %(str)s") assert fails(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(nommer)d en string %(str)s") assert fails(stdchecker.printf, "String %(str)s and number %(num)d", "Nommer %(num)d en string %s") # checking omitted plural format string placeholder %.0s stdchecker.hasplural = 1 assert passes(stdchecker.printf, "%d plurals", "%.0s plural") def test_puncspacing(): """tests spacing after punctuation""" stdchecker = checks.StandardChecker() assert passes(stdchecker.puncspacing, "One, two, three.", "Kunye, kubili, kuthathu.") assert passes(stdchecker.puncspacing, "One, two, three. ", "Kunye, kubili, kuthathu.") assert fails(stdchecker.puncspacing, "One, two, three. ", "Kunye, kubili,kuthathu.") assert passes(stdchecker.puncspacing, "One, two, three!?", "Kunye, kubili, kuthathu?") # Some languages have padded puntuation marks frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr")) assert passes(frchecker.puncspacing, "Do \"this\"", "Do « this »") assert passes(frchecker.puncspacing, u"Do \"this\"", u"Do «\u00a0this\u00a0»") assert fails(frchecker.puncspacing, "Do \"this\"", "Do «this»") def test_purepunc(): """tests messages containing only punctuation""" stdchecker = checks.StandardChecker() assert passes(stdchecker.purepunc, ".", ".") assert passes(stdchecker.purepunc, "", "") assert fails(stdchecker.purepunc, ".", " ") assert fails(stdchecker.purepunc, "Find", "'") assert fails(stdchecker.purepunc, "'", "Find") assert passes(stdchecker.purepunc, "year measurement template|2000", "2000") def test_sentencecount(): """tests sentencecount messages""" stdchecker = checks.StandardChecker() assert passes(stdchecker.sentencecount, "One. Two. Three.", "Een. Twee. Drie.") assert passes(stdchecker.sentencecount, "One two three", "Een twee drie.") assert fails(stdchecker.sentencecount, "One. Two. Three.", "Een Twee. Drie.") assert passes(stdchecker.sentencecount, "Sentence with i.e. in it.", "Sin met d.w.s. in dit.") # bug 178, description item 8 el_checker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='el')) assert fails(el_checker.sentencecount, "First sentence. Second sentence.", "Πρώτη πρόταση. δεύτερη πρόταση.") def test_short(): """tests short messages""" stdchecker = checks.StandardChecker() assert passes(stdchecker.short, "I am normal", "Ek is ook normaal") assert fails(stdchecker.short, "I am a very long sentence", "Ek") assert fails(stdchecker.short, "abcde", "c") def test_singlequoting(): """tests single quotes""" stdchecker = checks.StandardChecker() assert passes(stdchecker.singlequoting, "A 'Hot' plate", "Ipuleti 'elishisa' kunye") # FIXME this should pass but doesn't probably to do with our logic that got confused at the end of lines assert passes(stdchecker.singlequoting, "'Hot' plate", "Ipuleti 'elishisa'") # FIXME newlines also confuse our algorithm for single quotes assert passes(stdchecker.singlequoting, "File '%s'\n", "'%s' Faele\n") assert fails(stdchecker.singlequoting, "'Hot' plate", "Ipuleti \"elishisa\"") assert passes(stdchecker.singlequoting, "It's here.", "Dit is hier.") # Don't get confused by punctuation that touches a single quote assert passes(stdchecker.singlequoting, "File '%s'.", "'%s' Faele.") assert passes(stdchecker.singlequoting, "Blah 'format' blah.", "Blah blah 'sebopego'.") assert passes(stdchecker.singlequoting, "Blah 'format' blah!", "Blah blah 'sebopego'!") assert passes(stdchecker.singlequoting, "Blah 'format' blah?", "Blah blah 'sebopego'?") # Real examples assert passes(stdchecker.singlequoting, "A nickname that identifies this publishing site (e.g.: 'MySite')", "Vito ro duvulela leri tirhisiwaka ku kuma sayiti leri ro kandziyisa (xik.: 'Sayiti ra Mina')") assert passes(stdchecker.singlequoting, "isn't", "ayikho") assert passes(stdchecker.singlequoting, "Required (can't send message unless all recipients have certificates)", "Verlang (kan nie boodskappe versend tensy al die ontvangers sertifikate het nie)") # Afrikaans 'n assert passes(stdchecker.singlequoting, "Please enter a different site name.", "Tik 'n ander werfnaam in.") assert passes(stdchecker.singlequoting, "\"%name%\" already exists. Please enter a different site name.", "\"%name%\" bestaan reeds. Tik 'n ander werfnaam in.") # Check that accelerators don't mess with removing singlequotes mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.singlequoting, "&Don't import anything", "&Moenie enigiets invoer nie") ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.singlequoting, "~Don't import anything", "~Moenie enigiets invoer nie") vichecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="vi")) assert passes(vichecker.singlequoting, "Save 'File'", u"Lưu « Tập tin »") assert passes(vichecker.singlequoting, "Save `File'", u"Lưu « Tập tin »") def test_simplecaps(): """tests simple caps""" # Simple caps is a very vauge test so the checks here are mostly for obviously fixable problem # or for checking obviously correct situations that are triggering a failure. stdchecker = checks.StandardChecker() assert passes(stdchecker.simplecaps, "MB of disk space for the cache.", "MB yendzawo yediski etsala.") # We should squash 'I' in the source text as it messes with capital detection assert passes(stdchecker.simplecaps, "if you say I want", "as jy se ek wil") assert passes(stdchecker.simplecaps, "sentence. I want more.", "sin. Ek wil meer he.") assert passes(stdchecker.simplecaps, "Where are we? I can't see where we are going.", "Waar is ons? Ek kan nie sien waar ons gaan nie.") ## We should remove variables before checking stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("%", 1)])) assert passes(stdchecker.simplecaps, "Could not load %s", "A swi koteki ku panga %S") assert passes(stdchecker.simplecaps, "The element \"%S\" is not recognized.", "Elemente \"%S\" a yi tiveki.") stdchecker = checks.StandardChecker(checks.CheckerConfig(varmatches=[("&", ";")])) assert passes(stdchecker.simplecaps, "Determine how &brandShortName; connects to the Internet.", "Kuma &brandShortName; hlanganisa eka Internete.") ## If source is ALL CAPS then we should just check that target is also ALL CAPS assert passes(stdchecker.simplecaps, "COUPDAYS", "COUPMALANGA") # Just some that at times have failed but should always pass assert passes(stdchecker.simplecaps, "Create a query entering an SQL statement directly.", "Yakha sibuti singena SQL inkhomba yesitatimende.") ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.simplecaps, "SOLK (%PRODUCTNAME Link)", "SOLK (%PRODUCTNAME Thumanyo)") assert passes(ooochecker.simplecaps, "%STAROFFICE Image", "Tshifanyiso tsha %STAROFFICE") assert passes(stdchecker.simplecaps, "Flies, flies, everywhere! Ack!", u"Vlieë, oral vlieë! Jig!") def test_spellcheck(): """tests spell checking""" stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af")) assert passes(stdchecker.spellcheck, "Great trek", "Groot trek") assert fails(stdchecker.spellcheck, "Final deadline", "End of the road") # Bug 289: filters accelerators before spell checking stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&", targetlanguage="fi")) assert passes(stdchecker.spellcheck, "&Reload Frame", "P&äivitä kehys") # Ensure we don't check notranslatewords stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af")) assert fails(stdchecker.spellcheck, "Mozilla is wonderful", "Mozillaaa is wonderlik") # We should pass the test if the "error" occurs in the English assert passes(stdchecker.spellcheck, "Mozilla is wonderful", "Mozilla is wonderlik") stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="af", notranslatewords=["Mozilla"])) assert passes(stdchecker.spellcheck, "Mozilla is wonderful", "Mozilla is wonderlik") def test_startcaps(): """tests starting capitals""" stdchecker = checks.StandardChecker() assert passes(stdchecker.startcaps, "Find", "Vind") assert passes(stdchecker.startcaps, "find", "vind") assert fails(stdchecker.startcaps, "Find", "vind") assert fails(stdchecker.startcaps, "find", "Vind") assert passes(stdchecker.startcaps, "'", "'") assert passes(stdchecker.startcaps, "\\.,/?!`'\"[]{}()@#$%^&*_-;:<>Find", "\\.,/?!`'\"[]{}()@#$%^&*_-;:<>Vind") # With leading whitespace assert passes(stdchecker.startcaps, " Find", " Vind") assert passes(stdchecker.startcaps, " find", " vind") assert fails(stdchecker.startcaps, " Find", " vind") assert fails(stdchecker.startcaps, " find", " Vind") # Leading punctuation assert passes(stdchecker.startcaps, "'Find", "'Vind") assert passes(stdchecker.startcaps, "'find", "'vind") assert fails(stdchecker.startcaps, "'Find", "'vind") assert fails(stdchecker.startcaps, "'find", "'Vind") # Unicode assert passes(stdchecker.startcaps, "Find", u"Šind") assert passes(stdchecker.startcaps, "find", u"šind") assert fails(stdchecker.startcaps, "Find", u"šind") assert fails(stdchecker.startcaps, "find", u"Šind") # Unicode further down the Unicode tables assert passes(stdchecker.startcaps, "A text enclosed...", u"Ḽiṅwalwa ḽo katelwaho...") assert fails(stdchecker.startcaps, "A text enclosed...", u"ḽiṅwalwa ḽo katelwaho...") # Accelerators stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&")) assert passes(stdchecker.startcaps, "&Find", "Vi&nd") # Language specific stuff stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='af')) assert passes(stdchecker.startcaps, "A cow", "'n Koei") assert passes(stdchecker.startcaps, "A list of ", "'n Lys van ") # should pass: #assert passes(stdchecker.startcaps, "A 1k file", u"'n 1k-lêer") assert passes(stdchecker.startcaps, "'Do it'", "'Doen dit'") assert fails(stdchecker.startcaps, "'Closer than'", "'nader as'") assert passes(stdchecker.startcaps, "List", "Lys") assert passes(stdchecker.startcaps, "a cow", "'n koei") assert fails(stdchecker.startcaps, "a cow", "'n Koei") assert passes(stdchecker.startcaps, "(A cow)", "('n Koei)") assert fails(stdchecker.startcaps, "(a cow)", "('n Koei)") def test_startpunc(): """tests startpunc""" stdchecker = checks.StandardChecker() assert passes(stdchecker.startpunc, "<< Previous", "<< Correct") assert fails(stdchecker.startpunc, " << Previous", "Wrong") assert fails(stdchecker.startpunc, "Question", u"\u2026Wrong") assert passes(stdchecker.startpunc, "<fish>hello</fish> world", "world <fish>hello</fish>") # The inverted Spanish question mark should be accepted stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='es')) assert passes(stdchecker.startpunc, "Do you want to reload the file?", u"¿Quiere recargar el archivo?") # The Afrikaans indefinite article should be accepted stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='af')) assert passes(stdchecker.startpunc, "A human?", u"'n Mens?") def test_startwhitespace(): """tests startwhitespace""" stdchecker = checks.StandardChecker() assert passes(stdchecker.startwhitespace, "A setence.", "I'm correct.") assert fails(stdchecker.startwhitespace, " A setence.", "I'm incorrect.") def test_unchanged(): """tests unchanged entries""" stdchecker = checks.StandardChecker(checks.CheckerConfig(accelmarkers="&")) assert fails(stdchecker.unchanged, "Unchanged", "Unchanged") assert fails(stdchecker.unchanged, "&Unchanged", "Un&changed") assert passes(stdchecker.unchanged, "Unchanged", "Changed") assert passes(stdchecker.unchanged, "1234", "1234") assert passes(stdchecker.unchanged, "2×2", "2×2") # bug 178, description item 14 assert passes(stdchecker.unchanged, "I", "I") assert passes(stdchecker.unchanged, " ", " ") # bug 178, description item 5 assert passes(stdchecker.unchanged, "???", "???") # bug 178, description item 15 assert passes(stdchecker.unchanged, "&ACRONYM", "&ACRONYM") # bug 178, description item 7 assert passes(stdchecker.unchanged, "F1", "F1") # bug 178, description item 20 assert fails(stdchecker.unchanged, "Two words", "Two words") #TODO: this still fails # assert passes(stdchecker.unchanged, "NOMINAL", "NOMİNAL") gnomechecker = checks.GnomeChecker() assert fails(gnomechecker.unchanged, "Entity references, such as &amp; and &#169;", "Entity references, such as &amp; and &#169;") # Variable only and variable plus punctuation messages should be ignored mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.unchanged, "$ProgramName$", "$ProgramName$") assert passes(mozillachecker.unchanged, "$file$ : $dir$", "$file$ : $dir$") # bug 178, description item 13 assert fails(mozillachecker.unchanged, "$file$ in $dir$", "$file$ in $dir$") assert passes(mozillachecker.unchanged, "&brandShortName;", "&brandShortName;") # Don't translate words should be ignored stdchecker = checks.StandardChecker(checks.CheckerConfig(notranslatewords=["Mozilla"])) assert passes(stdchecker.unchanged, "Mozilla", "Mozilla") # bug 178, description item 10 # Don't fail unchanged if the entry is a dialogsize, quite plausible that you won't change it mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.unchanged, 'width: 12em;', 'width: 12em;') def test_untranslated(): """tests untranslated entries""" stdchecker = checks.StandardChecker() assert fails(stdchecker.untranslated, "I am untranslated", "") assert passes(stdchecker.untranslated, "I am translated", "Ek is vertaal") # KDE comments that make it into translations should not mask untranslated test assert fails(stdchecker.untranslated, "_: KDE comment\\n\nI am untranslated", "_: KDE comment\\n\n") def test_validchars(): """tests valid characters""" stdchecker = checks.StandardChecker(checks.CheckerConfig()) assert passes(stdchecker.validchars, "The check always passes if you don't specify chars", "Die toets sal altyd werk as jy nie karacters specifisier") stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz')) assert passes(stdchecker.validchars, "This sentence contains valid characters", "Hierdie sin bevat ware karakters") assert fails(stdchecker.validchars, "Some unexpected characters", "©®°±÷¼½¾") stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars='⠁⠂⠃⠄⠅⠆⠇⠈⠉⠊⠋⠌⠍⠎⠏⠐⠑⠒⠓⠔⠕⠖⠗⠘⠙⠚⠛⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫⠬⠭⠮⠯⠰')) assert passes(stdchecker.validchars, "Our target language is all non-ascii", "⠁⠂⠃⠄⠆⠇⠈⠉⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫") assert fails(stdchecker.validchars, "Our target language is all non-ascii", "Some ascii⠁⠂⠃⠄⠆⠇⠈⠉⠜⠝⠞⠟⠠⠡⠢⠣⠤⠥⠦⠧⠨⠩⠪⠫") stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars=u'\u004c\u032d')) assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u004c\u032d") assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u1e3c") stdchecker = checks.StandardChecker(checks.CheckerConfig(validchars=u'\u1e3c')) assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u1e3c") assert passes(stdchecker.validchars, "This sentence contains valid chars", u"\u004c\u032d") def test_variables_kde(): """tests variables in KDE translations""" # GNOME variables kdechecker = checks.KdeChecker() assert passes(kdechecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.") assert fails_serious(kdechecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.") def test_variables_gnome(): """tests variables in GNOME translations""" # GNOME variables gnomechecker = checks.GnomeChecker() assert passes(gnomechecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.") assert fails_serious(gnomechecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.") assert passes(gnomechecker.variables, "Save $(file)", "Stoor $(file)") assert fails_serious(gnomechecker.variables, "Save $(file)", "Stoor $(leer)") def test_variables_mozilla(): """tests variables in Mozilla translations""" # Mozilla variables mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandShortname; weergawe.") assert fails_serious(mozillachecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandKortnaam; weergawe.") assert passes(mozillachecker.variables, "Save %file%", "Stoor %file%") assert fails_serious(mozillachecker.variables, "Save %file%", "Stoor %leer%") assert passes(mozillachecker.variables, "Save $file$", "Stoor $file$") assert fails_serious(mozillachecker.variables, "Save $file$", "Stoor $leer$") assert passes(mozillachecker.variables, "%d files of type %s saved.", "%d leers van %s tipe gestoor.") assert fails_serious(mozillachecker.variables, "%d files of type %s saved.", "%s leers van %s tipe gestoor.") assert passes(mozillachecker.variables, "Save $file", "Stoor $file") assert fails_serious(mozillachecker.variables, "Save $file", "Stoor $leer") assert passes(mozillachecker.variables, "About $ProgramName$", "Oor $ProgramName$") assert fails_serious(mozillachecker.variables, "About $ProgramName$", "Oor $NaamVanProgam$") assert passes(mozillachecker.variables, "About $_CLICK", "Oor $_CLICK") assert fails_serious(mozillachecker.variables, "About $_CLICK", "Oor $_KLIK") assert passes(mozillachecker.variables, "About $_CLICK and more", "Oor $_CLICK en meer") assert fails_serious(mozillachecker.variables, "About $_CLICK and more", "Oor $_KLIK en meer") assert passes(mozillachecker.variables, "About $(^NameDA)", "Oor $(^NameDA)") assert fails_serious(mozillachecker.variables, "About $(^NameDA)", "Oor $(^NaamDA)") # Double variable problem assert fails_serious(mozillachecker.variables, "Create In &lt;&lt;", "Etsa ka Ho &lt;lt;") # Variables at the end of a sentence assert fails_serious(mozillachecker.variables, "...time you start &brandShortName;.", "...lekgetlo le latelang ha o qala &LebitsoKgutshwane la kgwebo;.") # Ensure that we can detect two variables of the same name with one faulty assert fails_serious(mozillachecker.variables, "&brandShortName; successfully downloaded and installed updates. You will have to restart &brandShortName; to complete the update.", "&brandShortName; ḽo dzhenisa na u longela khwinifhadzo zwavhuḓi. Ni ḓo tea u thoma hafhu &DzinaḼipfufhi ḽa pfungavhuṇe; u itela u fhedzisa khwinifha dzo.") # We must detect entities in their fullform, ie with fullstop in the middle. assert fails_serious(mozillachecker.variables, "Welcome to the &pluginWizard.title;", "Wamkelekile kwi&Sihloko Soncedo lwe-plugin;") # Variables that are missing in quotes should be detected assert fails_serious(mozillachecker.variables, "\"%S\" is an executable file.... Are you sure you want to launch \"%S\"?", ".... Uyaqiniseka ukuthi ufuna ukuqalisa I\"%S\"?") # False positive $ style variables assert passes(mozillachecker.variables, "for reporting $ProductShortName$ crash information", "okokubika ukwaziswa kokumosheka kwe-$ProductShortName$") # We shouldn't mask variables within variables. This should highlight &brandShortName as missing and &amp as extra assert fails_serious(mozillachecker.variables, "&brandShortName;", "&amp;brandShortName;") def test_variables_openoffice(): """tests variables in OpenOffice translations""" # OpenOffice.org variables ooochecker = checks.OpenOfficeChecker() assert passes(ooochecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandShortname; weergawe.") assert fails_serious(ooochecker.variables, "Use the &brandShortname; instance.", "Gebruik die &brandKortnaam; weergawe.") assert passes(ooochecker.variables, "Save %file%", "Stoor %file%") assert fails_serious(ooochecker.variables, "Save %file%", "Stoor %leer%") assert passes(ooochecker.variables, "Save %file", "Stoor %file") assert fails_serious(ooochecker.variables, "Save %file", "Stoor %leer") assert passes(ooochecker.variables, "Save %1", "Stoor %1") assert fails_serious(ooochecker.variables, "Save %1", "Stoor %2") assert passes(ooochecker.variables, "Save %", "Stoor %") assert fails_serious(ooochecker.variables, "Save %", "Stoor") assert passes(ooochecker.variables, "Save $(file)", "Stoor $(file)") assert fails_serious(ooochecker.variables, "Save $(file)", "Stoor $(leer)") assert passes(ooochecker.variables, "Save $file$", "Stoor $file$") assert fails_serious(ooochecker.variables, "Save $file$", "Stoor $leer$") assert passes(ooochecker.variables, "Save ${file}", "Stoor ${file}") assert fails_serious(ooochecker.variables, "Save ${file}", "Stoor ${leer}") assert passes(ooochecker.variables, "Save #file#", "Stoor #file#") assert fails_serious(ooochecker.variables, "Save #file#", "Stoor #leer#") assert passes(ooochecker.variables, "Save #1", "Stoor #1") assert fails_serious(ooochecker.variables, "Save #1", "Stoor #2") assert passes(ooochecker.variables, "Save #", "Stoor #") assert fails_serious(ooochecker.variables, "Save #", "Stoor") assert passes(ooochecker.variables, "Save ($file)", "Stoor ($file)") assert fails_serious(ooochecker.variables, "Save ($file)", "Stoor ($leer)") assert passes(ooochecker.variables, "Save $[file]", "Stoor $[file]") assert fails_serious(ooochecker.variables, "Save $[file]", "Stoor $[leer]") assert passes(ooochecker.variables, "Save [file]", "Stoor [file]") assert fails_serious(ooochecker.variables, "Save [file]", "Stoor [leer]") assert passes(ooochecker.variables, "Save $file", "Stoor $file") assert fails_serious(ooochecker.variables, "Save $file", "Stoor $leer") assert passes(ooochecker.variables, "Use @EXTENSION@", "Gebruik @EXTENSION@") assert fails_serious(ooochecker.variables, "Use @EXTENSUION@", "Gebruik @UITBRUIDING@") # Same variable name twice assert fails_serious(ooochecker.variables, r"""Start %PROGRAMNAME% as %PROGRAMNAME%""", "Begin %PROGRAMNAME%") def test_variables_cclicense(): """Tests variables in Creative Commons translations.""" checker = checks.CCLicenseChecker() assert passes(checker.variables, "CC-GNU @license_code@.", "CC-GNU @license_code@.") assert fails_serious(checker.variables, "CC-GNU @license_code@.", "CC-GNU @lisensie_kode@.") assert passes(checker.variables, "Deed to the @license_name_full@", "Akte vir die @license_name_full@") assert fails_serious(checker.variables, "Deed to the @license_name_full@", "Akte vir die @volle_lisensie@") assert passes(checker.variables, "The @license_name_full@ is", "Die @license_name_full@ is") assert fails_serious(checker.variables, "The @license_name_full@ is", "Die @iiilicense_name_full@ is") assert fails_serious(checker.variables, "A @ccvar@", "'n @ccvertaaldeveranderlike@") def test_xmltags(): """tests xml tags""" stdchecker = checks.StandardChecker() assert fails(stdchecker.xmltags, "Do it <b>now</b>", "Doen dit <v>nou</v>") assert passes(stdchecker.xmltags, "Do it <b>now</b>", "Doen dit <b>nou</b>") assert passes(stdchecker.xmltags, "Click <img src=\"img.jpg\">here</img>", "Klik <img src=\"img.jpg\">hier</img>") assert fails(stdchecker.xmltags, "Click <img src=\"image.jpg\">here</img>", "Klik <img src=\"prent.jpg\">hier</img>") assert passes(stdchecker.xmltags, "Click <img src=\"img.jpg\" alt=\"picture\">here</img>", "Klik <img src=\"img.jpg\" alt=\"prentjie\">hier</img>") assert passes(stdchecker.xmltags, "Click <a title=\"tip\">here</a>", "Klik <a title=\"wenk\">hier</a>") assert passes(stdchecker.xmltags, "Click <div title=\"tip\">here</div>", "Klik <div title=\"wenk\">hier</div>") assert passes(stdchecker.xmltags, "Start with the &lt;start&gt; tag", "Begin met die &lt;begin&gt;") assert fails(stdchecker.xmltags, "Click <a href=\"page.html\">", "Klik <a hverw=\"page.html\">") assert passes(stdchecker.xmltags, "Click <a xml-lang=\"en\" href=\"page.html\">", "Klik <a xml-lang=\"af\" href=\"page.html\">") assert passes(stdchecker.xmltags, "Click <div lang=\"en\" dir=\"ltr\">", "Klik <div lang=\"ar\" dir=\"rtl\">") assert fails(stdchecker.xmltags, "Click <a href=\"page.html\" target=\"koei\">", "Klik <a href=\"page.html\">") assert fails(stdchecker.xmltags, "<b>Current Translation</b>", "<b>Traducción Actual:<b>") assert passes(stdchecker.xmltags, "<Error>", "<Fout>") assert fails(stdchecker.xmltags, "%d/%d translated\n(%d blank, %d fuzzy)", "<br>%d/%d μεταφρασμένα\n<br>(%d κενά, %d ασαφή)") assert fails(stdchecker.xmltags, '(and <a href="http://www.schoolforge.net/education-software" class="external">other open source software</a>)', '(en <a href="http://www.schoolforge.net/education-software" class="external">ander Vry Sagteware</a)') assert fails(stdchecker.xmltags, 'Because Tux Paint (and <a href="http://www.schoolforge.net/education-software" class="external">other open source software</a>) is free of cost and not limited in any way, a school can use it <i>today</i>, without waiting for procurement or a budget!', 'Omdat Tux Paint (en <a href="http://www.schoolforge.net/education-software" class="external">ander Vry Sagteware</a)gratis is en nie beperk is op enige manier nie, kan \'n skool dit vandag</i> gebruik sonder om te wag vir goedkeuring of \'n begroting!') assert fails(stdchecker.xmltags, "test <br />", "test <br>") assert fails(stdchecker.xmltags, "test <img src='foo.jpg'/ >", "test <img src='foo.jpg' >") frchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage="fr")) assert fails(frchecker.xmltags, "Click <a href=\"page.html\">", "Klik <a href=« page.html »>") def test_ooxmltags(): """Tests the xml tags in OpenOffice.org translations for quality as done in gsicheck""" ooochecker = checks.OpenOfficeChecker() #some attributes can be changed or removed assert fails(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"b.jpg\" width=\"500\">") assert passes(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"a.jpg\" width=\"500\">") assert passes(ooochecker.xmltags, "<img src=\"a.jpg\" width=\"400\">", "<img src=\"a.jpg\">") assert passes(ooochecker.xmltags, "<img src=\"a.jpg\">", "<img src=\"a.jpg\" width=\"400\">") assert passes(ooochecker.xmltags, "<alt xml-lang=\"ab\">text</alt>", "<alt>teks</alt>") assert passes(ooochecker.xmltags, "<ahelp visibility=\"visible\">bla</ahelp>", "<ahelp>blu</ahelp>") assert fails(ooochecker.xmltags, "<ahelp visibility=\"visible\">bla</ahelp>", "<ahelp visibility=\"invisible\">blu</ahelp>") assert fails(ooochecker.xmltags, "<ahelp visibility=\"invisible\">bla</ahelp>", "<ahelp>blu</ahelp>") #some attributes can be changed, but not removed assert passes(ooochecker.xmltags, "<link name=\"John\">", "<link name=\"Jan\">") assert fails(ooochecker.xmltags, "<link name=\"John\">", "<link naam=\"Jan\">") # Reported OOo error ## Bug 1910 assert fails(ooochecker.xmltags, u"""<variable id="FehlendesElement">In a database file window, click the <emph>Queries</emph> icon, then choose <emph>Edit - Edit</emph>. When referenced fields no longer exist, you see this dialog</variable>""", u"""<variable id="FehlendesElement">Dans une fenêtre de fichier de base de données, cliquez sur l'icône <emph>Requêtes</emph>, puis choisissez <emph>Éditer - Éditer</emp>. Lorsque les champs de référence n'existent plus, vous voyez cette boîte de dialogue</variable>""") assert fails(ooochecker.xmltags, "<variable> <emph></emph> <emph></emph> </variable>", "<variable> <emph></emph> <emph></emp> </variable>") def test_functions(): """tests to see that funtions() are not translated""" stdchecker = checks.StandardChecker() assert fails(stdchecker.functions, "blah rgb() blah", "blee brg() blee") assert passes(stdchecker.functions, "blah rgb() blah", "blee rgb() blee") assert fails(stdchecker.functions, "percentage in rgb()", "phesenthe kha brg()") assert passes(stdchecker.functions, "percentage in rgb()", "phesenthe kha rgb()") assert fails(stdchecker.functions, "rgb() in percentage", "brg() kha phesenthe") assert passes(stdchecker.functions, "rgb() in percentage", "rgb() kha phesenthe") assert fails(stdchecker.functions, "blah string.rgb() blah", "blee bleeb.rgb() blee") assert passes(stdchecker.functions, "blah string.rgb() blah", "blee string.rgb() blee") assert passes(stdchecker.functions, "or domain().", "domain() verwag.") assert passes(stdchecker.functions, "Expected url(), url-prefix(), or domain().", "url(), url-prefix() of domain() verwag.") def test_emails(): """tests to see that email addresses are not translated""" stdchecker = checks.StandardChecker() assert fails(stdchecker.emails, "blah [email protected] blah", "blee [email protected] blee") assert passes(stdchecker.emails, "blah [email protected] blah", "blee [email protected] blee") def test_urls(): """tests to see that URLs are not translated""" stdchecker = checks.StandardChecker() assert fails(stdchecker.urls, "blah http://translate.org.za blah", "blee http://vertaal.org.za blee") assert passes(stdchecker.urls, "blah http://translate.org.za blah", "blee http://translate.org.za blee") def test_simpleplurals(): """test that we can find English style plural(s)""" stdchecker = checks.StandardChecker() assert passes(stdchecker.simpleplurals, "computer(s)", "rekenaar(s)") assert fails(stdchecker.simpleplurals, "plural(s)", "meervoud(e)") assert fails(stdchecker.simpleplurals, "Ungroup Metafile(s)...", "Kuvhanganyululani Metafaela(dzi)...") # Test a language that doesn't use plurals stdchecker = checks.StandardChecker(checks.CheckerConfig(targetlanguage='vi')) assert passes(stdchecker.simpleplurals, "computer(s)", u"Máy tính") assert fails(stdchecker.simpleplurals, "computer(s)", u"Máy tính(s)") def test_nplurals(): """Test that we can find the wrong number of plural forms. Note that this test uses a UnitChecker, not a translation checker.""" checker = checks.StandardUnitChecker() unit = po.pounit("") unit.source = ["%d file", "%d files"] unit.target = [u"%d lêer", u"%d lêers"] assert checker.nplurals(unit) checker = checks.StandardUnitChecker(checks.CheckerConfig(targetlanguage='af')) unit.source = "%d files" unit.target = "%d lêer" assert checker.nplurals(unit) unit.source = ["%d file", "%d files"] unit.target = [u"%d lêer", u"%d lêers"] assert checker.nplurals(unit) unit.source = ["%d file", "%d files"] unit.target = [u"%d lêer", u"%d lêers", u"%d lêeeeers"] assert not checker.nplurals(unit) unit.source = ["%d file", "%d files"] unit.target = [u"%d lêer"] assert not checker.nplurals(unit) checker = checks.StandardUnitChecker(checks.CheckerConfig(targetlanguage='km')) unit.source = "%d files" unit.target = "%d ឯកសារ" assert checker.nplurals(unit) unit.source = ["%d file", "%d files"] unit.target = [u"%d ឯកសារ"] assert checker.nplurals(unit) unit.source = ["%d file", "%d files"] unit.target = [u"%d ឯកសារ", u"%d lêers"] assert not checker.nplurals(unit) def test_credits(): """tests credits""" stdchecker = checks.StandardChecker() assert passes(stdchecker.credits, "File", "iFayile") assert passes(stdchecker.credits, "&File", "&Fayile") assert passes(stdchecker.credits, "translator-credits", "Ekke, ekke!") assert passes(stdchecker.credits, "Your names", "Ekke, ekke!") assert passes(stdchecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!") kdechecker = checks.KdeChecker() assert passes(kdechecker.credits, "File", "iFayile") assert passes(kdechecker.credits, "&File", "&Fayile") assert passes(kdechecker.credits, "translator-credits", "Ekke, ekke!") assert fails(kdechecker.credits, "Your names", "Ekke, ekke!") assert fails(kdechecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!") gnomechecker = checks.GnomeChecker() assert passes(gnomechecker.credits, "File", "iFayile") assert passes(gnomechecker.credits, "&File", "&Fayile") assert fails(gnomechecker.credits, "translator-credits", "Ekke, ekke!") assert passes(gnomechecker.credits, "Your names", "Ekke, ekke!") assert passes(gnomechecker.credits, "ROLES_OF_TRANSLATORS", "Ekke, ekke!") def test_gconf(): """test GNOME gconf errors""" gnomechecker = checks.GnomeChecker() # Let's cheat a bit and prepare the checker as the run_filters() method # would do by adding locations needed by the gconf test gnomechecker.locations = [] assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"') assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"') gnomechecker.locations = ['file.schemas.in.h:24'] assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"') assert fails(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"') # redo the same, but with the new location comment: gnomechecker.locations = ['file.gschema.xml.in.in.h:24'] assert passes(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_setting"') assert fails(gnomechecker.gconf, 'Blah "gconf_setting"', 'Bleh "gconf_steling"') def test_hassuggestion(): """test that hassuggestion() works""" checker = checks.StandardUnitChecker() po_store = po.pofile() po_store.addsourceunit("koeie") assert checker.hassuggestion(po_store.units[-1]) xliff_store = xliff.xlifffile.parsestring(''' <xliff version='1.2' xmlns='urn:oasis:names:tc:xliff:document:1.2'> <file original='hello.txt' source-language='en' target-language='fr' datatype='plaintext'> <body> <trans-unit id='hi'> <source>Hello world</source> <target>Bonjour le monde</target> <alt-trans> <target xml:lang='es'>Hola mundo</target> </alt-trans> </trans-unit> </body> </file> </xliff> ''') assert not checker.hassuggestion(xliff_store.units[0]) def test_dialogsizes(): """test Mozilla dialog sizes""" mozillachecker = checks.MozillaChecker() assert passes(mozillachecker.dialogsizes, 'width: 12em;', 'width: 12em;') assert passes(mozillachecker.dialogsizes, 'width: 12em; height: 36em', 'width: 12em; height: 36em') assert fails(mozillachecker.dialogsizes, 'height: 12em;', 'hoogde: 12em;') assert passes(mozillachecker.dialogsizes, 'height: 12em;', 'height: 24px;') assert fails(mozillachecker.dialogsizes, 'height: 12em;', 'height: 24xx;') assert fails(mozillachecker.dialogsizes, 'height: 12.5em;', 'height: 12,5em;')
rdio/translate-toolkit
filters/test_checks.py
Python
gpl-2.0
65,604
""" This page is in the table of contents. Skeinlayer is an analyze viewer to display each layer of a gcode file. The skeinlayer manual page is at: http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Skeinlayer Skeinlayer is derived from Nophead's preview script. The extruded lines are in the resistor colors red, orange, yellow, green, blue, purple & brown. When the extruder is off, the travel line is grey. Skeinlayer is useful for a detailed view of the extrusion, skeiniso is better to see the orientation of the shape. To get an initial overview of the skein, when the skeinlayer display window appears, click the Soar button (double right arrow button beside the layer field). ==Operation== The default 'Activate Skeinlayer' checkbox is on. When it is on, the functions described below will work when called from the skeinforge toolchain, when it is off, the functions will not be called from the toolchain. The functions will still be called, whether or not the 'Activate Skeinlayer' checkbox is on, when skeinlayer is run directly. Skeinlayer has trouble separating the layers when it reads gcode without comments. ==Settings== ===Animation=== ====Animation Line Quickening==== Default is one. The quickness of the tool animation over the quickness of the actual tool. ====Animation Slide Show Rate==== Default is two layers per second. The rate, in layers per second, at which the layer changes when the soar or dive button is pressed.. ===Draw Arrows=== Default is on. When selected, arrows will be drawn at the end of each line segment. ===Export Menu=== When the submenu in the export menu item in the file menu is clicked, an export canvas dialog will be displayed, which can export the canvas to a file. ===Go Around Extruder Off Travel=== Default is off. When selected, the display will include the travel when the extruder is off, which means it will include the nozzle wipe path if any. ===Layers=== ====Layer==== Default is zero. On the display window, the Up button increases the 'Layer' by one, and the Down button decreases the layer by one. When the layer displayed in the layer spin box is changed then <Return> is hit, the layer shown will be set to the spin box, to a mimimum of zero and to a maximum of the highest index layer.The Soar button increases the layer at the 'Animation Slide Show Rate', and the Dive (double left arrow button beside the layer field) button decreases the layer at the slide show rate. ====Layer Extra Span==== Default is zero. The viewer will draw the layers in the range including the 'Layer' index and the 'Layer' index plus the 'Layer Extra Span'. If the 'Layer Extra Span' is negative, the layers viewed will start at the 'Layer' index, plus the 'Layer Extra Span', and go up to and include the 'Layer' index. If the 'Layer Extra Span' is zero, only the 'Layer' index layer will be displayed. If the 'Layer Extra Span' is positive, the layers viewed will start at the 'Layer' index, and go up to and include the 'Layer' index plus the 'Layer Extra Span'. ===Line=== Default is zero. The index of the selected line on the layer that is highlighted when the 'Display Line' mouse tool is chosen. The line spin box up button increases the 'Line' by one. If the line index of the layer goes over the index of the last line, the layer index will be increased by one and the new line index will be zero. The down button decreases the line index by one. If the line index goes below the index of the first line, the layer index will be decreased by one and the new line index will be at the last line. When the line displayed in the line field is changed then <Return> is hit, the line shown will be set to the line field, to a mimimum of zero and to a maximum of the highest index line. The Soar button increases the line at the speed at which the extruder would move, times the 'Animation Line Quickening' ratio, and the Dive (double left arrow button beside the line field) button decreases the line at the animation line quickening ratio. ===Mouse Mode=== Default is 'Display Line'. The mouse tool can be changed from the 'Mouse Mode' menu button or picture button. The mouse tools listen to the arrow keys when the canvas has the focus. Clicking in the canvas gives the canvas the focus, and when the canvas has the focus a thick black border is drawn around the canvas. ====Display Line==== The 'Display Line' tool will display the highlight the selected line, and display the file line count, counting from one, and the gcode line itself. When the 'Display Line' tool is active, clicking the canvas will select the closest line to the mouse click. ====Viewpoint Move==== The 'Viewpoint Move' tool will move the viewpoint in the xy plane when the mouse is clicked and dragged on the canvas. ===Numeric Pointer=== Default is on. When selected, the distance along the ruler of the arrow pointers will be drawn next to the pointers. ===Scale=== Default is ten. The scale setting is the scale of the image in pixels per millimeter, the higher the number, the greater the size of the display. The zoom in mouse tool will zoom in the display at the point where the mouse was clicked, increasing the scale by a factor of two. The zoom out tool will zoom out the display at the point where the mouse was clicked, decreasing the scale by a factor of two. ===Screen Inset=== ====Screen Horizontal Inset==== Default is one hundred. The "Screen Horizontal Inset" determines how much the canvas will be inset in the horizontal direction from the edge of screen, the higher the number the more it will be inset and the smaller it will be. ====Screen Vertical Inset==== Default is two hundred and twenty. The "Screen Vertical Inset" determines how much the canvas will be inset in the vertical direction from the edge of screen, the higher the number the more it will be inset and the smaller it will be. ===Width=== The width of each type of thread and of each axis can be changed. If the width is set to zero, the thread will not be visible. ====Width of Extrusion Thread==== Default is three. The "Width of Extrusion Thread" sets the width of the extrusion threads. ====Width of Selection Thread==== Default is six. The "Width of Selection Thread" sets the width of the selected line. ====Width of Travel Thread==== Default is one. The "Width of Travel Thread" sets the width of the grey extruder off travel threads. ==Icons== The dive, soar and zoom icons are from Mark James' soarSilk icon set 1.3 at: http://www.famfamfam.com/lab/icons/silk/ ==Gcodes== An explanation of the gcodes is at: http://reprap.org/bin/view/Main/Arduino_GCode_Interpreter and at: http://reprap.org/bin/view/Main/MCodeReference A gode example is at: http://forums.reprap.org/file.php?12,file=565 ==Examples== Below are examples of skeinlayer being used. These examples are run in a terminal in the folder which contains Screw Holder_penultimate.gcode and skeinlayer.py. > python skeinlayer.py This brings up the skeinlayer dialog. > python skeinlayer.py Screw Holder_penultimate.gcode This brings up the skeinlayer viewer to view each layer of a gcode file. """ from __future__ import absolute_import #Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module. import __init__ from fabmetheus_utilities.vector3 import Vector3 from fabmetheus_utilities import archive from fabmetheus_utilities import euclidean from fabmetheus_utilities import gcodec from fabmetheus_utilities import settings from skeinforge_application.skeinforge_plugins.analyze_plugins.analyze_utilities import display_line from skeinforge_application.skeinforge_plugins.analyze_plugins.analyze_utilities import tableau from skeinforge_application.skeinforge_plugins.analyze_plugins.analyze_utilities import view_move from skeinforge_application.skeinforge_utilities import skeinforge_polyfile from skeinforge_application.skeinforge_utilities import skeinforge_profile import os import sys __author__ = 'Enrique Perez ([email protected])' __date__ = '$Date: 2008/21/04 $' __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html' def getNewRepository(): 'Get new repository.' return SkeinlayerRepository() def getRankIndex( rulingSeparationWidthMillimeters, screenOrdinate ): "Get rank index." return int( round( screenOrdinate / rulingSeparationWidthMillimeters ) ) def getWindowAnalyzeFile(fileName): "Display a gcode file in a skeinlayer window." gcodeText = archive.getFileText(fileName) return getWindowAnalyzeFileGivenText(fileName, gcodeText) def getWindowAnalyzeFileGivenText( fileName, gcodeText, repository=None): "Display a gcode file in a skeinlayer window given the text." if gcodeText == '': return None if repository == None: repository = settings.getReadRepository( SkeinlayerRepository() ) skeinWindow = getWindowGivenTextRepository( fileName, gcodeText, repository ) skeinWindow.updateDeiconify() return skeinWindow def getWindowGivenTextRepository( fileName, gcodeText, repository ): "Display a gcode file in a skeinlayer window given the text and settings." skein = SkeinlayerSkein() skein.parseGcode( fileName, gcodeText, repository ) return SkeinWindow( repository, skein ) def writeOutput(fileName, fileNamePenultimate, fileNameSuffix, filePenultimateWritten, gcodeText=''): "Display a skeinlayered gcode file for a skeinforge gcode file, if 'Activate Skeinlayer' is selected." try: import Tkinter except: try: import tkinter as Tkinter except: print('Warning, skeinlayer will do nothing because Tkinter is not installed.') return repository = settings.getReadRepository( SkeinlayerRepository() ) if repository.activateSkeinlayer.value: gcodeText = archive.getTextIfEmpty( fileNameSuffix, gcodeText ) return getWindowAnalyzeFileGivenText( fileNameSuffix, gcodeText, repository ) class SkeinlayerRepository( tableau.TableauRepository ): "A class to handle the skeinlayer settings." def __init__(self): "Set the default settings, execute title & settings fileName." skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.analyze_plugins.skeinlayer.html', self) self.baseNameSynonym = 'skeinview.csv' self.fileNameInput = settings.FileNameInput().getFromFileName( [ ('Gcode text files', '*.gcode') ], 'Open File for Skeinlayer', self, '') self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Skeinlayer') self.activateSkeinlayer = settings.BooleanSetting().getFromValue('Activate Skeinlayer', self, True ) self.addAnimation() self.drawArrows = settings.BooleanSetting().getFromValue('Draw Arrows', self, True ) self.goAroundExtruderOffTravel = settings.BooleanSetting().getFromValue('Go Around Extruder Off Travel', self, False ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Layers -', self ) self.layer = settings.IntSpinNotOnMenu().getSingleIncrementFromValue( 0, 'Layer (index):', self, 912345678, 0 ) self.layerExtraSpan = settings.IntSpinUpdate().getSingleIncrementFromValue( - 3, 'Layer Extra Span (integer):', self, 3, 0 ) settings.LabelSeparator().getFromRepository(self) self.line = settings.IntSpinNotOnMenu().getSingleIncrementFromValue( 0, 'Line (index):', self, 912345678, 0 ) self.mouseMode = settings.MenuButtonDisplay().getFromName('Mouse Mode:', self ) self.displayLine = settings.MenuRadio().getFromMenuButtonDisplay( self.mouseMode, 'Display Line', self, True ) self.viewMove = settings.MenuRadio().getFromMenuButtonDisplay( self.mouseMode, 'View Move', self, False ) self.addScaleScreenSlide() self.showPosition = settings.BooleanSetting().getFromValue('Show Position', self, True ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Width -', self ) self.widthOfExtrusionThread = settings.IntSpinUpdate().getSingleIncrementFromValue( 0, 'Width of Extrusion Thread (pixels):', self, 5, 3 ) self.widthOfSelectionThread = settings.IntSpinUpdate().getSingleIncrementFromValue( 0, 'Width of Selection Thread (pixels):', self, 10, 6 ) self.widthOfTravelThread = settings.IntSpinUpdate().getSingleIncrementFromValue( 0, 'Width of Travel Thread (pixels):', self, 5, 1 ) self.executeTitle = 'Skeinlayer' def execute(self): "Write button has been clicked." fileNames = skeinforge_polyfile.getFileOrGcodeDirectory( self.fileNameInput.value, self.fileNameInput.wasCancelled ) for fileName in fileNames: getWindowAnalyzeFile(fileName) class SkeinlayerSkein: "A class to write a get a scalable vector graphics text for a gcode skein." def __init__(self): 'Initialize.' self.extrusionNumber = 0 self.feedRateMinute = 960.1 self.isThereALayerStartWord = False self.layerCount = settings.LayerCount() self.oldZ = - 999987654321.0 self.skeinPane = None self.skeinPanes = [] def addToPath( self, line, location ): "Add a point to travel and maybe extrusion." if self.oldLocation == None: return colorName = 'gray' locationComplex = location.dropAxis() oldLocationComplex = self.oldLocation.dropAxis() begin = self.getScreenCoordinates( oldLocationComplex ) end = self.getScreenCoordinates( locationComplex ) if self.extruderActive: colorName = self.colorNames[ self.extrusionNumber % len( self.colorNames ) ] displayString = '%s %s' % ( self.lineIndex + 1, line ) tagString = 'colored_line_index: %s %s' % ( len( self.skeinPane ), len( self.skeinPanes ) - 1 ) coloredLine = tableau.ColoredLine( begin, colorName, displayString, end, tagString ) coloredLine.isExtrusionThread = self.extruderActive self.skeinPane.append( coloredLine ) def getModelCoordinates( self, screenCoordinates ): "Get the model coordinates." modelCoordinates = ( screenCoordinates + self.marginCornerLow ) / self.scale return complex( modelCoordinates.real, self.cornerImaginaryTotal - modelCoordinates.imag ) def getScreenCoordinates( self, pointComplex ): "Get the screen coordinates." pointComplex = complex( pointComplex.real, self.cornerImaginaryTotal - pointComplex.imag ) return self.scale * pointComplex - self.marginCornerLow def initializeActiveLocation(self): "Set variables to default." self.extruderActive = False self.oldLocation = None def linearCorner( self, splitLine ): "Update the bounding corners." location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine) if self.extruderActive or self.repository.goAroundExtruderOffTravel.value: self.cornerMaximum.maximize(location) self.cornerMinimum.minimize(location) self.oldLocation = location def linearMove( self, line, location ): "Get statistics for a linear move." if self.skeinPane != None: self.addToPath(line, location) def parseCorner(self, line): "Parse a gcode line and use the location to update the bounding corners." splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) if len(splitLine) < 1: return firstWord = splitLine[0] if firstWord == 'G1': self.linearCorner(splitLine) elif firstWord == 'M101': self.extruderActive = True elif firstWord == 'M103': self.extruderActive = False def parseGcode( self, fileName, gcodeText, repository ): "Parse gcode text and store the vector output." self.fileName = fileName self.gcodeText = gcodeText self.repository = repository self.initializeActiveLocation() self.cornerMaximum = Vector3(-987654321.0, -987654321.0, -987654321.0) self.cornerMinimum = Vector3(987654321.0, 987654321.0, 987654321.0) self.lines = archive.getTextLines(gcodeText) self.isThereALayerStartWord = (gcodec.getFirstWordIndexReverse('(<layer>', self.lines, 1) > -1) self.parseInitialization() for line in self.lines[self.lineIndex :]: self.parseCorner(line) self.cornerMaximumComplex = self.cornerMaximum.dropAxis() self.cornerMinimumComplex = self.cornerMinimum.dropAxis() self.scale = repository.scale.value self.scaleCornerHigh = self.scale * self.cornerMaximumComplex self.scaleCornerLow = self.scale * self.cornerMinimumComplex self.cornerImaginaryTotal = self.cornerMaximum.y + self.cornerMinimum.y self.margin = complex( 10.0, 10.0 ) self.marginCornerHigh = self.scaleCornerHigh + self.margin self.marginCornerLow = self.scaleCornerLow - self.margin self.screenSize = self.marginCornerHigh - self.marginCornerLow self.initializeActiveLocation() self.colorNames = ['brown', 'red', 'orange', 'yellow', 'green', 'blue', 'purple'] for self.lineIndex in xrange(self.lineIndex, len(self.lines)): line = self.lines[self.lineIndex] self.parseLine(line) def parseInitialization(self): 'Parse gcode initialization and store the parameters.' for self.lineIndex in xrange(len(self.lines)): line = self.lines[self.lineIndex] splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) firstWord = gcodec.getFirstWord(splitLine) if firstWord == '(</extruderInitialization>)': return elif firstWord == '(<operatingFeedRatePerSecond>': self.feedRateMinute = 60.0 * float(splitLine[1]) self.lineIndex = 0 def parseLine(self, line): "Parse a gcode line and add it to the vector output." splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line) if len(splitLine) < 1: return firstWord = splitLine[0] if tableau.getIsLayerStart(firstWord, self, splitLine): self.extrusionNumber = 0 self.layerCount.printProgressIncrement('skeinlayer') self.skeinPane = [] self.skeinPanes.append( self.skeinPane ) if firstWord == 'G1': location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine) self.linearMove(line, location) self.oldLocation = location elif firstWord == 'M101': self.extruderActive = True self.extrusionNumber += 1 elif firstWord == 'M103': self.extruderActive = False if firstWord == 'G2' or firstWord == 'G3': relativeLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine) relativeLocation.z = 0.0 location = self.oldLocation + relativeLocation self.linearMove(line, location) self.oldLocation = location class SkeinWindow( tableau.TableauWindow ): def __init__(self, repository, skein): "Initialize the skein window.setWindowNewMouseTool" self.addCanvasMenuRootScrollSkein(repository, skein, '_skeinlayer', 'Skeinlayer') horizontalRulerBoundingBox = (0, 0, int( skein.screenSize.real ), self.rulingExtent) self.horizontalRulerCanvas = settings.Tkinter.Canvas(self.root, width = self.canvasWidth, height = self.rulingExtent, scrollregion=horizontalRulerBoundingBox) self.horizontalRulerCanvas.grid(row=1, column=2, columnspan=96, sticky=settings.Tkinter.E+settings.Tkinter.W) self.horizontalRulerCanvas['xscrollcommand'] = self.xScrollbar.set verticalRulerBoundingBox = (0, 0, self.rulingExtent, int(skein.screenSize.imag)) self.verticalRulerCanvas = settings.Tkinter.Canvas(self.root, width=self.rulingExtent, height=self.canvasHeight, scrollregion=verticalRulerBoundingBox) self.verticalRulerCanvas.grid(row=2, rowspan=96, column=1, sticky=settings.Tkinter.N+settings.Tkinter.S) self.verticalRulerCanvas['yscrollcommand'] = self.yScrollbar.set self.xStringVar = settings.Tkinter.StringVar(self.root) self.xLabel = settings.Tkinter.Label(self.root, textvariable=self.xStringVar) self.xLabel.grid(row=0, column=3, sticky=settings.Tkinter.W) self.yStringVar = settings.Tkinter.StringVar(self.root) self.yLabel = settings.Tkinter.Label(self.root, textvariable=self.yStringVar) self.yLabel.grid(row=0, column=4, sticky=settings.Tkinter.W) self.setWindowNewMouseTool(display_line.getNewMouseTool, repository.displayLine) self.setWindowNewMouseTool(view_move.getNewMouseTool, repository.viewMove) repository.showPosition.setUpdateFunction(self.setWindowToDisplaySaveUpdate) repository.widthOfExtrusionThread.setUpdateFunction(self.setWindowToDisplaySaveUpdate) self.addMouseToolsBind() self.createRulers() def addHorizontalRulerRuling( self, xMillimeters ): "Add a ruling to the horizontal ruler." xPixel = self.skein.getScreenCoordinates( complex( xMillimeters, 0.0 ) ).real self.createVerticalLine( 0.0, xPixel ) self.horizontalRulerCanvas.create_text( xPixel + 2, 0, anchor = settings.Tkinter.NW, text = self.getRoundedRulingText( 1, xMillimeters ) ) cumulativeDistance = xMillimeters self.createVerticalLine( self.rulingExtentTiny, self.skein.getScreenCoordinates( complex( xMillimeters + self.separationWidthMillimetersTenth, 0.0 ) ).real ) for subRulingIndex in xrange(4): cumulativeDistance += self.separationWidthMillimetersFifth self.createVerticalLine( self.rulingExtentShort, self.skein.getScreenCoordinates( complex( cumulativeDistance, 0.0 ) ).real ) self.createVerticalLine( self.rulingExtentTiny, self.skein.getScreenCoordinates( complex( cumulativeDistance + self.separationWidthMillimetersTenth, 0.0 ) ).real ) def addVerticalRulerRuling( self, yMillimeters ): "Add a ruling to the vertical ruler." fontHeight = 12 yPixel = self.skein.getScreenCoordinates( complex( 0.0, yMillimeters ) ).imag self.createHorizontalLine( 0.0, yPixel ) yPixel += 2 roundedRulingText = self.getRoundedRulingText( 1, yMillimeters ) effectiveRulingTextLength = len( roundedRulingText ) if roundedRulingText.find('.') != - 1: effectiveRulingTextLength -= 1 cumulativeDistance = yMillimeters self.createHorizontalLine( self.rulingExtentTiny, self.skein.getScreenCoordinates( complex( 0.0, yMillimeters + self.separationWidthMillimetersTenth ) ).imag ) for subRulingIndex in xrange(4): cumulativeDistance += self.separationWidthMillimetersFifth self.createHorizontalLine( self.rulingExtentShort, self.skein.getScreenCoordinates( complex( 0.0, cumulativeDistance ) ).imag ) self.createHorizontalLine( self.rulingExtentTiny, self.skein.getScreenCoordinates( complex( 0.0, cumulativeDistance + self.separationWidthMillimetersTenth ) ).imag ) if effectiveRulingTextLength < 4: self.verticalRulerCanvas.create_text( 0, yPixel, anchor = settings.Tkinter.NW, text = roundedRulingText ) return for character in roundedRulingText: if character == '.': yPixel -= fontHeight * 2 / 3 self.verticalRulerCanvas.create_text( 0, yPixel, anchor = settings.Tkinter.NW, text = character ) yPixel += fontHeight def createHorizontalLine( self, begin, yPixel ): "Create a horizontal line for the horizontal ruler." self.verticalRulerCanvas.create_line( begin, yPixel, self.rulingExtent, yPixel, fill = 'black') def createRulers(self): "Create the rulers.." self.rulingExtentShort = 0.382 * self.rulingExtent self.rulingExtentTiny = 0.764 * self.rulingExtent self.rulingExtentPointer = 0.5 * ( self.rulingExtentShort + self.rulingExtentTiny ) self.rulingPointerRadius = self.rulingExtent - self.rulingExtentPointer self.textBoxHeight = int( 0.8 * self.rulingExtent ) self.textBoxWidth = int( 2.5 * self.rulingExtent ) self.separationWidthMillimetersFifth = 0.2 * self.rulingSeparationWidthMillimeters self.separationWidthMillimetersTenth = 0.1 * self.rulingSeparationWidthMillimeters rulingSeparationWidthPixels = self.getRulingSeparationWidthPixels( self.rank ) marginOverScale = self.skein.margin / self.skein.scale cornerMaximumMargin = self.skein.cornerMaximumComplex + marginOverScale cornerMinimumMargin = self.skein.cornerMinimumComplex - marginOverScale xRankIndexHigh = getRankIndex( self.rulingSeparationWidthMillimeters, cornerMaximumMargin.real ) xRankIndexLow = getRankIndex( self.rulingSeparationWidthMillimeters, cornerMinimumMargin.real ) for xRankIndex in xrange( xRankIndexLow - 2, xRankIndexHigh + 2 ): # 1 is enough, 2 is to be on the safe side self.addHorizontalRulerRuling( xRankIndex * self.rulingSeparationWidthMillimeters ) yRankIndexHigh = getRankIndex( self.rulingSeparationWidthMillimeters, cornerMaximumMargin.imag ) yRankIndexLow = getRankIndex( self.rulingSeparationWidthMillimeters, cornerMinimumMargin.imag ) for yRankIndex in xrange( yRankIndexLow - 2, yRankIndexHigh + 2 ): # 1 is enough, 2 is to be on the safe side self.addVerticalRulerRuling( yRankIndex * self.rulingSeparationWidthMillimeters ) def createVerticalLine( self, begin, xPixel ): "Create a vertical line for the horizontal ruler." self.horizontalRulerCanvas.create_line( xPixel, begin, xPixel, self.rulingExtent, fill = 'black') def getColoredLines(self): "Get the colored lines from the skein pane." if len(self.skeinPanes) == 0: return [] return self.skeinPanes[self.repository.layer.value] def getCopy(self): "Get a copy of this window." return SkeinWindow(self.repository, self.skein) def getCopyWithNewSkein(self): "Get a copy of this window with a new skein." return getWindowGivenTextRepository( self.skein.fileName, self.skein.gcodeText, self.repository ) def getDrawnColoredLine( self, coloredLine, tags, width ): "Get the drawn colored line." return self.canvas.create_line( coloredLine.begin.real, coloredLine.begin.imag, coloredLine.end.real, coloredLine.end.imag, fill = coloredLine.colorName, arrow = self.arrowType, tags = tags, width = width ) def getDrawnColoredLineIfThick( self, coloredLine, width ): "Get the drawn colored line if it has a positive thickness." if width > 0: return self.getDrawnColoredLine( coloredLine, coloredLine.tagString, width ) def getDrawnSelectedColoredLine(self, coloredLine): "Get the drawn selected colored line." return self.getDrawnColoredLine(coloredLine, 'selection_line', self.repository.widthOfSelectionThread.value) def motion(self, event): "The mouse moved." self.mouseTool.motion(event) xString = '' yString = '' x = self.canvas.canvasx( event.x ) y = self.canvas.canvasy( event.y ) self.horizontalRulerCanvas.delete('pointer') self.horizontalRulerCanvas.create_polygon( x - self.rulingPointerRadius, self.rulingExtentPointer, x + self.rulingPointerRadius, self.rulingExtentPointer, x, self.rulingExtent, tag = 'pointer') self.verticalRulerCanvas.delete('pointer') self.verticalRulerCanvas.create_polygon( self.rulingExtentPointer, y - self.rulingPointerRadius, self.rulingExtentPointer, y + self.rulingPointerRadius, self.rulingExtent, y, tag = 'pointer') if self.repository.showPosition.value: motionCoordinate = complex(x, y) modelCoordinates = self.skein.getModelCoordinates( motionCoordinate ) roundedXText = self.getRoundedRulingText(3, modelCoordinates.real) roundedYText = self.getRoundedRulingText(3, modelCoordinates.imag) xString = 'X: ' + roundedXText yString = 'Y: ' + roundedYText self.xStringVar.set(xString) self.yStringVar.set(yString) def qqqmotion(self, event): "The mouse moved." self.mouseTool.motion(event) x = self.canvas.canvasx( event.x ) y = self.canvas.canvasy( event.y ) self.horizontalRulerCanvas.delete('pointer') self.horizontalRulerCanvas.create_polygon( x - self.rulingPointerRadius, self.rulingExtentPointer, x + self.rulingPointerRadius, self.rulingExtentPointer, x, self.rulingExtent, tag = 'pointer') self.verticalRulerCanvas.delete('pointer') self.verticalRulerCanvas.create_polygon( self.rulingExtentPointer, y - self.rulingPointerRadius, self.rulingExtentPointer, y + self.rulingPointerRadius, self.rulingExtent, y, tag = 'pointer') if not self.repository.numericPointer.value: return motionCoordinate = complex(x, y) modelCoordinates = self.skein.getModelCoordinates( motionCoordinate ) roundedXText = self.getRoundedRulingText( 3, modelCoordinates.real ) yStart = self.canvas.canvasy( 0 ) self.canvas.create_rectangle( x - 2, yStart, x + self.textBoxWidth, yStart + self.textBoxHeight + 5, fill = self.canvas['background'], tag = 'pointer') self.canvas.create_text( x, yStart + 5, anchor = settings.Tkinter.NW, tag = 'pointer', text = roundedXText ) roundedYText = self.getRoundedRulingText( 3, modelCoordinates.imag ) xStart = self.canvas.canvasx( 0 ) self.canvas.create_rectangle( xStart, y - 2, xStart + self.textBoxWidth + 5, y + self.textBoxHeight, fill = self.canvas['background'], tag = 'pointer') self.canvas.create_text( xStart + 5, y, anchor = settings.Tkinter.NW, tag = 'pointer', text = roundedYText ) xString = '' xString = 'X: ' + roundedXText self.xStringVar.set(xString) def relayXview( self, *args ): "Relay xview changes." self.canvas.xview( *args ) self.horizontalRulerCanvas.xview( *args ) def relayYview( self, *args ): "Relay yview changes." self.canvas.yview( *args ) self.verticalRulerCanvas.yview( *args ) def update(self): "Update the window." if len( self.skeinPanes ) < 1: return self.limitIndexSetArrowMouseDeleteCanvas() for coloredLines in self.getUpdateSkeinPanes(): for coloredLine in coloredLines: if coloredLine.isExtrusionThread: self.getDrawnColoredLineIfThick( coloredLine, self.repository.widthOfExtrusionThread.value ) else: self.getDrawnColoredLineIfThick( coloredLine, self.repository.widthOfTravelThread.value ) self.setDisplayLayerIndex() def main(): "Display the skeinlayer dialog." if len(sys.argv) > 1: settings.startMainLoopFromWindow(getWindowAnalyzeFile(' '.join(sys.argv[1 :]))) else: settings.startMainLoopFromConstructor(getNewRepository()) if __name__ == "__main__": main()
jetty840/ReplicatorG
skein_engines/skeinforge-50/skeinforge_application/skeinforge_plugins/analyze_plugins/skeinlayer.py
Python
gpl-2.0
29,441
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2016 CERN. # # Invenio is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Persistent identifier minters.""" from __future__ import absolute_import, print_function from .providers import CDSRecordIdProvider def recid_minter(record_uuid, data): """Mint record identifiers.""" assert 'recid' not in data provider = CDSRecordIdProvider.create( object_type='rec', object_uuid=record_uuid) data['recid'] = int(provider.pid.pid_value) return provider.pid
drjova/cds-demosite
cds/modules/records/minters.py
Python
gpl-2.0
1,381
# # My first attempt at python # calibrate accelerometer # import re import scipy from scipy import optimize from scipy import linalg from pylab import * # # parse the log # def read_log(ac_id, filename, sensor): f = open(filename, 'r') pattern = re.compile("(\S+) "+ac_id+" IMU_"+sensor+"_RAW (\S+) (\S+) (\S+)") list_meas = [] while 1: line = f.readline().strip() if line == '': break m=re.match(pattern, line) if m: list_meas.append([float(m.group(2)), float(m.group(3)), float(m.group(4))]) return scipy.array(list_meas) # # select only non-noisy data # def filter_meas(meas, window_size, noise_threshold): filtered_meas = [] filtered_idx = [] for i in range(window_size,len(meas)-window_size): noise = meas[i-window_size:i+window_size,:].std(axis=0) if linalg.norm(noise) < noise_threshold: filtered_meas.append(meas[i,:]) filtered_idx.append(i) return scipy.array(filtered_meas), filtered_idx # # initial boundary based calibration # def get_min_max_guess(meas, scale): max_meas = meas[:,:].max(axis=0) min_meas = meas[:,:].min(axis=0) n = (max_meas + min_meas) / 2 sf = 2*scale/(max_meas - min_meas) return scipy.array([n[0], n[1], n[2], sf[0], sf[1], sf[2]]) # # scale the set of measurements # def scale_measurements(meas, p): l_comp = []; l_norm = []; for m in meas[:,]: sm = (m - p[0:3])*p[3:6] l_comp.append(sm) l_norm.append(linalg.norm(sm)) return scipy.array(l_comp), scipy.array(l_norm) # # print xml for airframe file # def print_xml(p, sensor, res): print "" print "<define name=\""+sensor+"_X_NEUTRAL\" value=\""+str(int(round(p[0])))+"\"/>" print "<define name=\""+sensor+"_Y_NEUTRAL\" value=\""+str(int(round(p[1])))+"\"/>" print "<define name=\""+sensor+"_Z_NEUTRAL\" value=\""+str(int(round(p[2])))+"\"/>" print "<define name=\""+sensor+"_X_SENS\" value=\""+str(p[3]*2**res)+"\" integer=\"16\"/>" print "<define name=\""+sensor+"_Y_SENS\" value=\""+str(p[4]*2**res)+"\" integer=\"16\"/>" print "<define name=\""+sensor+"_Z_SENS\" value=\""+str(p[5]*2**res)+"\" integer=\"16\"/>" filename = 'log_accel_booz2_a2' ac_id = "151" if 1: sensor = "ACCEL" sensor_ref = 9.81 sensor_res = 10 noise_window = 20; noise_threshold = 40; else: sensor = "MAG" sensor_ref = 1. sensor_res = 11 noise_window = 10; noise_threshold = 1000; print "reading file "+filename+" for aircraft "+ac_id+" and sensor "+sensor measurements = read_log(ac_id, filename, sensor) print "found "+str(len(measurements))+" records" flt_meas, flt_idx = filter_meas(measurements, noise_window, noise_threshold) print "remaining "+str(len(flt_meas))+" after low pass" p0 = get_min_max_guess(flt_meas, sensor_ref) cp0, np0 = scale_measurements(flt_meas, p0) print "initial guess : "+str(np0.mean())+" "+str(np0.std()) print p0 def err_func(p,meas,y): cp, np = scale_measurements(meas, p) err = y*scipy.ones(len(meas)) - np return err p1, success = optimize.leastsq(err_func, p0[:], args=(flt_meas, sensor_ref)) cp1, np1 = scale_measurements(flt_meas, p1) print "optimized guess : "+str(np1.mean())+" "+str(np1.std()) print p1 print_xml(p1, sensor, sensor_res) subplot(3,1,1) plot(measurements[:,0]) plot(measurements[:,1]) plot(measurements[:,2]) plot(flt_idx, flt_meas[:,0], 'ro') plot(flt_idx, flt_meas[:,1], 'ro') plot(flt_idx, flt_meas[:,2], 'ro') subplot(3,2,3) plot(cp0[:,0]); plot(cp0[:,1]); plot(cp0[:,2]); plot(-sensor_ref*scipy.ones(len(flt_meas))); plot(sensor_ref*scipy.ones(len(flt_meas))); subplot(3,2,4) plot(np0); plot(sensor_ref*scipy.ones(len(flt_meas))); subplot(3,2,5) plot(cp1[:,0]); plot(cp1[:,1]); plot(cp1[:,2]); plot(-sensor_ref*scipy.ones(len(flt_meas))); plot(sensor_ref*scipy.ones(len(flt_meas))); subplot(3,2,6) plot(np1); plot(sensor_ref*scipy.ones(len(flt_meas))); show();
pchickey/paparazzi-linux-release
sw/tools/calibration/calib.py
Python
gpl-2.0
3,989
#! /usr/bin/env python import os from setuptools import setup readme = os.path.join(os.path.dirname(__file__), 'README.md') setup(name = 'bottleneck', version = '0.1.0', description = 'performance report generator for OpenMP programs in GNU/Linux', long_description = open(readme).read(), author = 'Andres More', author_email='[email protected]', url='https://github.com/moreandres/bottleneck.git', packages= [ 'bottleneck' ], entry_points = { 'console_scripts': [ 'bt = bottleneck.bottleneck:main' ] }, data_files = [ ( 'config', [ 'cfg/bt.cfg', 'cfg/bt.tex' ] ) ], classifiers = [ 'Development Status :: 1 - Planning', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Operating System :: POSIX', 'Natural Language :: English', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Software Development :: Quality Assurance', 'Topic :: System :: Benchmark', 'Topic :: Utilities', ], zip_safe = False, test_suite = 'tests', # include_package_data = True, # install_requires=[ 'numpy', 'scipy', 'matplotlib' ], )
moreandres/bottleneck
setup.py
Python
gpl-2.0
1,379
import os from PyQt4.QtCore import pyqtSignal from PyQt4.QtGui import QComboBox, QDoubleValidator from configmanager.editorwidgets.core import ConfigWidget from configmanager.editorwidgets.uifiles.ui_numberwidget_config import Ui_Form class NumberWidgetConfig(Ui_Form, ConfigWidget): description = 'Number entry widget' def __init__(self, parent=None): super(NumberWidgetConfig, self).__init__(parent) self.setupUi(self) self.minEdit.setValidator( QDoubleValidator() ) self.maxEdit.setValidator( QDoubleValidator() ) self.minEdit.textChanged.connect(self.widgetchanged) self.maxEdit.textChanged.connect(self.widgetchanged) self.prefixEdit.textChanged.connect(self.widgetchanged) self.suffixEdit.textChanged.connect(self.widgetchanged) def getconfig(self): config = {} config['max'] = self.maxEdit.text() config['min'] = self.minEdit.text() config['prefix'] = self.prefixEdit.text() config['suffix'] = self.suffixEdit.text() return config def setconfig(self, config): self.blockSignals(True) max = config.get('max', '') min = config.get('min', '') prefix = config.get('prefix', '') suffix = config.get('suffix', '') self.minEdit.setText(min) self.maxEdit.setText(max) self.prefixEdit.setText(prefix) self.suffixEdit.setText(suffix) self.blockSignals(False)
HeatherHillers/RoamMac
src/configmanager/editorwidgets/numberwidget.py
Python
gpl-2.0
1,472
# -*- coding: utf-8 -*- # This file is part of the xc2424scan package # Copyright (C) 2005 Mathieu Bouchard <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ This is the main widget of the xc2424scan application This widget is self contained and can be included in any other Qt4 application. """ __all__ = ["ScanWidget"] from PyQt4.QtCore import QDir, QObject, QRect, Qt, SIGNAL from PyQt4.QtGui import QWidget, QFileDialog, QListWidgetItem, QPixmap, \ QIcon, QMessageBox, QInputDialog, QLineEdit, QPainter, \ QProgressDialog, QMessageBox, QSizePolicy, QDialog, \ QLabel, QVBoxLayout, QHBoxLayout, QSpacerItem, \ QSizePolicy, QPushButton import os from xc2424scan import config from xc2424scan.threadedscanlib import ThreadedXeroxC2424 from xc2424scan.scanlib import ProtectedError, SocketError, NoPreviewError from xc2424scan.ui.widgets.scanwidgetbase import Ui_ScanWidgetBase class ProgressFullDialog(QProgressDialog): def __init__(self, parent = None): QProgressDialog.__init__(self, parent) self.setWindowTitle(_("Downloading")) # Top level fixed size dialog self.setWindowModality(Qt.WindowModal) # Do not close when reaching 100% self.setAutoClose(False) self.setAutoReset(False) self.__nbr_pages_ = -1 def setNbrPages(self, nbr_pages): self.__nbr_pages_ = nbr_pages def newpage(self, current_page, file_size): if self.isVisible(): # Set progress value to 0 and range to file size self.setValue(0) self.setRange(0, file_size) # Set label text if self.__nbr_pages_ == 1: self.setLabelText(_("Getting page %d") % current_page) else: self.setLabelText(_("Getting page %d of %d") % \ (current_page, self.__nbr_pages_)) def progress(self, received_size): if self.isVisible(): self.setValue(self.value() + received_size) class ProgressDialog(QDialog): def __init__(self, parent = None): QDialog.__init__(self, parent) self.setWindowTitle(_("Downloading")) # Top level fixed size dialog self.setWindowModality(Qt.WindowModal) self.__page_ = QLabel(self) self.__progress_ = QLabel(self) self.__cancel_ = QPushButton(self) self.__downloaded_ = 0 self.__nbr_pages_ = 0 vboxlayout = QVBoxLayout(self) # Page status labellayout = QHBoxLayout() labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)) labellayout.addWidget(self.__page_) labellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)) vboxlayout.addLayout(labellayout) # Progress status progresslayout = QHBoxLayout() progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)) progresslayout.addWidget(self.__progress_) progresslayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)) vboxlayout.addLayout(progresslayout) # Cancel button cancellayout = QHBoxLayout() cancellayout.addItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)) cancellayout.addWidget(self.__cancel_) vboxlayout.addLayout(cancellayout) self.__cancel_.setDefault(True) self.__cancel_.setText("Cancel") QObject.connect(self.__cancel_, SIGNAL("clicked()"), self.__ui_progress_canceled_) QObject.connect(self, SIGNAL("rejected()"), self.__ui_progress_canceled_) def __ui_progress_canceled_(self): self.emit(SIGNAL("canceled()")) def setLabelText(self, text): self.__page_.setText(text) def setValue(self, value): self.__downloaded_ = value self.progress(0) def setNbrPages(self, nbr_pages): self.__nbr_pages_ = nbr_pages def newpage(self, current_page, file_size = None): if self.isVisible(): # Set progress value to 0 self.setValue(0) # Set label text if self.__nbr_pages_ == 0: # Only happens when getting a pdf file self.__page_.setText(_("Getting file")) elif self.__nbr_pages_ == 1: self.__page_.setText(_("Getting page %d") % current_page) else: self.__page_.setText(_("Getting page %d of %d") % \ (current_page, self.__nbr_pages_)) def progress(self, received_size): self.__downloaded_ += received_size if self.isVisible(): size = self.__downloaded_ / 1024 if size > 1024: size = float(size) / 1024 self.__progress_.setText("Received %.3f mb" % size) else: self.__progress_.setText("Received %d kb" % size) class ProgressWrapper(QObject): def __init__(self, parent = None): QObject.__init__(self) self.__progress_full_ = ProgressFullDialog(parent) self.__progress_ = ProgressDialog(parent) self.__current_ = None QObject.connect(self.__progress_full_, SIGNAL("canceled()"), self.__ui_progress_canceled_) QObject.connect(self.__progress_, SIGNAL("canceled()"), self.__ui_progress_canceled_) def show(self, format, nbr_pages): if format in ["tiff", "bmp"]: self.__current_ = self.__progress_full_ else: self.__current_ = self.__progress_ self.__current_.setLabelText(_("Waiting for transfer to begin")) self.__current_.setValue(0) self.__current_.setNbrPages(nbr_pages) self.__current_.show() def __ui_progress_canceled_(self): self.emit(SIGNAL("canceled()")) def newpage(self, current_page, file_size): if self.__current_ is not None: self.__current_.newpage(current_page, file_size) def progress(self, received_size): if self.__current_ is not None: self.__current_.progress(received_size) def isVisible(self): if self.__current_ is not None: return self.__current_.isVisible() else: return False def hide(self): if self.__current_ is not None: self.__current_.hide() class ScanWidget(QWidget): """The main scanning widget""" def __init__(self, parent = None): """Create a new scanning widget @param parent: The parent widget @type parent: QWidget """ QWidget.__init__(self, parent) self.__basewidget_ = Ui_ScanWidgetBase() self.__basewidget_.setupUi(self) # The threaded scanner object self.__scanner_ = ThreadedXeroxC2424() # List of files available on the scanner self.__scanned_files_ = None # Last folder visited self.__old_folder_ = "Public" # Progress dialog self.__progress_ = ProgressWrapper(self) # UI: Buttons QObject.connect(self.__basewidget_.refresh, SIGNAL("clicked()"), self.__ui_refresh_clicked_) QObject.connect(self.__basewidget_.delete, SIGNAL("clicked()"), self.__ui_delete_clicked_) QObject.connect(self.__basewidget_.save, SIGNAL("clicked()"), self.__ui_save_clicked_) # UI: An option has been modified QObject.connect(self.__basewidget_.folder, SIGNAL("activated(const QString&)"), self.__ui_folder_currentChanged_) # UI: List widget QObject.connect(self.__basewidget_.imageList, SIGNAL("currentTextChanged(const QString&)"), self.__ui_imageList_currentChanged_) QObject.connect(self.__basewidget_.format, SIGNAL("currentIndexChanged(const QString&)"), self.__ui_format_currentChanged_) # Signals emited from threads QObject.connect(self.__scanner_, SIGNAL("foldersList()"), self.__foldersListReceived_) QObject.connect(self.__scanner_, SIGNAL("filesList()"), self.__filesListReceived_) QObject.connect(self.__scanner_, SIGNAL("folderSet(const QString&)"), self.__folderSetReceived_) QObject.connect(self.__scanner_, SIGNAL("folderProtected(const QString&)"), self.__folderProtectedReceived_) QObject.connect(self.__scanner_, SIGNAL("fileReceived(const QString&)"), self.__fileReceived_) QObject.connect(self.__scanner_, SIGNAL("previewReceived(const QString&)"), self.__previewReceived_) QObject.connect(self.__scanner_, SIGNAL("allPreviewReceived()"), self.__allPreviewReceived_) QObject.connect(self.__scanner_, SIGNAL("fileDeleted(const QString&)"), self.__fileDeletedReceived_) QObject.connect(self.__scanner_, SIGNAL("connectedToScanner()"), self.__connectedToScannerReceived_) QObject.connect(self.__scanner_, SIGNAL("scanlibError(const QString&)"), self.__scanlibErrorReceived) QObject.connect(self.__scanner_, SIGNAL("newPage(int, int)"), self.__progress_.newpage) QObject.connect(self.__scanner_, SIGNAL("progress(int)"), self.__progress_.progress) # Progress dialog QObject.connect(self.__progress_, SIGNAL("canceled()"), self.__ui_progress_canceled_) self.__lock_() # # Methods connected to thread signals # def __scanlibErrorReceived(self, text): """Called when there is an error in the scan library @param text: The text of the error @type text: str """ if self.__progress_.isVisible(): self.__progress_.hide() QMessageBox.critical(self, "Critical error", text) if self.__scanner_.connected: self.__unlock_() def __connectedToScannerReceived_(self): """Called when we are connected to a new scanner""" # Show the public directory if config.DEBUG_GUI: print "<-- Connected to scanner" # Clear the list of files and request the available folders self.__basewidget_.imageList.clear() self.__scanner_.getFolders() def __folderSetReceived_(self, folder): """Called when we have changed the current folder @param folder: The folder name @type folder: str """ if config.DEBUG_GUI: print "<-- Folder has been set:", str(folder) # Save old folder self.__old_folder_ = str(folder) # Refresh the contents of the folder self.__refreshPreviews_() def __folderProtectedReceived_(self, folder): """Called when we are trying to access a protected folder @param folder: The folder name @type folder: str """ if config.DEBUG_GUI: print "<-- Protected folder:", folder folder = str(folder) password, result = QInputDialog.getText(self, "Accessing a protected folder", "Please enter the password for the protected " \ "folder %s" % folder, QLineEdit.Password) if result is True: self.__scanner_.setFolder(folder, str(password)) else: folder_index = self.__basewidget_.folder.findText(self.__old_folder_) self.__basewidget_.folder.setCurrentIndex(folder_index) self.__unlock_() def __fileReceived_(self, filename): """Called when a file tranfert has been successfully completed @param filename: The file name @type filename: str """ if config.DEBUG_GUI: print "<-- File transfer finished for:", filename # Reset the progress dialog and unlock the widget self.__progress_.hide() self.__unlock_() def __allPreviewReceived_(self): """Received when we have received all previews""" if config.DEBUG_GUI: print "<-- All previews received" self.__unlock_() self.__basewidget_.imageList.setCurrentItem(self.__basewidget_.imageList.item(0)) def __previewReceived_(self, filename): """Received when a preview has been received @param filename: The filename of the preview @type filename: str """ if config.DEBUG_GUI: print "<-- Preview received:", filename filename = str(filename) preview = self.__scanner_.previews[filename] del self.__scanner_.previews[filename] # Create the pixmap item pixmap = QPixmap() if preview == None: pixmap.load(config.NO_PREVIEW_FILENAME) else: pixmap.loadFromData(preview) # Add a black border self.__add_black_border_(pixmap) # Add the new icon to the list items = self.__basewidget_.imageList.findItems(filename, Qt.MatchExactly) items[0].setIcon(QIcon(pixmap)) def __fileDeletedReceived_(self, filename): """Called when a file has been deleted @param filename: The name of the deleted file @type filename: str """ if config.DEBUG_GUI: print "<-- File deleted:", filename # Remove the deleted item from the list items = self.__basewidget_.imageList.findItems(filename, Qt.MatchExactly) item = self.__basewidget_.imageList.takeItem(self.__basewidget_.imageList.row(items[0])) del item # Unlock the widget self.__unlock_() def __foldersListReceived_(self): """Called when the folders listing has arrived""" if config.DEBUG_GUI: print "<-- Received folder listing" # Add the folders to the list of folders for folder in self.__scanner_.folders: self.__basewidget_.folder.addItem(folder) # Refresh the files of the current folder self.__refreshPreviews_() def __filesListReceived_(self): """Called when the files listing of the current folder has arrived""" if config.DEBUG_GUI: print "<-- Received files listing" self.__scanned_files_ = self.__scanner_.files # Add the files to the list and request their previews if len(self.__scanned_files_) != 0: # Sort by filename (wich is also by date) filenames = self.__scanned_files_.keys() filenames.sort() # Create the Waiting for preview pixmap pixmap = QPixmap() pixmap.load(config.WAITING_PREVIEW_FILENAME) self.__add_black_border_(pixmap) # Add the files to the list for filename in filenames: self.__basewidget_.imageList.addItem(QListWidgetItem(QIcon(pixmap), filename)) # Request the previews if config.DEBUG_GUI: print "--> Requesting previews" self.__scanner_.getPreviews(filenames) else: self.__unlock_() # # Methods connected to the UI # def __ui_refresh_clicked_(self): """Called when the user activates the refresh button This method clears the files list and request the current files list again """ # Refresh the folder contents self.__refreshPreviews_() def __ui_delete_clicked_(self): """Called when the user activates the delete button This method delete the current selected file """ if config.DEBUG_GUI: print "--> Deleting file" filename = self.currentFilename() if filename is not None: result = QMessageBox.question(self, "Confirmation of file deletion", "Do you really want to delete the file %s " \ "from the scanner?" % filename, QMessageBox.Yes, QMessageBox.No) if result == QMessageBox.Yes: self.__scanner_.deleteFile(filename) else: print "WARNING: No file selected (save), this should not happen" def __ui_save_clicked_(self): """Called when the user activates the save button This method ask for a filename and download the selected pages """ if config.DEBUG_GUI: print "--> Saving file" filename = self.currentFilename() # Check if a file has been selected if filename is not None: # Ask for filename save_filter = self.__get_format_filter_() default_save_filename = os.path.join(str(QDir.homePath()), "%s.%s" % (os.path.splitext(filename)[0], self.getFormat())) save_filename = str(QFileDialog.getSaveFileName(self, "Saving scanned file", default_save_filename, save_filter)) if save_filename != "": self.__lock_() # Add file format if not specified if os.path.splitext(save_filename)[1] == "": save_filename += ".%s" % self.getFormat() # Call the saving thread method format = self.getFormat() pages = self.getPages() dpi = self.getDpi() if dpi == None: dpi = self.__scanned_files_[filename]["dpi"] samplesize = self.getSamplesize() self.__scanner_.getFile(filename, save_filename, pages, format, dpi, samplesize) # Show the progress dialog self.__progress_.show(format, len(pages)) else: print "WARNING: No file selected (save), this should not happen" def __ui_folder_currentChanged_(self, folder): """Called when the current folder has been changed If the user has selected another directory, we need to list the contents of this directory """ if config.DEBUG_GUI: print "--> Changing folder" folder = str(folder) if folder != self.__old_folder_: self.__lock_() # Request the new folder self.__scanner_.setFolder(folder) def __ui_imageList_currentChanged_(self, filename): """Called when the user select an image in the image list @param filename: The file name of the selected file @type filename: str """ filename = str(filename) if config.DEBUG_GUI: print "--- Selected file: \"%s\"" % filename if filename == "": self.__basewidget_.info_nbPages.setText("") self.__basewidget_.info_dpi.setText("") self.__basewidget_.info_resolution.setText("") self.__clearOptions_() self.__basewidget_.delete.setEnabled(False) self.__basewidget_.save.setEnabled(False) self.__basewidget_.format.setEnabled(False) self.__basewidget_.page.setEnabled(False) self.__basewidget_.resolution.setEnabled(False) self.__basewidget_.color.setEnabled(False) else: file_infos = self.__scanned_files_[filename] # Show basic informations self.__basewidget_.info_nbPages.setText(str(file_infos["nbpages"])) self.__basewidget_.info_dpi.setText("%dx%d dpi" % \ (file_infos["dpi"][0], file_infos["dpi"][1])) self.__basewidget_.info_resolution.setText("%dx%d" % \ (file_infos["resolution"][0], file_infos["resolution"][1])) # Create file options self.__clearOptions_() # Add pages pages = [] if file_infos["nbpages"] > 1: pages.append("all") pages.extend([str(x) for x in range(1, file_infos["nbpages"] + 1)]) self.__basewidget_.page.addItems(pages) # Add dpi dpis = ["max"] dpis.extend(["%dx%d" % (x, x) for x in [100, 200, 300, 400, 600] if x <= file_infos["dpi"][0]]) self.__basewidget_.resolution.addItems(dpis) # Add samplesize if file_infos["samplesize"] == 24: self.__basewidget_.color.addItem("Color") if file_infos["samplesize"] >= 8: self.__basewidget_.color.addItem("Grayscale") self.__basewidget_.color.addItem("Black & White") # Enable buttons self.__basewidget_.delete.setEnabled(True) self.__basewidget_.save.setEnabled(True) # Enable options self.__basewidget_.format.setEnabled(True) self.__basewidget_.resolution.setEnabled(True) self.__basewidget_.color.setEnabled(True) self.__ui_format_currentChanged_(self.__basewidget_.format.currentText()) def __ui_format_currentChanged_(self, format): """Called when file format has changed If the file format is pdf, we cannot select a page. If it is not pdf, we need to enable the page selector """ format = str(format).lower() if format == "pdf": self.__basewidget_.page.setCurrentIndex(0) self.__basewidget_.page.setEnabled(False) else: self.__basewidget_.page.setEnabled(True) def __ui_progress_canceled_(self): """Called when the user click on the progress cancel button""" if config.DEBUG_GUI: print "--- Canceled saving" self.__scanner_.stop() # # Other methods # def __get_format_filter_(self): format = self.getFormat() if format == "tiff": filter = _("TIFF images (*.tif *.tiff)") elif format == "gif": filter = _("GIF images (*.gif)") elif format == "jpeg": filter = _("JPEG images (*.jpg *.jpeg)") elif format == "bmp": filter = _("BMP images (*.bmp)") elif format == "pdf": filter = _("PDF files (*.pdf)") else: filter = "" return filter + ";;All files (*)" def __add_black_border_(self, pixmap): """Add a black border around a pixmap @param pixmap: The pixmap @type pixmap: QPixmap """ painter = QPainter() painter.begin(pixmap) painter.setPen(Qt.black); painter.drawRect(QRect(0, 0, pixmap.width() - 1, pixmap.height() - 1)) painter.end() def __refreshPreviews_(self): if config.DEBUG_GUI: print "--> Refreshing previews" self.__basewidget_.imageList.clear() self.__lock_() self.__scanner_.getFilesList() def __clearOptions_(self): self.__basewidget_.page.clear() self.__basewidget_.resolution.clear() self.__basewidget_.color.clear() def __lock_(self): self.__basewidget_.refresh.setEnabled(False) self.__basewidget_.folder.setEnabled(False) self.__basewidget_.imageList.setEnabled(False) self.__basewidget_.save.setEnabled(False) self.__basewidget_.delete.setEnabled(False) self.__basewidget_.format.setEnabled(False) self.__basewidget_.page.setEnabled(False) self.__basewidget_.resolution.setEnabled(False) self.__basewidget_.color.setEnabled(False) def __unlock_(self): self.__basewidget_.refresh.setEnabled(True) self.__basewidget_.folder.setEnabled(True) self.__basewidget_.imageList.setEnabled(True) if self.currentFilename() is not None: self.__basewidget_.save.setEnabled(True) self.__basewidget_.delete.setEnabled(True) self.__basewidget_.format.setEnabled(True) self.__basewidget_.page.setEnabled(True) self.__basewidget_.resolution.setEnabled(True) self.__basewidget_.color.setEnabled(True) # # API public # def currentFilename(self): currentItem = self.__basewidget_.imageList.currentItem() # Vérification inutile, car le bouton delete est activé seulement # s'il y a un item sélectionné, mais on ne sais jamais if currentItem is not None: return str(currentItem.text()) def currentFolder(self): return str(self.__basewidget_.folder.currentText()) def getFormat(self): return str(self.__basewidget_.format.currentText()).lower() def getDpi(self): dpi = str(self.__basewidget_.resolution.currentText()) if dpi == "max": return None elif dpi == "100x100": return [100, 100] elif dpi == "200x200": return [200, 200] elif dpi == "300x300": return [300, 300] elif dpi == "400x400": return [400, 400] elif dpi == "600x600": return [600, 600] def getPages(self): if self.getFormat() == "pdf": return [] if str(self.__basewidget_.page.currentText()) == "all": return [x for x in range(1, self.__scanned_files_[self.currentFilename()]["nbpages"] + 1)] else: return [int(str(self.__basewidget_.page.currentText()))] def getSamplesize(self): samplesize = str(self.__basewidget_.color.currentText()) # 24 bits color if samplesize == "Color": return 24 # 8 tones grayscale elif samplesize == "Grayscale": return 8 # black and white else: return 1 def connectToScanner(self, host, port): if config.DEBUG_GUI: print "--> Connecting to scanner" self.__scanner_.connectToScanner(host, port) def disconnect(self): if config.DEBUG_GUI: print "--> Disconnecting from scanner" self.__scanner_.disconnect()
mbouchar/xc2424scan
src/xc2424scan/ui/widgets/scanwidget.py
Python
gpl-2.0
27,979
# synthrepo.py - repo synthesis # # Copyright 2012 Facebook # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. '''synthesize structurally interesting change history This extension is useful for creating a repository with properties that are statistically similar to an existing repository. During analysis, a simple probability table is constructed from the history of an existing repository. During synthesis, these properties are reconstructed. Properties that are analyzed and synthesized include the following: - Lines added or removed when an existing file is modified - Number and sizes of files added - Number of files removed - Line lengths - Topological distance to parent changeset(s) - Probability of a commit being a merge - Probability of a newly added file being added to a new directory - Interarrival time, and time zone, of commits - Number of files in each directory A few obvious properties that are not currently handled realistically: - Merges are treated as regular commits with two parents, which is not realistic - Modifications are not treated as operations on hunks of lines, but as insertions and deletions of randomly chosen single lines - Committer ID (always random) - Executability of files - Symlinks and binary files are ignored ''' from __future__ import absolute_import import bisect import collections import itertools import json import os import random import sys import time from mercurial.i18n import _ from mercurial.node import ( nullid, nullrev, short, ) from mercurial import ( cmdutil, context, error, hg, patch, scmutil, util, ) # Note for extension authors: ONLY specify testedwith = 'internal' for # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should # be specifying the version(s) of Mercurial they are tested with, or # leave the attribute unspecified. testedwith = 'internal' cmdtable = {} command = cmdutil.command(cmdtable) newfile = set(('new fi', 'rename', 'copy f', 'copy t')) def zerodict(): return collections.defaultdict(lambda: 0) def roundto(x, k): if x > k * 2: return int(round(x / float(k)) * k) return int(round(x)) def parsegitdiff(lines): filename, mar, lineadd, lineremove = None, None, zerodict(), 0 binary = False for line in lines: start = line[:6] if start == 'diff -': if filename: yield filename, mar, lineadd, lineremove, binary mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False filename = patch.gitre.match(line).group(1) elif start in newfile: mar = 'a' elif start == 'GIT bi': binary = True elif start == 'delete': mar = 'r' elif start: s = start[0] if s == '-' and not line.startswith('--- '): lineremove += 1 elif s == '+' and not line.startswith('+++ '): lineadd[roundto(len(line) - 1, 5)] += 1 if filename: yield filename, mar, lineadd, lineremove, binary @command('analyze', [('o', 'output', '', _('write output to given file'), _('FILE')), ('r', 'rev', [], _('analyze specified revisions'), _('REV'))], _('hg analyze'), optionalrepo=True) def analyze(ui, repo, *revs, **opts): '''create a simple model of a repository to use for later synthesis This command examines every changeset in the given range (or all of history if none are specified) and creates a simple statistical model of the history of the repository. It also measures the directory structure of the repository as checked out. The model is written out to a JSON file, and can be used by :hg:`synthesize` to create or augment a repository with synthetic commits that have a structure that is statistically similar to the analyzed repository. ''' root = repo.root if not root.endswith(os.path.sep): root += os.path.sep revs = list(revs) revs.extend(opts['rev']) if not revs: revs = [':'] output = opts['output'] if not output: output = os.path.basename(root) + '.json' if output == '-': fp = sys.stdout else: fp = open(output, 'w') # Always obtain file counts of each directory in the given root directory. def onerror(e): ui.warn(_('error walking directory structure: %s\n') % e) dirs = {} rootprefixlen = len(root) for dirpath, dirnames, filenames in os.walk(root, onerror=onerror): dirpathfromroot = dirpath[rootprefixlen:] dirs[dirpathfromroot] = len(filenames) if '.hg' in dirnames: dirnames.remove('.hg') lineschanged = zerodict() children = zerodict() p1distance = zerodict() p2distance = zerodict() linesinfilesadded = zerodict() fileschanged = zerodict() filesadded = zerodict() filesremoved = zerodict() linelengths = zerodict() interarrival = zerodict() parents = zerodict() dirsadded = zerodict() tzoffset = zerodict() # If a mercurial repo is available, also model the commit history. if repo: revs = scmutil.revrange(repo, revs) revs.sort() progress = ui.progress _analyzing = _('analyzing') _changesets = _('changesets') _total = len(revs) for i, rev in enumerate(revs): progress(_analyzing, i, unit=_changesets, total=_total) ctx = repo[rev] pl = ctx.parents() pctx = pl[0] prev = pctx.rev() children[prev] += 1 p1distance[rev - prev] += 1 parents[len(pl)] += 1 tzoffset[ctx.date()[1]] += 1 if len(pl) > 1: p2distance[rev - pl[1].rev()] += 1 if prev == rev - 1: lastctx = pctx else: lastctx = repo[rev - 1] if lastctx.rev() != nullrev: timedelta = ctx.date()[0] - lastctx.date()[0] interarrival[roundto(timedelta, 300)] += 1 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), []) fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff): if isbin: continue added = sum(lineadd.itervalues(), 0) if mar == 'm': if added and lineremove: lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1 filechanges += 1 elif mar == 'a': fileadds += 1 if '/' in filename: filedir = filename.rsplit('/', 1)[0] if filedir not in pctx.dirs(): diradds += 1 linesinfilesadded[roundto(added, 5)] += 1 elif mar == 'r': fileremoves += 1 for length, count in lineadd.iteritems(): linelengths[length] += count fileschanged[filechanges] += 1 filesadded[fileadds] += 1 dirsadded[diradds] += 1 filesremoved[fileremoves] += 1 invchildren = zerodict() for rev, count in children.iteritems(): invchildren[count] += 1 if output != '-': ui.status(_('writing output to %s\n') % output) def pronk(d): return sorted(d.iteritems(), key=lambda x: x[1], reverse=True) json.dump({'revs': len(revs), 'initdirs': pronk(dirs), 'lineschanged': pronk(lineschanged), 'children': pronk(invchildren), 'fileschanged': pronk(fileschanged), 'filesadded': pronk(filesadded), 'linesinfilesadded': pronk(linesinfilesadded), 'dirsadded': pronk(dirsadded), 'filesremoved': pronk(filesremoved), 'linelengths': pronk(linelengths), 'parents': pronk(parents), 'p1distance': pronk(p1distance), 'p2distance': pronk(p2distance), 'interarrival': pronk(interarrival), 'tzoffset': pronk(tzoffset), }, fp) fp.close() @command('synthesize', [('c', 'count', 0, _('create given number of commits'), _('COUNT')), ('', 'dict', '', _('path to a dictionary of words'), _('FILE')), ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))], _('hg synthesize [OPTION].. DESCFILE')) def synthesize(ui, repo, descpath, **opts): '''synthesize commits based on a model of an existing repository The model must have been generated by :hg:`analyze`. Commits will be generated randomly according to the probabilities described in the model. If --initfiles is set, the repository will be seeded with the given number files following the modeled repository's directory structure. When synthesizing new content, commit descriptions, and user names, words will be chosen randomly from a dictionary that is presumed to contain one word per line. Use --dict to specify the path to an alternate dictionary to use. ''' try: fp = hg.openpath(ui, descpath) except Exception as err: raise error.Abort('%s: %s' % (descpath, err[0].strerror)) desc = json.load(fp) fp.close() def cdf(l): if not l: return [], [] vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True)) t = float(sum(probs, 0)) s, cdfs = 0, [] for v in probs: s += v cdfs.append(s / t) return vals, cdfs lineschanged = cdf(desc['lineschanged']) fileschanged = cdf(desc['fileschanged']) filesadded = cdf(desc['filesadded']) dirsadded = cdf(desc['dirsadded']) filesremoved = cdf(desc['filesremoved']) linelengths = cdf(desc['linelengths']) parents = cdf(desc['parents']) p1distance = cdf(desc['p1distance']) p2distance = cdf(desc['p2distance']) interarrival = cdf(desc['interarrival']) linesinfilesadded = cdf(desc['linesinfilesadded']) tzoffset = cdf(desc['tzoffset']) dictfile = opts.get('dict') or '/usr/share/dict/words' try: fp = open(dictfile, 'rU') except IOError as err: raise error.Abort('%s: %s' % (dictfile, err.strerror)) words = fp.read().splitlines() fp.close() initdirs = {} if desc['initdirs']: for k, v in desc['initdirs']: initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v initdirs = renamedirs(initdirs, words) initdirscdf = cdf(initdirs) def pick(cdf): return cdf[0][bisect.bisect_left(cdf[1], random.random())] def pickpath(): return os.path.join(pick(initdirscdf), random.choice(words)) def makeline(minimum=0): total = max(minimum, pick(linelengths)) c, l = 0, [] while c < total: w = random.choice(words) c += len(w) + 1 l.append(w) return ' '.join(l) wlock = repo.wlock() lock = repo.lock() nevertouch = set(('.hgsub', '.hgignore', '.hgtags')) progress = ui.progress _synthesizing = _('synthesizing') _files = _('initial files') _changesets = _('changesets') # Synthesize a single initial revision adding files to the repo according # to the modeled directory structure. initcount = int(opts['initfiles']) if initcount and initdirs: pctx = repo[None].parents()[0] dirs = set(pctx.dirs()) files = {} def validpath(path): # Don't pick filenames which are already directory names. if path in dirs: return False # Don't pick directories which were used as file names. while path: if path in files: return False path = os.path.dirname(path) return True for i in xrange(0, initcount): ui.progress(_synthesizing, i, unit=_files, total=initcount) path = pickpath() while not validpath(path): path = pickpath() data = '%s contents\n' % path files[path] = context.memfilectx(repo, path, data) dir = os.path.dirname(path) while dir and dir not in dirs: dirs.add(dir) dir = os.path.dirname(dir) def filectxfn(repo, memctx, path): return files[path] ui.progress(_synthesizing, None) message = 'synthesized wide repo with %d files' % (len(files),) mc = context.memctx(repo, [pctx.node(), nullid], message, files.iterkeys(), filectxfn, ui.username(), '%d %d' % util.makedate()) initnode = mc.commit() if ui.debugflag: hexfn = hex else: hexfn = short ui.status(_('added commit %s with %d files\n') % (hexfn(initnode), len(files))) # Synthesize incremental revisions to the repository, adding repo depth. count = int(opts['count']) heads = set(map(repo.changelog.rev, repo.heads())) for i in xrange(count): progress(_synthesizing, i, unit=_changesets, total=count) node = repo.changelog.node revs = len(repo) def pickhead(heads, distance): if heads: lheads = sorted(heads) rev = revs - min(pick(distance), revs) if rev < lheads[-1]: rev = lheads[bisect.bisect_left(lheads, rev)] else: rev = lheads[-1] return rev, node(rev) return nullrev, nullid r1 = revs - min(pick(p1distance), revs) p1 = node(r1) # the number of heads will grow without bound if we use a pure # model, so artificially constrain their proliferation toomanyheads = len(heads) > random.randint(1, 20) if p2distance[0] and (pick(parents) == 2 or toomanyheads): r2, p2 = pickhead(heads.difference([r1]), p2distance) else: r2, p2 = nullrev, nullid pl = [p1, p2] pctx = repo[r1] mf = pctx.manifest() mfk = mf.keys() changes = {} if mfk: for __ in xrange(pick(fileschanged)): for __ in xrange(10): fctx = pctx.filectx(random.choice(mfk)) path = fctx.path() if not (path in nevertouch or fctx.isbinary() or 'l' in fctx.flags()): break lines = fctx.data().splitlines() add, remove = pick(lineschanged) for __ in xrange(remove): if not lines: break del lines[random.randrange(0, len(lines))] for __ in xrange(add): lines.insert(random.randint(0, len(lines)), makeline()) path = fctx.path() changes[path] = context.memfilectx(repo, path, '\n'.join(lines) + '\n') for __ in xrange(pick(filesremoved)): path = random.choice(mfk) for __ in xrange(10): path = random.choice(mfk) if path not in changes: changes[path] = None break if filesadded: dirs = list(pctx.dirs()) dirs.insert(0, '') for __ in xrange(pick(filesadded)): pathstr = '' while pathstr in dirs: path = [random.choice(dirs)] if pick(dirsadded): path.append(random.choice(words)) path.append(random.choice(words)) pathstr = '/'.join(filter(None, path)) data = '\n'.join(makeline() for __ in xrange(pick(linesinfilesadded))) + '\n' changes[pathstr] = context.memfilectx(repo, pathstr, data) def filectxfn(repo, memctx, path): return changes[path] if not changes: continue if revs: date = repo['tip'].date()[0] + pick(interarrival) else: date = time.time() - (86400 * count) # dates in mercurial must be positive, fit in 32-bit signed integers. date = min(0x7fffffff, max(0, date)) user = random.choice(words) + '@' + random.choice(words) mc = context.memctx(repo, pl, makeline(minimum=2), sorted(changes.iterkeys()), filectxfn, user, '%d %d' % (date, pick(tzoffset))) newnode = mc.commit() heads.add(repo.changelog.rev(newnode)) heads.discard(r1) heads.discard(r2) lock.release() wlock.release() def renamedirs(dirs, words): '''Randomly rename the directory names in the per-dir file count dict.''' wordgen = itertools.cycle(words) replacements = {'': ''} def rename(dirpath): '''Recursively rename the directory and all path prefixes. The mapping from path to renamed path is stored for all path prefixes as in dynamic programming, ensuring linear runtime and consistent renaming regardless of iteration order through the model. ''' if dirpath in replacements: return replacements[dirpath] head, _ = os.path.split(dirpath) if head: head = rename(head) else: head = '' renamed = os.path.join(head, next(wordgen)) replacements[dirpath] = renamed return renamed result = [] for dirpath, count in dirs.iteritems(): result.append([rename(dirpath.lstrip(os.sep)), count]) return result
dscho/hg
contrib/synthrepo.py
Python
gpl-2.0
18,180
from Xml.Xslt import test_harness sheet_str = """<?xml version="1.0"?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> <xsl:template match="/"> <root> <xsl:apply-templates/> </root> </xsl:template> <xsl:template name="do-the-rest"> <xsl:param name="start"/> <xsl:param name="count"/> <tr> <xsl:for-each select="item[position()&gt;=$start and position()&lt;$start+$count]"> <td> <xsl:value-of select="."/> </td> </xsl:for-each> </tr> <xsl:if test="$start + $count - 1 &lt; count(child::item)"> <xsl:call-template name="do-the-rest"> <xsl:with-param name="start" select="$start + $count"/> <xsl:with-param name="count" select="$count"/> </xsl:call-template> </xsl:if> </xsl:template> <xsl:template match="data"> <xsl:call-template name="do-the-rest"> <xsl:with-param name="start" select="1"/> <xsl:with-param name="count" select="2"/> </xsl:call-template> </xsl:template> </xsl:stylesheet> """ source_str = """<?xml version = "1.0"?> <data> <item>b</item> <item>a</item> <item>d</item> <item>c</item> </data> """ expected = """<?xml version='1.0' encoding='UTF-8'?> <root><tr><td>b</td><td>a</td></tr><tr><td>d</td><td>c</td></tr></root>""" def Test(tester): source = test_harness.FileInfo(string=source_str) sheet = test_harness.FileInfo(string=sheet_str) test_harness.XsltTest(tester, source, [sheet], expected, title='xsl:call-template') return
Pikecillo/genna
external/4Suite-XML-1.0.2/test/Xml/Xslt/Core/test_call_template.py
Python
gpl-2.0
1,488
#!/usr/bin/python -tt # by [email protected] # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. # copyright 2012 Red Hat, Inc. # SUMMARY # mockchain # take a mock config and a series of srpms # rebuild them one at a time # adding each to a local repo # so they are available as build deps to next pkg being built import sys import subprocess import os import optparse import tempfile import shutil from urlgrabber import grabber import time import mockbuild.util # all of the variables below are substituted by the build system __VERSION__ = "unreleased_version" SYSCONFDIR = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), "..", "etc") PYTHONDIR = os.path.dirname(os.path.realpath(sys.argv[0])) PKGPYTHONDIR = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), "mockbuild") MOCKCONFDIR = os.path.join(SYSCONFDIR, "mock") # end build system subs mockconfig_path='/etc/mock' def createrepo(path): if os.path.exists(path + '/repodata/repomd.xml'): comm = ['/usr/bin/createrepo', '--update', path] else: comm = ['/usr/bin/createrepo', path] cmd = subprocess.Popen(comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = cmd.communicate() return out, err def parse_args(args): parser = optparse.OptionParser('\nmockchain -r mockcfg pkg1 [pkg2] [pkg3]') parser.add_option('-r', '--root', default=None, dest='chroot', help="chroot config name/base to use in the mock build") parser.add_option('-l', '--localrepo', default=None, help="local path for the local repo, defaults to making its own") parser.add_option('-c', '--continue', default=False, action='store_true', dest='cont', help="if a pkg fails to build, continue to the next one") parser.add_option('-a','--addrepo', default=[], action='append', dest='repos', help="add these repo baseurls to the chroot's yum config") parser.add_option('--recurse', default=False, action='store_true', help="if more than one pkg and it fails to build, try to build the rest and come back to it") parser.add_option('--log', default=None, dest='logfile', help="log to the file named by this option, defaults to not logging") parser.add_option('--tmp_prefix', default=None, dest='tmp_prefix', help="tmp dir prefix - will default to username-pid if not specified") #FIXME? # figure out how to pass other args to mock? opts, args = parser.parse_args(args) if opts.recurse: opts.cont = True if not opts.chroot: print "You must provide an argument to -r for the mock chroot" sys.exit(1) if len(sys.argv) < 3: print "You must specifiy at least 1 package to build" sys.exit(1) return opts, args def add_local_repo(infile, destfile, baseurl, repoid=None): """take a mock chroot config and add a repo to it's yum.conf infile = mock chroot config file destfile = where to save out the result baseurl = baseurl of repo you wish to add""" global config_opts try: execfile(infile) if not repoid: repoid=baseurl.split('//')[1].replace('/','_') localyumrepo=""" [%s] name=%s baseurl=%s enabled=1 skip_if_unavailable=1 metadata_expire=30 cost=1 """ % (repoid, baseurl, baseurl) config_opts['yum.conf'] += localyumrepo br_dest = open(destfile, 'w') for k,v in config_opts.items(): br_dest.write("config_opts[%r] = %r\n" % (k, v)) br_dest.close() return True, '' except (IOError, OSError): return False, "Could not write mock config to %s" % destfile return True, '' def do_build(opts, cfg, pkg): # returns 0, cmd, out, err = failure # returns 1, cmd, out, err = success # returns 2, None, None, None = already built s_pkg = os.path.basename(pkg) pdn = s_pkg.replace('.src.rpm', '') resdir = '%s/%s' % (opts.local_repo_dir, pdn) resdir = os.path.normpath(resdir) if not os.path.exists(resdir): os.makedirs(resdir) success_file = resdir + '/success' fail_file = resdir + '/fail' if os.path.exists(success_file): return 2, None, None, None # clean it up if we're starting over :) if os.path.exists(fail_file): os.unlink(fail_file) mockcmd = ['/usr/bin/mock', '--configdir', opts.config_path, '--resultdir', resdir, '--uniqueext', opts.uniqueext, '-r', cfg, ] print 'building %s' % s_pkg mockcmd.append(pkg) cmd = subprocess.Popen(mockcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) out, err = cmd.communicate() if cmd.returncode == 0: open(success_file, 'w').write('done\n') ret = 1 else: open(fail_file, 'w').write('undone\n') ret = 0 return ret, cmd, out, err def log(lf, msg): if lf: now = time.time() try: open(lf, 'a').write(str(now) + ':' + msg + '\n') except (IOError, OSError), e: print 'Could not write to logfile %s - %s' % (lf, str(e)) print msg config_opts = {} def main(args): global config_opts config_opts = mockbuild.util.setup_default_config_opts(os.getgid(), __VERSION__, PKGPYTHONDIR) opts, args = parse_args(args) # take mock config + list of pkgs cfg=opts.chroot pkgs=args[1:] mockcfg = mockconfig_path + '/' + cfg + '.cfg' if not os.path.exists(mockcfg): print "could not find config: %s" % mockcfg sys.exit(1) if not opts.tmp_prefix: try: opts.tmp_prefix = os.getlogin() except OSError, e: print "Could not find login name for tmp dir prefix add --tmp_prefix" sys.exit(1) pid = os.getpid() opts.uniqueext = '%s-%s' % (opts.tmp_prefix, pid) # create a tempdir for our local info if opts.localrepo: local_tmp_dir = os.path.abspath(opts.localrepo) if not os.path.exists(local_tmp_dir): os.makedirs(local_tmp_dir) else: pre = 'mock-chain-%s-' % opts.uniqueext local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp') os.chmod(local_tmp_dir, 0755) if opts.logfile: opts.logfile = os.path.join(local_tmp_dir, opts.logfile) if os.path.exists(opts.logfile): os.unlink(opts.logfile) log(opts.logfile, "starting logfile: %s" % opts.logfile) opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + cfg + '/') if not os.path.exists(opts.local_repo_dir): os.makedirs(opts.local_repo_dir, mode=0755) local_baseurl="file://%s" % opts.local_repo_dir log(opts.logfile, "results dir: %s" % opts.local_repo_dir) opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + cfg + '/') if not os.path.exists(opts.config_path): os.makedirs(opts.config_path, mode=0755) log(opts.logfile, "config dir: %s" % opts.config_path) my_mock_config = opts.config_path + '/' + os.path.basename(mockcfg) # modify with localrepo res, msg = add_local_repo(mockcfg, my_mock_config, local_baseurl, 'local_build_repo') if not res: log(opts.logfile, "Error: Could not write out local config: %s" % msg) sys.exit(1) for baseurl in opts.repos: res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl) if not res: log(opts.logfile, "Error: Could not add: %s to yum config in mock chroot: %s" % (baseurl, msg)) sys.exit(1) # these files needed from the mock.config dir to make mock run for fn in ['site-defaults.cfg', 'logging.ini']: pth = mockconfig_path + '/' + fn shutil.copyfile(pth, opts.config_path + '/' + fn) # createrepo on it out, err = createrepo(opts.local_repo_dir) if err.strip(): log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir) log(opts.logfile, "Err: %s" % err) sys.exit(1) download_dir = tempfile.mkdtemp() downloaded_pkgs = {} built_pkgs = [] try_again = True to_be_built = pkgs while try_again: failed = [] for pkg in to_be_built: if not pkg.endswith('.rpm'): log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg) failed.append(pkg) continue elif pkg.startswith('http://') or pkg.startswith('https://'): url = pkg cwd = os.getcwd() os.chdir(download_dir) try: log(opts.logfile, 'Fetching %s' % url) ug = grabber.URLGrabber() fn = ug.urlgrab(url) pkg = download_dir + '/' + fn except Exception, e: log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e))) failed.append(url) os.chdir(cwd) continue else: os.chdir(cwd) downloaded_pkgs[pkg] = url log(opts.logfile, "Start build: %s" % pkg) ret, cmd, out, err = do_build(opts, cfg, pkg) log(opts.logfile, "End build: %s" % pkg) if ret == 0: if opts.recurse: failed.append(pkg) log(opts.logfile, "Error building %s, will try again" % os.path.basename(pkg)) else: log(opts.logfile,"Error building %s" % os.path.basename(pkg)) log(opts.logfile,"See logs/results in %s" % opts.local_repo_dir) if not opts.cont: sys.exit(1) elif ret == 1: log(opts.logfile, "Success building %s" % os.path.basename(pkg)) built_pkgs.append(pkg) # createrepo with the new pkgs out, err = createrepo(opts.local_repo_dir) if err.strip(): log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir) log(opts.logfile, "Err: %s" % err) elif ret == 2: log(opts.logfile, "Skipping already built pkg %s" % os.path.basename(pkg)) if failed: if len(failed) != len(to_be_built): to_be_built = failed try_again = True log(opts.logfile, 'Trying to rebuild %s failed pkgs' % len(failed)) else: log(opts.logfile, "Tried twice - following pkgs could not be successfully built:") for pkg in failed: msg = pkg if pkg in downloaded_pkgs: msg = downloaded_pkgs[pkg] log(opts.logfile, msg) try_again = False else: try_again = False # cleaning up our download dir shutil.rmtree(download_dir, ignore_errors=True) log(opts.logfile, "Results out to: %s" % opts.local_repo_dir) log(opts.logfile, "Pkgs built: %s" % len(built_pkgs)) log(opts.logfile, "Packages successfully built in this order:") for pkg in built_pkgs: log(opts.logfile, pkg) if __name__ == "__main__": main(sys.argv) sys.exit(0)
alanfranz/mock
py/mockchain.py
Python
gpl-2.0
12,065
# -*- coding: utf-8 -*- # # Copyright (C) 2005-2010 TUBITAK/UEKAE # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation; either version 2 of the License, or (at your option) # any later version. # # Please read the COPYING file. # import gettext _ = gettext.translation('yali', fallback=True).ugettext from PyQt5.Qt import QWidget, pyqtSignal, QVariant import yali.util import yali.localedata import yali.postinstall import yali.context as ctx from yali.gui import ScreenWidget from yali.gui.Ui.keyboardwidget import Ui_KeyboardWidget ## # Keyboard setup screen class Widget(QWidget, ScreenWidget): name = "keyboardSetup" def __init__(self): QWidget.__init__(self) self.ui = Ui_KeyboardWidget() self.ui.setupUi(self) index = 0 # comboBox.addItem doesn't increase the currentIndex self.default_layout_index = None locales = sorted([(country, data) for country, data in yali.localedata.locales.items()]) for country, data in locales: if data["xkbvariant"]: i = 0 for variant in data["xkbvariant"]: _d = dict(data) _d["xkbvariant"] = variant[0] _d["name"] = variant[1] _d["consolekeymap"] = data["consolekeymap"][i] self.ui.keyboard_list.addItem(_d["name"], QVariant(_d)) i += 1 else: self.ui.keyboard_list.addItem(data["name"], QVariant(data)) if ctx.consts.lang == country: if ctx.consts.lang == "tr": self.default_layout_index = index + 1 else: self.default_layout_index = index index += 1 self.ui.keyboard_list.setCurrentIndex(self.default_layout_index) self.ui.keyboard_list.currentIndexChanged[int].connect(self.slotLayoutChanged) def shown(self): self.slotLayoutChanged() def slotLayoutChanged(self): index = self.ui.keyboard_list.currentIndex() keymap = self.ui.keyboard_list.itemData(index)#.toMap() # Gökmen's converter keymap = dict(map(lambda x: (str(x[0]), unicode(x[1])), keymap.iteritems())) ctx.installData.keyData = keymap ctx.interface.informationWindow.hide() if "," in keymap["xkblayout"]: message = _("Use Alt-Shift to toggle between alternative keyboard layouts") ctx.interface.informationWindow.update(message, type="warning") else: ctx.interface.informationWindow.hide() yali.util.setKeymap(keymap["xkblayout"], keymap["xkbvariant"]) def execute(self): ctx.interface.informationWindow.hide() ctx.logger.debug("Selected keymap is : %s" % ctx.installData.keyData["name"]) return True
forYali/yali
yali/gui/ScrKeyboard.py
Python
gpl-2.0
2,959
""" urlresolver Kodi plugin Copyright (C) 2011 t0mm0 Updated by Gujal (C) 2016 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re from urlresolver import common from urlresolver.resolver import UrlResolver, ResolverError class NowvideoResolver(UrlResolver): name = "nowvideo" domains = ['nowvideo.eu', 'nowvideo.ch', 'nowvideo.sx', 'nowvideo.co', 'nowvideo.li', 'nowvideo.fo', 'nowvideo.at', 'nowvideo.ec'] pattern = '(?://|\.)(nowvideo\.(?:eu|ch|sx|co|li|fo|at|ec))/(?:video/|embed\.php\?\S*v=)([A-Za-z0-9]+)' def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) stream_url = '' html = self.net.http_GET(web_url).content try: r = re.search('flashvars.filekey=(.+?);', html) if r: r = r.group(1) try: filekey = re.compile('\s+%s="(.+?)"' % r).findall(html)[-1] except: filekey = r player_url = 'http://www.nowvideo.sx/api/player.api.php?key=%s&file=%s' % (filekey, media_id) html = self.net.http_GET(player_url).content r = re.search('url=(.+?)&', html) if r: stream_url = r.group(1) else: raise ResolverError('File Not Found or removed') except: print "no embedded urls found using first method" try: r = re.search('id="player".*?src="(.*?)"', html, re.DOTALL) if r: stream_url = r.group(1) except: print "no embedded urls found using second method" if stream_url: return '%s%s' % (stream_url, '|Referer=' + web_url) else: raise ResolverError('File Not Found or removed') def get_url(self, host, media_id): return 'http://embed.nowvideo.sx/embed/?v=%s' % media_id
koditraquinas/koditraquinas.repository
script.module.urlresolver/lib/urlresolver/plugins/nowvideo.py
Python
gpl-2.0
2,602
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys, os my_site = os.path.join(os.environ["HOME"], ".local/lib/python2.7/site-packages") sys.path.insert(0, my_site) import h5py import networkx as nx import numpy as np import pycuda.driver as cuda import scipy.stats as st import sys import aux from consts import * def to_graph(connections): graph = nx.DiGraph() ca_size = connections.shape[0] for cell in xrange(ca_size): for neighbor in connections[cell]: graph.add_edge(neighbor, cell) # Count the number of rewired connection this cell has graph.node[cell]['rew'] = (connections[cell] != (np.arange(cell - 3, cell + 4) % ca_size)).sum() return graph class AnalysisIndividual: __cuda_module = False def __init__(self, individual, correct, executions, ca_size, connection_radius, ca_iterations, ca_repeat, k_history, save_executions=0): self.__ca_size = ca_size self.__connection_radius = connection_radius self.__n_connections = 2 * self.__connection_radius + 1 self.__ca_iterations = ca_iterations self.__ca_repeat = ca_repeat self.__k_history = k_history self.__n_possible_history = 2 ** self.__k_history self.__n_observations = self.__ca_repeat * \ (self.__ca_iterations - self.__k_history + 1) self.__save_executions = save_executions self.__individual = individual self.__individual_number = self.__individual.number self.__rules = self.__individual.gene_rules self.__connections = self.__individual.connections self.__graph = to_graph(self.__connections) self.__executions = executions density = np.mean(self.__executions[:, 0], axis=1) self.__majority = np.round(density).astype(np.uint32) # The closer the density is to .5 the harder the configuration is to # decide self.__difficult = 1 - np.abs(density - .5) / .5 # Checking which repetitions converged to a single state self.__converged = np.all(self.__executions[:, -1] == self.__executions[:, -1, 0].reshape(-1, 1), axis=1) # Checking how many cells in each repetition converged to the right # state self.__cells_correct = np.mean(self.__executions[:, -1] == self.__majority.reshape(-1, 1), axis=1) self.__correct = correct self.__fitness = np.mean(self.__correct) self.__gini = None self.__limits = None self.__entropy_rate = None self.__base_table = None self.__correlations = None # Initialize the CUDA module if not AnalysisIndividual.__cuda_module: AnalysisIndividual.__cuda_module = True cuda_module = aux.CudaModule('analysis.cu', (self.__ca_size, self.__ca_iterations, self.__ca_repeat, self.__connection_radius, self.__n_connections, self.__n_observations, self.__k_history, self.__n_possible_history)) AnalysisIndividual.__kernel_calc_diffs = \ cuda_module.get_function("kernel_calc_diffs") AnalysisIndividual.__kernel_probabilities = \ cuda_module.get_function("kernel_probabilities") AnalysisIndividual.__kernel_active_storage = \ cuda_module.get_function("kernel_active_storage") AnalysisIndividual.__kernel_entropy_rate = \ cuda_module.get_function("kernel_entropy_rate") def __calculate_gini(self, values): # Calculate the Gini coefficient to measure the inequality in a # distribution of values cum_values = np.sort(values).cumsum() return 1 - (cum_values[0] + (cum_values[1:] + cum_values[:-1]).sum()) \ / float(cum_values[-1] * cum_values.size) def __get_limits(self): # This function implements a heuristic to calculate how many times a # cell has the role of "limit" of a diffusion in a simulation. # The main idea here is that, usually, information in cellular automata # flows in a given direction at a constant speed. If we know this # direction and speed, we can check how many times a cell interrupts a # flow. sum_diffs = np.zeros(self.__ca_size, dtype=np.uint32) try: self.__kernel_calc_diffs(cuda.In(self.__majority), cuda.In(self.__executions), cuda.InOut(sum_diffs), block=(self.__ca_size, 1, 1), grid=(1,)) cuda.Context.synchronize() except cuda.Error as e: sys.exit("CUDA: Execution failed ('%s')!" % e) # For all repetitions, calculate the ratio of total iterations each # cell acted as a "limit" self.__limits = sum_diffs / \ float(self.__ca_repeat * self.__ca_iterations) def get_individual_info(self): if self.__gini != None: # If all metrics are already computed, just return them! return self.__fitness, self.__gini, self.__prop_max_min, \ self.__individual.epoch, self.__individual_number, \ self.__clustering, self.__average_k_neigh, \ self.__average_shortest_path, self.__diameter self.__get_limits() self.__gini = self.__calculate_gini(self.__limits) self.__prop_max_min = self.__limits.max() / self.__limits.min() # As clustering coefficient is not defined for directed graphs, we # convert the graph to its undirected version self.__clustering = nx.average_clustering(nx.Graph(self.__graph)) self.__average_shortest_path = \ nx.average_shortest_path_length(self.__graph) try: self.__diameter = nx.diameter(self.__graph) except nx.exception.NetworkXError: self.__diameter = float('nan') self.__convergence = np.mean(self.__converged) table_individual = { # Serial number "i_num": np.array([self.__individual_number], dtype=np.int), # Individual fitness "fit": np.array([self.__fitness], dtype=np.float), # Ratio of the repetitions that converged to a single state "conv": np.array([self.__convergence], dtype=np.float), # gini and max_min are metrics intended to measure the inequality # in the number of times each cell is a "limit" "gini": np.array([self.__gini], dtype=np.float), "max_min": np.array([self.__prop_max_min], dtype=np.float), # Epoch in the evolution "epoch": np.array([self.__individual.epoch], dtype=np.float), # Clustering coefficient "clust": np.array([self.__clustering], dtype=np.float), # Average shortests path between each pair of cells "short": np.array([self.__average_shortest_path], dtype=np.float), # Maximum distance between any two cells "diam": np.array([self.__diameter], dtype=np.float)} return table_individual def __get_probs_entropy(self): # Calculate information theoretical metrics to evaluate the # computational role of each cell if self.__entropy_rate != None: # If all metrics are already computed, just return them! return self.__entropy_rate, self.__active_storage, \ self.__cond_entropy p_joint_table = np.zeros((self.__ca_size, self.__n_possible_history, 2), dtype=np.float32) p_prev_table = np.zeros((self.__ca_size, self.__n_possible_history), dtype=np.float32) p_curr_table = np.zeros((self.__ca_size, 2), dtype=np.float32) try: self.__kernel_probabilities(cuda.In(self.__executions), cuda.InOut(p_joint_table), cuda.InOut(p_prev_table), cuda.InOut(p_curr_table), block=(self.__ca_size, 1, 1), grid=(self.__ca_repeat, 1, 1)) cuda.Context.synchronize() except cuda.Error as e: sys.exit("CUDA: Execution failed!\n'%s'" % e) # The entropy rate is a measure of the uncertainty in a cell's state # given its past self.__entropy_rate = np.zeros(self.__ca_size, dtype=np.float32) # The active information storage is the amount of past information # currently in use by a cell, i.e., its memory self.__active_storage = np.zeros(self.__ca_size, dtype=np.float32) try: self.__kernel_entropy_rate(cuda.In(p_joint_table), cuda.In(p_prev_table), cuda.InOut(self.__entropy_rate), block=(self.__ca_size, 1, 1)) cuda.Context.synchronize() for i in xrange(self.__ca_iterations - self.__k_history): ca_aux = np.array(self.__executions[:, i:i + self.__k_history + 1, :]) self.__kernel_active_storage(cuda.In(ca_aux), cuda.In(p_joint_table), cuda.In(p_prev_table), cuda.In(p_curr_table), cuda.InOut(self.__active_storage), block=(self.__ca_size, 1, 1), grid=(self.__ca_repeat, 1, 1)) cuda.Context.synchronize() except cuda.Error as e: sys.exit("CUDA: Execution failed!\n'%s'" % e) aux = np.multiply(p_joint_table, np.log2(np.divide(p_prev_table. reshape(p_prev_table.shape + (1,)), p_joint_table))) aux[p_joint_table == 0] = 0 self.__cond_entropy = np.sum(aux, axis=(1, 2)) / self.__n_observations return self.__entropy_rate, self.__active_storage, self.__cond_entropy def get_cells_info(self): self.__get_limits() self.__get_probs_entropy() full_data = { "lim": self.__limits, "ent_rt": self.__entropy_rate, "act_st": self.__active_storage, "cond_ent": self.__cond_entropy} if self.__base_table == None: # Calculate graph measures order = sorted(self.__graph.nodes()) pagerank = nx.pagerank(self.__graph) pagerank = np.array([pagerank[k] for k in order], dtype=np.float) try: hubs, authorities = nx.hits(self.__graph, 1000) hubs = np.array([hubs[k] for k in order], dtype=np.float) authorities = np.array([authorities[k] for k in order], dtype=np.float) except nx.exception.NetworkXError: hubs = np.repeat(float('nan'), self.__ca_size).astype(np.float) authorities = hubs try: eccentricity = nx.eccentricity(self.__graph) eccentricity = np.array([eccentricity[k] for k in order], dtype=np.float) except nx.exception.NetworkXError: eccentricity = np.repeat(float('nan'), self.__ca_size). \ astype(np.float) closeness = nx.closeness_centrality(self.__graph) closeness = np.array([closeness[k] for k in order], dtype=np.float) closeness_reverse = nx.closeness_centrality( self.__graph.reverse(True)) closeness_reverse = np.array([closeness_reverse[k] for k in order], dtype=np.float) betweenness = nx.betweenness_centrality(self.__graph) betweenness = np.array([betweenness[k] for k in order], dtype=np.float) try: eigenvector = nx.eigenvector_centrality(self.__graph, 1000) eigenvector = np.array([eigenvector[k] for k in order], dtype=np.float) except nx.exception.NetworkXError: eigenvector = np.repeat(float('nan'), self.__ca_size). \ astype(np.float) load = nx.load_centrality(self.__graph) load = np.array([load[k] for k in order], dtype=np.float) clustering = nx.clustering(nx.Graph(self.__graph)) clustering = np.array([clustering[k] for k in order], dtype=np.float) in_degree = nx.in_degree_centrality(self.__graph) in_degree = np.array([in_degree[k] for k in order], dtype=np.float) out_degree = nx.out_degree_centrality(self.__graph) out_degree = np.array([out_degree[k] for k in order], dtype=np.float) rewires = np.array([self.__graph.node[k]['rew'] for k in order], dtype=np.float) average_k_neigh = nx.average_neighbor_degree(self.__graph) average_k_neigh = np.array([average_k_neigh[k] for k in order], dtype=np.float) self.__base_table = { "epoch": np.repeat(self.__individual.epoch, self.__ca_size). \ astype(np.int), "i_num": np.repeat(self.__individual_number, self.__ca_size). \ astype(np.int), "pr": pagerank, "hub": hubs, "auth": authorities, "ecc": eccentricity, "cls": closeness, "cls_rev": closeness_reverse, "btw": betweenness, "eig": eigenvector, "load": load, "cltr": clustering, "ind": in_degree, "outd": out_degree, "rew": rewires, "kneigh": average_k_neigh} return dict(full_data.items() + self.__base_table.items()) def save_executions(self): # Save space-time diagrams of some executions for i in np.random.choice(range(self.__executions.shape[0]), self.__save_executions, replace=False): aux.save_as_image(self.__executions[i], "images/i%04d" % self.__individual_number, "execution-%06d.png" % i) class Analysis: elems = 0 def __init__(self, data_file, ca_size, ca_iterations, ca_repeat, connection_radius, k_history, save_executions=0): self.__ca_size = ca_size self.__ca_iterations = ca_iterations self.__ca_repeat = ca_repeat self.__connection_radius = connection_radius self.__k_history = k_history self.__save_executions = save_executions self.__data_file = h5py.File(data_file, "w-") def add_individual(self, individual): # Run simulations with densities uniformly distributed in [0, 1], # storing execution data for posterio analysis correct, executions = individual.get_execution_data(UNIFORM_RHO) # Perform individual analysis individual = AnalysisIndividual(individual, correct, executions, self.__ca_size, self.__connection_radius, self.__ca_iterations, self.__ca_repeat, self.__k_history, save_executions=self.__save_executions) Analysis.elems += 1 table_cells = individual.get_cells_info() table_individual = individual.get_individual_info() individual.save_executions() del correct del executions del individual # Store the individual analysis in a HDF5 file group = self.__data_file.create_group("individual%d" % table_individual["i_num"]) cells_grp = group.create_group("cells") for key, values in table_cells.iteritems(): cells_grp.create_dataset(key, data=values, shape=values.shape, dtype=values.dtype) individuals_grp = group.create_group("individuals") for key, values in table_individual.iteritems(): individuals_grp.create_dataset(key, data=values, shape=values.shape, dtype=values.dtype) self.__data_file.flush() def get_table(self): table = { "cells": {}, "individuals": {}} for individual_grp in self.__data_file.values(): for group in ["cells", "individuals"]: for key, values in individual_grp[group].iteritems(): try: table[group][key].append(values.value) except KeyError: table[group][key] = [values.value] for group_values in table.values(): for key, values in group_values.iteritems(): group_values[key] = np.concatenate(values) return table def get_correlations(self): table = self.get_table() correlations = {'cells': {}, 'individuals': {}} refs_cells = ['lim', 'cls_rev'] for ref in refs_cells: correlations['cells'][ref] = {} ref_cell = table['cells'][ref] for key, values in table['cells'].iteritems(): if key == ref: continue correlations['cells'][ref][key] = \ st.spearmanr(ref_cell, values) refs_individuals = ['gini', 'max_min', 'short', 'fit'] for ref in refs_individuals: correlations['individuals'][ref] = {} ref_individual = table['individuals'][ref] for key, values in table['individuals'].iteritems(): if key == ref: continue correlations['individuals'][ref][key] = \ st.spearmanr(ref_individual, values) return correlations
unicamp-lbic/small_world_ca
analysis.py
Python
gpl-2.0
19,617
# coding=utf-8 from __future__ import print_function, unicode_literals __author__ = "Sally Wilsak" import codecs import os import sys import textwrap import unittest import import_resolver # This isn't strictly correct; it will only work properly if your terminal is set to UTF-8. # However, Linux is usually set to UTF-8 and Windows' English code page 437 is at least ASCII-compatible this will work well enough for our purposes if sys.stdout.encoding != 'utf8': sys.stdout = codecs.getwriter('utf8')(sys.stdout, 'strict') if sys.stderr.encoding != 'utf8': sys.stderr = codecs.getwriter('utf8')(sys.stderr, 'strict') def simple_normpath(path): """On Windows, normpath substitutes back slashes into the file path. This makes cross-platform testing difficult since we're checking string output. But the test cases have simple filepaths so we can substitute something simpler for the tests. """ return path.replace("./", "") def simple_join(path, *args): """ Make os.path.join work the same on Windows and Linux. Again this is ok because the test cases have simple paths """ elements = [path] elements.extend(args) return "/".join(elements) class TestImportResolver(unittest.TestCase): def setUp(self): # Monkey-patch some path manipulations so we can string match with Unix-style paths and Windows won't mess them up import_resolver.os.path.normpath = simple_normpath import_resolver.os.path.join = simple_join def test_line_extraction(self): self.assertEqual(import_resolver.extract_import_files(""), []) self.assertEqual(import_resolver.extract_import_files("This isn't TypeScript.\nBut it does have multiple lines."), []) self.assertEqual(import_resolver.extract_import_files("import thing = require('./thing.ts');"), ["./thing.ts"]) import_statements = textwrap.dedent(""" // Comments should get ignored, of course import first = require('./lib/first.ts'); // Different amounts of whitespace should be ok import second=require('./second.ts') ; // so should other stuff at the end // Double quotes are also ok import _THIRD = require("./third.ts") // So is something that's not a ts file, but it gets .ts added import fourth = require("../fourth/file/path") // A Windows-style path doesn't match... import fifth = require("C:\\fifth.ts") // ...neither does an absolute Unix-style path... import sixth = require("/home/user6/sixth.ts") // ...but this mixed-up one does import seventh = require('./folder\\folder\\seventh.ts') // Capitalizing the keywords means it doesn't match Import eighth = Require('./eighth.ts') // Something that's not a file path doesn't match import ninth = require('ninth') // If it's not at the start of the line, it doesn't match some stuff import tenth = require('./tenth.ts') // And for good measure, a non-ASCII file path should work import eleventh = require('./одиннадцать.ts') """) expected_filenames = [ "./lib/first.ts", "./second.ts", "./third.ts", "../fourth/file/path.ts", "./folder\\folder\\seventh.ts", "./одиннадцать.ts", ] self.assertEqual(import_resolver.extract_import_files(import_statements), expected_filenames) def test_format(self): files = ["/badger/badger", "C:\\badger.ts", "/bad ger/snake.ts"] self.assertEqual(import_resolver.format_line("/file/name.ts", files), "/file/name.ts <- /badger/badger C:\\badger.ts /bad\\ ger/snake.ts") def test_circular_deps(self): circular_deps = { "/home/badger/a.ts": "import b = require('./b.ts');\nimport c = require('./c.ts');", "/home/badger/b.ts": "import d = require('./d.ts');", "/home/badger/c.ts": "", "/home/badger/d.ts": "import a = require('./a.ts');", } import_resolver.read_file = lambda x: circular_deps[x] expected_string = "\n".join([ "/home/badger/c.ts <- /home/badger/a.ts", "/home/badger/d.ts <- /home/badger/b.ts", "/home/badger/a.ts <- /home/badger/d.ts", "/home/badger/b.ts <- /home/badger/a.ts", ]) self.assertEqual(import_resolver.do_dependency_resolve(["/home/badger/a.ts"]), expected_string) def test_triangle_deps(self): triangle_deps = { "/home/badger/a.ts": "import b = require('./b.ts');\nimport c = require('./c.ts');", "/home/badger/b.ts": "import c = require('./c.ts');", "/home/badger/c.ts": "", } import_resolver.read_file = lambda x: triangle_deps[x] expected_string = "\n".join([ "/home/badger/c.ts <- /home/badger/a.ts /home/badger/b.ts", "/home/badger/a.ts <- ", "/home/badger/b.ts <- /home/badger/a.ts", ]) self.assertEqual(import_resolver.do_dependency_resolve(["/home/badger/a.ts"]), expected_string) def test_inaccessible_deps(self): def inaccessible_deps(filename): if "a.ts" in filename: return "import b = require('./b.ts');" elif "b.ts" in filename: return "import c = require('./c.ts');" raise IOError import_resolver.read_file = inaccessible_deps expected_string = "\n".join([ "/home/badger/c.ts <- /home/badger/b.ts", "/home/badger/a.ts <- ", "/home/badger/b.ts <- /home/badger/a.ts", "Cannot read file '/home/badger/c.ts'", ]) self.assertEqual(import_resolver.do_dependency_resolve(["/home/badger/a.ts"]), expected_string) def test_lists(self): lists_deps = { "/home/badger/a.ts": "import b = require('./b.ts');\nimport c = require('./c.ts');\nimport d = require('./d.ts');", "/home/badger/b.ts": "import c = require('./c.ts');\nimport d = require('./d.ts');", "/home/badger/c.ts": "import d = require('./d.ts');", "/home/badger/d.ts": "", } import_resolver.read_file = lambda x: lists_deps[x] expected_string = "\n".join([ "/home/badger/c.ts <- /home/badger/a.ts /home/badger/b.ts", "/home/badger/d.ts <- /home/badger/a.ts /home/badger/b.ts /home/badger/c.ts", "/home/badger/a.ts <- ", "/home/badger/b.ts <- /home/badger/a.ts", ]) self.assertEqual(import_resolver.do_dependency_resolve(["/home/badger/a.ts"]), expected_string)
starcruiseromega/insolent-meow
import_resolver/test_import_resolver.py
Python
gpl-2.0
6,124
from twitter_rec import Api import time USERNAME = "[email protected]" PASSWD = "bigdata" s = Api.Session(USERNAME, PASSWD, debug=False) s.connect() counter = 0 while True: _ = s.read("/AllenboChina/followers") if "eason" in _: print counter counter += 1 else: assert False
WeakGroup/twitter-rec
test/test_block.py
Python
gpl-2.0
304
#!/usr/bin/python import cv if __name__ == "__main__": capture = cv.CaptureFromCAM(-1) cv.NamedWindow("image") while True: frame = cv.QueryFrame(capture) cv.ShowImage("image", frame) k = cv.WaitKey(10) if k % 256 == 27: break cv.DestroyWindow("image")
jquacinella/pyIsbnScanner
scan.py
Python
gpl-2.0
317
#coding: utf-8 # # Ailurus - a simple application installer and GNOME tweaker # # Copyright (C) 2009-2010, Ailurus developers and Ailurus contributors # Copyright (C) 2007-2010, Trusted Digital Technology Laboratory, Shanghai Jiao Tong University, China. # # Ailurus is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Ailurus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ailurus; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA from __future__ import with_statement import gtk, gobject, sys, os import pango from lib import * from libu import * class ComputerDoctorPane(gtk.VBox): icon = D+'sora_icons/m_computer_doctor.png' text = _('Computer\nDoctor') def render_type_func(self, column, cell, model, iter): cure_obj = model.get_value(iter, 1) pixbuf = [self.icon_must_fix, self.icon_suggestion][cure_obj.type] cell.set_property('pixbuf', pixbuf) def render_text_func(self, column, cell, model, iter): cure_obj = model.get_value(iter, 1) markup = '<b>%s</b>' % cure_obj.__doc__ if cure_obj.detail: markup += '\n' + cure_obj.detail cell.set_property('markup', markup) def toggled(self, render_toggle, path, sortedstore): path = sortedstore.convert_path_to_child_path(path) self.liststore[path][0] = not self.liststore[path][0] sensitive = False for row in self.liststore: to_apply = row[0] sensitive = sensitive or to_apply self.button_apply.set_sensitive(sensitive) def sort_by_type(self, model, iter1, iter2): obj1 = model.get_value(iter1, 1) obj2 = model.get_value(iter2, 1) if obj1 and obj2: return cmp(obj1.type, obj2.type) or cmp(obj1.__doc__, obj2.__doc__) else: return 0 def sort_by_text(self, model, iter1, iter2): obj1 = model.get_value(iter1, 1) obj2 = model.get_value(iter2, 1) if obj1 and obj2: return cmp(obj1.__doc__, obj2.__doc__) else: return 0 def refresh(self): self.liststore.clear() for obj in self.cure_objs: if obj.exists(): self.liststore.append([False, obj]) self.sortedstore.set_sort_column_id(1000, gtk.SORT_ASCENDING) self.button_apply.set_sensitive(False) self.show_text('') must_fix = 0 for row in self.liststore: obj = row[1] if obj.type == C.MUST_FIX: must_fix += 1 text = '' if len(self.liststore): if must_fix: text += _('Found %s errors in your system.') % must_fix text += ' ' text += _('There is a total of %s suggestions.') % len(self.liststore) else: text = _('Found no error :)') self.show_text(text) def apply(self): success = 0 for row in self.liststore: apply = row[0] if apply: obj = row[1] try: obj.cure() success += 1 except: print_traceback() self.refresh() if success: notify(_('Computer doctor'), _('Successfully applied %s suggestions.') % success) def show_text(self, text): self.column_text.set_title(text) def __init__(self, main_view, cure_objs): self.cure_objs = cure_objs self.icon_must_fix = get_pixbuf(D+'sora_icons/c_must_fix.png', 24, 24) self.icon_suggestion = get_pixbuf(D+'sora_icons/c_suggestion.png', 24, 24) self.liststore = liststore = gtk.ListStore(bool, gobject.TYPE_PYOBJECT) # apply?, cure_object self.sortedstore = sortedstore = gtk.TreeModelSort(liststore) sortedstore.set_sort_func(1000, self.sort_by_type) sortedstore.set_sort_func(1001, self.sort_by_text) render_toggle = gtk.CellRendererToggle() render_toggle.connect('toggled', self.toggled, sortedstore) render_type = gtk.CellRendererPixbuf() render_text = gtk.CellRendererText() render_text.set_property('ellipsize', pango.ELLIPSIZE_END) column_toggle = gtk.TreeViewColumn() column_toggle.pack_start(render_toggle, False) column_toggle.add_attribute(render_toggle, 'active', 0) column_toggle.set_sort_column_id(0) column_type = gtk.TreeViewColumn() column_type.pack_start(render_type, False) column_type.set_cell_data_func(render_type, self.render_type_func) column_type.set_sort_column_id(1000) self.column_text = column_text = gtk.TreeViewColumn() column_text.pack_start(render_text) column_text.set_cell_data_func(render_text, self.render_text_func) column_text.set_sort_column_id(1001) self.view = view = gtk.TreeView(sortedstore) view.set_rules_hint(True) view.append_column(column_toggle) view.append_column(column_type) view.append_column(column_text) scroll = gtk.ScrolledWindow() scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC) scroll.set_shadow_type(gtk.SHADOW_IN) scroll.add(view) button_refresh = image_stock_button(gtk.STOCK_REFRESH, _('Refresh')) button_refresh.connect('clicked', lambda *w: self.refresh()) self.button_apply = button_apply = image_stock_button(gtk.STOCK_APPLY, _('Apply')) button_apply.connect('clicked', lambda *w: self.apply()) button_apply.set_sensitive(False) button_box = gtk.HBox(False, 10) button_box.pack_start(button_refresh, False) button_box.pack_start(button_apply, False) gtk.VBox.__init__(self, False, 10) self.set_border_width(5) self.pack_start(button_box, False) self.pack_start(scroll) self.show_text(_('Please click "refresh" button.')) self.refresh()
killpanda/Ailurus
ailurus/computer_doctor_pane.py
Python
gpl-2.0
6,390
import os import re import netifaces as ni from socket import * from Components.Console import Console from Components.PluginComponent import plugins from Plugins.Plugin import PluginDescriptor from boxbranding import getBoxType class Network: def __init__(self): self.ifaces = {} self.configuredNetworkAdapters = [] self.NetworkState = 0 self.DnsState = 0 self.nameservers = [] self.ethtool_bin = "/usr/sbin/ethtool" self.console = Console() self.linkConsole = Console() self.restartConsole = Console() self.deactivateInterfaceConsole = Console() self.activateInterfaceConsole = Console() self.resetNetworkConsole = Console() self.dnsConsole = Console() self.pingConsole = Console() self.config_ready = None self.friendlyNames = {} self.lan_interfaces = [] self.wlan_interfaces = [] self.remoteRootFS = None self.getInterfaces() def onRemoteRootFS(self): if self.remoteRootFS is None: import Harddisk for parts in Harddisk.getProcMounts(): if parts[1] == '/' and parts[2] == 'nfs': self.remoteRootFS = True break else: self.remoteRootFS = False return self.remoteRootFS def isBlacklisted(self, iface): return iface in ('lo', 'wifi0', 'wmaster0', 'sit0', 'tun0', 'sys0', 'p2p0') def getInterfaces(self, callback=None): self.configuredInterfaces = [] for device in self.getInstalledAdapters(): self.getAddrInet(device, callback) # helper function def regExpMatch(self, pattern, string): if string is None: return None try: return pattern.search(string).group() except AttributeError: return None # helper function to convert ips from a sring to a list of ints def convertIP(self, ip): return [int(n) for n in ip.split('.')] def getAddrInet(self, iface, callback): data = {'up': False, 'dhcp': False, 'preup': False, 'predown': False} try: data['up'] = int(open('/sys/class/net/%s/flags' % iface).read().strip(), 16) & 1 == 1 if data['up']: self.configuredInterfaces.append(iface) nit = ni.ifaddresses(iface) data['ip'] = self.convertIP(nit[ni.AF_INET][0]['addr']) # ipv4 data['netmask'] = self.convertIP(nit[ni.AF_INET][0]['netmask']) data['bcast'] = self.convertIP(nit[ni.AF_INET][0]['broadcast']) data['mac'] = nit[ni.AF_LINK][0]['addr'] # mac data['gateway'] = self.convertIP(ni.gateways()['default'][ni.AF_INET][0]) # default gw except: data['dhcp'] = True data['ip'] = [0, 0, 0, 0] data['netmask'] = [0, 0, 0, 0] data['gateway'] = [0, 0, 0, 0] self.ifaces[iface] = data self.loadNetworkConfig(iface, callback) def writeNetworkConfig(self): self.configuredInterfaces = [] fp = file('/etc/network/interfaces', 'w') fp.write("# automatically generated by enigma2\n# do NOT change manually!\n\n") fp.write("auto lo\n") fp.write("iface lo inet loopback\n\n") for ifacename, iface in self.ifaces.items(): if iface['up']: fp.write("auto " + ifacename + "\n") self.configuredInterfaces.append(ifacename) if iface['dhcp']: fp.write("iface " + ifacename + " inet dhcp\n") fp.write("udhcpc_opts -T1 -t9\n") if not iface['dhcp']: fp.write("iface " + ifacename + " inet static\n") if 'ip' in iface: print tuple(iface['ip']) fp.write(" address %d.%d.%d.%d\n" % tuple(iface['ip'])) fp.write(" netmask %d.%d.%d.%d\n" % tuple(iface['netmask'])) if 'gateway' in iface: fp.write(" gateway %d.%d.%d.%d\n" % tuple(iface['gateway'])) if "configStrings" in iface: fp.write(iface["configStrings"]) if iface["preup"] is not False and "configStrings" not in iface: fp.write(iface["preup"]) if iface["predown"] is not False and "configStrings" not in iface: fp.write(iface["predown"]) fp.write("\n") fp.close() self.configuredNetworkAdapters = self.configuredInterfaces self.writeNameserverConfig() def writeNameserverConfig(self): fp = file('/etc/resolv.conf', 'w') for nameserver in self.nameservers: fp.write("nameserver %d.%d.%d.%d\n" % tuple(nameserver)) fp.close() def loadNetworkConfig(self, iface, callback=None): interfaces = [] # parse the interfaces-file try: fp = file('/etc/network/interfaces', 'r') interfaces = fp.readlines() fp.close() except: print "[Network.py] interfaces - opening failed" ifaces = {} currif = "" for i in interfaces: split = i.strip().split(' ') if split[0] == "iface": currif = split[1] ifaces[currif] = {} if len(split) == 4 and split[3] == "dhcp": ifaces[currif]["dhcp"] = True else: ifaces[currif]["dhcp"] = False if currif == iface: #read information only for available interfaces if split[0] == "address": ifaces[currif]["address"] = map(int, split[1].split('.')) if "ip" in self.ifaces[currif]: if self.ifaces[currif]["ip"] != ifaces[currif]["address"] and ifaces[currif]["dhcp"] == False: self.ifaces[currif]["ip"] = map(int, split[1].split('.')) if split[0] == "netmask": ifaces[currif]["netmask"] = map(int, split[1].split('.')) if "netmask" in self.ifaces[currif]: if self.ifaces[currif]["netmask"] != ifaces[currif]["netmask"] and ifaces[currif]["dhcp"] == False: self.ifaces[currif]["netmask"] = map(int, split[1].split('.')) if split[0] == "gateway": ifaces[currif]["gateway"] = map(int, split[1].split('.')) if "gateway" in self.ifaces[currif]: if self.ifaces[currif]["gateway"] != ifaces[currif]["gateway"] and ifaces[currif]["dhcp"] == False: self.ifaces[currif]["gateway"] = map(int, split[1].split('.')) if split[0] == "pre-up": if "preup" in self.ifaces[currif]: self.ifaces[currif]["preup"] = i if split[0] in ("pre-down", "post-down"): if "predown" in self.ifaces[currif]: self.ifaces[currif]["predown"] = i for ifacename, iface in ifaces.items(): if ifacename in self.ifaces: self.ifaces[ifacename]["dhcp"] = iface["dhcp"] if not self.console.appContainers: # save configured interfacelist self.configuredNetworkAdapters = self.configuredInterfaces # load ns only once self.loadNameserverConfig() print "read configured interface:", ifaces print "self.ifaces after loading:", self.ifaces self.config_ready = True self.msgPlugins() if callback is not None: callback(True) def loadNameserverConfig(self): ipRegexp = "[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}" nameserverPattern = re.compile("nameserver +" + ipRegexp) ipPattern = re.compile(ipRegexp) resolv = [] try: fp = file('/etc/resolv.conf', 'r') resolv = fp.readlines() fp.close() self.nameservers = [] except: print "[Network.py] resolv.conf - opening failed" for line in resolv: if self.regExpMatch(nameserverPattern, line) is not None: ip = self.regExpMatch(ipPattern, line) if ip: self.nameservers.append(self.convertIP(ip)) print "nameservers:", self.nameservers def getInstalledAdapters(self): return [x for x in os.listdir('/sys/class/net') if not self.isBlacklisted(x)] def getConfiguredAdapters(self): return self.configuredNetworkAdapters def getNumberOfAdapters(self): return len(self.ifaces) def getFriendlyAdapterName(self, x): if x in self.friendlyNames.keys(): return self.friendlyNames.get(x, x) self.friendlyNames[x] = self.getFriendlyAdapterNaming(x) return self.friendlyNames.get(x, x) # when we have no friendly name, use adapter name def getFriendlyAdapterNaming(self, iface): name = None if self.isWirelessInterface(iface): if iface not in self.wlan_interfaces: name = _("WLAN connection") if len(self.wlan_interfaces): name += " " + str(len(self.wlan_interfaces) + 1) self.wlan_interfaces.append(iface) else: if iface not in self.lan_interfaces: if iface == "eth1": name = _("VLAN connection") else: name = _("LAN connection") if len(self.lan_interfaces) and not iface == "eth1": name += " " + str(len(self.lan_interfaces) + 1) self.lan_interfaces.append(iface) return name def getFriendlyAdapterDescription(self, iface): if not self.isWirelessInterface(iface): return _('Ethernet network interface') moduledir = self.getWlanModuleDir(iface) if moduledir: name = os.path.basename(os.path.realpath(moduledir)) if name.startswith('ath') or name.startswith('carl'): name = 'Atheros' elif name.startswith('rt2') or name.startswith('rt3') or name.startswith('rt5') or name.startswith('rt6') or name.startswith('rt7'): name = 'Ralink' elif name.startswith('zd'): name = 'Zydas' elif name.startswith('rtl') or name.startswith('r8'): name = 'Realtek' elif name.startswith('smsc'): name = 'SMSC' elif name.startswith('peg'): name = 'Pegasus' elif name.startswith('rn'): name = 'RNDIS' elif name.startswith('mw') or name.startswith('libertas'): name = 'Marvel' elif name.startswith('p5'): name = 'Prism' elif name.startswith('as') or name.startswith('ax'): name = 'ASIX' elif name.startswith('dm'): name = 'Davicom' elif name.startswith('mcs'): name = 'MosChip' elif name.startswith('at'): name = 'Atmel' elif name.startswith('iwm'): name = 'Intel' elif name.startswith('brcm') or name.startswith('bcm'): name = 'Broadcom' elif os.path.isdir('/tmp/bcm/' + iface): name = 'Broadcom' else: name = _('Unknown') return name + ' ' + _('wireless network interface') def getAdapterName(self, iface): return iface def getAdapterList(self): return self.ifaces.keys() def getAdapterAttribute(self, iface, attribute): return self.ifaces.get(iface, {}).get(attribute) def setAdapterAttribute(self, iface, attribute, value): print "setting for adapter", iface, "attribute", attribute, " to value", value if iface in self.ifaces: self.ifaces[iface][attribute] = value def removeAdapterAttribute(self, iface, attribute): if iface in self.ifaces and attribute in self.ifaces[iface]: del self.ifaces[iface][attribute] def getNameserverList(self): if len(self.nameservers) == 0: return [[0, 0, 0, 0], [0, 0, 0, 0]] else: return self.nameservers def clearNameservers(self): self.nameservers = [] def addNameserver(self, nameserver): if nameserver not in self.nameservers: self.nameservers.append(nameserver) def removeNameserver(self, nameserver): if nameserver in self.nameservers: self.nameservers.remove(nameserver) def changeNameserver(self, oldnameserver, newnameserver): if oldnameserver in self.nameservers: for i in range(len(self.nameservers)): if self.nameservers[i] == oldnameserver: self.nameservers[i] = newnameserver def resetNetworkConfig(self, mode='lan', callback=None): self.commands = [] self.commands.append("/etc/init.d/avahi-daemon stop") for iface in self.ifaces.keys(): if iface != 'eth0' or not self.onRemoteRootFS(): self.commands.append("/sbin/ip addr flush dev " + iface + " scope global") self.commands.append("/etc/init.d/networking stop") self.commands.append("killall -9 udhcpc") self.commands.append("rm /var/run/udhcpc*") self.resetNetworkConsole.eBatch(self.commands, self.resetNetworkFinishedCB, [mode, callback], debug=True) def resetNetworkFinishedCB(self, extra_args): (mode, callback) = extra_args if not self.resetNetworkConsole.appContainers: self.writeDefaultNetworkConfig(mode, callback) def writeDefaultNetworkConfig(self, mode='lan', callback=None): fp = file('/etc/network/interfaces', 'w') fp.write("# automatically generated by enigma2\n# do NOT change manually!\n\n") fp.write("auto lo\n") fp.write("iface lo inet loopback\n\n") if mode == 'wlan': fp.write("auto wlan0\n") fp.write("iface wlan0 inet dhcp\n") if mode == 'wlan-mpci': fp.write("auto ath0\n") fp.write("iface ath0 inet dhcp\n") if mode == 'lan': fp.write("auto eth0\n") fp.write("iface eth0 inet dhcp\n") fp.write("\n") fp.close() self.commands = [] if mode == 'wlan': self.commands.append("/sbin/ifconfig eth0 down") self.commands.append("/sbin/ifconfig ath0 down") self.commands.append("/sbin/ifconfig wlan0 up") if mode == 'wlan-mpci': self.commands.append("/sbin/ifconfig eth0 down") self.commands.append("/sbin/ifconfig wlan0 down") self.commands.append("/sbin/ifconfig ath0 up") if mode == 'lan': self.commands.append("/sbin/ifconfig eth0 up") self.commands.append("/sbin/ifconfig wlan0 down") self.commands.append("/sbin/ifconfig ath0 down") self.commands.append("/etc/init.d/avahi-daemon start") self.resetNetworkConsole.eBatch(self.commands, self.resetNetworkFinished, [mode, callback], debug=True) def resetNetworkFinished(self, extra_args): (mode, callback) = extra_args if not self.resetNetworkConsole.appContainers: if callback is not None: callback(True, mode) def checkNetworkState(self, statecallback): self.NetworkState = 0 self.pingConsole = Console() for server in ("www.openpli.org", "www.google.nl", "www.google.com"): self.pingConsole.ePopen(("/bin/ping", "/bin/ping", "-c", "1", server), self.checkNetworkStateFinished, statecallback) def checkNetworkStateFinished(self, result, retval, extra_args): (statecallback) = extra_args if self.pingConsole is not None: if retval == 0: self.pingConsole = None statecallback(self.NetworkState) else: self.NetworkState += 1 if not self.pingConsole.appContainers: statecallback(self.NetworkState) def restartNetwork(self, callback=None): self.config_ready = False self.msgPlugins() self.commands = [] self.commands.append("/etc/init.d/avahi-daemon stop") for iface in self.ifaces.keys(): if iface != 'eth0' or not self.onRemoteRootFS(): self.commands.append(("/sbin/ifdown", "/sbin/ifdown", iface)) self.commands.append("/sbin/ip addr flush dev " + iface + " scope global") self.commands.append("/etc/init.d/networking stop") self.commands.append("killall -9 udhcpc") self.commands.append("rm /var/run/udhcpc*") self.commands.append("/etc/init.d/networking start") self.commands.append("/etc/init.d/avahi-daemon start") self.restartConsole.eBatch(self.commands, self.restartNetworkFinished, callback, debug=True) def restartNetworkFinished(self, extra_args): (callback) = extra_args if callback is not None: callback(True) def getLinkState(self, iface, callback): self.linkConsole.ePopen((self.ethtool_bin, self.ethtool_bin, iface), self.getLinkStateFinished, callback) def getLinkStateFinished(self, result, retval, extra_args): (callback) = extra_args if not self.linkConsole.appContainers: callback(result) def stopPingConsole(self): if self.pingConsole is not None: self.pingConsole.killAll() def stopLinkStateConsole(self): self.linkConsole.killAll() def stopDNSConsole(self): if self.dnsConsole is not None: self.dnsConsole.killAll() def stopRestartConsole(self): self.restartConsole.killAll() def stopGetInterfacesConsole(self): self.console.killAll() def stopDeactivateInterfaceConsole(self): self.deactivateInterfaceConsole.killAll() def stopActivateInterfaceConsole(self): self.activateInterfaceConsole.killAll() def checkforInterface(self, iface): if self.getAdapterAttribute(iface, 'up') is True: return True else: ret = os.system("ifconfig " + iface + " up") os.system("ifconfig " + iface + " down") if ret == 0: return True else: return False def checkDNSLookup(self, statecallback): self.DnsState = 0 self.dnsConsole = Console() for server in ("www.openpli.org", "www.google.nl", "www.google.com"): self.dnsConsole.ePopen(("/usr/bin/nslookup", "/usr/bin/nslookup", server), self.checkDNSLookupFinished, statecallback) def checkDNSLookupFinished(self, result, retval, extra_args): (statecallback) = extra_args if self.dnsConsole is not None: if retval == 0: self.dnsConsole = None statecallback(self.DnsState) else: self.DnsState += 1 if not self.dnsConsole.appContainers: statecallback(self.DnsState) def deactivateInterface(self, ifaces, callback=None): self.config_ready = False self.msgPlugins() commands = [] def buildCommands(iface): commands.append(("/sbin/ifdown", "/sbin/ifdown", "-f", iface)) commands.append(("/sbin/ip", "/sbin/ip", "addr", "flush", "dev", iface, "scope", "global")) #wpa_supplicant sometimes doesn't quit properly on SIGTERM if os.path.exists('/var/run/wpa_supplicant/' + iface): commands.append("wpa_cli -i" + iface + " terminate") if isinstance(ifaces, (list, tuple)): for iface in ifaces: if iface != 'eth0' or not self.onRemoteRootFS(): buildCommands(iface) else: if ifaces == 'eth0' and self.onRemoteRootFS(): if callback is not None: callback(True) return buildCommands(ifaces) self.deactivateInterfaceConsole.eBatch(commands, self.deactivateInterfaceFinished, (ifaces, callback), debug=True) def deactivateInterfaceFinished(self, extra_args): (ifaces, callback) = extra_args if not self.deactivateInterfaceConsole.appContainers: if callback is not None: callback(True) def activateInterface(self, iface, callback=None): if self.config_ready: self.config_ready = False self.msgPlugins() if iface == 'eth0' and self.onRemoteRootFS(): if callback is not None: callback(True) return commands = [] commands.append(("/sbin/ifup", "/sbin/ifup", iface)) self.activateInterfaceConsole.eBatch(commands, self.activateInterfaceFinished, callback, debug=True) def activateInterfaceFinished(self, extra_args): callback = extra_args if not self.activateInterfaceConsole.appContainers: if callback is not None: callback(True) def sysfsPath(self, iface): return '/sys/class/net/' + iface def isWirelessInterface(self, iface): if iface in self.wlan_interfaces: return True if os.path.isdir(self.sysfsPath(iface) + '/wireless'): return True # r871x_usb_drv on kernel 2.6.12 is not identifiable over /sys/class/net/'ifacename'/wireless so look also inside /proc/net/wireless device = re.compile('[a-z]{2,}[0-9]*:') ifnames = [] fp = open('/proc/net/wireless', 'r') for line in fp: try: ifnames.append(device.search(line).group()[:-1]) except AttributeError: pass if iface in ifnames: return True return False def getWlanModuleDir(self, iface=None): devicedir = self.sysfsPath(iface) + '/device' if not os.path.isdir(devicedir): return None moduledir = devicedir + '/driver/module' if os.path.isdir(moduledir): return moduledir # identification is not possible over default moduledir for x in os.listdir(devicedir): # rt3070 on kernel 2.6.18 registers wireless devices as usb_device (e.g. 1-1.3:1.0) and identification is only possible over /sys/class/net/'ifacename'/device/1-xxx if x.startswith("1-"): moduledir = devicedir + '/' + x + '/driver/module' if os.path.isdir(moduledir): return moduledir # rt73, zd1211b, r871x_usb_drv on kernel 2.6.12 can be identified over /sys/class/net/'ifacename'/device/driver, so look also here moduledir = devicedir + '/driver' if os.path.isdir(moduledir): return moduledir return None def detectWlanModule(self, iface=None): if not self.isWirelessInterface(iface): return None devicedir = self.sysfsPath(iface) + '/device' if os.path.isdir(devicedir + '/ieee80211'): return 'nl80211' moduledir = self.getWlanModuleDir(iface) if moduledir: module = os.path.basename(os.path.realpath(moduledir)) if module in ('ath_pci', 'ath5k'): return 'madwifi' if module in ('rt73', 'rt73'): return 'ralink' if module == 'zd1211b': return 'zydas' if module == 'brcm-systemport': return 'brcm-wl' return 'wext' def calc_netmask(self, nmask): from struct import pack from socket import inet_ntoa mask = 1L << 31 xnet = (1L << 32) - 1 cidr_range = range(0, 32) cidr = long(nmask) if cidr not in cidr_range: print 'cidr invalid: %d' % cidr return None else: nm = ((1L << cidr) - 1) << (32 - cidr) netmask = str(inet_ntoa(pack('>L', nm))) return netmask def msgPlugins(self): if self.config_ready is not None: for p in plugins.getPlugins(PluginDescriptor.WHERE_NETWORKCONFIG_READ): p(reason=self.config_ready) def hotplug(self, event): interface = event['INTERFACE'] if self.isBlacklisted(interface): return action = event['ACTION'] if action == "add": print "[Network] Add new interface:", interface self.getAddrInet(interface, None) elif action == "remove": print "[Network] Removed interface:", interface try: del self.ifaces[interface] except KeyError: pass iNetwork = Network() def InitNetwork(): pass
Openeight/enigma2
lib/python/Components/Network.py
Python
gpl-2.0
20,757
# return_codes.py # # Copyright (C) 2018 Kano Computing Ltd. # License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2 # # Return codes of binaries used throughout this project. class RC(object): """Return codes of binaries used throughout this project. See ``source`` for more details.""" SUCCESS = 0 INCORRECT_ARGS = 1 NO_INTERNET = 2 NO_KANO_WORLD_ACC = 3 CANNOT_CREATE_FLAG = 4 # read-only fs? # kano-feedback-cli specific. ERROR_SEND_DATA = 10 ERROR_COPY_ARCHIVE = 11 ERROR_CREATE_FLAG = 12
KanoComputing/kano-feedback
kano_feedback/return_codes.py
Python
gpl-2.0
550
# ----------------------------------------------------------- # reads the text from the given file, and outputs its # character statistics #o # (C) 2015 Frank Hofmann, Berlin, Germany # Released under GNU Public License (GPL) # email [email protected] # ----------------------------------------------------------- # call the program this way: # python character-statistics.py inputfile.txt > statistics.csv # import required python standard modules import sys,csv import codecs import os # define character count function def charStat (text): # set default value stat = {} # go through the characters one by one for character in text: #print (character) # retrieve current value for a character, # and 0 if still not in list # update the list stat[character] = stat.get(character,0) + 1 # return statistics array return stat # count number of program parameters numPara = len(sys.argv) if numPara < 2: print ("invalid number of parameters: 1 filename required.") print ("call for output on-screen: python %s " % sys.argv[0]) print ("call for file output: python %s > statistics.csv" % sys.argv[0]) print ("Exiting.") sys.exit(2) # read name of the datafile textfileName = sys.argv[1] # print ("reading text from", textfileName, "...") bytes = min(32, os.path.getsize(textfileName)) raw = open(textfileName, 'rb').read(bytes) if raw.startswith(codecs.BOM_UTF8): encoding = 'utf-8-sig' else: result = chardet.detect(raw) encoding = result['encoding'] # open file for reading fileHandle = open(textfileName, "r", encoding=encoding) # read content data = fileHandle.read() # close file fileHandle.close() # calculate the character statisitics statistics = charStat(data) # retrieve the single items items = statistics.items() # print ("sorting by character ...") # sort the items sortedItems = sorted(items) lines = [] # output sorted list as CSV data for singleItem in sortedItems: lines.append(str(singleItem[0]) + "," + singleItem[1]) #print ("%s,%i" % (singleItem[0], singleItem[1])) # open file for writing fileHandle = open("s.txt", "w", encoding=encoding) # read content data = fileHandle.writelines(lines) # close file fileHandle.close()
hofmannedv/training-python
text-analysis/character-statistics.py
Python
gpl-2.0
2,207
#!/usr/bin/env python import sys import optparse import socket def main(): p = optparse.OptionParser() p.add_option("--port", "-p", default=8888) p.add_option("--input", "-i", default="test.txt") options, arguments = p.parse_args() sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(("localhost", options.port)) fp = open(options.input, "r") ii = 0 sock.sendall ("^0^1^sheet1^1000000^3\n") while ii < 1000000: sock.sendall ("^%d^0^sheet1^%d^0^^0\n" %(ii, ii)) ii = ii + 1 sock.close() if __name__ == '__main__': main()
johnbellone/gtkworkbook
etc/socketTest.py
Python
gpl-2.0
626
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA # # Author: Zhongjie Wang <[email protected]> # Tianwei Liu <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA """ Entrance of ICM Desktop Agent """ import os import signal import sys import time import socket from twisted.internet import reactor from twisted.internet import task from umit.icm.agent.logger import g_logger from umit.icm.agent.BasePaths import * from umit.icm.agent.Global import * from umit.icm.agent.Version import VERSION from umit.icm.agent.rpc.message import * from umit.icm.agent.rpc.MessageFactory import MessageFactory from umit.icm.agent.I18N import _ # Script found at http://www.py2exe.org/index.cgi/HowToDetermineIfRunningFromExe import imp frozen = (hasattr(sys, "frozen") or # new py2exe hasattr(sys, "importers") # old py2exe or imp.is_frozen("__main__")) # tools/freeze del(imp) def main_is_frozen(): return frozen class Application(object): def __init__(self): pass def _init_components(self, aggregator): from umit.icm.agent.core.PeerInfo import PeerInfo self.peer_info = PeerInfo() from umit.icm.agent.core.PeerManager import PeerManager self.peer_manager = PeerManager() from umit.icm.agent.core.EventManager import EventManager self.event_manager = EventManager() from umit.icm.agent.core.TaskManager import TaskManager self.task_manager = TaskManager() from umit.icm.agent.core.ReportManager import ReportManager self.report_manager = ReportManager() from umit.icm.agent.core.ReportUploader import ReportUploader self.report_uploader = ReportUploader(self.report_manager) from umit.icm.agent.core.TaskScheduler import TaskScheduler self.task_scheduler = TaskScheduler(self.task_manager, self.report_manager) from umit.icm.agent.core.TaskAssignFetch import TaskAssignFetch self.task_assign = TaskAssignFetch(self.task_manager) from umit.icm.agent.core.TestSetsFetcher import TestSetsFetcher self.test_sets = TestSetsFetcher(self.task_manager, self.report_manager) from umit.icm.agent.secure.KeyManager import KeyManager self.key_manager = KeyManager() from umit.icm.agent.core.Statistics import Statistics self.statistics = Statistics() from umit.icm.agent.rpc.aggregator import AggregatorAPI self.aggregator = AggregatorAPI(aggregator) from umit.icm.agent.super.SuperBehaviourByManual import SuperBehaviourByManual self.speer_by_manual = SuperBehaviourByManual(self) self.quitting = False self.is_auto_login = False self.is_successful_login = False #fix the login failure, save DB problem def _load_from_db(self): """ """ self.peer_manager.load_from_db() # restore unsent reports self.report_manager.load_unsent_reports() # desktop agent stats saving self.statistics.load_from_db() def init_after_running(self, port=None, username=None, password=None, server_enabled=True, skip_server_check=False): """ """ ##################################################### # Create agent service(need to add the port confilct) if server_enabled: self.listen_port = port if port is not None else g_config.getint('network', 'listen_port') try: from umit.icm.agent.rpc.AgentService import AgentFactory self.factory = AgentFactory() g_logger.info("Listening on port %d.", self.listen_port) reactor.listenTCP(self.listen_port, self.factory) except Exception,info: #There can add more information self.quit_window_in_wrong(primary_text = _("The Listen Port has been used by other applications"), \ secondary_text = _("Please check the Port") ) ############################# # Create mobile agent service from umit.icm.agent.rpc.mobile import MobileAgentService self.ma_service = MobileAgentService() if self.use_gui: import gtk # Init GUI from umit.icm.agent.gui.GtkMain import GtkMain self.gtk_main = GtkMain() self.is_auto_login = g_config.getboolean('application', 'auto_login_swittch') ################################################################### #debug switch: It can show the gtkWindow without any authentication if g_config.getboolean('debug','debug_switch') and self.use_gui: self.login_simulate() ###################################### #check aggregator can be reached first if not skip_server_check: defer_ = self.aggregator.check_aggregator_website() defer_.addCallback(self.check_aggregator_success) defer_.addErrback(self.check_aggregator_failed) def check_aggregator_success(self,response): """ """ if response == True: self.login_window_show() else: self.speer_by_manual.peer_communication() def login_window_show(self): """ """ if self.is_auto_login and self.use_gui : ####################################################### #login with saved username or password, not credentials self.peer_info.load_from_db() ######################################## #Add more condition to check login legal self.login(self.peer_info.Username,self.peer_info.Password, True) else: if self.use_gui: self.gtk_main.show_login() else: self.login_without_gui() g_logger.info("Auto-login is disabled. You need to manually login.") def check_aggregator_failed(self,message): """ """ self.aggregator.available = False self.speer_by_manual.peer_communication() def login_without_gui(self): """ Users login without username or password """ username = False password = False if g_config.has_section("credentials"): username = g_config.get("credentials", "user") password = g_config.get("credentials", "password") if not username: username = raw_input("User Name:") if not password: password = raw_input("Password:") self.login(username, password, save_login=True) def check_software_auto(self): """ check software: according the time and other configurations """ from umit.icm.agent.core.Updater import auto_check_update ############################## #Software update automatically if g_config.getboolean('application','auto_update'): defer_ = auto_check_update(auto_upgrade=True) defer_.addErrback(self._handle_errback) else: ############################ #Detect update automatically if g_config.getboolean('update', 'update_detect'): #Here can set some update attributes defer_ = auto_check_update(auto_upgrade=False) defer_.addErrback(self._handle_errback) def register_agent(self, username, password): """ """ defer_ = self.aggregator.register(username, password) defer_.addCallback(self._handle_register) defer_.addErrback(self._handle_errback) return defer_ def _handle_register(self, result): if result: self.peer_info.ID = result['id'] self.peer_info.CipheredPublicKeyHash = result['hash'] self.peer_info.is_registered = True g_logger.debug("Register to Aggregator: %s" % result['id']) return result def _handle_errback(self, failure): """ """ failure.printTraceback() g_logger.error(">>> Failure from Application: %s" % failure) def login(self, username, password, save_login=False, login_only=False): """ """ if self.use_gui: self.gtk_main.set_to_logging_in() if self.is_auto_login and self.use_gui and self.check_username(username,password): #auto-login, select the credentials username and password from DB return self._login_after_register_callback(None, username, password, save_login, login_only) else: #manually login, we should check whether the username and password exists in database #If *NOT*, we should register the username and password to aggregator #IF *YES*, we will use credentials in DB g_config.set('application', 'auto_login_swittch', False) if self.check_username(username,password): return self._login_after_register_callback(None, username, password, save_login, login_only) else: self.peer_info.clear_db() deferred = self.register_agent(username, password) deferred.addCallback(self._login_after_register_callback, username, password, save_login, login_only) deferred.addErrback(self._handle_errback) return deferred def check_username(self,username="",password=""): """ check username and password in DB, the information is got from Login-Window """ rs = g_db_helper.select("select * from peer_info where username='%s' and \ password='%s'"%(username,password)) if not rs: g_logger.info("No matching peer info in db.\ icm-agent will register the username or password") return False else: g_logger.info("Match the username and password, \ we will change the default credentials") g_logger.debug(rs[0]) self.peer_info.ID = rs[0][0] self.peer_info.Username = rs[0][1] self.peer_info.Password = rs[0][2] self.peer_info.Email = rs[0][3] self.peer_info.CipheredPublicKeyHash = rs[0][4] self.peer_info.Type = rs[0][5] self.peer_info.is_registered = True return True def _login_after_register_callback(self, message, username, password, save_login, login_only): """ """ defer_ = self.aggregator.login(username, password) defer_.addCallback(self._handle_login, username, password, save_login, login_only) defer_.addErrback(self._handle_login_errback) return defer_ def _handle_login_errback(self,failure): """ """ print "------------------login failed!-------------------" failure.printTraceback() g_logger.error(">>> Failure from Application: %s" % failure) def _handle_login(self, result, username, password, save_login,login_only=False): """ """ #login successfully if result: self.peer_info.Username = username if username !="" and username != None else self.peer_info.Username self.peer_info.Password = password if password !="" and password != None else self.peer_info.Password #print self.peer_info.Username, self.peer_info.Password self.peer_info.is_logged_in = True #self.peer_info.clear_db() self.peer_info.save_to_db() g_logger.debug("Login Successfully :%s@%s" % (username,password)) if save_login: g_config.set('application', 'auto_login_swittch', True) else: g_config.set('application', 'auto_login_swittch', False) if self.use_gui: self.gtk_main.set_login_status(True) if login_only: return result #Load peers and reports from DB self._load_from_db() #check the new software(should appear after login successfully) self.check_software_auto() #mark login-successful self.is_successful_login = True #Task Looping manager self.task_loop_manager() return result def login_simulate(self): """ Only test GTK features """ #GTK show if self.use_gui == True: self.gtk_main.set_login_status(True) #Basic Information self.peer_info.load_from_db() self._load_from_db() #mark login-successful self.is_successful_login = True #TASK LOOP self.task_loop_manager() def task_loop_manager(self): """""" # Add looping calls if not hasattr(self, 'peer_maintain_lc'): self.peer_maintain_lc = task.LoopingCall(self.peer_manager.maintain) self.peer_maintain_lc.start(7200) if not hasattr(self, 'task_run_lc'): g_logger.info("Starting task scheduler looping ") self.task_run_lc = task.LoopingCall(self.task_scheduler.schedule) task_scheduler_text = g_config.get("Timer","task_scheduler_timer") if task_scheduler_text != "": indival = float(task_scheduler_text) else: indival = 30 self.task_run_lc.start(indival) if not hasattr(self, 'report_proc_lc'): g_logger.info("Starting report upload looping ") self.report_proc_lc = task.LoopingCall(self.report_uploader.process) report_uploade_text = g_config.get("Timer","send_report_timer") if report_uploade_text != "": indival = float(report_uploade_text) else: indival = 30 self.report_proc_lc.start(indival) if not hasattr(self,'task_assign_lc'): g_logger.info("Starting get assigned task from Aggregator") self.task_assgin_lc = task.LoopingCall(self.task_assign.fetch_task) task_assign_text = g_config.get("Timer","task_assign_timer") if task_assign_text != "": indival = float(task_assign_text) else: indival = 30 self.task_assgin_lc.start(indival) if not hasattr(self,'test_sets_fetch_lc'): g_logger.info("Starting get test sets from Aggregator") self.test_sets_fetch_lc = task.LoopingCall(self.test_sets.fetch_tests) test_fetch_text = g_config.get("Timer","test_fetch_timer") if test_fetch_text != "": indival = float(test_fetch_text) else: indival = 30 self.test_sets_fetch_lc.start(indival) def logout(self): defer_ = self.aggregator.logout() defer_.addCallback(self._handle_logout) return defer_ def _handle_logout(self, result): if self.use_gui: self.gtk_main.set_login_status(False) g_config.set('application', 'auto_login_swittch', False) return result def start(self, run_reactor=True, managed_mode=False, aggregator=None): """ The Main function """ g_logger.info("Starting ICM agent. Version: %s", VERSION) self._init_components(aggregator) reactor.addSystemEventTrigger('before', 'shutdown', self.on_quit) if not managed_mode: # This is necessary so the bot can take over and control the agent reactor.callWhenRunning(self.init_after_running) if run_reactor: # This is necessary so the bot can take over and control the agent reactor.run() def quit_window_in_wrong(self,primary_text = "",secondary_text = ""): """ """ #There can add more information from higwidgets.higwindows import HIGAlertDialog #print 'The exception is %s'%(info) alter = HIGAlertDialog(primary_text = primary_text,\ secondary_text = secondary_text) alter.show_all() result = alter.run() #cannot write return, if so the program cannot quit, and run in background self.terminate() def terminate(self): #print 'quit' reactor.callWhenRunning(reactor.stop) def on_quit(self): if hasattr(self, 'peer_info') and self.is_successful_login: g_logger.info("[quit]:save peer_info into DB") self.peer_info.save_to_db() if hasattr(self, 'peer_manager') and self.is_successful_login: g_logger.info("[quit]:save peer_manager into DB") self.peer_manager.save_to_db() if hasattr(self, 'statistics') and self.is_successful_login: g_logger.info("[quit]:save statistics into DB") self.statistics.save_to_db() if hasattr(self,'test_sets') and self.is_successful_login \ and os.path.exists(CONFIG_PATH): #store test_version id self.test_sets.set_test_version(self.test_sets.current_test_version) m = os.path.join(ROOT_DIR, 'umit', 'icm', 'agent', 'agent_restart_mark') if os.path.exists(m): os.remove(m) self.quitting = True g_logger.info("ICM Agent quit.") theApp = Application() if __name__ == "__main__": #theApp.start() pass
umitproject/openmonitor-desktop-agent
umit/icm/agent/Application.py
Python
gpl-2.0
19,850
import ArtusConfigBase as base import mc def config(): conf = mc.config() l = [] for pipeline in conf['Pipelines']: if not pipeline.startswith('all'): l.append(pipeline) elif 'CHS' not in pipeline: l.append(pipeline) for pipeline in l: del conf['Pipelines'][pipeline] for pipeline in conf['Pipelines']: conf['Pipelines'][pipeline]['Consumer'] = [ #"muonntuple", "jetntuple", ] return conf
dhaitz/CalibFW
cfg/artus/mc_noc.py
Python
gpl-2.0
508
import os import pytest import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') DEB_PACKAGES = ['percona-server-mongodb', 'percona-server-mongodb-server', 'percona-server-mongodb-mongos', 'percona-server-mongodb-shell', 'percona-server-mongodb-tools', 'percona-server-mongodb-dbg'] RPM_PACKAGES = ['percona-server-mongodb', 'percona-server-mongodb-server', 'percona-server-mongodb-mongos', 'percona-server-mongodb-shell', 'percona-server-mongodb-tools', 'percona-server-mongodb-debuginfo'] RPM_NEW_CENTOS_PACKAGES = ['percona-server-mongodb', 'percona-server-mongodb-mongos-debuginfo', 'percona-server-mongodb-server-debuginfo', 'percona-server-mongodb-shell-debuginfo', 'percona-server-mongodb-tools-debuginfo', 'percona-server-mongodb-debugsource'] BINARIES = ['mongo', 'mongod', 'mongos', 'bsondump', 'mongoexport', 'mongofiles', 'mongoimport', 'mongorestore', 'mongotop', 'mongostat'] PSMDB42_VER = "4.2" def test_package_script(host): with host.sudo(): result = host.run("/package-testing/package_check.sh psmdb42") print(result.stdout) print(result.stderr) assert result.rc == 0, result.stderr def test_version_script(host): with host.sudo(): result = host.run("/package-testing/version_check.sh psmdb42") print(result.stdout) print(result.stderr) assert result.rc == 0, result.stderr @pytest.mark.parametrize("package", DEB_PACKAGES) def test_deb_packages(host, package): os = host.system_info.distribution if os.lower() in ["redhat", "centos", 'rhel']: pytest.skip("This test only for Debian based platforms") pkg = host.package(package) assert pkg.is_installed assert PSMDB42_VER in pkg.version @pytest.mark.parametrize("package", RPM_PACKAGES) def test_rpm_packages(host, package): os = host.system_info.distribution if os in ["debian", "ubuntu"]: pytest.skip("This test only for RHEL based platforms") if float(host.system_info.release) >= 8.0: pytest.skip("Only for centos7 tests") pkg = host.package(package) assert pkg.is_installed assert PSMDB42_VER in pkg.version @pytest.mark.parametrize("package", RPM_NEW_CENTOS_PACKAGES) def test_rpm8_packages(host, package): os = host.system_info.distribution if os in ["debian", "ubuntu"]: pytest.skip("This test only for RHEL based platforms") if float(host.system_info.release) < 8.0: pytest.skip("Only for centos7 tests") pkg = host.package(package) assert pkg.is_installed assert PSMDB42_VER in pkg.version @pytest.mark.parametrize("binary", BINARIES) def test_binary_version(host, binary): cmd = '{} --version|head -n1|grep -c "{}"'.format(binary, PSMDB42_VER) result = host.run(cmd) assert result.rc == 0, result.stdout def test_bats(host): cmd = "/usr/local/bin/bats /package-testing/bats/mongo-init-scripts.bats" with host.sudo(): result = host.run(cmd) print(result.stdout) print(result.stderr) assert result.rc == 0, result.stdout def test_service(host): with host.sudo(): assert host.service("mongod").is_running def test_data_is_there(host): cmd = " /package-testing/scripts/mongo_check.sh" with host.sudo(): result = host.run(cmd) print(result.stdout) print(result.stderr) assert result.rc == 0, result.stdout def test_functional(host): with host.sudo(): result = host.run("/package-testing/scripts/psmdb_test.sh 4.2") assert result.rc == 0, result.stderr
Percona-QA/package-testing
molecule/psmdb40-upgrade-from/molecule/default/tests/test_psmdb40_upgrade_from.py
Python
gpl-2.0
3,739
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2014 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Unit tests for utility functions.""" from __future__ import absolute_import from invenio.testsuite import InvenioTestCase, make_test_suite, run_test_suite class HoldingPenUtilsTest(InvenioTestCase): """Test basic utility functions for Holding Pen.""" def test_get_previous_next_objects_empty(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [] self.assertEqual(get_previous_next_objects(objects, 1), (None, None)) def test_get_previous_next_objects_not_there(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 42), (None, None)) def test_get_previous_next_objects_previous(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 4), (3, None)) def test_get_previous_next_objects_next(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 3), (None, 4)) def test_get_previous_next_objects_previous_next(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4, 5] self.assertEqual(get_previous_next_objects(objects, 4), (3, 5)) TEST_SUITE = make_test_suite(HoldingPenUtilsTest) if __name__ == "__main__": run_test_suite(TEST_SUITE)
egabancho/invenio
invenio/modules/workflows/testsuite/test_utils.py
Python
gpl-2.0
2,691
### Copyright (C) 2002-2006 Stephen Kennedy <[email protected]> ### Copyright (C) 2010-2012 Kai Willadsen <[email protected]> ### This program is free software; you can redistribute it and/or modify ### it under the terms of the GNU General Public License as published by ### the Free Software Foundation; either version 2 of the License, or ### (at your option) any later version. ### This program is distributed in the hope that it will be useful, ### but WITHOUT ANY WARRANTY; without even the implied warranty of ### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ### GNU General Public License for more details. ### You should have received a copy of the GNU General Public License ### along with this program; if not, write to the Free Software ### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, ### USA. from __future__ import print_function import atexit import tempfile import shutil import os import sys from gettext import gettext as _ import gtk import pango from . import melddoc from . import misc from . import paths from . import recent from . import tree from . import vc from .ui import emblemcellrenderer from .ui import gnomeglade ################################################################################ # # Local Functions # ################################################################################ def _commonprefix(files): if len(files) != 1: workdir = misc.commonprefix(files) else: workdir = os.path.dirname(files[0]) or "." return workdir def cleanup_temp(): temp_location = tempfile.gettempdir() # The strings below will probably end up as debug log, and are deliberately # not marked for translation. for f in _temp_files: try: assert os.path.exists(f) and os.path.isabs(f) and \ os.path.dirname(f) == temp_location os.remove(f) except: except_str = "{0[0]}: \"{0[1]}\"".format(sys.exc_info()) print("File \"{0}\" not removed due to".format(f), except_str, file=sys.stderr) for f in _temp_dirs: try: assert os.path.exists(f) and os.path.isabs(f) and \ os.path.dirname(f) == temp_location shutil.rmtree(f, ignore_errors=1) except: except_str = "{0[0]}: \"{0[1]}\"".format(sys.exc_info()) print("Directory \"{0}\" not removed due to".format(f), except_str, file=sys.stderr) _temp_dirs, _temp_files = [], [] atexit.register(cleanup_temp) ################################################################################ # # CommitDialog # ################################################################################ class CommitDialog(gnomeglade.Component): def __init__(self, parent): gnomeglade.Component.__init__(self, paths.ui_dir("vcview.ui"), "commitdialog") self.parent = parent self.widget.set_transient_for( parent.widget.get_toplevel() ) selected = parent._get_selected_files() topdir = _commonprefix(selected) selected = [ s[len(topdir):] for s in selected ] self.changedfiles.set_text( ("(in %s) "%topdir) + " ".join(selected) ) self.widget.show_all() def run(self): self.previousentry.child.set_editable(False) self.previousentry.set_active(0) self.textview.grab_focus() buf = self.textview.get_buffer() buf.place_cursor( buf.get_start_iter() ) buf.move_mark( buf.get_selection_bound(), buf.get_end_iter() ) response = self.widget.run() msg = buf.get_text(buf.get_start_iter(), buf.get_end_iter(), 0) if response == gtk.RESPONSE_OK: self.parent._command_on_selected( self.parent.vc.commit_command(msg) ) if len(msg.strip()): self.previousentry.prepend_text(msg) self.widget.destroy() def on_previousentry_activate(self, gentry): buf = self.textview.get_buffer() buf.set_text( gentry.child.get_text() ) COL_LOCATION, COL_STATUS, COL_REVISION, COL_TAG, COL_OPTIONS, COL_END = \ list(range(tree.COL_END, tree.COL_END+6)) class VcTreeStore(tree.DiffTreeStore): def __init__(self): tree.DiffTreeStore.__init__(self, 1, [str] * 5) ################################################################################ # filters ################################################################################ entry_modified = lambda x: (x.state >= tree.STATE_NEW) or (x.isdir and (x.state > tree.STATE_NONE)) entry_normal = lambda x: (x.state == tree.STATE_NORMAL) entry_nonvc = lambda x: (x.state == tree.STATE_NONE) or (x.isdir and (x.state > tree.STATE_IGNORED)) entry_ignored = lambda x: (x.state == tree.STATE_IGNORED) or x.isdir ################################################################################ # # VcView # ################################################################################ class VcView(melddoc.MeldDoc, gnomeglade.Component): # Map action names to VC commands and required arguments list action_vc_cmds_map = { "VcCompare": ("diff_command", ()), "VcCommit": ("commit_command", ("",)), "VcUpdate": ("update_command", ()), "VcAdd": ("add_command", ()), "VcResolved": ("resolved_command", ()), "VcRemove": ("remove_command", ()), "VcRevert": ("revert_command", ()), } state_actions = { "flatten": ("VcFlatten", None), "modified": ("VcShowModified", entry_modified), "normal": ("VcShowNormal", entry_normal), "unknown": ("VcShowNonVC", entry_nonvc), "ignored": ("VcShowIgnored", entry_ignored), } def __init__(self, prefs): melddoc.MeldDoc.__init__(self, prefs) gnomeglade.Component.__init__(self, paths.ui_dir("vcview.ui"), "vcview") actions = ( ("VcCompare", gtk.STOCK_DIALOG_INFO, _("_Compare"), None, _("Compare selected"), self.on_button_diff_clicked), ("VcCommit", "vc-commit-24", _("Co_mmit"), None, _("Commit"), self.on_button_commit_clicked), ("VcUpdate", "vc-update-24", _("_Update"), None, _("Update"), self.on_button_update_clicked), ("VcAdd", "vc-add-24", _("_Add"), None, _("Add to VC"), self.on_button_add_clicked), ("VcRemove", "vc-remove-24", _("_Remove"), None, _("Remove from VC"), self.on_button_remove_clicked), ("VcResolved", "vc-resolve-24", _("_Resolved"), None, _("Mark as resolved for VC"), self.on_button_resolved_clicked), ("VcRevert", gtk.STOCK_REVERT_TO_SAVED, None, None, _("Revert to original"), self.on_button_revert_clicked), ("VcDeleteLocally", gtk.STOCK_DELETE, None, None, _("Delete locally"), self.on_button_delete_clicked), ) toggleactions = ( ("VcFlatten", gtk.STOCK_GOTO_BOTTOM, _("_Flatten"), None, _("Flatten directories"), self.on_button_flatten_toggled, False), ("VcShowModified","filter-modified-24", _("_Modified"), None, _("Show modified"), self.on_filter_state_toggled, False), ("VcShowNormal", "filter-normal-24", _("_Normal"), None, _("Show normal"), self.on_filter_state_toggled, False), ("VcShowNonVC", "filter-nonvc-24", _("Non _VC"), None, _("Show unversioned files"), self.on_filter_state_toggled, False), ("VcShowIgnored", "filter-ignored-24", _("Ignored"), None, _("Show ignored files"), self.on_filter_state_toggled, False), ) self.ui_file = paths.ui_dir("vcview-ui.xml") self.actiongroup = gtk.ActionGroup('VcviewActions') self.actiongroup.set_translation_domain("meld") self.actiongroup.add_actions(actions) self.actiongroup.add_toggle_actions(toggleactions) for action in ("VcCompare", "VcFlatten", "VcShowModified", "VcShowNormal", "VcShowNonVC", "VcShowIgnored"): self.actiongroup.get_action(action).props.is_important = True for action in ("VcCommit", "VcUpdate", "VcAdd", "VcRemove", "VcShowModified", "VcShowNormal", "VcShowNonVC", "VcShowIgnored", "VcResolved"): button = self.actiongroup.get_action(action) button.props.icon_name = button.props.stock_id self.model = VcTreeStore() self.widget.connect("style-set", self.model.on_style_set) self.treeview.set_model(self.model) selection = self.treeview.get_selection() selection.set_mode(gtk.SELECTION_MULTIPLE) selection.connect("changed", self.on_treeview_selection_changed) self.treeview.set_headers_visible(1) self.treeview.set_search_equal_func(self.treeview_search_cb) self.current_path, self.prev_path, self.next_path = None, None, None column = gtk.TreeViewColumn( _("Name") ) renicon = emblemcellrenderer.EmblemCellRenderer() rentext = gtk.CellRendererText() column.pack_start(renicon, expand=0) column.pack_start(rentext, expand=1) col_index = self.model.column_index column.set_attributes(renicon, icon_name=col_index(tree.COL_ICON, 0), icon_tint=col_index(tree.COL_TINT, 0)) column.set_attributes(rentext, text=col_index(tree.COL_TEXT, 0), foreground_gdk=col_index(tree.COL_FG, 0), style=col_index(tree.COL_STYLE, 0), weight=col_index(tree.COL_WEIGHT, 0), strikethrough=col_index(tree.COL_STRIKE, 0)) self.treeview.append_column(column) def addCol(name, num): column = gtk.TreeViewColumn(name) rentext = gtk.CellRendererText() column.pack_start(rentext, expand=0) column.set_attributes(rentext, markup=self.model.column_index(num, 0)) self.treeview.append_column(column) return column self.treeview_column_location = addCol( _("Location"), COL_LOCATION) addCol(_("Status"), COL_STATUS) addCol(_("Rev"), COL_REVISION) addCol(_("Tag"), COL_TAG) addCol(_("Options"), COL_OPTIONS) self.state_filters = [] for s in self.state_actions: if s in self.prefs.vc_status_filters: action_name = self.state_actions[s][0] self.state_filters.append(s) self.actiongroup.get_action(action_name).set_active(True) class ConsoleStream(object): def __init__(self, textview): self.textview = textview b = textview.get_buffer() self.mark = b.create_mark("END", b.get_end_iter(), 0) def write(self, s): if s: b = self.textview.get_buffer() b.insert(b.get_end_iter(), s) self.textview.scroll_mark_onscreen( self.mark ) self.consolestream = ConsoleStream(self.consoleview) self.location = None self.treeview_column_location.set_visible(self.actiongroup.get_action("VcFlatten").get_active()) if not self.prefs.vc_console_visible: self.on_console_view_toggle(self.console_hide_box) self.vc = None self.valid_vc_actions = tuple() # VC ComboBox self.combobox_vcs = gtk.ComboBox() self.combobox_vcs.lock = True self.combobox_vcs.set_model(gtk.ListStore(str, object, bool)) cell = gtk.CellRendererText() self.combobox_vcs.pack_start(cell, False) self.combobox_vcs.add_attribute(cell, 'text', 0) self.combobox_vcs.add_attribute(cell, 'sensitive', 2) self.combobox_vcs.lock = False self.hbox2.pack_end(self.combobox_vcs, expand=False) self.combobox_vcs.show() self.combobox_vcs.connect("changed", self.on_vc_change) def on_container_switch_in_event(self, ui): melddoc.MeldDoc.on_container_switch_in_event(self, ui) self.scheduler.add_task(self.on_treeview_cursor_changed) def update_actions_sensitivity(self): """Disable actions that use not implemented VC plugin methods """ valid_vc_actions = ["VcDeleteLocally"] for action_name, (meth_name, args) in self.action_vc_cmds_map.items(): action = self.actiongroup.get_action(action_name) try: getattr(self.vc, meth_name)(*args) action.props.sensitive = True valid_vc_actions.append(action_name) except NotImplementedError: action.props.sensitive = False self.valid_vc_actions = tuple(valid_vc_actions) def choose_vc(self, vcs): """Display VC plugin(s) that can handle the location""" self.combobox_vcs.lock = True self.combobox_vcs.get_model().clear() tooltip_texts = [_("Choose one Version Control"), _("Only one Version Control in this directory")] default_active = -1 valid_vcs = [] # Try to keep the same VC plugin active on refresh() for idx, avc in enumerate(vcs): # See if the necessary version control command exists. If so, # make sure what we're diffing is a valid respository. If either # check fails don't let the user select the that version control # tool and display a basic error message in the drop-down menu. err_str = "" if vc._vc.call(["which", avc.CMD]): # TRANSLATORS: this is an error message when a version control # application isn't installed or can't be found err_str = _("%s Not Installed" % avc.CMD) elif not avc.valid_repo(): # TRANSLATORS: this is an error message when a version # controlled repository is invalid or corrupted err_str = _("Invalid Repository") else: valid_vcs.append(idx) if (self.vc is not None and self.vc.__class__ == avc.__class__): default_active = idx if err_str: self.combobox_vcs.get_model().append( \ [_("%s (%s)") % (avc.NAME, err_str), avc, False]) else: self.combobox_vcs.get_model().append([avc.NAME, avc, True]) if valid_vcs and default_active == -1: default_active = min(valid_vcs) self.combobox_vcs.set_tooltip_text(tooltip_texts[len(vcs) == 1]) self.combobox_vcs.set_sensitive(len(vcs) > 1) self.combobox_vcs.lock = False self.combobox_vcs.set_active(default_active) def on_vc_change(self, cb): if not cb.lock: self.vc = cb.get_model()[cb.get_active_iter()][1] self._set_location(self.vc.location) self.update_actions_sensitivity() def set_location(self, location): self.choose_vc(vc.get_vcs(os.path.abspath(location or "."))) def _set_location(self, location): self.location = location self.current_path = None self.model.clear() self.fileentry.set_filename(location) self.fileentry.prepend_history(location) it = self.model.add_entries( None, [location] ) self.treeview.grab_focus() self.treeview.get_selection().select_iter(it) self.model.set_path_state(it, 0, tree.STATE_NORMAL, isdir=1) self.recompute_label() self.scheduler.remove_all_tasks() # If the user is just diffing a file (ie not a directory), there's no # need to scan the rest of the repository if os.path.isdir(self.vc.location): root = self.model.get_iter_root() self.scheduler.add_task(self._search_recursively_iter(root)) self.scheduler.add_task(self.on_treeview_cursor_changed) def get_comparison(self): return recent.TYPE_VC, [self.location] def recompute_label(self): self.label_text = os.path.basename(self.location) # TRANSLATORS: This is the location of the directory the user is diffing self.tooltip_text = _("%s: %s") % (_("Location"), self.location) self.label_changed() def _search_recursively_iter(self, iterstart): yield _("[%s] Scanning %s") % (self.label_text,"") rootpath = self.model.get_path( iterstart ) rootname = self.model.value_path( self.model.get_iter(rootpath), 0 ) prefixlen = 1 + len( self.model.value_path( self.model.get_iter_root(), 0 ) ) todo = [ (rootpath, rootname) ] active_action = lambda a: self.actiongroup.get_action(a).get_active() filters = [a[1] for a in self.state_actions.values() if \ active_action(a[0]) and a[1]] def showable(entry): for f in filters: if f(entry): return 1 recursive = self.actiongroup.get_action("VcFlatten").get_active() self.vc.cache_inventory(rootname) while len(todo): todo.sort() # depth first path, name = todo.pop(0) if path: it = self.model.get_iter( path ) root = self.model.value_path( it, 0 ) else: it = self.model.get_iter_root() root = name yield _("[%s] Scanning %s") % (self.label_text, root[prefixlen:]) entries = [f for f in self.vc.listdir(root) if showable(f)] differences = 0 for e in entries: differences |= (e.state != tree.STATE_NORMAL) if e.isdir and recursive: todo.append( (None, e.path) ) continue child = self.model.add_entries(it, [e.path]) self._update_item_state( child, e, root[prefixlen:] ) if e.isdir: todo.append( (self.model.get_path(child), None) ) if not recursive: # expand parents if len(entries) == 0: self.model.add_empty(it, _("(Empty)")) if differences or len(path)==1: self.treeview.expand_to_path(path) else: # just the root self.treeview.expand_row( (0,), 0) self.vc.uncache_inventory() def on_fileentry_activate(self, fileentry): path = fileentry.get_full_path() self.set_location(path) def on_delete_event(self, appquit=0): self.scheduler.remove_all_tasks() return gtk.RESPONSE_OK def on_row_activated(self, treeview, path, tvc): it = self.model.get_iter(path) if self.model.iter_has_child(it): if self.treeview.row_expanded(path): self.treeview.collapse_row(path) else: self.treeview.expand_row(path,0) else: path = self.model.value_path(it, 0) self.run_diff( [path] ) def run_diff_iter(self, path_list): silent_error = hasattr(self.vc, 'switch_to_external_diff') retry_diff = True while retry_diff: retry_diff = False yield _("[%s] Fetching differences") % self.label_text diffiter = self._command_iter(self.vc.diff_command(), path_list, 0) diff = None while type(diff) != type(()): diff = next(diffiter) yield 1 prefix, patch = diff[0], diff[1] try: patch = self.vc.clean_patch(patch) except AttributeError: pass yield _("[%s] Applying patch") % self.label_text if patch: applied = self.show_patch(prefix, patch, silent=silent_error) if not applied and silent_error: silent_error = False self.vc.switch_to_external_diff() retry_diff = True else: for path in path_list: self.emit("create-diff", [path]) def run_diff(self, path_list): try: for path in path_list: comp_path = self.vc.get_path_for_repo_file(path) os.chmod(comp_path, 0o444) _temp_files.append(comp_path) self.emit("create-diff", [comp_path, path]) except NotImplementedError: for path in path_list: self.scheduler.add_task(self.run_diff_iter([path]), atfront=1) def on_treeview_popup_menu(self, treeview): time = gtk.get_current_event_time() self.popup_menu.popup(None, None, None, 0, time) return True def on_button_press_event(self, treeview, event): if event.button == 3: path = treeview.get_path_at_pos(int(event.x), int(event.y)) if path is None: return False selection = treeview.get_selection() model, rows = selection.get_selected_rows() if path[0] not in rows: selection.unselect_all() selection.select_path(path[0]) treeview.set_cursor(path[0]) self.popup_menu.popup(None, None, None, event.button, event.time) return True return False def on_button_flatten_toggled(self, button): action = self.actiongroup.get_action("VcFlatten") self.treeview_column_location.set_visible(action.get_active()) self.on_filter_state_toggled(button) def on_filter_state_toggled(self, button): active_action = lambda a: self.actiongroup.get_action(a).get_active() active_filters = [a for a in self.state_actions if \ active_action(self.state_actions[a][0])] if set(active_filters) == set(self.state_filters): return self.state_filters = active_filters self.prefs.vc_status_filters = active_filters self.refresh() def on_treeview_selection_changed(self, selection): model, rows = selection.get_selected_rows() have_selection = bool(rows) for action in self.valid_vc_actions: self.actiongroup.get_action(action).set_sensitive(have_selection) def _get_selected_files(self): model, rows = self.treeview.get_selection().get_selected_rows() sel = [self.model.value_path(self.model.get_iter(r), 0) for r in rows] # Remove empty entries and trailing slashes return [x[-1] != "/" and x or x[:-1] for x in sel if x is not None] def _command_iter(self, command, files, refresh): """Run 'command' on 'files'. Return a tuple of the directory the command was executed in and the output of the command. """ msg = misc.shelljoin(command) yield "[%s] %s" % (self.label_text, msg.replace("\n", "\t")) def relpath(pbase, p): kill = 0 if len(pbase) and p.startswith(pbase): kill = len(pbase) + 1 return p[kill:] or "." if len(files) == 1 and os.path.isdir(files[0]): workdir = self.vc.get_working_directory(files[0]) else: workdir = self.vc.get_working_directory( _commonprefix(files) ) files = [ relpath(workdir, f) for f in files ] r = None self.consolestream.write( misc.shelljoin(command+files) + " (in %s)\n" % workdir) readiter = misc.read_pipe_iter(command + files, self.consolestream, workdir=workdir) try: while r is None: r = next(readiter) self.consolestream.write(r) yield 1 except IOError as e: misc.run_dialog("Error running command.\n'%s'\n\nThe error was:\n%s" % ( misc.shelljoin(command), e), parent=self, messagetype=gtk.MESSAGE_ERROR) if refresh: self.refresh_partial(workdir) yield workdir, r def _command(self, command, files, refresh=1): """Run 'command' on 'files'. """ self.scheduler.add_task(self._command_iter(command, files, refresh)) def _command_on_selected(self, command, refresh=1): files = self._get_selected_files() if len(files): self._command(command, files, refresh) def on_button_update_clicked(self, obj): self._command_on_selected(self.vc.update_command()) def on_button_commit_clicked(self, obj): CommitDialog(self).run() def on_button_add_clicked(self, obj): self._command_on_selected(self.vc.add_command()) def on_button_remove_clicked(self, obj): self._command_on_selected(self.vc.remove_command()) def on_button_resolved_clicked(self, obj): self._command_on_selected(self.vc.resolved_command()) def on_button_revert_clicked(self, obj): self._command_on_selected(self.vc.revert_command()) def on_button_delete_clicked(self, obj): files = self._get_selected_files() for name in files: try: if os.path.isfile(name): os.remove(name) elif os.path.isdir(name): if misc.run_dialog(_("'%s' is a directory.\nRemove recursively?") % os.path.basename(name), parent = self, buttonstype=gtk.BUTTONS_OK_CANCEL) == gtk.RESPONSE_OK: shutil.rmtree(name) except OSError as e: misc.run_dialog(_("Error removing %s\n\n%s.") % (name,e), parent = self) workdir = _commonprefix(files) self.refresh_partial(workdir) def on_button_diff_clicked(self, obj): files = self._get_selected_files() if len(files): self.run_diff(files) def open_external(self): self._open_files(self._get_selected_files()) def show_patch(self, prefix, patch, silent=False): if vc._vc.call(["which", "patch"]): primary = _("Patch tool not found") secondary = _("Meld needs the <i>patch</i> tool to be installed " "to perform comparisons in %s repositories. Please " "install <i>patch</i> and try again.") % self.vc.NAME msgarea = self.msgarea_mgr.new_from_text_and_icon( gtk.STOCK_DIALOG_ERROR, primary, secondary) msgarea.add_button(_("Hi_de"), gtk.RESPONSE_CLOSE) msgarea.connect("response", lambda *args: self.msgarea_mgr.clear()) msgarea.show_all() return False tmpdir = tempfile.mkdtemp("-meld") _temp_dirs.append(tmpdir) diffs = [] for fname in self.vc.get_patch_files(patch): destfile = os.path.join(tmpdir,fname) destdir = os.path.dirname( destfile ) if not os.path.exists(destdir): os.makedirs(destdir) pathtofile = os.path.join(prefix, fname) try: shutil.copyfile( pathtofile, destfile) except IOError: # it is missing, create empty file open(destfile,"w").close() diffs.append( (destfile, pathtofile) ) patchcmd = self.vc.patch_command(tmpdir) try: result = misc.write_pipe(patchcmd, patch, error=misc.NULL) except OSError: result = 1 if result == 0: for d in diffs: os.chmod(d[0], 0o444) self.emit("create-diff", d) return True elif not silent: primary = _("Error fetching original comparison file") secondary = _("Meld couldn't obtain the original version of your " "comparison file. If you are using the most recent " "version of Meld, please report a bug, including as " "many details as possible.") msgarea = self.msgarea_mgr.new_from_text_and_icon( gtk.STOCK_DIALOG_ERROR, primary, secondary) msgarea.add_button(_("Hi_de"), gtk.RESPONSE_CLOSE) msgarea.add_button(_("Report a bug"), gtk.RESPONSE_OK) def patch_error_cb(msgarea, response): if response == gtk.RESPONSE_OK: bug_url = "https://bugzilla.gnome.org/enter_bug.cgi?" + \ "product=meld" misc.open_uri(bug_url) else: self.msgarea_mgr.clear() msgarea.connect("response", patch_error_cb) msgarea.show_all() return False def refresh(self): self.set_location( self.model.value_path( self.model.get_iter_root(), 0 ) ) def refresh_partial(self, where): if not self.actiongroup.get_action("VcFlatten").get_active(): it = self.find_iter_by_name( where ) if it: newiter = self.model.insert_after( None, it) self.model.set_value(newiter, self.model.column_index( tree.COL_PATH, 0), where) self.model.set_path_state(newiter, 0, tree.STATE_NORMAL, True) self.model.remove(it) self.scheduler.add_task(self._search_recursively_iter(newiter)) else: # XXX fixme self.refresh() def _update_item_state(self, it, vcentry, location): e = vcentry self.model.set_path_state(it, 0, e.state, e.isdir) def setcol(col, val): self.model.set_value(it, self.model.column_index(col, 0), val) setcol(COL_LOCATION, location) setcol(COL_STATUS, e.get_status()) setcol(COL_REVISION, e.rev) setcol(COL_TAG, e.tag) setcol(COL_OPTIONS, e.options) def on_file_changed(self, filename): it = self.find_iter_by_name(filename) if it: path = self.model.value_path(it, 0) self.vc.update_file_state(path) files = self.vc.lookup_files([], [(os.path.basename(path), path)])[1] for e in files: if e.path == path: prefixlen = 1 + len( self.model.value_path( self.model.get_iter_root(), 0 ) ) self._update_item_state( it, e, e.parent[prefixlen:]) return def find_iter_by_name(self, name): it = self.model.get_iter_root() path = self.model.value_path(it, 0) while it: if name == path: return it elif name.startswith(path): child = self.model.iter_children( it ) while child: path = self.model.value_path(child, 0) if name == path: return child elif name.startswith(path): break else: child = self.model.iter_next( child ) it = child else: break return None def on_console_view_toggle(self, box, event=None): if box == self.console_hide_box: self.prefs.vc_console_visible = 0 self.console_hbox.hide() self.console_show_box.show() else: self.prefs.vc_console_visible = 1 self.console_hbox.show() self.console_show_box.hide() def on_consoleview_populate_popup(self, text, menu): item = gtk.ImageMenuItem(gtk.STOCK_CLEAR) def activate(*args): buf = text.get_buffer() buf.delete( buf.get_start_iter(), buf.get_end_iter() ) item.connect("activate", activate) item.show() menu.insert( item, 0 ) item = gtk.SeparatorMenuItem() item.show() menu.insert( item, 1 ) def on_treeview_cursor_changed(self, *args): cursor_path, cursor_col = self.treeview.get_cursor() if not cursor_path: self.emit("next-diff-changed", False, False) self.current_path = cursor_path return # If invoked directly rather than through a callback, we always check if not args: skip = False else: try: old_cursor = self.model.get_iter(self.current_path) except (ValueError, TypeError): # An invalid path gives ValueError; None gives a TypeError skip = False else: # We can skip recalculation if the new cursor is between # the previous/next bounds, and we weren't on a changed row state = self.model.get_state(old_cursor, 0) if state not in (tree.STATE_NORMAL, tree.STATE_EMPTY): skip = False else: if self.prev_path is None and self.next_path is None: skip = True elif self.prev_path is None: skip = cursor_path < self.next_path elif self.next_path is None: skip = self.prev_path < cursor_path else: skip = self.prev_path < cursor_path < self.next_path if not skip: prev, next = self.model._find_next_prev_diff(cursor_path) self.prev_path, self.next_path = prev, next have_next_diffs = (prev is not None, next is not None) self.emit("next-diff-changed", *have_next_diffs) self.current_path = cursor_path def next_diff(self, direction): if direction == gtk.gdk.SCROLL_UP: path = self.prev_path else: path = self.next_path if path: self.treeview.expand_to_path(path) self.treeview.set_cursor(path) def on_reload_activate(self, *extra): self.on_fileentry_activate(self.fileentry) def on_find_activate(self, *extra): self.treeview.emit("start-interactive-search") def treeview_search_cb(self, model, column, key, it): """Callback function for searching in VcView treeview""" path = model.get_value(it, tree.COL_PATH) # if query text contains slash, search in full path if key.find('/') >= 0: lineText = path else: lineText = os.path.basename(path) # Perform case-insensitive matching if query text is all lower-case if key.islower(): lineText = lineText.lower() if lineText.find(key) >= 0: # line matches return False else: return True
pedrox/meld
meld/vcview.py
Python
gpl-2.0
35,046
""" blank screen to stop game being played out-of-hours """ import random import os import pygame import pygame.locals from ctime_common import go_fullscreen class BlankScreen(): """ a blank screen with no controls """ def __init__(self, ctime, screen_width, screen_height, log): log.info('Time for bed said Zeberdee') self.screen_size = {'width': screen_width, 'height': screen_height} self.screen = pygame.display.get_surface() self.screen.fill(pygame.Color(0, 0, 0, 0), (0, 0, screen_width, screen_height), 0) log.info('Lights out') ctime.button_power.rpi_power() go_fullscreen()
magicalbob/ctime
ctime_blank.py
Python
gpl-2.0
698
# vim: expandtab sw=4 ts=4 sts=4: # # Copyright © 2003 - 2018 Michal Čihař <[email protected]> # # This file is part of python-gammu <https://wammu.eu/python-gammu/> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # """ Asynchronous communication to phone. Mostly you should use only L{GammuWorker} class, others are only helpers which are used by this class. """ import queue import threading import gammu class InvalidCommand(Exception): """ Exception indicating invalid command. """ def __init__(self, value): """ Initializes exception. @param value: Name of wrong command. @type value: string """ super().__init__() self.value = value def __str__(self): """ Returns textual representation of exception. """ return f'Invalid command: "{self.value}"' def check_worker_command(command): """ Checks whether command is valid. @param command: Name of command. @type command: string """ if hasattr(gammu.StateMachine, command): return raise InvalidCommand(command) class GammuCommand: """ Storage of single command for gammu. """ def __init__(self, command, params=None, percentage=100): """ Creates single command instance. """ check_worker_command(command) self._command = command self._params = params self._percentage = percentage def get_command(self): """ Returns command name. """ return self._command def get_params(self): """ Returns command params. """ return self._params def get_percentage(self): """ Returns percentage of current task. """ return self._percentage def __str__(self): """ Returns textual representation. """ if self._params is not None: return f"{self._command} {self._params}" else: return f"{self._command} ()" class GammuTask: """ Storage of taks for gammu. """ def __init__(self, name, commands): """ Creates single command instance. @param name: Name of task. @type name: string @param commands: List of commands to execute. @type commands: list of tuples or strings """ self._name = name self._list = [] self._pointer = 0 for i in range(len(commands)): if isinstance(commands[i], tuple): cmd = commands[i][0] try: params = commands[i][1] except IndexError: params = None else: cmd = commands[i] params = None percents = round(100 * (i + 1) / len(commands)) self._list.append(GammuCommand(cmd, params, percents)) def get_next(self): """ Returns next command to be executed as L{GammuCommand}. """ result = self._list[self._pointer] self._pointer += 1 return result def get_name(self): """ Returns task name. """ return self._name def gammu_pull_device(state_machine): state_machine.ReadDevice() class GammuThread(threading.Thread): """ Thread for phone communication. """ def __init__(self, queue, config, callback, pull_func=gammu_pull_device): """ Initialises thread data. @param queue: Queue with events. @type queue: queue.Queue object. @param config: Gammu configuration, same as L{StateMachine.SetConfig} accepts. @type config: hash @param callback: Function which will be called upon operation completing. @type callback: Function, needs to accept four params: name of completed operation, result of it, error code and percentage of overall operation. This callback is called from different thread, so please take care of various threading issues in other modules you use. """ super().__init__() self._kill = False self._terminate = False self._sm = gammu.StateMachine() self._callback = callback self._queue = queue self._sm.SetConfig(0, config) self._pull_func = pull_func def _do_command(self, name, cmd, params, percentage=100): """ Executes single command on phone. """ func = getattr(self._sm, cmd) error = "ERR_NONE" result = None try: if params is None: result = func() elif isinstance(params, dict): result = func(**params) else: result = func(*params) except gammu.GSMError as info: errcode = info.args[0]["Code"] error = gammu.ErrorNumbers[errcode] self._callback(name, result, error, percentage) def run(self): """ Thread body, which handles phone communication. This should not be used from outside. """ start = True while not self._kill: try: if start: task = GammuTask("Init", ["Init"]) start = False else: # Wait at most ten seconds for next command task = self._queue.get(True, 10) try: while True: cmd = task.get_next() self._do_command( task.get_name(), cmd.get_command(), cmd.get_params(), cmd.get_percentage(), ) except IndexError: try: if task.get_name() != "Init": self._queue.task_done() except (AttributeError, ValueError): pass except queue.Empty: if self._terminate: break # Read the device to catch possible incoming events try: self._pull_func(self._sm) except Exception as ex: self._callback("ReadDevice", None, ex, 0) def kill(self): """ Forces thread end without emptying queue. """ self._kill = True def join(self, timeout=None): """ Terminates thread and waits for it. """ self._terminate = True super().join(timeout) class GammuWorker: """ Wrapper class for asynchronous communication with Gammu. It spaws own thread and then passes all commands to this thread. When task is done, caller is notified via callback. """ def __init__(self, callback, pull_func=gammu_pull_device): """ Initializes worker class. @param callback: See L{GammuThread.__init__} for description. """ self._thread = None self._callback = callback self._config = {} self._lock = threading.Lock() self._queue = queue.Queue() self._pull_func = pull_func def enqueue_command(self, command, params): """ Enqueues command. @param command: Command(s) to execute. Each command is tuple containing function name and it's parameters. @type command: tuple of list of tuples @param params: Parameters to command. @type params: tuple or string """ self._queue.put(GammuTask(command, [(command, params)])) def enqueue_task(self, command, commands): """ Enqueues task. @param command: Command(s) to execute. Each command is tuple containing function name and it's parameters. @type command: tuple of list of tuples @param commands: List of commands to execute. @type commands: list of tuples or strings """ self._queue.put(GammuTask(command, commands)) def enqueue(self, command, params=None, commands=None): """ Enqueues command or task. @param command: Command(s) to execute. Each command is tuple containing function name and it's parameters. @type command: tuple of list of tuples @param params: Parameters to command. @type params: tuple or string @param commands: List of commands to execute. When this is not none, params are ignored and command is taken as task name. @type commands: list of tuples or strings """ if commands is not None: self.enqueue_task(command, commands) else: self.enqueue_command(command, params) def configure(self, config): """ Configures gammu instance according to config. @param config: Gammu configuration, same as L{StateMachine.SetConfig} accepts. @type config: hash """ self._config = config def abort(self): """ Aborts any remaining operations. """ raise NotImplementedError def initiate(self): """ Connects to phone. """ self._thread = GammuThread( self._queue, self._config, self._callback, self._pull_func ) self._thread.start() def terminate(self, timeout=None): """ Terminates phone connection. """ self.enqueue("Terminate") self._thread.join(timeout) self._thread = None
gammu/python-gammu
gammu/worker.py
Python
gpl-2.0
10,344
#!/usr/bin/python3 import os import sys from merge_utils import * xml_out = etree.Element("packages") funtoo_staging_w = GitTree("funtoo-staging", "master", "repos@localhost:ports/funtoo-staging.git", root="/var/git/dest-trees/funtoo-staging", pull=False, xml_out=xml_out) #funtoo_staging_w = GitTree("funtoo-staging-unfork", "master", "repos@localhost:ports/funtoo-staging-unfork.git", root="/var/git/dest-trees/funtoo-staging-unfork", pull=False, xml_out=None) xmlfile="/home/ports/public_html/packages.xml" nopush=False funtoo_overlay = GitTree("funtoo-overlay", "master", "repos@localhost:funtoo-overlay.git", pull=True) # We treat our Gentoo staging overlay specially, so it's listed separately. This overlay contains all Gentoo # ebuilds, in a git repository. We use a special file in the funtoo-overlay/funtoo/scripts directory (next to # this file) to provide a SHA1 of the commit of the gentoo-staging overlay that we want to use as a basis # for our merges. Let's grab the SHA1 hash from that file: p = os.path.join(funtoo_overlay.root,"funtoo/scripts/commit-staged") if os.path.exists(p): a = open(p,"r") commit = a.readlines()[0].strip() print("Using commit: %s" % commit) else: commit = None gentoo_staging_r = GitTree("gentoo-staging", "master", "repos@localhost:ports/gentoo-staging.git", commit=commit, pull=True) # These overlays are monitored for changes -- if there are changes in these overlays, we regenerate the entire # tree. If there aren't changes in these overlays, we don't. shards = { "perl" : GitTree("gentoo-perl-shard", "1fc10379b04cb4aaa29e824288f3ec22badc6b33", "repos@localhost:gentoo-perl-shard.git", pull=True), "kde" : GitTree("gentoo-kde-shard", "cd4e1129ddddaa21df367ecd4f68aab894e57b31", "repos@localhost:gentoo-kde-shard.git", pull=True), "gnome" : GitTree("gentoo-gnome-shard", "ffabb752f8f4e23a865ffe9caf72f950695e2f26", "repos@localhost:ports/gentoo-gnome-shard.git", pull=True), "x11" : GitTree("gentoo-x11-shard", "12c1bdf9a9bfd28f48d66bccb107c17b5f5af577", "repos@localhost:ports/gentoo-x11-shard.git", pull=True), "office" : GitTree("gentoo-office-shard", "9a702057d23e7fa277e9626344671a82ce59442f", "repos@localhost:ports/gentoo-office-shard.git", pull=True), "core" : GitTree("gentoo-core-shard", "56e5b9edff7dc27e828b71010d019dcbd8e176fd", "repos@localhost:gentoo-core-shard.git", pull=True) } # perl: 1fc10379b04cb4aaa29e824288f3ec22badc6b33 (Updated 6 Dec 2016) # kde: cd4e1129ddddaa21df367ecd4f68aab894e57b31 (Updated 25 Dec 2016) # gnome: ffabb752f8f4e23a865ffe9caf72f950695e2f26 (Updated 20 Sep 2016) # x11: 12c1bdf9a9bfd28f48d66bccb107c17b5f5af577 (Updated 24 Dec 2016) # office: 9a702057d23e7fa277e9626344671a82ce59442f (Updated 29 Nov 2016) # core: 56e5b9edff7dc27e828b71010d019dcbd8e176fd (Updated 17 Dec 2016) # funtoo-toolchain: b97787318b7ffcfeaacde82cd21ddd5e207ad1f4 (Updated 25 Dec 2016) funtoo_overlays = { "funtoo_media" : GitTree("funtoo-media", "master", "repos@localhost:funtoo-media.git", pull=True), "plex_overlay" : GitTree("funtoo-plex", "master", "https://github.com/Ghent/funtoo-plex.git", pull=True), #"gnome_fixups" : GitTree("gnome-3.16-fixups", "master", "repos@localhost:ports/gnome-3.16-fixups.git", pull=True), "gnome_fixups" : GitTree("gnome-3.20-fixups", "master", "repos@localhost:ports/gnome-3.20-fixups.git", pull=True), "funtoo_toolchain" : GitTree("funtoo-toolchain", "b97787318b7ffcfeaacde82cd21ddd5e207ad1f4", "repos@localhost:funtoo-toolchain-overlay.git", pull=True), "ldap_overlay" : GitTree("funtoo-ldap", "master", "repos@localhost:funtoo-ldap-overlay.git", pull=True), "deadbeef_overlay" : GitTree("deadbeef-overlay", "master", "https://github.com/damex/deadbeef-overlay.git", pull=True), "gambas_overlay" : GitTree("gambas-overlay", "master", "https://github.com/damex/gambas-overlay.git", pull=True), "wmfs_overlay" : GitTree("wmfs-overlay", "master", "https://github.com/damex/wmfs-overlay.git", pull=True), "flora" : GitTree("flora", "master", "repos@localhost:flora.git", pull=True), } # These are other overlays that we merge into the Funtoo tree. However, we just pull in the most recent versions # of these when we regenerate our tree. other_overlays = { "foo_overlay" : GitTree("foo-overlay", "master", "https://github.com/slashbeast/foo-overlay.git", pull=True), "bar_overlay" : GitTree("bar-overlay", "master", "git://github.com/adessemond/bar-overlay.git", pull=True), "squeezebox_overlay" : GitTree("squeezebox", "master", "git://anongit.gentoo.org/user/squeezebox.git", pull=True), "pantheon_overlay" : GitTree("pantheon", "master", "https://github.com/pimvullers/elementary.git", pull=True), "pinsard_overlay" : GitTree("pinsard", "master", "https://github.com/apinsard/sapher-overlay.git", pull=True), "sabayon_for_gentoo" : GitTree("sabayon-for-gentoo", "master", "git://github.com/Sabayon/for-gentoo.git", pull=True), "tripsix_overlay" : GitTree("tripsix", "master", "https://github.com/666threesixes666/tripsix.git", pull=True), "faustoo_overlay" : GitTree("faustoo", "master", "https://github.com/fmoro/faustoo.git", pull=True), "wltjr_overlay" : GitTree("wltjr", "master", "https://github.com/Obsidian-StudiosInc/os-xtoo", pull=True), "vmware_overlay" : GitTree("vmware", "master", "git://anongit.gentoo.org/proj/vmware.git", pull=True) } funtoo_changes = False if funtoo_overlay.changes: funtoo_changes = True elif gentoo_staging_r.changes: funtoo_changes = True else: for fo in funtoo_overlays: if funtoo_overlays[fo].changes: funtoo_changes = True break # This next code regenerates the contents of the funtoo-staging tree. Funtoo's tree is itself composed of # many different overlays which are merged in an automated fashion. This code does it all. pull = True if nopush: push = False else: push = "master" # base_steps define the initial steps that prepare our destination tree for writing. Checking out the correct # branch, copying almost the full entirety of Gentoo's portage tree to our destination tree, and copying over # funtoo overlay licenses, metadata, and also copying over GLSA's. base_steps = [ GitCheckout("master"), SyncFromTree(gentoo_staging_r, exclude=[ "/metadata/cache/**", "ChangeLog", "dev-util/metro", "skel.ChangeLog", ]), ] # Steps related to generating system profiles. These can be quite order-dependent and should be handled carefully. # Generally, the funtoo_overlay sync should be first, then the gentoo_staging_r SyncFiles, which overwrites some stub # files in the funtoo overlay. profile_steps = [ SyncDir(funtoo_overlay.root, "profiles", "profiles", exclude=["categories", "updates"]), CopyAndRename("profiles/funtoo/1.0/linux-gnu/arch/x86-64bit/subarch", "profiles/funtoo/1.0/linux-gnu/arch/pure64/subarch", lambda x: os.path.basename(x) + "-pure64"), SyncFiles(gentoo_staging_r.root, { "profiles/package.mask":"profiles/package.mask/00-gentoo", "profiles/arch/amd64/package.use.mask":"profiles/funtoo/1.0/linux-gnu/arch/x86-64bit/package.use.mask/01-gentoo", "profiles/features/multilib/package.use.mask":"profiles/funtoo/1.0/linux-gnu/arch/x86-64bit/package.use.mask/02-gentoo", "profiles/arch/amd64/use.mask":"profiles/funtoo/1.0/linux-gnu/arch/x86-64bit/use.mask/01-gentoo", "profiles/arch/x86/package.use.mask":"profiles/funtoo/1.0/linux-gnu/arch/x86-32bit/package.use.mask/01-gentoo", "profiles/arch/x86/use.mask":"profiles/funtoo/1.0/linux-gnu/arch/x86-32bit/use.mask/01-gentoo", "profiles/default/linux/package.use.mask":"profiles/funtoo/1.0/linux-gnu/package.use.mask/01-gentoo", "profiles/default/linux/use.mask":"profiles/funtoo/1.0/linux-gnu/use.mask/01-gentoo", "profiles/arch/amd64/no-multilib/package.use.mask":"profiles/funtoo/1.0/linux-gnu/arch/pure64/package.use.mask/01-gentoo", "profiles/arch/amd64/no-multilib/package.mask":"profiles/funtoo/1.0/linux-gnu/arch/pure64/package.mask/01-gentoo", "profiles/arch/amd64/no-multilib/use.mask":"profiles/funtoo/1.0/linux-gnu/arch/pure64/use.mask/01-gentoo" }), SyncFiles(funtoo_overlays["deadbeef_overlay"].root, { "profiles/package.mask":"profiles/package.mask/deadbeef-mask" }), SyncFiles(funtoo_overlays["wmfs_overlay"].root, { "profiles/package.mask":"profiles/package.mask/wmfs-mask" }) ] profile_steps += [ SyncFiles(funtoo_overlays["funtoo_toolchain"].root, { "profiles/package.mask/funtoo-toolchain":"profiles/funtoo/1.0/linux-gnu/build/current/package.mask/funtoo-toolchain", }), SyncFiles(funtoo_overlays["funtoo_toolchain"].root, { "profiles/package.mask/funtoo-toolchain":"profiles/funtoo/1.0/linux-gnu/build/stable/package.mask/funtoo-toolchain", "profiles/package.mask/funtoo-toolchain-experimental":"profiles/funtoo/1.0/linux-gnu/build/experimental/package.mask/funtoo-toolchain", }), RunSed(["profiles/base/make.defaults"], ["/^PYTHON_TARGETS=/d", "/^PYTHON_SINGLE_TARGET=/d"]), ] # Steps related to copying ebuilds. Note that order can make a difference here when multiple overlays are # providing identical catpkgs. # Ebuild additions -- these are less-risky changes because ebuilds are only added, and not replaced. ebuild_additions = [ InsertEbuilds(other_overlays["bar_overlay"], select="all", skip=["app-emulation/qemu"], replace=False), InsertEbuilds(other_overlays["squeezebox_overlay"], select="all", skip=None, replace=False), InsertEbuilds(funtoo_overlays["deadbeef_overlay"], select="all", skip=None, replace=False), InsertEbuilds(funtoo_overlays["gambas_overlay"], select="all", skip=None, replace=False), InsertEbuilds(funtoo_overlays["wmfs_overlay"], select="all", skip=None, replace=False), InsertEbuilds(funtoo_overlays["flora"], select="all", skip=None, replace=True, merge=True), ] # Ebuild modifications -- these changes need to be treated more carefully as ordering can be important # for wholesale replacing as well as merging. ebuild_modifications = [ InsertEbuilds(other_overlays["vmware_overlay"], select=[ "app-emulation/vmware-modules" ], skip=None, replace=True, merge=True), InsertEbuilds(other_overlays["pantheon_overlay"], select=[ "x11-libs/granite", "x11-libs/bamf", "x11-themes/plank-theme-pantheon", "pantheon-base/plank", "x11-wm/gala"], skip=None, replace=True, merge=True), InsertEbuilds(other_overlays["faustoo_overlay"], select="all", skip=None, replace=True, merge=True), InsertEbuilds(other_overlays["foo_overlay"], select="all", skip=["sys-fs/mdev-bb", "sys-fs/mdev-like-a-boss", "media-sound/deadbeef", "media-video/handbrake"], replace=["app-shells/rssh"]), InsertEbuilds(funtoo_overlays["plex_overlay"], select=[ "media-tv/plex-media-server" ], skip=None, replace=True), InsertEbuilds(other_overlays["sabayon_for_gentoo"], select=["app-admin/equo", "app-admin/matter", "sys-apps/entropy", "sys-apps/entropy-server", "sys-apps/entropy-client-services","app-admin/rigo", "sys-apps/rigo-daemon", "sys-apps/magneto-core", "x11-misc/magneto-gtk", "x11-misc/magneto-gtk3", "x11-themes/numix-icon-theme", "kde-misc/magneto-kde", "app-misc/magneto-loader", "media-video/kazam" ], replace=True), InsertEbuilds(other_overlays["tripsix_overlay"], select=["media-sound/rakarrack"], skip=None, replace=True, merge=False), InsertEbuilds(other_overlays["pinsard_overlay"], select=["app-portage/chuse", "dev-python/iwlib", "media-sound/pytify", "x11-wm/qtile"], skip=None, replace=True, merge=True), InsertEbuilds(other_overlays["wltjr_overlay"], select=["mail-filter/assp", "mail-mta/netqmail"], skip=None, replace=True, merge=False), ] ebuild_modifications += [ InsertEbuilds(funtoo_overlays["funtoo_media"], select="all", skip=None, replace=True), InsertEbuilds(funtoo_overlays["ldap_overlay"], select="all", skip=["net-nds/openldap"], replace=True), ] # Steps related to eclass copying: eclass_steps = [ SyncDir(funtoo_overlays["deadbeef_overlay"].root,"eclass"), ] # General tree preparation steps -- finishing touches. This is where you should put steps that require all ebuilds # from all trees to all be inserted (like AutoGlobMask calls) as well as misc. copying of files like licenses and # updates files. It also contains misc. tweaks like mirror fixups and Portage tree minification. treeprep_steps = [ SyncDir(funtoo_overlays["plex_overlay"].root,"licenses"), ] master_steps = [ InsertEbuilds(shards["perl"], select="all", skip=None, replace=True), InsertEclasses(shards["perl"], select=re.compile(".*\.eclass")), InsertEbuilds(shards["x11"], select="all", skip=None, replace=True), InsertEbuilds(shards["office"], select="all", skip=None, replace=True), InsertEbuilds(shards["kde"], select="all", skip=None, replace=True), InsertEclasses(shards["kde"], select=re.compile(".*\.eclass")), InsertEbuilds(shards["gnome"], select="all", skip=None, replace=True), InsertEbuilds(funtoo_overlays["gnome_fixups"], select="all", skip=None, replace=True), InsertEbuilds(shards["core"], select="all", skip=None, replace=True), InsertEclasses(shards["core"], select=re.compile(".*\.eclass")), InsertEbuilds(funtoo_overlays["funtoo_toolchain"], select="all", skip=None, replace=True, merge=False), InsertEbuilds(funtoo_overlay, select="all", skip=None, replace=True), SyncDir(funtoo_overlay.root, "eclass"), SyncDir(funtoo_overlay.root,"licenses"), SyncDir(funtoo_overlay.root,"metadata"), SyncFiles(funtoo_overlay.root, { "COPYRIGHT.txt":"COPYRIGHT.txt", "LICENSE.txt":"LICENSE.txt", "README.rst":"README.rst", "header.txt":"header.txt", }), ] treeprep_steps += [ MergeUpdates(funtoo_overlay.root), AutoGlobMask("dev-lang/python", "python*_pre*", "funtoo-python_pre"), ThirdPartyMirrors(), ProfileDepFix(), Minify(), # Set name of repository as "gentoo". Unset masters. RunSed(["metadata/layout.conf"], ["s/^repo-name = .*/repo-name = gentoo/", "/^masters =/d"]), RunSed(["profiles/repo_name"], ["s/.*/gentoo/"]) ] all_steps = [ base_steps, profile_steps, ebuild_additions, eclass_steps, master_steps, ebuild_modifications, treeprep_steps ] for step in all_steps: funtoo_staging_w.run(step) funtoo_staging_w.gitCommit(message="glorious funtoo updates",branch=push) if xmlfile: a=open(xmlfile,"wb") etree.ElementTree(xml_out).write(a, encoding='utf-8', xml_declaration=True, pretty_print=True) a.close() print("merge-funtoo-staging.py completed successfully.") sys.exit(0) # vim: ts=4 sw=4 noet
apinsard/funtoo-overlay
funtoo/scripts/merge-funtoo-staging.py
Python
gpl-2.0
14,296
# # Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # """ binlog_purge_rpl test for ms test and BUG#22543517 running binlogpurge on second master added to slave replication channels """ import replicate_ms from mysql.utilities.exception import MUTLibError _CHANGE_MASTER = ("CHANGE MASTER TO MASTER_HOST = 'localhost', " "MASTER_USER = 'rpl', MASTER_PASSWORD = 'rpl', " "MASTER_PORT = {0}, MASTER_AUTO_POSITION=1 " "FOR CHANNEL 'master-{1}'") def flush_server_logs_(server, times=5): """Flush logs on a server server[in] the instance server where to flush logs on times[in] number of times to flush the logs. """ # Flush master binary log server.exec_query("SET sql_log_bin = 0") for _ in range(times): server.exec_query("FLUSH LOCAL BINARY LOGS") server.exec_query("SET sql_log_bin = 1") class test(replicate_ms.test): """test binlog purge Utility This test runs the mysqlbinlogpurge utility on a known topology. """ master_datadir = None slaves = None mask_ports = [] def check_prerequisites(self): if not self.servers.get_server(0).check_version_compat(5, 7, 6): raise MUTLibError("Test requires server version 5.7.6 or later") return self.check_num_servers(1) def setup(self): self.res_fname = "result.txt" res = super(test, self).setup() if not res: return False # Setup multiple channels for slave m1_dict = self.get_connection_values(self.server2) m2_dict = self.get_connection_values(self.server3) for master in [self.server2, self.server3]: master.exec_query("SET SQL_LOG_BIN= 0") master.exec_query("GRANT REPLICATION SLAVE ON *.* TO 'rpl'@'{0}' " "IDENTIFIED BY 'rpl'".format(self.server1.host)) master.exec_query("SET SQL_LOG_BIN= 1") self.server1.exec_query("SET GLOBAL relay_log_info_repository = " "'TABLE'") self.server1.exec_query(_CHANGE_MASTER.format(m1_dict[3], 1)) self.server1.exec_query(_CHANGE_MASTER.format(m2_dict[3], 2)) self.server1.exec_query("START SLAVE") return True def run(self): test_num = 0 master1_conn = self.build_connection_string(self.server2).strip(' ') master2_conn = self.build_connection_string(self.server3).strip(' ') cmd_str = "mysqlbinlogpurge.py --master={0} ".format(master1_conn) cmd_opts = ("--discover-slaves={0} --dry-run " "".format(master1_conn.split('@')[0])) test_num += 1 comment = ("Test case {0} - mysqlbinlogpurge: with discover " "and verbose options - master 1".format(test_num)) cmds = ("{0} {1} {2} -vv" "").format(cmd_str, cmd_opts, "binlog_purge{0}.log".format(1)) res = self.run_test_case(0, cmds, comment) if not res: raise MUTLibError("{0}: failed".format(comment)) flush_server_logs_(self.server1) cmd_str = "mysqlbinlogpurge.py --master={0} ".format(master2_conn) test_num += 1 comment = ("Test case {0} - mysqlbinlogpurge: with discover " "and verbose options - master 2".format(test_num)) cmds = ("{0} {1} {2} -vv" "").format(cmd_str, cmd_opts, "binlog_purge{0}.log".format(2)) res = self.run_test_case(0, cmds, comment) if not res: raise MUTLibError("{0}: failed".format(comment)) flush_server_logs_(self.server1) super(test, self).reset_ms_topology() return True def get_result(self): # If run method executes successfully without throwing any exceptions, # then test was successful return True, None def record(self): # Not a comparative test return True def cleanup(self): return super(test, self).cleanup()
mysql/mysql-utilities
mysql-test/suite/replication/t/binlog_purge_ms.py
Python
gpl-2.0
4,696
from abc import ABCMeta,abstractmethod from my_hue import * # Would dynamically choose a trigger based on trigger type def trigger_factory(trigger_type): return None class Trigger(object): __metaclass__ = ABCMeta def __init__(self): self.action() @abstractmethod def action(self): pass class IClickerTrigger(object): def __init__(self, clicker_id, response_info, time_of_trigger, sequence_number): super(IClickerTrigger, self).__init__() self.clicker_id = clicker_id self.response_info = response_info self.time_of_trigger = time_of_trigger self.sequence_number = sequence_number def action(self): print self.response_info button = 'a' if button == 'a': pass
Caveat4U/home_automation
trigger.py
Python
gpl-2.0
793
from __future__ import absolute_import from unittest import TestCase from datetime import datetime, timedelta from voeventdb.server.tests.fixtures import fake, packetgen class TestBasicRoutines(TestCase): def setUp(self): self.start = datetime(2015, 1, 1) self.interval = timedelta(minutes=15) def test_timerange(self): n_interval_added = 5 times = [t for t in packetgen.timerange(self.start, self.start+self.interval*n_interval_added, self.interval)] self.assertEqual(n_interval_added, len(times)) self.assertEqual(self.start, times[0]) def test_heartbeat(self): n_interval = 4*6 packets = fake.heartbeat_packets(self.start, self.interval, n_interval) self.assertEqual(n_interval, len(packets))
timstaley/voeventdb
voeventdb/server/tests/test_fixture_creation.py
Python
gpl-2.0
916
import sqlite3 import os.path import sys import random def makeDatabase(databaseName): if databaseName[-3:] != ".db": databaseName = databaseName + ".db" conn = sqlite3.connect(databaseName) conn.commit() conn.close() def listToString(list): string = "" for i in list: string += str(i)+"\t" return string[:-1] def stringToList(string): list = [str(line) for line in string.split('\t')] return list #class for connecting, inserting, and retrieving information from a sqlite3 database class SqliteDB: #connects to the database, alters its name if named incorrectly def __init__(self, databaseName): if databaseName[-3:] != ".db": databaseName = databaseName + ".db" if os.path.isfile(databaseName): self.databaseName = databaseName; self.conn = sqlite3.connect(self.databaseName) self.cursor = self.conn.cursor() else: #sees if database name is unique, so it doesn't overwrite anything sys.exit("This database does not exist, use the makeDatabase(databaseName) to create it") def createTables(self): #creates tables if they do not exist self.cursor.execute("CREATE TABLE IF NOT EXISTS students (wID text, email text, UNIQUE(wID, email) ON CONFLICT ABORT)") self.cursor.execute("CREATE TABLE IF NOT EXISTS submissions (labNumber int, wID text, URL text, metadata text, URLsToGrade text)") self.cursor.execute("CREATE TABLE IF NOT EXISTS uniqueStudentURL (labNumber int, wID text, URL text, UNIQUE(URL) ON CONFLICT ABORT)") self.cursor.execute("CREATE TABLE IF NOT EXISTS experts (labNumber int, URL text, grade text, hidden int, PRIMARY KEY(labNumber, URL, hidden))") self.cursor.execute("CREATE TABLE IF NOT EXISTS responses (labNumber int, URL text, wID text, response text, practice boolean, PRIMARY KEY(labNumber, URL, response))") self.cursor.execute("CREATE TABLE IF NOT EXISTS questions (labNumber int, questionNumber int, questionWebassignNumber int, practice boolean)") weightString = '' for i in range(6): weightString += ', weight'+str(i+1)+' num' self.cursor.execute("CREATE TABLE IF NOT EXISTS weightsBIBI (labNumber int, wID text"+weightString+", weightSum num)") self.cursor.execute("CREATE TABLE IF NOT EXISTS rubrics (labNumber int, itemIndex int, itemType text, itemValues text, graded boolean, itemPrompt text)") self.cursor.execute("CREATE TABLE IF NOT EXISTS grades(labNumber int, wID text, URL text, finalGrade number, finalGradeVector text, rawGrade number, rawGradeVector text)") ##check to see if the tables have already been created #creates columns in tables for each lab specified self.conn.commit() #adds a person into the database, works for both new users and existing ones def addEntry(self, wID, URL, labNumber, metadata = None): if self.databaseName != None and self.conn != None and self.cursor !=None: #If the student did not submit a URL (aka the inputted URL is '') if URL == '': self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,'']) #try putting the student and its URL into the uniqueStudentURL database to check if the URL is unique else: try: self.cursor.execute("INSERT INTO uniqueStudentURL VALUES (?,?,?)", [labNumber, wID, URL]) #if there is no error in inserting to a table where URL has to be unique, put it in the actual student database self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,'']) #if the try fails, that means that the URL is already in the db, duplicate URL found! except: self.cursor.execute("SELECT wID FROM uniqueStudentURL WHERE URL=?", [URL]) print "URL: " + URL + " was initially submitted by: " + self.cursor.fetchall()[0][0] URL = "DUPLICATEURL" self.cursor.execute("INSERT INTO submissions VALUES(?,?,?,?,?)", [labNumber, wID, URL,metadata,'']) self.conn.commit() def addEmail(self, wID, email): try: self.cursor.execute("INSERT INTO students VALUES (?,?,?)", [wID, email]) except: print "wID: " + wID + " or email: " + email + " already in database." #retrieves URL for a specific student and specific lab number def getURL(self, wID, labNumber): self.cursor.execute("SELECT URL FROM submissions WHERE labNumber=? AND wID=?", [labNumber, wID]) URL = self.cursor.fetchone(); if URL is not None: return (URL[0]) else: return None def addExpertURL(self, labNumber, URL, grade, hidden): self.cursor.execute("SELECT * FROM experts WHERE URL = ?", [URL]) #adds in a user if not in database already presentURL = self.cursor.fetchone() if presentURL == None: self.cursor.execute("INSERT INTO experts VALUES (?, ?, ?, ?)", [labNumber, URL, listToString(grade), hidden]) self.conn.commit() elif presentURL == URL: print "The URL " + URL + " is already in the expert database" else: sys.exit("Trying to overrite") ##find a way to make seperate expert tables for each lab, and then join them together to prevent the staggaring of grades in the excel sheet #self.cursor.execute("SELECT * FROM expert WHERE Lab1Grade") #print self.cursor.fetchall() #query = ("SELECT {0} FROM expert WHERE wID def getExpertURLs(self, labNumber): self.cursor.execute("SElECT URL, grade FROM experts where labNumber=?", [labNumber]) URLsAndGrades = {} for d in self.cursor.fetchall(): URLsAndGrades[str(d[0])] = stringToList(str(d[1])) return URLsAndGrades def finalize(self, labNumber, seed, N, MOOC=False): ##randomize the youtube URLs #for each wID #put that into the databse under the student ID self.cursor.execute("SELECT URL FROM experts WHERE labNumber=? and hidden=0", [labNumber]) expertURL = [str(d[0]) for d in self.cursor.fetchall()] # find all the hidden expert videos self.cursor.execute("SELECT URL FROM experts WHERE labNumber=? and hidden=1", [labNumber]) hiddenURL = [str(d[0]) for d in self.cursor.fetchall()] #get all the studnet URLs self.cursor.execute("SELECT URL from submissions WHERE labNumber=?", [labNumber]) data = [str(d[0]) for d in self.cursor.fetchall()] #assign the students whos videos are designated expert graded URLs to grade, and remove them from the URL pool retrieved above if len(expertURL) + N + 1 <= len(data): pseudoURL = {} for d in expertURL: #if the expertURL is not in the data list, then it is a video that is not submitted by a student this sem #semester, in which case, we skip it if d in data: self.cursor.execute("SELECT wID FROM submissions WHERE URL=?", [d]) indice = (data.index(d) + 1) % len(data) while data[indice] in expertURL or data[indice] in hiddenURL: indice = (indice + 1) % len(data) pseudoURL[d] = data[indice] data.remove(d) for d in hiddenURL: if d in data: indice = (data.index(d) + 1) % len(data) while data[indice] in expertURL or data[indice] in hiddenURL: indice = (indice + 1) % len(data) pseudoURL[d] = data[indice] data.remove(d) self.cursor.execute("SELECT wID FROM submissions WHERE labNumber=? and URL is ''", [labNumber]) noURLSubmitted = [str(d[0]) for d in self.cursor.fetchall()] wIDPseudoURL = {} if(data.count('') > 0) and not MOOC: for d in noURLSubmitted: indice = (data.index('') + 1) % len(data) while data[indice] == '': indice = (indice + 1) % len(data) wIDPseudoURL[d] = data[indice] data.remove('') else: while '' in data: data.remove('') self.cursor.execute("SELECT wID FROM submissions WHERE labNumber=? AND URL=?", [labNumber, "DUPLICATEURL"]) noURLSubmitted = [str(d[0]) for d in self.cursor.fetchall()] if(data.count("DUPLICATEURL") > 0) and not MOOC: for d in noURLSubmitted: indice = (data.index("DUPLICATEURL") + 1) % len(data) while data[indice] == "DUPLICATEURL": indice = (indice + 1) % len(data) wIDPseudoURL[d] = data[indice] data.remove("DUPLICATEURL") else: while '' in data: data.remove('') #self.cursor.execute(query) random.shuffle(data) selectFrom = data + data[:N + len(expertURL) + 1] if len(pseudoURL.keys()) > 0: # params = ("Lab" + str(labNumber) + "URLSToGrade", "Lab" + str(labNumber) + "URL") for key in pseudoURL.keys(): startIndex = selectFrom.index(pseudoURL[key]) URLSToGrade = selectFrom[startIndex: startIndex+N+1] for i in hiddenURL: URLSToGrade.append(i) random.shuffle(URLSToGrade) self.cursor.execute("UPDATE submissions SET URLsToGrade=? WHERE URL=?", [listToString(expertURL + URLSToGrade), key]) self.conn.commit() if len(wIDPseudoURL.keys()) > 0: for key in wIDPseudoURL.keys(): startIndex = selectFrom.index(wIDPseudoURL[key]) URLSToGrade = selectFrom[startIndex: startIndex+N+1] for i in hiddenURL: URLSToGrade.append(i) random.shuffle(URLSToGrade) self.cursor.execute("UPDATE submissions SET URLsToGrade=? WHERE wID=?", [listToString(expertURL + URLSToGrade), key]) self.conn.commit() if len(data) > N: for d in data: startIndex = selectFrom.index(d) URLSToGrade = selectFrom[startIndex:startIndex+N+1] for i in hiddenURL: URLSToGrade.append(i) random.shuffle(URLSToGrade) # params = ("Lab" + str(labNumber) + "URLSToGrade", "Lab" + str(labNumber) + "URL") self.cursor.execute("UPDATE submissions SET URLsToGrade=? WHERE URL=? and labNumber=?", [listToString(expertURL + URLSToGrade), d, labNumber]) self.conn.commit() def getURLsToGrade(self, wID, labNumber): self.cursor.execute("Select URLsToGrade FROM submissions WHERE wID=? and labNumber=?", [wID, labNumber]) dbExtract = self.cursor.fetchone() if dbExtract == None: return False else: return [i for i in stringToList(dbExtract[0])] def addGrade(self, wID, labNumber, URL, grade , practice = False): URLsToGrade = self.getURLsToGrade(wID, labNumber) if URLsToGrade != False: if URL in URLsToGrade: self.cursor.execute("INSERT INTO responses VALUES(?, ?, ?, ?, ?)", [labNumber, URL, wID, listToString(grade), practice]) self.conn.commit() else: print "wID: " + wID + " was not assigned to grade URL: " + URL else: print("wID: " + wID + " not in the submissions table") def wIDGradesSubmitted(self, wID, labNumber): URLsToGrade = self.getURLsToGrade(wID, labNumber) gradesSubmitted = {} for URL in URLsToGrade: self.cursor.execute("SElECT grade FROM grades WHERE wID = ? AND URL = ?",[wID, URL]) dbExtract = self.cursor.fetchall() #if they did not grade the URL assigned to them if dbExtract!=[]: gradesSubmitted[URL] = stringToList(str(dbExtract[0][0])) else: gradesSubmitted[URL] = None return gradesSubmitted def compareToExpert(self, wID, labNumber): expertURLsAndGrades = self.getExpertURLs(labNumber) userSubmittedGrades = self.wIDGradesSubmitted(wID, labNumber) URLsGraded = userSubmittedGrades.keys() for key in expertURLsAndGrades.keys(): if key in URLsGraded: print expertURLsAndGrades[key] print userSubmittedGrades[key] def getGrades(self, wID, labNumber): URL = self.getURL(wID, labNumber) self.cursor.execute("SELECT grade,wID FROM grades WHERE URL=?", [URL]) grades = {} for d in self.cursor.fetchall(): grades[str(d[1])] = str(d[0]) return grades def check(self, labNumber): # params = ("Lab" + str(labNumber) + "URL", "Lab" + str(labNumber) + "URLsToGrade", None) self.cursor.execute("Select URL, URLsToGrade FROM submissions WHERE URL!= ''") fetch = self.cursor.fetchall() individualURL = [str(d[0]) for d in fetch] URLList = listToString([str(d[1]) for d in fetch]) for i in range(1, len(individualURL)-1): if individualURL[i] not in stringToList(URLList[i]): print individualURL[i] return False return True if False: os.remove("test.db") makeDatabase("test.db") sqldb = SqliteDB("test.db") sqldb.createTables() sqldb.addEntry("1", "1lkjsdf", 1) sqldb.addEntry("2", "1lkjsdf", 1) sqldb.addEntry("3", "1lkjsdf", 1) sqldb.addEntry("4", "4lkjsdf", 1) # sqldb.addEntry("4a",None , 2) sqldb.addEntry("5", "5lkjsdf", 1) sqldb.addEntry("6", "6lkjsdf", 1) sqldb.addEntry("7", "7lkjsdf", 1) sqldb.getURL("1", 1) sqldb.getURL("2", 1) sqldb.addExpertURL(1, "5lkjsdf",[1, 2, 3, 4, 5, 6, 7], 0) sqldb.addExpertURL(1, "2lkjsdf", [1, 7, 3, 1, 6, 3], 0) # sqldb.addEntry("8", None, 2) sqldb.addEntry("8", '', 1) sqldb.addEntry(9, "hidden", 1) sqldb.addExpertURL(1, "hidden", [1, 2, 3], 1) print "testing below" sqldb.finalize(1, 1, 3) print sqldb.getURLsToGrade("1", 1) sqldb.addGrade("1",1, "5lkjsdf", [1, 2, 3, 4]) sqldb.addGrade("12",1, "asdf", 1) sqldb.addGrade("1", 1, "2kjla", 1) sqldb.addGrade("2", "1", "5lkjsdf", [4, 3, 2, 1]) sqldb.wIDGradesSubmitted("1", 1) sqldb.getGrades("5", 1) sqldb.getExpertURLs(1) sqldb.compareToExpert("1",1) sqldb.check(1) # sqldb.addExpert("expertVideo", 1, 1) # sqldb.addExpert("test2", 2, 2)
scott-s-douglas/SWAPR
sqlite1.py
Python
gpl-2.0
12,933
#to get some base functionality for free, including the methods get_params and set_params #to set and return the classifier's parameters as well as the score method to calculate the #prediction accuracy,respectively from sklearn.base import BaseEstimator from sklearn.base import ClassifierMixin from sklearn.preprocessing import LabelEncoder #import six too make the MajorityVoteClassifier compatible with python2.7 from sklearn.externals import six from sklearn.base import clone from sklearn.pipeline import _name_estimators import numpy as np import operator class MajorityVoteClassifier(BaseEstimator,ClassifierMixin): """ A majority vote ensemble classifier Parameters ---------- classifiers : array-like, shape = [n_classifiers] Different classifiers for the ensemble vote : str, {'classlabel', 'probability'} Default: 'classlabel' If 'classlabel' the prediction is based on the argmax of class labels. Else if 'probability', the argmax of the sum of probabilities is used to predict the class label (recommended for calibrated classifiers). weights : array-like, shape = [n_classifiers] Optional, default: None If a list of `int` or `float` values are provided , the classifiers are weithed by importance; Uses uniform weights if 'weights = None' """ def __init__(self, classifiers,vote='classlabel', weights=None): self.classifiers = classifiers self.named_classifiers = {key: value for key, value in _name_estimators(classifiers)} self.vote = vote self.weights = weights def fit(self, X, y): """ Fit classifiers. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Matrix of training samples. y : array-like, shape = [n_samples] Vector of target class labels. Returns ------- self : object """ # Use LabelEncoder to ensure class labels start # with 0, which is important for np.argmax # call in self.predict self.lablenc_ = LabelEncoder() self.lablenc_.fit(y) self.classes_ = self.lablenc_.classes_ self.classifiers_ = [] for clf in self.classifiers: fitted_clf = clone(clf).fit(X,self.lablenc_.transform(y)) self.classifiers_.append(fitted_clf) return self def predict(self, X): """ Predict class labels for X. Parameters ---------- X : {array-like, sparse matrix}, Shape = [n_samples, n_features] Matrix of training samples Returns ---------- maj_vote : array-like, shape = [n_samples] Predicted class labels. """ if self.vote == 'probability': maj_vote = np.argmax(self.predict_proba(X),axis=1) else: # 'classlabel' vote # Collect results from clf.predict calls predictions = np.asarray([clf.predict(X) for clf in self.classifiers_]).T maj_vote = np.apply_along_axis(lambda x: np.argmax(np.bincount(x,weights=self.weights)),axis=1,arr=predictions) maj_vote = self.lablenc_.inverse_transform(maj_vote) return maj_vote def predict_proba(self, X): """ Predict class probabilities for X. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. Returns ---------- avg_proba : array-like, shape = [n_samples, n_classes] Weighted average probability for each class per sample. """ probas = np.asarray([clf.predict_proba(X) for clf in self.classifiers_]) avg_proba = np.average(probas,axis=0, weights=self.weights) return avg_proba def get_params(self, deep=True): """ Get classifier parameter names for GridSearch""" if not deep: return super(MajorityVoteClassifier,self).get_params(deep=False) else: out = self.named_classifiers.copy() for name, step in six.iteritems(self.named_classifiers): for key, value in six.iteritems(step.get_params(deep=True)): out['%s__%s' % (name, key)] = value return out #get datas from sklearn import datasets from sklearn.cross_validation import train_test_split from sklearn.preprocessing import StandardScaler from sklearn.preprocessing import LabelEncoder iris = datasets.load_iris() X,y = iris.data[50:,[1,2]],iris.target[50:] le = LabelEncoder() y = le.fit_transform(y) X_train,X_test,y_train,y_test = train_test_split(X,y,test_size = 0.5,random_state = 1) #train logistic regression classifier, decision tree, k-nearest neightbor respectively from sklearn.cross_validation import cross_val_score from sklearn.linear_model import LogisticRegression from sklearn.tree import DecisionTreeClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.pipeline import Pipeline import numpy as np clf1 = LogisticRegression(penalty = 'l2',C = 0.001,random_state = 0) clf2 = DecisionTreeClassifier(max_depth = 1,criterion = 'entropy',random_state = 0) clf3 = KNeighborsClassifier(n_neighbors = 1,p=2,metric = 'minkowski') pipe1 = Pipeline([['sc',StandardScaler()],['clf',clf1]]) pipe3 = Pipeline([['sc',StandardScaler()],['clf',clf3]]) clf_labels = ['Logistic Regression','Decision Tree','KNN'] print('10-fold cross validation:\n') for clf,label in zip([pipe1,clf2,pipe3],clf_labels): scores = cross_val_score(estimator = clf, X=X_train, y=y_train, cv=10, scoring = 'roc_auc') print ("ROC AUC: %0.2f (+/- %0.2f) [%s]" % (scores.mean(),scores.std(),label)) #combine the individual classifiers for majority rule voting in our MajorityVoteClassifier #import os #pwd = os.getcwd() #os.chdir('E:\\machine-learning\\19-Ensemble Learning\\') #from majority_voting import MajorityVoteClassifier mv_clf = MajorityVoteClassifier(classifiers=[pipe1, clf2, pipe3]) clf_labels += ['Majority Voting'] all_clf = [pipe1, clf2, pipe3, mv_clf] for clf, label in zip(all_clf, clf_labels): scores = cross_val_score(estimator=clf,X=X_train,y=y_train,cv=10,scoring='roc_auc') print("Accuracy: %0.2f (+/- %0.2f) [%s]"% (scores.mean(), scores.std(), label)) #os.chdir(pwd) #compute the ROC curves from the test set to check if the MajorityVoteClassifier generalizes well to unseen data from sklearn.metrics import roc_curve from sklearn.metrics import auc import matplotlib.pyplot as plt colors = ['black','orange','blue','green'] linestyles = [':', '--', '-.', '-'] for clf,label,clr,ls in zip(all_clf,clf_labels,colors,linestyles): #assuming the label of the positive class is 1 y_pred = clf.fit(X_train,y_train).predict_proba(X_test)[:,1] fpr,tpr,thresholds = roc_curve(y_true = y_test,y_score = y_pred) roc_auc = auc(x= fpr,y=tpr) plt.plot(fpr,tpr,color = clr,linestyle = ls,label = '%s (auc = %0.2f)' % (label,roc_auc)) plt.legend(loc = 'lower right') plt.plot([0, 1], [0, 1],linestyle='--',color='gray',linewidth=2) plt.xlim([-0.1, 1.1]) plt.ylim([-0.1, 1.1]) plt.grid() plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.show() #tune the inverse regularization parameter C of the logistic regression classifier and the decision tree #depth via a grid search for demonstration purposes from sklearn.grid_search import GridSearchCV params = {'decisiontreeclassifier__max_depth':[1,2],'pipeline-1__clf__C':[0.001,0.1,100.0]} grid = GridSearchCV(estimator = mv_clf,param_grid=params,cv = 10,scoring = 'roc_auc') grid.fit(X_train,y_train) for params,mean_score,scores in grid.grid_scores_: print('%0.3f +/- %0.2f %r' % (mean_score,scores.std()/2,params)) print('Best parameters : %s' % grid.best_params_) print('Accuracy: %.2f' % grid.best_score_)
PhenixI/machine-learning
1_supervised_classification/19-Ensemble Learning/majority_voting/majority_voting_test.py
Python
gpl-2.0
8,185
from Tools.Profile import profile profile("LOAD:ElementTree") import xml.etree.cElementTree import os profile("LOAD:enigma_skin") from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, addFont, gRGB, eWindowStyleSkinned, getDesktop from Components.config import ConfigSubsection, ConfigText, config from Components.Converter.Converter import Converter from Components.Sources.Source import Source, ObsoleteSource from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_FONTS, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists, SCOPE_SKIN_IMAGE from Tools.Import import my_import from Tools.LoadPixmap import LoadPixmap from Components.RcModel import rc_model from Components.SystemInfo import SystemInfo colorNames = {} # Predefined fonts, typically used in built-in screens and for components like # the movie list and so. fonts = { "Body": ("Regular", 18, 22, 16), "ChoiceList": ("Regular", 20, 24, 18), } parameters = {} def dump(x, i=0): print " " * i + str(x) try: for n in x.childNodes: dump(n, i + 1) except: None class SkinError(Exception): def __init__(self, message): self.msg = message def __str__(self): return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg) dom_skins = [ ] def addSkin(name, scope = SCOPE_SKIN): # read the skin filename = resolveFilename(scope, name) if fileExists(filename): mpath = os.path.dirname(filename) + "/" try: dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot())) except: print "[SKIN ERROR] error in %s" % filename return False else: return True return False # get own skin_user_skinname.xml file, if exist def skin_user_skinname(): name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml" filename = resolveFilename(SCOPE_CONFIG, name) if fileExists(filename): return name return None # we do our best to always select the "right" value # skins are loaded in order of priority: skin with # highest priority is loaded last, usually the user-provided # skin. # currently, loadSingleSkinData (colors, bordersets etc.) # are applied one-after-each, in order of ascending priority. # the dom_skin will keep all screens in descending priority, # so the first screen found will be used. # example: loadSkin("nemesis_greenline/skin.xml") config.skin = ConfigSubsection() DEFAULT_SKIN = SystemInfo["HasFullHDSkinSupport"] and "PLi-FullNightHD/skin.xml" or "PLi-HD/skin.xml" # on SD hardware, PLi-HD will not be available if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)): # in that case, fallback to Magic (which is an SD skin) DEFAULT_SKIN = "Magic/skin.xml" if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)): DEFAULT_SKIN = "skin.xml" config.skin.primary_skin = ConfigText(default=DEFAULT_SKIN) profile("LoadSkin") res = None name = skin_user_skinname() if name: res = addSkin(name, SCOPE_CONFIG) if not name or not res: addSkin('skin_user.xml', SCOPE_CONFIG) # some boxes lie about their dimensions addSkin('skin_box.xml') # add optional discrete second infobar addSkin('skin_second_infobar.xml') display_skin_id = 1 addSkin('skin_display.xml') addSkin('skin_text.xml') addSkin('skin_subtitles.xml') try: if not addSkin(config.skin.primary_skin.value): raise SkinError, "primary skin not found" except Exception, err: print "SKIN ERROR:", err skin = DEFAULT_SKIN if config.skin.primary_skin.value == skin: skin = 'skin.xml' print "defaulting to standard skin...", skin config.skin.primary_skin.value = skin addSkin(skin) del skin addSkin('skin_default.xml') profile("LoadSkinDefaultDone") # # Convert a string into a number. Used to convert object position and size attributes into a number # s is the input string. # e is the the parent object size to do relative calculations on parent # size is the size of the object size (e.g. width or height) # font is a font object to calculate relative to font sizes # Note some constructs for speeding # up simple cases that are very common. # Can do things like: 10+center-10w+4% # To center the widget on the parent widget, # but move forward 10 pixels and 4% of parent width # and 10 character widths backward # Multiplication, division and subexprsssions are also allowed: 3*(e-c/2) # # Usage: center : center the object on parent based on parent size and object size # e : take the parent size/width # c : take the center point of parent size/width # % : take given percentag of parent size/width # w : multiply by current font width # h : multiply by current font height # def parseCoordinate(s, e, size=0, font=None): s = s.strip() if s == "center": # for speed, can be common case val = (e - size)/2 elif s == '*': return None else: try: val = int(s) # for speed except: if 't' in s: s = s.replace("center", str((e-size)/2.0)) if 'e' in s: s = s.replace("e", str(e)) if 'c' in s: s = s.replace("c", str(e/2.0)) if 'w' in s: s = s.replace("w", "*" + str(fonts[font][3])) if 'h' in s: s = s.replace("h", "*" + str(fonts[font][2])) if '%' in s: s = s.replace("%", "*" + str(e/100.0)) try: val = int(s) # for speed except: val = eval(s) if val < 0: return 0 return int(val) # make sure an integer value is returned def getParentSize(object, desktop): size = eSize() if object: parent = object.getParent() # For some widgets (e.g. ScrollLabel) the skin attributes are applied to # a child widget, instead of to the widget itself. In that case, the parent # we have here is not the real parent, but it is the main widget. # We have to go one level higher to get the actual parent. # We can detect this because the 'parent' will not have a size yet # (the main widget's size will be calculated internally, as soon as the child # widget has parsed the skin attributes) if parent and parent.size().isEmpty(): parent = parent.getParent() if parent: size = parent.size() elif desktop: #widget has no parent, use desktop size instead for relative coordinates size = desktop.size() return size def parseValuePair(s, scale, object = None, desktop = None, size = None): x, y = s.split(',') parentsize = eSize() if object and ('c' in x or 'c' in y or 'e' in x or 'e' in y or '%' in x or '%' in y): # need parent size for ce% parentsize = getParentSize(object, desktop) xval = parseCoordinate(x, parentsize.width(), size and size.width() or 0) yval = parseCoordinate(y, parentsize.height(), size and size.height() or 0) return (xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1]) def parsePosition(s, scale, object = None, desktop = None, size = None): (x, y) = parseValuePair(s, scale, object, desktop, size) return ePoint(x, y) def parseSize(s, scale, object = None, desktop = None): (x, y) = parseValuePair(s, scale, object, desktop) return eSize(x, y) def parseFont(s, scale): try: f = fonts[s] name = f[0] size = f[1] except: name, size = s.split(';') return gFont(name, int(size) * scale[0][0] / scale[0][1]) def parseColor(s): if s[0] != '#': try: return colorNames[s] except: raise SkinError("color '%s' must be #aarrggbb or valid named color" % s) return gRGB(int(s[1:], 0x10)) def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))): # walk all attributes size = None pos = None font = None for attrib, value in node.items(): if attrib not in ignore: if attrib in filenames: value = resolveFilename(SCOPE_CURRENT_SKIN, value, path_prefix=skin_path_prefix) # Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder) # it needs to be set at least before the size is set, in order for the # window dimensions to be calculated correctly in all situations. # If wfNoBorder is applied after the size has been set, the window will fail to clear the title area. # Similar situation for a scrollbar in a listbox; when the scrollbar setting is applied after # the size, a scrollbar will not be shown until the selection moves for the first time if attrib == 'size': size = value.encode("utf-8") elif attrib == 'position': pos = value.encode("utf-8") elif attrib == 'font': font = value.encode("utf-8") skinAttributes.append((attrib, font)) else: skinAttributes.append((attrib, value.encode("utf-8"))) if pos is not None: pos, size = context.parse(pos, size, font) skinAttributes.append(('position', pos)) if size is not None: skinAttributes.append(('size', size)) def morphRcImagePath(value): if rc_model.rcIsDefault() is False: if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png': value = rc_model.getRcImg() return value def loadPixmap(path, desktop): option = path.find("#") if option != -1: path = path[:option] ptr = LoadPixmap(morphRcImagePath(path), desktop) if ptr is None: raise SkinError("pixmap file %s not found!" % path) return ptr class AttributeParser: def __init__(self, guiObject, desktop, scale=((1,1),(1,1))): self.guiObject = guiObject self.desktop = desktop self.scaleTuple = scale def applyOne(self, attrib, value): try: getattr(self, attrib)(value) except AttributeError: print "[Skin] Attribute not implemented:", attrib, "value:", value except SkinError, ex: print "[Skin] Error:", ex def applyAll(self, attrs): for attrib, value in attrs: self.applyOne(attrib, value) def conditional(self, value): pass def position(self, value): if isinstance(value, tuple): self.guiObject.move(ePoint(*value)) else: self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize())) def size(self, value): if isinstance(value, tuple): self.guiObject.resize(eSize(*value)) else: self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop)) def title(self, value): self.guiObject.setTitle(_(value)) def text(self, value): self.guiObject.setText(_(value)) def font(self, value): self.guiObject.setFont(parseFont(value, self.scaleTuple)) def zPosition(self, value): self.guiObject.setZPosition(int(value)) def itemHeight(self, value): self.guiObject.setItemHeight(int(value)) def pixmap(self, value): ptr = loadPixmap(value, self.desktop) self.guiObject.setPixmap(ptr) def backgroundPixmap(self, value): ptr = loadPixmap(value, self.desktop) self.guiObject.setBackgroundPicture(ptr) def selectionPixmap(self, value): ptr = loadPixmap(value, self.desktop) self.guiObject.setSelectionPicture(ptr) def sliderPixmap(self, value): ptr = loadPixmap(value, self.desktop) self.guiObject.setSliderPicture(ptr) def scrollbarbackgroundPixmap(self, value): ptr = loadPixmap(value, self.desktop) self.guiObject.setScrollbarBackgroundPicture(ptr) def alphatest(self, value): self.guiObject.setAlphatest( { "on": 1, "off": 0, "blend": 2, }[value]) def scale(self, value): self.guiObject.setScale(1) def orientation(self, value): # used by eSlider try: self.guiObject.setOrientation(* { "orVertical": (self.guiObject.orVertical, False), "orTopToBottom": (self.guiObject.orVertical, False), "orBottomToTop": (self.guiObject.orVertical, True), "orHorizontal": (self.guiObject.orHorizontal, False), "orLeftToRight": (self.guiObject.orHorizontal, False), "orRightToLeft": (self.guiObject.orHorizontal, True), }[value]) except KeyError: print "oprientation must be either orVertical or orHorizontal!" def valign(self, value): try: self.guiObject.setVAlign( { "top": self.guiObject.alignTop, "center": self.guiObject.alignCenter, "bottom": self.guiObject.alignBottom }[value]) except KeyError: print "valign must be either top, center or bottom!" def halign(self, value): try: self.guiObject.setHAlign( { "left": self.guiObject.alignLeft, "center": self.guiObject.alignCenter, "right": self.guiObject.alignRight, "block": self.guiObject.alignBlock }[value]) except KeyError: print "halign must be either left, center, right or block!" def textOffset(self, value): x, y = value.split(',') self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1])) def flags(self, value): flags = value.split(',') for f in flags: try: fv = eWindow.__dict__[f] self.guiObject.setFlag(fv) except KeyError: print "illegal flag %s!" % f def backgroundColor(self, value): self.guiObject.setBackgroundColor(parseColor(value)) def backgroundColorSelected(self, value): self.guiObject.setBackgroundColorSelected(parseColor(value)) def foregroundColor(self, value): self.guiObject.setForegroundColor(parseColor(value)) def foregroundColorSelected(self, value): self.guiObject.setForegroundColorSelected(parseColor(value)) def shadowColor(self, value): self.guiObject.setShadowColor(parseColor(value)) def selectionDisabled(self, value): self.guiObject.setSelectionEnable(0) def transparent(self, value): self.guiObject.setTransparent(int(value)) def borderColor(self, value): self.guiObject.setBorderColor(parseColor(value)) def borderWidth(self, value): self.guiObject.setBorderWidth(int(value)) def scrollbarMode(self, value): self.guiObject.setScrollbarMode(getattr(self.guiObject, value)) # { "showOnDemand": self.guiObject.showOnDemand, # "showAlways": self.guiObject.showAlways, # "showNever": self.guiObject.showNever, # "showLeft": self.guiObject.showLeft # }[value]) def enableWrapAround(self, value): self.guiObject.setWrapAround(True) def itemHeight(self, value): self.guiObject.setItemHeight(int(value)) def pointer(self, value): (name, pos) = value.split(':') pos = parsePosition(pos, self.scaleTuple) ptr = loadPixmap(name, self.desktop) self.guiObject.setPointer(0, ptr, pos) def seek_pointer(self, value): (name, pos) = value.split(':') pos = parsePosition(pos, self.scaleTuple) ptr = loadPixmap(name, self.desktop) self.guiObject.setPointer(1, ptr, pos) def shadowOffset(self, value): self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple)) def noWrap(self, value): self.guiObject.setNoWrap(1) def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))): # Someone still using applySingleAttribute? AttributeParser(guiObject, desktop, scale).applyOne(attrib, value) def applyAllAttributes(guiObject, desktop, attributes, scale): AttributeParser(guiObject, desktop, scale).applyAll(attributes) def loadSingleSkinData(desktop, skin, path_prefix): """loads skin data like colors, windowstyle etc.""" assert skin.tag == "skin", "root element in skin must be 'skin'!" for c in skin.findall("output"): id = c.attrib.get('id') if id: id = int(id) else: id = 0 if id == 0: # framebuffer for res in c.findall("resolution"): get_attr = res.attrib.get xres = get_attr("xres") if xres: xres = int(xres) else: xres = 720 yres = get_attr("yres") if yres: yres = int(yres) else: yres = 576 bpp = get_attr("bpp") if bpp: bpp = int(bpp) else: bpp = 32 #print "Resolution:", xres,yres,bpp from enigma import gMainDC gMainDC.getInstance().setResolution(xres, yres) desktop.resize(eSize(xres, yres)) if bpp != 32: # load palette (not yet implemented) pass if yres >= 1080: parameters["FileListName"] = (68,4,1000,34) parameters["FileListIcon"] = (7,4,52,37) parameters["FileListMultiName"] = (90,3,1000,32) parameters["FileListMultiIcon"] = (45, 4, 30, 30) parameters["FileListMultiLock"] = (2,0,36,36) parameters["ChoicelistDash"] = (0,3,1000,30) parameters["ChoicelistName"] = (68,3,1000,30) parameters["ChoicelistIcon"] = (7,0,52,38) parameters["PluginBrowserName"] = (180,8,38) parameters["PluginBrowserDescr"] = (180,42,25) parameters["PluginBrowserIcon"] = (15,8,150,60) parameters["PluginBrowserDownloadName"] = (120,8,38) parameters["PluginBrowserDownloadDescr"] = (120,42,25) parameters["PluginBrowserDownloadIcon"] = (15,0,90,76) parameters["ServiceInfo"] = (0,0,450,50) parameters["ServiceInfoLeft"] = (0,0,450,45) parameters["ServiceInfoRight"] = (450,0,1000,45) parameters["SelectionListDescr"] = (45,3,1000,32) parameters["SelectionListLock"] = (0,2,36,36) parameters["ConfigListSeperator"] = 300 parameters["VirtualKeyboard"] = (68,68) parameters["PartnerBoxEntryListName"] = (8,2,225,38) parameters["PartnerBoxEntryListIP"] = (180,2,225,38) parameters["PartnerBoxEntryListPort"] = (405,2,150,38) parameters["PartnerBoxEntryListType"] = (615,2,150,38) parameters["PartnerBoxTimerServicename"] = (0,0,45) parameters["PartnerBoxTimerName"] = (0,42,30) parameters["PartnerBoxE1TimerTime"] = (0,78,255,30) parameters["PartnerBoxE1TimerState"] = (255,78,255,30) parameters["PartnerBoxE2TimerTime"] = (0,78,225,30) parameters["PartnerBoxE2TimerState"] = (225,78,225,30) parameters["PartnerBoxE2TimerIcon"] = (1050,8,20,20) parameters["PartnerBoxE2TimerIconRepeat"] = (1050,38,20,20) parameters["PartnerBoxBouquetListName"] = (0,0,45) parameters["PartnerBoxChannelListName"] = (0,0,45) parameters["PartnerBoxChannelListTitle"] = (0,42,30) parameters["PartnerBoxChannelListTime"] = (0,78,225,30) parameters["HelpMenuListHlp"] = (0,0,900,42) parameters["HelpMenuListExtHlp0"] = (0,0,900,39) parameters["HelpMenuListExtHlp1"] = (0,42,900,30) parameters["AboutHddSplit"] = 1 parameters["DreamexplorerName"] = (62,0,1200,38) parameters["DreamexplorerIcon"] = (15,4,30,30) parameters["PicturePlayerThumb"] = (30,285,45,300,30,25) parameters["PlayListName"] = (38,2,1000,34) parameters["PlayListIcon"] = (7,7,24,24) parameters["SHOUTcastListItem"] = (30,27,35,96,35,33,60,32) for skininclude in skin.findall("include"): filename = skininclude.attrib.get("filename") if filename: skinfile = resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix) if not fileExists(skinfile): skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix) if fileExists(skinfile): print "[SKIN] loading include:", skinfile loadSkin(skinfile) for c in skin.findall("colors"): for color in c.findall("color"): get_attr = color.attrib.get name = get_attr("name") color = get_attr("value") if name and color: colorNames[name] = parseColor(color) #print "Color:", name, color else: raise SkinError("need color and name, got %s %s" % (name, color)) for c in skin.findall("fonts"): for font in c.findall("font"): get_attr = font.attrib.get filename = get_attr("filename", "<NONAME>") name = get_attr("name", "Regular") scale = get_attr("scale") if scale: scale = int(scale) else: scale = 100 is_replacement = get_attr("replacement") and True or False render = get_attr("render") if render: render = int(render) else: render = 0 resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix) if not fileExists(resolved_font): #when font is not available look at current skin path skin_path = resolveFilename(SCOPE_CURRENT_SKIN, filename) if fileExists(skin_path): resolved_font = skin_path addFont(resolved_font, name, scale, is_replacement, render) #print "Font: ", resolved_font, name, scale, is_replacement for alias in c.findall("alias"): get = alias.attrib.get try: name = get("name") font = get("font") size = int(get("size")) height = int(get("height", size)) # to be calculated some day width = int(get("width", size)) global fonts fonts[name] = (font, size, height, width) except Exception, ex: print "[SKIN] bad font alias", ex for c in skin.findall("parameters"): for parameter in c.findall("parameter"): get = parameter.attrib.get try: name = get("name") value = get("value") parameters[name] = "," in value and map(int, value.split(",")) or int(value) except Exception, ex: print "[SKIN] bad parameter", ex for c in skin.findall("subtitles"): from enigma import eSubtitleWidget scale = ((1,1),(1,1)) for substyle in c.findall("sub"): get_attr = substyle.attrib.get font = parseFont(get_attr("font"), scale) col = get_attr("foregroundColor") if col: foregroundColor = parseColor(col) haveColor = 1 else: foregroundColor = gRGB(0xFFFFFF) haveColor = 0 col = get_attr("borderColor") if col: borderColor = parseColor(col) else: borderColor = gRGB(0) borderwidth = get_attr("borderWidth") if borderwidth is None: # default: use a subtitle border borderWidth = 3 else: borderWidth = int(borderwidth) face = eSubtitleWidget.__dict__[get_attr("name")] eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth) for windowstyle in skin.findall("windowstyle"): style = eWindowStyleSkinned() style_id = windowstyle.attrib.get("id") if style_id: style_id = int(style_id) else: style_id = 0 # defaults font = gFont("Regular", 20) offset = eSize(20, 5) for title in windowstyle.findall("title"): get_attr = title.attrib.get offset = parseSize(get_attr("offset"), ((1,1),(1,1))) font = parseFont(get_attr("font"), ((1,1),(1,1))) style.setTitleFont(font); style.setTitleOffset(offset) #print " ", font, offset for borderset in windowstyle.findall("borderset"): bsName = str(borderset.attrib.get("name")) for pixmap in borderset.findall("pixmap"): get_attr = pixmap.attrib.get bpName = get_attr("pos") filename = get_attr("filename") if filename and bpName: png = loadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix), desktop) style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png) #print " borderset:", bpName, filename for color in windowstyle.findall("color"): get_attr = color.attrib.get colorType = get_attr("name") color = parseColor(get_attr("color")) try: style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color) except: raise SkinError("Unknown color %s" % colorType) #pass #print " color:", type, color x = eWindowStyleManager.getInstance() x.setStyle(style_id, style) for margin in skin.findall("margin"): style_id = margin.attrib.get("id") if style_id: style_id = int(style_id) else: style_id = 0 r = eRect(0,0,0,0) v = margin.attrib.get("left") if v: r.setLeft(int(v)) v = margin.attrib.get("top") if v: r.setTop(int(v)) v = margin.attrib.get("right") if v: r.setRight(int(v)) v = margin.attrib.get("bottom") if v: r.setBottom(int(v)) # the "desktop" parameter is hardcoded to the UI screen, so we must ask # for the one that this actually applies to. getDesktop(style_id).setMargins(r) dom_screens = {} def loadSkin(name, scope = SCOPE_SKIN): # Now a utility for plugins to add skin data to the screens global dom_screens, display_skin_id filename = resolveFilename(scope, name) if fileExists(filename): path = os.path.dirname(filename) + "/" for elem in xml.etree.cElementTree.parse(filename).getroot(): if elem.tag == 'screen': name = elem.attrib.get('name', None) if name: sid = elem.attrib.get('id', None) if sid and (sid != display_skin_id): # not for this display elem.clear() continue if name in dom_screens: print "loadSkin: Screen already defined elsewhere:", name elem.clear() else: dom_screens[name] = (elem, path) else: elem.clear() else: elem.clear() def loadSkinData(desktop): # Kinda hackish, but this is called once by mytest.py global dom_skins skins = dom_skins[:] skins.reverse() for (path, dom_skin) in skins: loadSingleSkinData(desktop, dom_skin, path) for elem in dom_skin: if elem.tag == 'screen': name = elem.attrib.get('name', None) if name: sid = elem.attrib.get('id', None) if sid and (sid != display_skin_id): # not for this display elem.clear() continue if name in dom_screens: # Kill old versions, save memory dom_screens[name][0].clear() dom_screens[name] = (elem, path) else: # without name, it's useless! elem.clear() else: # non-screen element, no need for it any longer elem.clear() # no longer needed, we know where the screens are now. del dom_skins class additionalWidget: pass # Class that makes a tuple look like something else. Some plugins just assume # that size is a string and try to parse it. This class makes that work. class SizeTuple(tuple): def split(self, *args): return (str(self[0]), str(self[1])) def strip(self, *args): return '%s,%s' % self def __str__(self): return '%s,%s' % self class SkinContext: def __init__(self, parent=None, pos=None, size=None, font=None): if parent is not None: if pos is not None: pos, size = parent.parse(pos, size, font) self.x, self.y = pos self.w, self.h = size else: self.x = None self.y = None self.w = None self.h = None def __str__(self): return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h) def parse(self, pos, size, font): if pos == "fill": pos = (self.x, self.y) size = (self.w, self.h) self.w = 0 self.h = 0 else: w,h = size.split(',') w = parseCoordinate(w, self.w, 0, font) h = parseCoordinate(h, self.h, 0, font) if pos == "bottom": pos = (self.x, self.y + self.h - h) size = (self.w, h) self.h -= h elif pos == "top": pos = (self.x, self.y) size = (self.w, h) self.h -= h self.y += h elif pos == "left": pos = (self.x, self.y) size = (w, self.h) self.x += w self.w -= w elif pos == "right": pos = (self.x + self.w - w, self.y) size = (w, self.h) self.w -= w else: size = (w, h) pos = pos.split(',') pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font)) return (SizeTuple(pos), SizeTuple(size)) class SkinContextStack(SkinContext): # A context that stacks things instead of aligning them def parse(self, pos, size, font): if pos == "fill": pos = (self.x, self.y) size = (self.w, self.h) else: w,h = size.split(',') w = parseCoordinate(w, self.w, 0, font) h = parseCoordinate(h, self.h, 0, font) if pos == "bottom": pos = (self.x, self.y + self.h - h) size = (self.w, h) elif pos == "top": pos = (self.x, self.y) size = (self.w, h) elif pos == "left": pos = (self.x, self.y) size = (w, self.h) elif pos == "right": pos = (self.x + self.w - w, self.y) size = (w, self.h) else: size = (w, h) pos = pos.split(',') pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font)) return (SizeTuple(pos), SizeTuple(size)) def readSkin(screen, skin, names, desktop): if not isinstance(names, list): names = [names] # try all skins, first existing one have priority global dom_screens for n in names: myscreen, path = dom_screens.get(n, (None,None)) if myscreen is not None: # use this name for debug output name = n break else: name = "<embedded-in-'%s'>" % screen.__class__.__name__ # otherwise try embedded skin if myscreen is None: myscreen = getattr(screen, "parsedSkin", None) # try uncompiled embedded skin if myscreen is None and getattr(screen, "skin", None): skin = screen.skin print "[SKIN] Parsing embedded skin", name if isinstance(skin, tuple): for s in skin: candidate = xml.etree.cElementTree.fromstring(s) if candidate.tag == 'screen': sid = candidate.attrib.get('id', None) if (not sid) or (int(sid) == display_skin_id): myscreen = candidate break; else: print "[SKIN] Hey, no suitable screen!" else: myscreen = xml.etree.cElementTree.fromstring(skin) if myscreen: screen.parsedSkin = myscreen if myscreen is None: print "[SKIN] No skin to read..." myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>") screen.skinAttributes = [ ] skin_path_prefix = getattr(screen, "skin_path", path) context = SkinContextStack() s = desktop.bounds() context.x = s.left() context.y = s.top() context.w = s.width() context.h = s.height() del s collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",)) context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size')) screen.additionalWidgets = [ ] screen.renderer = [ ] visited_components = set() # now walk all widgets and stuff def process_none(widget, context): pass def process_widget(widget, context): get_attr = widget.attrib.get # ok, we either have 1:1-mapped widgets ('old style'), or 1:n-mapped # widgets (source->renderer). wname = get_attr('name') wsource = get_attr('source') if wname is None and wsource is None: print "widget has no name and no source!" return if wname: #print "Widget name=", wname visited_components.add(wname) # get corresponding 'gui' object try: attributes = screen[wname].skinAttributes = [ ] except: raise SkinError("component with name '" + wname + "' was not found in skin of screen '" + name + "'!") # assert screen[wname] is not Source collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',)) elif wsource: # get corresponding source #print "Widget source=", wsource while True: # until we found a non-obsolete source # parse our current "wsource", which might specifiy a "related screen" before the dot, # for example to reference a parent, global or session-global screen. scr = screen # resolve all path components path = wsource.split('.') while len(path) > 1: scr = screen.getRelatedScreen(path[0]) if scr is None: #print wsource #print name raise SkinError("specified related screen '" + wsource + "' was not found in screen '" + name + "'!") path = path[1:] # resolve the source. source = scr.get(path[0]) if isinstance(source, ObsoleteSource): # however, if we found an "obsolete source", issue warning, and resolve the real source. print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source) print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % (source.removal_date) if source.description: print source.description wsource = source.new_source else: # otherwise, use that source. break if source is None: raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!") wrender = get_attr('render') if not wrender: raise SkinError("you must define a renderer with render= for source '%s'" % wsource) for converter in widget.findall("convert"): ctype = converter.get('type') assert ctype, "'convert'-tag needs a 'type'-attribute" #print "Converter:", ctype try: parms = converter.text.strip() except: parms = "" #print "Params:", parms converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype) c = None for i in source.downstream_elements: if isinstance(i, converter_class) and i.converter_arguments == parms: c = i if c is None: c = converter_class(parms) c.connect(source) source = c renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender) renderer = renderer_class() # instantiate renderer renderer.connect(source) # connect to source attributes = renderer.skinAttributes = [ ] collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source')) screen.renderer.append(renderer) def process_applet(widget, context): try: codeText = widget.text.strip() widgetType = widget.attrib.get('type') code = compile(codeText, "skin applet", "exec") except Exception, ex: raise SkinError("applet failed to compile: " + str(ex)) if widgetType == "onLayoutFinish": screen.onLayoutFinish.append(code) else: raise SkinError("applet type '%s' unknown!" % widgetType) def process_elabel(widget, context): w = additionalWidget() w.widget = eLabel w.skinAttributes = [ ] collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',)) screen.additionalWidgets.append(w) def process_epixmap(widget, context): w = additionalWidget() w.widget = ePixmap w.skinAttributes = [ ] collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',)) screen.additionalWidgets.append(w) def process_screen(widget, context): for w in widget.getchildren(): conditional = w.attrib.get('conditional') if conditional and not [i for i in conditional.split(",") if i in screen.keys()]: continue p = processors.get(w.tag, process_none) try: p(w, context) except SkinError, e: print "[Skin] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e def process_panel(widget, context): n = widget.attrib.get('name') if n: try: s = dom_screens[n] except KeyError: print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name) else: process_screen(s[0], context) layout = widget.attrib.get('layout') if layout == 'stack': cc = SkinContextStack else: cc = SkinContext try: c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font')) except Exception, ex: raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) ) process_screen(widget, c) processors = { None: process_none, "widget": process_widget, "applet": process_applet, "eLabel": process_elabel, "ePixmap": process_epixmap, "panel": process_panel } try: context.x = 0 # reset offsets, all components are relative to screen context.y = 0 # coordinates. process_screen(myscreen, context) except Exception, e: print "[Skin] SKIN ERROR in %s:" % name, e from Components.GUIComponent import GUIComponent nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)] assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components)) # This may look pointless, but it unbinds 'screen' from the nested scope. A better # solution is to avoid the nested scope above and use the context object to pass # things around. screen = None visited_components = None
isslayne/enigma2
skin.py
Python
gpl-2.0
35,736
#!/usr/bin/env python # -*- coding: utf-8 -*- # # PROJETO LAVAGEM A SECO # # MAIN # # Felipe Bandeira da Silva # 26 jul 15 # import logging import tornado.escape import tornado.ioloop import tornado.web import tornado.options import tornado.websocket import tornado.httpserver import os.path from tornado.concurrent import Future from tornado import gen from tornado.options import define, options, parse_command_line import socket import fcntl import struct import random define("port", default=8888, help="run on the given port", type=int) define("debug", default=False, help="run in debug mode") import multiprocessing import controle import time import os import signal import subprocess import sys from platform import uname #NAVEGADOR = 'epiphany' NAVEGADOR = 'midori -e Fullscreen -a' # A pagina HTML contém informações interessantes e que devem ser # apresentadas ao usuário. Quanto menor o tempo maior o processamento # por parte do cliente ou dependendo do caso pelo servidor. TEMPO_MS_ATUALIZACAO_HTML = 500 # Via websocket é possível mais um cliente conectado e todos devem # receber as mensagens do servidor, bem como enviar. # clientes do websocket clients = [] # tarefa para atualizacao do pagina html queue_joyx = multiprocessing.Queue() queue_joyy = multiprocessing.Queue() queue_joyz = multiprocessing.Queue() # anemometro queue_velocidade = multiprocessing.Queue() queue_direcao = multiprocessing.Queue() queue_distancia = multiprocessing.Queue() # usado para o controle da página pelo joystick queue_joy_botoes = multiprocessing.Queue() #class NavegadorWEB(multiprocessing.Process): # def __init__(self): # multiprocessing.Process.__init__(self) # # self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \ # shell=True, preexec_fn=os.setsid) # # def run(self): # while True: # time.sleep(0.01) def inicia_navegador(): navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \ stdout=subprocess.PIPE, \ shell=True, preexec_fn=os.setsid) def fecha_navegador(): processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE) print 'PID dos processos', processos.stdout for pid in processos.stdout: os.kill(int(pid), signal.SIGTERM) try: time.sleep(3) os.kill(int(pid), 0) print u'erro: o processo %d ainda existe' % pid except OSError as ex: continue def get_ip_address(): # Informa o endereço IP da primeira conexão funcionando # visto em: # http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: ifname = 'eth0' return socket.inet_ntoa(fcntl.ioctl( \ s.fileno(), \ 0x8915, # SIOCGIFADDR \ struct.pack('256s', ifname[:15]) \ )[20:24]) except: try: ifname = 'wlan0' return socket.inet_ntoa(fcntl.ioctl( \ s.fileno(), \ 0x8915, # SIOCGIFADDR \ struct.pack('256s', ifname[:15]) \ )[20:24]) except: return "127.0.0.1" def get_ip_address_interface(ifname): # Informa o endereço de IP de uma rede <ifname> # visto em: # http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: return socket.inet_ntoa(fcntl.ioctl( \ s.fileno(), \ 0x8915, # SIOCGIFADDR \ struct.pack('256s', ifname[:15]) \ )[20:24]) except: return "0.0.0.0" class MainHandler(tornado.web.RequestHandler): # Atende ao GET e POST do cliente def get(self): # é possível via argumento renderizar a página html com # informações interessantes, os comentários devem ter o mesmo # nome da variável da página self.render("index.html", title="LAVAGEM A SECO", \ ip_host=get_ip_address()+":"+str(options.port), \ msg_status="LIGADO") class WebSocketHandler(tornado.websocket.WebSocketHandler): # Todo cliente se encarrega de conectar-se ao servidor websocket. # Quando existe uma nova conexão é salvo qual cliente foi. def open(self): print 'tornado: websocket: aviso: nova conexão de um cliente' clients.append(self) self.write_message("connected") # Quando um cliente envia uma mensagem, esta é a função responsável # por ler e aqui deve ficar a chamada dos get das filas(queue) def on_message(self, message): print 'tornado: websocket: aviso: nova mensagem: %s' % message q = self.application.settings.get('queue') q.put(message) # Para evitar envios de informações a clientes que não existem mais # é necessário retirá-los da lista def on_close(self): print 'tornado: websocket: aviso: conexão finalizada/perdida' clients.remove(self) fecha_navegador() inicia_navegador() def envia_cmd_websocket(cmd, arg): # Facilita o trabalho repetitivo de envia mensagem para todo os clientes # Envia um comando e seu argumento para todos os clientes for c in clients: c.write_message(cmd+";"+arg) def tarefa_atualizacao_html(): # Esta função tem uma chamada periódica, responsável por atualizar os # elementos atualizáveis na página html envia_cmd_websocket("lan", get_ip_address()) envia_cmd_websocket("random", str(random.randint(0,1000))) # para envia algo é necessário que fila tenha algo if not queue_joyx.empty(): resultado = queue_joyx.get() envia_cmd_websocket("joyx", str(resultado)[:6]) if not queue_joyy.empty(): resultado = queue_joyy.get() envia_cmd_websocket("joyy", str(resultado)[:6]) if not queue_joyz.empty(): resultado = queue_joyz.get() envia_cmd_websocket("joyz", str(resultado)[:6]) if not queue_joy_botoes.empty(): resultado = queue_joy_botoes.get() envia_cmd_websocket("b", str(resultado)) if not queue_velocidade.empty(): resultado = queue_velocidade.get() envia_cmd_websocket("v", str(resultado)) if not queue_direcao.empty(): resultado = queue_direcao.get() envia_cmd_websocket("d", str(resultado)) if not queue_distancia.empty(): resultado = queue_distancia.get() envia_cmd_websocket("x", str(resultado)[:6]) def main(): print u"Iniciando o servidor Tornado" fecha_navegador() tarefa_controle = multiprocessing.Queue() # esse loop ler os dados do joystick e envia para o lavos # sem ele, nenhuma resposta do Joystick é atendida. controle_loop = controle.ControleLavagem(tarefa_controle, \ queue_joyx, \ queue_joyy, \ queue_joyz, \ queue_joy_botoes, \ queue_velocidade, \ queue_direcao, \ queue_distancia) controle_loop.daemon = True controle_loop.start() # espera um pouco para que a tarefa esteja realmente pronta # sincronismo é mais interessante? time.sleep(1) tarefa_controle.put("Testando Tarefa :)") parse_command_line() app = tornado.web.Application( [ (r"/", MainHandler), (r"/ws", WebSocketHandler) ], cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__", template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, debug=options.debug, autoreload=True, queue=tarefa_controle, ) # porta que o servidor irá usar app.listen(options.port) # carrega o servidor mas não inicia main_loop = tornado.ioloop.IOLoop.instance() # Aqui será a principal tarefa do lavagem, leitura e acionamento tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\ TEMPO_MS_ATUALIZACAO_HTML, \ io_loop = main_loop) print u"aviso: tornado: start" tarefa_atualizacao_html_loop.start() inicia_navegador() # o loop do servidor deve ser o último, já que não um daemon main_loop.start() if __name__ == "__main__": main()
lamotriz/lavagem-a-seco
src/main.py
Python
gpl-2.0
8,904
# coding: utf-8 # In[1]: import os from shutil import copyfile import subprocess from save_embedded_graph27 import main_binary as embed_main from spearmint_ghsom import main as ghsom_main import numpy as np import pickle from time import time def save_obj(obj, name): with open(name + '.pkl', 'wb') as f: pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL) def load_obj(name): with open(name + '.pkl', 'rb') as f: return pickle.load(f) #root dir os.chdir("C:\Miniconda3\Jupyter\GHSOM_simplex_dsd") #save directory dir = os.path.abspath("parameter_tests_edges") #number of times to repeat num_repeats = 30 #number of nodes in the graph N = 64 #make save directory if not os.path.isdir(dir): os.mkdir(dir) #change to dir os.chdir(dir) #network file names -- output of network generator network = "network.dat" first_level = "community.dat" #community labels labels = 'firstlevelcommunity' #mixing factors mu = 0.1 num_edges_ls = [256, 512, 1024] parameter_settings = [0.5, 0.6, 0.7, 0.8, 0.9, 1][::-1] overall_nmi_scores = np.zeros((len(num_edges_ls), len(parameter_settings))) for i in range(len(num_edges_ls)): #number of edges num_edges = num_edges_ls[i] #create directory dir_string = os.path.join(dir, str(num_edges)) if not os.path.isdir(dir_string): os.mkdir(dir_string) #change working directory os.chdir(dir_string) for j in range(len(parameter_settings)): #setting fo e_sg p = parameter_settings[j] #ghsom parameters params = {'w': 0.0001, 'eta': 0.0001, 'sigma': 1, 'e_sg': p, 'e_en': 0.8} #create directory dir_string_p = os.path.join(dir_string, str(p)) if not os.path.isdir(dir_string_p): os.mkdir(dir_string_p) #change working directory os.chdir(dir_string_p) if os.path.isfile('nmi_scores.csv'): print 'already completed {}/{}, loading scores and continuing'.format(k1, p) nmi_scores = np.genfromtxt('nmi_scores.csv', delimiter=',') overall_nmi_scores[i,j] = np.mean(nmi_scores, axis=0) continue #copy executable ex = "benchmark.exe" if not os.path.isfile(ex): source = "C:\\Users\\davem\\Documents\\PhD\\Benchmark Graph Generators\\binary_networks\\benchmark.exe" copyfile(source, ex) #record NMI scores if not os.path.isfile('nmi_scores.pkl'): print 'creating new nmi scores array' nmi_scores = np.zeros(num_repeats) else: print 'loading nmi score progress' nmi_scores = load_obj('nmi_scores') #record running times if not os.path.isfile('running_times.pkl'): print 'creating new running time array' running_times = np.zeros(num_repeats) else: print 'loading running time progress' running_times = load_obj('running_times') print #generate networks for r in range(1, num_repeats+1): #number of communities num_communities = np.random.randint(1,5) #number of nodes in micro community minc = np.floor(float(N) / num_communities) maxc = np.ceil(float(N) / num_communities) #average number of edges k = float(num_edges) / N #max number of edges maxk = 2 * k #make benchmark parameter file filename = "benchmark_flags_{}_{}_{}.dat".format(num_edges,p,r) if not os.path.isfile(filename): print 'number of edges: {}'.format(num_edges) print 'number of communities: {}'.format(num_communities) print '-N {} -k {} -maxk {} -minc {} -maxc {} -mu {}'.format(N, k, maxk, minc, maxc, mu) with open(filename,"w") as f: f.write("-N {} -k {} -maxk {} -minc {} -maxc {} -mu {}".format(N, k, maxk, minc, maxc, mu)) print 'written flag file: {}'.format(filename) #cmd strings change_dir_cmd = "cd {}".format(dir_string_p) generate_network_cmd = "benchmark -f {}".format(filename) #output of cmd output_file = open("cmd_output.out", 'w') network_rename = "{}_{}".format(r,network) first_level_rename = "{}_{}".format(r,first_level) gml_filename = 'embedded_network_{}.gml'.format(r) if not os.path.isfile(network_rename): process = subprocess.Popen(change_dir_cmd + " && " + generate_network_cmd, stdout=output_file, stderr=output_file, shell=True) process.wait() print 'generated graph {}'.format(r) os.rename(network, network_rename) os.rename(first_level, first_level_rename) print 'renamed graph {}'.format(r) if not os.path.isfile(gml_filename): ##embed graph embed_main(network_rename, first_level_rename) print 'embedded graph {} as {} in {}'.format(r, gml_filename, os.getcwd()) ##score for this network if not np.all(nmi_scores[r-1]): start_time = time() print 'starting ghsom for: {}/{}/{}'.format(num_edges, p, gml_filename) nmi_score, communities_detected = ghsom_main(params, gml_filename, labels) nmi_scores[r-1] = nmi_score running_time = time() - start_time print 'running time of algorithm: {}'.format(running_time) running_times[r-1] = running_time #save save_obj(nmi_scores, 'nmi_scores') save_obj(running_times, 'running_times') print 'saved nmi score for network {}: {}'.format(gml_filename, nmi_score) print ##output nmi scores to csv file print 'writing nmi scores and running times to file' np.savetxt('nmi_scores.csv',nmi_scores,delimiter=',') np.savetxt('running_times.csv',running_times,delimiter=',') print #odd to overall list overall_nmi_scores[i,j] = np.mean(nmi_scores, axis=0) print 'DONE' print 'OVERALL NMI SCORES' print overall_nmi_scores # In[3]: for scores in overall_nmi_scores: print scores idx = np.argsort(scores)[::-1] print parameter_settings[idx[0]]
DavidMcDonald1993/ghsom
parameter_tests_edges.py
Python
gpl-2.0
7,042
# -*- coding: utf-8 -*- from .base import * # memcache CACHES = { 'default' : { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache' } }
davegri/Sikumia
sikumim/settings/development.py
Python
gpl-2.0
162
from unittest import TestCase from pyage.core import inject from pyage_forams.solutions.foram import Foram class TestForam(TestCase): def test_step(self): inject.config = "pyage_forams.conf.dummy_conf" foram = Foram(10) # foram.step()
maciek123/pyage-forams
pyage_forams/solutions/test_foram.py
Python
gpl-2.0
263
#! /usr/bin/env python # -*- coding: UTF8 -*- # Este arquivo é parte do programa Carinhas # Copyright 2013-2014 Carlo Oliveira <[email protected]>, # `Labase <http://labase.selfip.org/>`__; `GPL <http://is.gd/3Udt>`__. # # Carinhas é um software livre; você pode redistribuí-lo e/ou # modificá-lo dentro dos termos da Licença Pública Geral GNU como # publicada pela Fundação do Software Livre (FSF); na versão 2 da # Licença. # # Este programa é distribuído na esperança de que possa ser útil, # mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO # a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a # Licença Pública Geral GNU para maiores detalhes. # # Você deve ter recebido uma cópia da Licença Pública Geral GNU # junto com este programa, se não, escreva para a Fundação do Software # Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ ############################################################ SuperPython - Teste de Funcionalidade Web ############################################################ Verifica a funcionalidade do servidor web. """ __author__ = 'carlo' import unittest import sys import bottle import os import sys import os project_server = os.path.dirname(os.path.abspath(__file__)) project_server = os.path.join(project_server, '../src/') # print(project_server) sys.path.insert(0, project_server) # make sure the default templates directory is known to Bottle templates_dir = os.path.join(project_server, 'server/views/') # print(templates_dir) if templates_dir not in bottle.TEMPLATE_PATH: bottle.TEMPLATE_PATH.insert(0, templates_dir) if sys.version_info[0] == 2: from mock import MagicMock, patch else: from unittest.mock import MagicMock, patch, ANY from webtest import TestApp from server.control import application as appbottle import server.modelo_redis as cs import server.control as ct class FunctionalWebTest(unittest.TestCase): def setUp(self): cs.DBF = '/tmp/redis_test.db' pass def test_default_page(self): """ test_default_page """ app = TestApp(appbottle) response = app.get('/static/index.html') self.assertEqual('200 OK', response.status) self.assertTrue('<title>Jogo Eica - Cadastro</title>' in response.text, response.text[:1000]) def test_default_redirect(self): """test_default_redirect """ app = TestApp(appbottle) response = app.get('/') self.assertEqual('302 Found', response.status) def test_register(self): """test_register """ # app = TestApp(appbottle) # response = app.get('/static/register?doc_id="10000001"&module=projeto2222') rec_id, response = self._get_id('3333') self.assertEqual('200 OK', response.status) self.assertTrue(rec_id in response, str(response)) # rec_id = str(response).split('ver = main("')[1].split('e0cb4e39e071")')[0] + 'e0cb4e39e071' expected_record = "{'module': 'projeto2222', 'user': 'projeto2222-lastcodename', 'idade': '00015'," received_record = cs.DRECORD.get(rec_id) assert expected_record in str(received_record),\ "{}: {}".format(rec_id, received_record) def _get_id(self, ref_id='e0cb4e39e071', url='/static/register?doc_id="10000001"&module=projeto2222'): """test_store """ app = TestApp(appbottle) user, idade, ano, sexo = 'projeto2222-lastcodename', '00015', '0009', 'outro' user_data = dict(doc_id=ref_id, user=user, idade=idade, ano=ano, sexo=sexo) response = app.get(url, params=user_data) return str(response).split('ver = main("')[1].split('")')[0], response def test_store(self): """test_store """ app = TestApp(appbottle) # response = app.get('/static/register?doc_id="10000001"&module=projeto2222') # rec_id = str(response).split('ver = main("')[1].split('e0cb4e39e071")')[0] + 'e0cb4e39e071' rec_id, _ = self._get_id() response = app.post('/record/store', self._pontua(rec_id)) self.assertEqual('200 OK', response.status) self.assertTrue('", "tempo": "20' in response, str(response)) # self.assertTrue('{"module": "projeto2222", "jogada": [{"carta": "2222",' in str(response), str(response)) expected_record = "{'module': 'projeto2222', 'user': 'projeto2222-lastcodename', 'idade': '00015'," received_record = str(response) assert expected_record.replace("'", '"') in received_record,\ "{}: {}".format(rec_id, received_record) def _pontua(self, ref_id): ct.LAST = ref_id jogada = {"doc_id": ref_id, "carta": 2222, "casa": 2222, "move": 2222, "ponto": 2222, "tempo": 2222, "valor": 2222} return jogada def test_pontos(self): rec_id, response = self._get_id() app = TestApp(appbottle) app.post('/record/store', self._pontua(rec_id)) ct.LAST = rec_id response = app.get('/pontos') self.assertEqual('200 OK', response.status) self.assertTrue('projeto2222-lastcodename' in response, str(response)) self.assertTrue('<h3>Idade: 10 Genero: outro Ano Escolar: 9</h3>' in response, str(response)) self.assertTrue('<td><span>2222<span></td>' in response, str(response)) if __name__ == '__main__': unittest.main()
labase/eica
tests/testwebfunctionaldb.py
Python
gpl-2.0
5,489
import codecs def funct(f_name): """Remove leading and trailing whitespace from file.""" f_read = codecs.open(f_name, 'r') f_lines = f_read.readlines() out_lines = map(str.strip, f_lines) f_read.close() while True: o_write = raw_input("Create new file (c) or overwrite existing (o): ") if o_write.lower() == 'o': # f_name stays the same break elif o_write.lower() == 'c': f_name = raw_input("What is new file name? ") break f_write = codecs.open(f_name, 'w') for line in out_lines: f_write.write(line + '\n') print '"{}" has been written with no leading or trailing \ whitespace.'.format(f_name) def funct_comp(f_name): """Remove leading and trailing whitespace from file w/ comprehension.""" f_read = codecs.open(f_name, 'r') f_lines = f_read.readlines() print f_lines # out_lines = map(str.strip, f_lines) out_lines = [line.strip() for line in f_lines] print out_lines f_read.close() while True: o_write = raw_input("Create new file (c) or overwrite existing (o): ") if o_write.lower() == 'o': # f_name stays the same break elif o_write.lower() == 'c': f_name = raw_input("What is new file name? ") break f_write = codecs.open(f_name, 'w') for line in out_lines: f_write.write(line + '\n') print '"{}" has been written with no leading or trailing \ whitespace.'.format(f_name)
AmandaMoen/AmandaMoen
students/MichelleRascati/funct.py
Python
gpl-2.0
1,535
# Copyright (c) Meta Platforms, Inc. and affiliates. # Copyright (c) Mercurial Contributors. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from testutil.dott import feature, sh, testtmp # noqa: F401 feature.require(["symlink"]) # https://bz.mercurial-scm.org/1438 sh % "hg init repo" sh % "cd repo" sh % "ln -s foo link" sh % "hg add link" sh % "hg ci -mbad link" sh % "hg rm link" sh % "hg ci -mok" sh % "hg diff -g -r '0:1'" > "bad.patch" sh % "hg up 0" == "1 files updated, 0 files merged, 0 files removed, 0 files unresolved" sh % "hg import --no-commit bad.patch" == "applying bad.patch" sh % "hg status" == r""" R link ? bad.patch"""
facebookexperimental/eden
eden/scm/tests/test-issue1438-t.py
Python
gpl-2.0
789
import pytest from cfme.cloud.provider.azure import AzureProvider from cfme.markers.env_markers.provider import ONE_PER_CATEGORY from cfme.networks.views import BalancerView from cfme.networks.views import CloudNetworkView from cfme.networks.views import FloatingIpView from cfme.networks.views import NetworkPortView from cfme.networks.views import NetworkRouterView from cfme.networks.views import SecurityGroupView from cfme.networks.views import SubnetView from cfme.utils.appliance.implementations.ui import navigate_to pytestmark = [ pytest.mark.usefixtures('setup_provider'), pytest.mark.provider([AzureProvider], selector=ONE_PER_CATEGORY, scope='module') ] network_collections = [ 'network_providers', 'cloud_networks', 'network_subnets', 'network_ports', 'network_security_groups', 'network_routers', 'network_floating_ips' ] network_test_items = [ ("Cloud Networks", CloudNetworkView), ("Cloud Subnets", SubnetView), ("Network Routers", NetworkRouterView), ("Security Groups", SecurityGroupView), ("Floating IPs", FloatingIpView), ("Network Ports", NetworkPortView), ("Load Balancers", BalancerView) ] def child_visibility(appliance, network_provider, relationship, view): network_provider_view = navigate_to(network_provider, 'Details') if network_provider_view.entities.relationships.get_text_of(relationship) == "0": pytest.skip("There are no relationships for {}".format(relationship)) network_provider_view.entities.relationships.click_at(relationship) relationship_view = appliance.browser.create_view(view) try: if relationship != "Floating IPs": assert relationship_view.entities.entity_names else: assert relationship_view.entities.entity_ids actual_visibility = True except AssertionError: actual_visibility = False return actual_visibility @pytest.mark.parametrize("relationship,view", network_test_items, ids=[rel[0] for rel in network_test_items]) def test_tagvis_network_provider_children(provider, appliance, request, relationship, view, tag, user_restricted): """ Polarion: assignee: anikifor initialEstimate: 1/8h casecomponent: Tagging """ collection = appliance.collections.network_providers.filter({'provider': provider}) network_provider = collection.all()[0] network_provider.add_tag(tag=tag) request.addfinalizer(lambda: network_provider.remove_tag(tag=tag)) actual_visibility = child_visibility(appliance, network_provider, relationship, view) assert actual_visibility with user_restricted: actual_visibility = child_visibility(appliance, network_provider, relationship, view) assert not actual_visibility @pytest.fixture(params=network_collections, scope='module') def entity(request, appliance): collection_name = request.param item_collection = getattr(appliance.collections, collection_name) items = item_collection.all() if items: return items[0] else: pytest.skip("No content found for test") @pytest.mark.parametrize('visibility', [True, False], ids=['visible', 'notVisible']) def test_network_tagvis(check_item_visibility, entity, visibility): """ Tests network provider and its items honors tag visibility Prerequisites: Catalog, tag, role, group and restricted user should be created Steps: 1. As admin add tag 2. Login as restricted user, item is visible for user 3. As admin remove tag 4. Login as restricted user, iten is not visible for user Polarion: assignee: anikifor initialEstimate: 1/4h casecomponent: Tagging """ check_item_visibility(entity, visibility)
RedHatQE/cfme_tests
cfme/tests/networks/test_tag_tagvis.py
Python
gpl-2.0
3,856
import unittest from mock import Mock, patch from expyrimenter import Executor from expyrimenter.runnable import Runnable from subprocess import CalledProcessError from concurrent.futures import ThreadPoolExecutor import re class TestExecutor(unittest.TestCase): output = 'TestExecutor output' outputs = ['TestExecutor 1', 'TestExecutor 2'] def test_runnable_output(self): executor = Executor() with patch.object(Runnable, 'run', return_value=TestExecutor.output): executor.run(Runnable()) executor.wait() results = executor.results self.assertEqual(1, len(results)) self.assertEqual(TestExecutor.output, results[0]) def test_runnable_outputs(self): executor = Executor() runnable = Runnable() with patch.object(Runnable, 'run', side_effect=TestExecutor.outputs): executor.run(runnable) executor.run(runnable) executor.wait() results = executor.results self.assertListEqual(TestExecutor.outputs, results) def test_function_output(self): executor = Executor() executor.run_function(background_function) executor.wait() output = executor.results[0] self.assertEqual(TestExecutor.output, output) def test_function_outputs(self): executor = Executor() runnable = Runnable() with patch.object(Runnable, 'run', side_effect=TestExecutor.outputs): executor.run(runnable) executor.run(runnable) executor.wait() results = executor.results self.assertListEqual(TestExecutor.outputs, results) def test_against_runnable_memory_leak(self): executor = Executor() with patch.object(Runnable, 'run'): executor.run(Runnable()) executor.wait() self.assertEqual(0, len(executor._future_runnables)) def test_against_function_memory_leak(self): executor = Executor() executor.run_function(background_function) executor.wait() self.assertEqual(0, len(executor._function_titles)) def test_if_shutdown_shutdowns_executor(self): executor = Executor() executor._executor = Mock() executor.shutdown() executor._executor.shutdown.called_once_with() def test_if_shutdown_clears_function_resources(self): executor = Executor() executor._function_titles = Mock() executor.shutdown() executor._function_titles.clear.assert_called_once_with() def test_if_shutdown_clears_runnable_resources(self): executor = Executor() executor._future_runnables = Mock() executor.shutdown() executor._future_runnables.clear.assert_called_once_with() def test_exception_logging(self): executor = Executor() executor._log = Mock() with patch.object(Runnable, 'run', side_effect=Exception): executor.run(Runnable) executor.wait() self.assertEqual(1, executor._log.error.call_count) @patch.object(ThreadPoolExecutor, '__init__', return_value=None) def test_specified_max_workers(self, pool_mock): max = 42 Executor(max) pool_mock.assert_called_once_with(42) def test_calledprocesserror_logging(self): executor = Executor() executor._log = Mock() exception = CalledProcessError(returncode=1, cmd='command') with patch.object(Runnable, 'run', side_effect=exception): executor.run(Runnable) executor.wait() self.assertEqual(1, executor._log.error.call_count) def test_if_logged_title_is_hidden_if_it_equals_command(self): command = 'command' runnable = Runnable() runnable.title = command exception = CalledProcessError(returncode=1, cmd=command) runnable.run = Mock(side_effect=exception) executor = Executor() executor._log = Mock() executor.run(runnable) executor.wait() executor._log.error.assert_called_once_with(Matcher(has_not_title)) def test_logged_title_when_it_differs_from_command(self): command, title = 'command', 'title' runnable = Runnable() runnable.title = title exception = CalledProcessError(returncode=1, cmd=command) runnable.run = Mock(side_effect=exception) executor = Executor() executor._log = Mock() executor.run(runnable) executor.wait() executor._log.error.assert_called_once_with(Matcher(has_title)) def has_title(msg): return re.match("(?ims).*Title", msg) is not None def has_not_title(msg): return re.match("(?ims).*Title", msg) is None class Matcher: def __init__(self, compare): self.compare = compare def __eq__(self, msg): return self.compare(msg) def background_function(): return TestExecutor.output if __name__ == '__main__': unittest.main()
cemsbr/expyrimenter
tests/test_executor.py
Python
gpl-3.0
4,989
from django.contrib import admin # Register your models here. from .models import Environment,EnvironmentAdmin,Component,ComponentAdmin,Environment_property,Environment_propertyAdmin,Component_attribute,Component_attributeAdmin admin.site.register(Environment,EnvironmentAdmin) admin.site.register(Component,ComponentAdmin) admin.site.register(Environment_property,Environment_propertyAdmin) admin.site.register(Component_attribute,Component_attributeAdmin)
wdq007/supreme-garbanzo
fkkenv/fukoku/env/admin.py
Python
gpl-3.0
461
# This file is part of HDL Checker. # # Copyright (c) 2015 - 2019 suoto (Andre Souto) # # HDL Checker is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HDL Checker is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HDL Checker. If not, see <http://www.gnu.org/licenses/>. "HDL Checker installation script" import setuptools # type: ignore import versioneer LONG_DESCRIPTION = open("README.md", "rb").read().decode(encoding='utf8', errors='replace') CLASSIFIERS = """\ Development Status :: 5 - Production/Stable Environment :: Console Intended Audience :: Developers License :: OSI Approved :: GNU General Public License v3 (GPLv3) Operating System :: Microsoft :: Windows Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Topic :: Software Development Topic :: Scientific/Engineering :: Electronic Design Automation (EDA) Topic :: Text Editors :: Integrated Development Environments (IDE) """ setuptools.setup( name = 'hdl_checker', version = versioneer.get_version(), description = 'HDL code checker', long_description = LONG_DESCRIPTION, long_description_content_type = "text/markdown", author = 'Andre Souto', author_email = '[email protected]', url = 'https://github.com/suoto/hdl_checker', license = 'GPLv3', keywords = 'VHDL Verilog SystemVerilog linter LSP language server protocol vimhdl vim-hdl', platforms = 'any', packages = setuptools.find_packages(), install_requires = ['argcomplete', 'argparse', 'backports.functools_lru_cache; python_version<"3.2"', 'bottle>=0.12.9', 'enum34>=1.1.6; python_version<"3.3"', 'future>=0.14.0', 'futures; python_version<"3.2"', 'prettytable>=0.7.2', 'pygls==0.9.1', 'requests>=2.20.0', 'six>=1.10.0', 'tabulate>=0.8.5', 'typing>=3.7.4', 'waitress>=0.9.0', ], cmdclass = versioneer.get_cmdclass(), entry_points = { 'console_scripts' : ['hdl_checker=hdl_checker.server:main', ] }, classifiers=CLASSIFIERS.splitlines(), )
suoto/hdlcc
setup.py
Python
gpl-3.0
3,321
# # -*- coding: utf-8 -*- # # pyllage # # Copyright (C) 2013 barisumog at gmail.com # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import html.parser class PyllageParser(html.parser.HTMLParser): def __init__(self): super().__init__() self.counter = 1 self.stack = {1: {"tag": "", "attrs": "", "data": []}} def handle_previous_tag(self): """Checks whether previously handled tag was significant.""" previous_tag = self.stack[self.counter] if not (previous_tag["attrs"] or previous_tag["data"]): del self.stack[self.counter] self.counter -= 1 def handle_starttag(self, tag, attrs): self.handle_previous_tag() self.counter += 1 attrs_string = " | ".join("{}={}".format(*attr) for attr in attrs) self.stack[self.counter] = {"tag": tag, "attrs": attrs_string, "data": []} def handle_data(self, data): data = data.strip() if data: self.stack[self.counter]["data"].append(data) def handle_entityref(self, name): self.stack[self.counter]["data"].append(self.unescape("&{};".format(name))) def handle_charref(self, name): self.stack[self.counter]["data"].append(self.unescape("&#{};".format(name))) def freeze_data(self): """Converts all data lists into string.""" self.handle_previous_tag() for key in self.stack: self.stack[key]["data"] = "".join(self.stack[key]["data"]) def parse(html): """Instantiate a parser to process html, return the stack.""" parser = PyllageParser() parser.feed(html) parser.freeze_data() return parser.stack
barisumog/pyllage
pyllage/parser.py
Python
gpl-3.0
2,264
# Copyright 2012, Tim Bielawa <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import datetime import signal import sys import termios import time import tty from os import ( getpgrp, isatty, tcgetpgrp, ) from ansible.errors import AnsibleError from ansible.module_utils._text import to_text, to_native from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.six import PY3 from ansible.plugins.action import ActionBase from ansible.utils.display import Display display = Display() try: import curses # Nest the try except since curses.error is not available if curses did not import try: curses.setupterm() HAS_CURSES = True except curses.error: HAS_CURSES = False except ImportError: HAS_CURSES = False if HAS_CURSES: MOVE_TO_BOL = curses.tigetstr('cr') CLEAR_TO_EOL = curses.tigetstr('el') else: MOVE_TO_BOL = b'\r' CLEAR_TO_EOL = b'\x1b[K' class AnsibleTimeoutExceeded(Exception): pass def timeout_handler(signum, frame): raise AnsibleTimeoutExceeded def clear_line(stdout): stdout.write(b'\x1b[%s' % MOVE_TO_BOL) stdout.write(b'\x1b[%s' % CLEAR_TO_EOL) def is_interactive(fd=None): if fd is None: return False if isatty(fd): # Compare the current process group to the process group associated # with terminal of the given file descriptor to determine if the process # is running in the background. return getpgrp() == tcgetpgrp(fd) else: return False class ActionModule(ActionBase): ''' pauses execution for a length or time, or until input is received ''' BYPASS_HOST_LOOP = True _VALID_ARGS = frozenset(('echo', 'minutes', 'prompt', 'seconds')) def run(self, tmp=None, task_vars=None): ''' run the pause action module ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect duration_unit = 'minutes' prompt = None seconds = None echo = True echo_prompt = '' result.update(dict( changed=False, rc=0, stderr='', stdout='', start=None, stop=None, delta=None, echo=echo )) # Should keystrokes be echoed to stdout? if 'echo' in self._task.args: try: echo = boolean(self._task.args['echo']) except TypeError as e: result['failed'] = True result['msg'] = to_native(e) return result # Add a note saying the output is hidden if echo is disabled if not echo: echo_prompt = ' (output is hidden)' # Is 'prompt' a key in 'args'? if 'prompt' in self._task.args: prompt = "[%s]\n%s%s:" % (self._task.get_name().strip(), self._task.args['prompt'], echo_prompt) else: # If no custom prompt is specified, set a default prompt prompt = "[%s]\n%s%s:" % (self._task.get_name().strip(), 'Press enter to continue, Ctrl+C to interrupt', echo_prompt) # Are 'minutes' or 'seconds' keys that exist in 'args'? if 'minutes' in self._task.args or 'seconds' in self._task.args: try: if 'minutes' in self._task.args: # The time() command operates in seconds so we need to # recalculate for minutes=X values. seconds = int(self._task.args['minutes']) * 60 else: seconds = int(self._task.args['seconds']) duration_unit = 'seconds' except ValueError as e: result['failed'] = True result['msg'] = u"non-integer value given for prompt duration:\n%s" % to_text(e) return result ######################################################################## # Begin the hard work! start = time.time() result['start'] = to_text(datetime.datetime.now()) result['user_input'] = b'' stdin_fd = None old_settings = None try: if seconds is not None: if seconds < 1: seconds = 1 # setup the alarm handler signal.signal(signal.SIGALRM, timeout_handler) signal.alarm(seconds) # show the timer and control prompts display.display("Pausing for %d seconds%s" % (seconds, echo_prompt)) display.display("(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)\r"), # show the prompt specified in the task if 'prompt' in self._task.args: display.display(prompt) else: display.display(prompt) # save the attributes on the existing (duped) stdin so # that we can restore them later after we set raw mode stdin_fd = None stdout_fd = None try: if PY3: stdin = self._connection._new_stdin.buffer stdout = sys.stdout.buffer else: stdin = self._connection._new_stdin stdout = sys.stdout stdin_fd = stdin.fileno() stdout_fd = stdout.fileno() except (ValueError, AttributeError): # ValueError: someone is using a closed file descriptor as stdin # AttributeError: someone is using a null file descriptor as stdin on windoze stdin = None interactive = is_interactive(stdin_fd) if interactive: # grab actual Ctrl+C sequence try: intr = termios.tcgetattr(stdin_fd)[6][termios.VINTR] except Exception: # unsupported/not present, use default intr = b'\x03' # value for Ctrl+C # get backspace sequences try: backspace = termios.tcgetattr(stdin_fd)[6][termios.VERASE] except Exception: backspace = [b'\x7f', b'\x08'] old_settings = termios.tcgetattr(stdin_fd) tty.setraw(stdin_fd) # Only set stdout to raw mode if it is a TTY. This is needed when redirecting # stdout to a file since a file cannot be set to raw mode. if isatty(stdout_fd): tty.setraw(stdout_fd) # Only echo input if no timeout is specified if not seconds and echo: new_settings = termios.tcgetattr(stdin_fd) new_settings[3] = new_settings[3] | termios.ECHO termios.tcsetattr(stdin_fd, termios.TCSANOW, new_settings) # flush the buffer to make sure no previous key presses # are read in below termios.tcflush(stdin, termios.TCIFLUSH) while True: if not interactive: if seconds is None: display.warning("Not waiting for response to prompt as stdin is not interactive") if seconds is not None: # Give the signal handler enough time to timeout time.sleep(seconds + 1) break try: key_pressed = stdin.read(1) if key_pressed == intr: # value for Ctrl+C clear_line(stdout) raise KeyboardInterrupt # read key presses and act accordingly if key_pressed in (b'\r', b'\n'): clear_line(stdout) break elif key_pressed in backspace: # delete a character if backspace is pressed result['user_input'] = result['user_input'][:-1] clear_line(stdout) if echo: stdout.write(result['user_input']) stdout.flush() else: result['user_input'] += key_pressed except KeyboardInterrupt: signal.alarm(0) display.display("Press 'C' to continue the play or 'A' to abort \r"), if self._c_or_a(stdin): clear_line(stdout) break clear_line(stdout) raise AnsibleError('user requested abort!') except AnsibleTimeoutExceeded: # this is the exception we expect when the alarm signal # fires, so we simply ignore it to move into the cleanup pass finally: # cleanup and save some information # restore the old settings for the duped stdin stdin_fd if not(None in (stdin_fd, old_settings)) and isatty(stdin_fd): termios.tcsetattr(stdin_fd, termios.TCSADRAIN, old_settings) duration = time.time() - start result['stop'] = to_text(datetime.datetime.now()) result['delta'] = int(duration) if duration_unit == 'minutes': duration = round(duration / 60.0, 2) else: duration = round(duration, 2) result['stdout'] = "Paused for %s %s" % (duration, duration_unit) result['user_input'] = to_text(result['user_input'], errors='surrogate_or_strict') return result def _c_or_a(self, stdin): while True: key_pressed = stdin.read(1) if key_pressed.lower() == b'a': return False elif key_pressed.lower() == b'c': return True
2ndQuadrant/ansible
lib/ansible/plugins/action/pause.py
Python
gpl-3.0
10,780
import os import unittest from vsg.rules import package from vsg import vhdlFile from vsg.tests import utils sTestDir = os.path.dirname(__file__) lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_001_test_input.vhd')) dIndentMap = utils.read_indent_file() lExpected = [] lExpected.append('') utils.read_file(os.path.join(sTestDir, 'rule_001_test_input.fixed.vhd'), lExpected) class test_package_rule(unittest.TestCase): def setUp(self): self.oFile = vhdlFile.vhdlFile(lFile) self.assertIsNone(eError) self.oFile.set_indent_map(dIndentMap) def test_rule_001(self): oRule = package.rule_001() self.assertTrue(oRule) self.assertEqual(oRule.name, 'package') self.assertEqual(oRule.identifier, '001') lExpected = [6] oRule.analyze(self.oFile) self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations)) def test_fix_rule_001(self): oRule = package.rule_001() oRule.fix(self.oFile) lActual = self.oFile.get_lines() self.assertEqual(lExpected, lActual) oRule.analyze(self.oFile) self.assertEqual(oRule.violations, [])
jeremiah-c-leary/vhdl-style-guide
vsg/tests/package/test_rule_001.py
Python
gpl-3.0
1,229
import requests import xml.etree.ElementTree as ET from time import sleep class QualysAPI: """Class to simplify the making and handling of API calls to the Qualys platform Class Members ============= server : String : The FQDN of the API server (with https:// prefix) user : String : The username of an API user in the subscription password : String : The password of the API user proxy : String : The FQDN of the proxy server to be used for connections (with https:// prefix) debug : Boolean : If True, will output debug information to the console during member function execution enableProxy : Boolean : If True will force connections via the proxy defined in the 'proxy' class member callCount : Integer : The number of API calls made during the life of the API object Class Methods ============= __init__(svr, usr, passwd, proxy, enableProxy, debug) Called when an object of type QualysAPI is created svr : String : The FQDN of the API server (with https:// prefix). Default value = "" usr : String : The username of an API user in the subscription. Default value = "" passwd : String : The password of the API user. Default value = "" proxy : String : The FQDN of the proxy server to be used for connections (with https:// prefix) Default value = "" enableProxy : Boolean : If True, will force connections made via the proxy defined in the 'proxy' class member Default value = False debug : Boolean : If True, will output debug information to the console during member function execution Default value = False makeCall(url, payload, headers, retryCount) Make a Qualys API call and return the response in XML format as an ElementTree.Element object url : String : The full URL of the API request, including any URL encoded parameters NO DEFAULT VALUE, REQUIRED PARAMETER payload : String : The payload (body) of the API request Default value = "" headers : Dict : HTTP Request headers to be sent in the API call Default value = None retryCount : Integer : The number of times this call has been attempted. Used in rate and concurrency limit handling, not intended for use by users Default value = 0 Example : api = QualysAPI(svr='https://qualysapi.qualys.com', usr='username', passwd='password', proxy='https://proxy.internal', enableProxy = True, debug=False) fullurl = '%s/full/path/to/api/call' % api.url api.makeCall(url=fullURL, payload='', headers={'X-Requested-With': 'python3'}) """ server: str user: str password: str proxy: str debug: bool enableProxy: bool callCount: int headers = {} sess: requests.Session def __init__(self, svr="", usr="", passwd="", proxy="", enableProxy=False, debug=False): # Set all member variables from the values passed in when object is created self.server = svr self.user = usr self.password = passwd self.proxy = proxy self.enableProxy = enableProxy self.debug = debug self.callCount = 0 # Create a session object with the requests library self.sess = requests.session() # Set the authentication credentials for the session to be the (username, password) tuple self.sess.auth = (self.user, self.password) # Add a default X-Requested-With header (most API calls require it, it doesn't hurt to have it in all calls) self.headers = {'X-Requested-With': 'python3/requests'} def makeCall(self, url, payload="", headers=None, retryCount=0): # Get the headers from our own session object rheaders = self.sess.headers # If there are headers (meaning the __init__ method has been called and the api object was correctly created) if headers is not None: # copy each of the headers passed in via the 'headers' variable to the session headers so they are included # in the request for h in headers.keys(): rheaders[h] = headers[h] # Create a Request object using the requests library r = requests.Request('POST', url, data=payload, headers=rheaders) # Prepare the request for sending prepped_req = self.sess.prepare_request(r) # If the proxy is enabled, send via the proxy if self.enableProxy: resp = self.sess.send(prepped_req, proxies={'https': self.proxy}) # Otherwise send direct else: resp = self.sess.send(prepped_req) if self.debug: print("QualysAPI.makeCall: Headers...") print("%s" % str(resp.headers)) # Handle concurrency limit failures if 'X-Concurrency-Limit-Limit' in resp.headers.keys() and 'X-Concurrency-Limit-Running' in resp.headers.keys(): climit = int(resp.headers['X-Concurrency-Limit-Limit']) crun = int(resp.headers['X-Concurrency-Limit-Running']) # If crun > climit then we have hit the concurrency limit. We then wait for a number of seconds depending # on how many retry attempts there have been if crun > climit: print("QualysAPI.makeCall: Concurrency limit hit. %s/%s running calls" % (crun,climit)) retryCount = retryCount + 1 if retryCount > 15: print("QualysAPI.makeCall: Retry count > 15, waiting 60 seconds") waittime = 60 elif retryCount > 5: print("QualysAPI.makeCall: Retry count > 5, waiting 30 seconds") waittime = 30 else: print("QualysAPI.makeCall: Waiting 15 seconds") waittime = 15 # Sleep here sleep(waittime) print("QualysAPI.makeCall: Retrying (retryCount = %s)" % str(retryCount)) # Make a self-referential call to this same class method, passing in the retry count so the next # iteration knows how many attempts have been made so far resp = self.makeCall(url=url, payload=payload,headers=headers, retryCount=retryCount) # Handle rate limit failures if 'X-RateLimit-ToWait-Sec' in resp.headers.keys(): if resp.headers['X-RateLimit-ToWait-Sec'] > 0: # If this response header has a value > 0, we know we have to wait some time so first we increment # the retryCount retryCount = retryCount + 1 # Get the number of seconds to wait from the response header. Add to this a number of seconds depending # on how many times we have already tried this call waittime = int(resp.headers['X-RateLimit-ToWait-Sec']) print("QualysAPI.makeCall: Rate limit reached, suggested wait time: %s seconds" % waittime) if retryCount > 15: print("QualysAPI.makeCall: Retry Count > 15, adding 60 seconds to wait time") waittime = waittime + 60 elif retryCount > 5: print("QualysAPI.makeCall: Retry Count > 5, adding 30 seconds to wait time") waittime = waittime + 30 # Sleep here sleep(waittime) print("QualysAPI.makeCall: Retrying (retryCount = %s)" % str(retryCount)) # Make a self-referential call to this same class method, passing in the retry count so the next # iteration knows how many attempts have been made so far resp = self.makeCall(url=url, payload=payload, headers=headers, retryCount=retryCount) # Increment the API call count (failed calls are not included in the count) self.callCount = self.callCount + 1 # Return the response as an ElementTree XML object return ET.fromstring(resp.text)
Qualys/community
scan-blackout/QualysAPI.py
Python
gpl-3.0
8,803
# -*- coding: utf-8 -*- __author__ = 'Patrick Michl' __email__ = '[email protected]' __license__ = 'GPLv3' import nemoa import numpy class Links: """Class to unify common ann link attributes.""" params = {} def __init__(self): pass @staticmethod def energy(dSrc, dTgt, src, tgt, links, calc = 'mean'): """Return link energy as numpy array.""" if src['class'] == 'gauss': M = - links['A'] * links['W'] \ / numpy.sqrt(numpy.exp(src['lvar'])).T elif src['class'] == 'sigmoid': M = - links['A'] * links['W'] else: raise ValueError('unsupported unit class') return numpy.einsum('ij,ik,jk->ijk', dSrc, dTgt, M) @staticmethod def get_updates(data, model): """Return weight updates of a link layer.""" D = numpy.dot(data[0].T, data[1]) / float(data[1].size) M = numpy.dot(model[0].T, model[1]) / float(data[1].size) return { 'W': D - M } @staticmethod def get_updates_delta(data, delta): return { 'W': -numpy.dot(data.T, delta) / float(data.size) }
fishroot/nemoa
nemoa/system/commons/links.py
Python
gpl-3.0
1,111
#!/usr/bin/python import glob import os.path import re import hashlib from bs4 import BeautifulSoup from subprocess import call, Popen, PIPE, STDOUT root = "/home/alex/tidalcycles.github.io/_site/" dnmatcher = re.compile(r'^\s*d[0-9]\s*(\$\s*)?') crmatcherpre = re.compile(r'^[\s\n\r]*') crmatcherpost = re.compile(r'[\s\n\r]*$') sizematcher = re.compile(r'\bsize\b') outpath = "../patterns/" for fn in glob.glob(os.path.join(root, "*.html")): soup = BeautifulSoup(open(fn), 'lxml') patterns = soup.find_all("div", "render") if len(patterns) > 0: print(fn + " (" + str(len(patterns)) +")") for pattern in patterns: code = pattern.get_text() code = crmatcherpre.sub('', code) code = crmatcherpost.sub('', code) digest = hashlib.md5(code).hexdigest() code = sizematcher.sub('Sound.Tidal.Context.size', code) outfn = outpath + digest + ".mp3" if (not os.path.exists(outfn)): print "building outfn: " + outfn print "digest:" + digest print "code >>" + code + "<<" code = dnmatcher.sub('', code) p = Popen(["./runpattern", outfn], stdout=PIPE, stdin=PIPE, stderr=STDOUT) tidalout = p.communicate(input=code)[0] print(tidalout) if p.returncode == 0: print "worked> " + outfn else: print "did not work."
tidalcycles/tidalcycles.github.io
bin/build_examples.py
Python
gpl-3.0
1,514
# This file is part of Codetrawl # Copyright (C) 2015 Nathaniel Smith <[email protected]> # See file LICENSE.txt for license information. """Usage: codetrawl.dump PATTERN FILE [FILE...] where PATTERN is a Python format string like "{raw_url}", with allowed keys: - service - query - repo - path - raw_url - content """ import sys import docopt from .read import read_matches if __name__ == "__main__": args = docopt.docopt(__doc__) for match in read_matches(args["FILE"]): sys.stdout.write(args["PATTERN"].format(**match)) sys.stdout.write("\n")
njsmith/codetrawl
codetrawl/dump.py
Python
gpl-3.0
587
# -*- coding: utf-8 -*- # Copyright © 2014-2018 GWHAT Project Contributors # https://github.com/jnsebgosselin/gwhat # # This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox). # Licensed under the terms of the GNU General Public License. # Standard library imports : import platform # Third party imports : from PyQt5.QtGui import QIcon, QFont, QFontDatabase from PyQt5.QtCore import QSize class StyleDB(object): def __init__(self): # ---- frame self.frame = 22 self.HLine = 52 self.VLine = 53 self.sideBarWidth = 275 # ----- colors self.red = '#C83737' self.lightgray = '#E6E6E6' self.rain = '#0000CC' self.snow = '0.7' self.wlvl = '#0000CC' # '#000099' if platform.system() == 'Windows': self.font1 = QFont('Segoe UI', 11) # Calibri, Cambria self.font_console = QFont('Segoe UI', 9) self.font_menubar = QFont('Segoe UI', 10) elif platform.system() == 'Linux': self.font1 = QFont('Ubuntu', 11) self.font_console = QFont('Ubuntu', 9) self.font_menubar = QFont('Ubuntu', 10) # database = QFontDatabase() # print database.families() if platform.system() == 'Windows': self.fontfamily = "Segoe UI" # "Cambria" #"Calibri" #"Segoe UI"" elif platform.system() == 'Linux': self.fontfamily = "Ubuntu" # self.fontSize1.setPointSize(11) # 17 = QtGui.QFrame.Box | QtGui.QFrame.Plain # 22 = QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain # 20 = QtGui.QFrame.HLine | QtGui.QFrame.Plain # 52 = QtGui.QFrame.HLine | QtGui.QFrame.Sunken # 53 = QtGui.QFrame.VLine | QtGui.QFrame.Sunken
jnsebgosselin/WHAT
gwhat/common/styles.py
Python
gpl-3.0
1,792
import unittest import wire class TestSQLString(unittest.TestCase): def setUp(self): '''Sets up the test case''' self.sql = wire.SQLString def test_pragma(self): '''Tests the PRAGMA SQL generation''' self.assertEqual(self.sql.pragma("INTEGRITY_CHECK(10)"), "PRAGMA INTEGRITY_CHECK(10)") self.assertEqual(self.sql.checkIntegrity(5), "PRAGMA INTEGRITY_CHECK(5)") def test_createTable(self): '''Tests the CREATE TABLE SQL generation''' table_outputs = ["CREATE TABLE test (id INT NOT NULL,username VARCHAR(255) DEFAULT 'default_user')", "CREATE TABLE test (username VARCHAR(255) DEFAULT 'default_user',id INT NOT NULL)"] temp_table_outputs = ["CREATE TEMPORARY TABLE test_temp (value REAL DEFAULT 0.0,time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)", "CREATE TEMPORARY TABLE test_temp (time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,value REAL DEFAULT 0.0)"] self.assertIn(self.sql.createTable("test", False, id = "INT", username = ["VARCHAR(255)", "'default_user'"]), table_outputs) self.assertIn(self.sql.createTable("test_temp", True, value = ["REAL", 0.0], time = ["TIMESTAMP", "CURRENT_TIMESTAMP"]), temp_table_outputs) # include a Temp table test (False --> True) def test_dropTable(self): '''Tests the DROP TABLE SQL generation''' self.assertEqual(self.sql.dropTable("table_drop"), "DROP TABLE table_drop") self.assertEqual(self.sql.dropTable("some_other_table"), "DROP TABLE some_other_table") def test_renameTable(self): '''Tests the ALTER TABLE RENAME SQL generation''' self.assertEqual(self.sql.rename("orig_table", "new_table"), "ALTER TABLE orig_table RENAME TO new_table") if __name__ == '__main__': unittest.main()
panchr/wire
wire/tests/sqlstring_test.py
Python
gpl-3.0
1,665
### # Copyright (c) 2004-2005, Kevin Murphy # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### import SOAP import supybot.utils as utils from supybot.commands import * import supybot.callbacks as callbacks class UrbanDict(callbacks.Plugin): threaded = True server = SOAP.SOAPProxy('http://api.urbandictionary.com/soap') def _licenseCheck(self, irc): license = self.registryValue('licenseKey') if not license: irc.error('You must have a free UrbanDictionary API license key ' 'in order to use this command. You can get one at ' '<http://www.urbandictionary.com/api.php>. Once you ' 'have one, you can set it with the command ' '"config supybot.plugins.UrbanDict.licenseKey <key>".', Raise=True) return license def urbandict(self, irc, msg, args, words): """<phrase> Returns the definition and usage of <phrase> from UrbanDictionary.com. """ license = self._licenseCheck(irc) definitions = self.server.lookup(license, ' '.join(words)) if not len(definitions): irc.error('No definition found.', Raise=True) word = definitions[0].word definitions = ['%s (%s)' % (d.definition, d.example) for d in definitions] irc.reply(utils.web.htmlToText('%s: %s' % (word, '; '.join(definitions)))) urbandict = wrap(urbandict, [many('something')]) def _define(self, irc, getDefinition, license): definition = getDefinition(license) word = definition.word definitions = ['%s (%s)' % (definition.definition, definition.example)] irc.reply(utils.web.htmlToText('%s: %s' % (word, '; '.join(definitions)))) def daily(self, irc, msg, args): """takes no arguments Returns the definition and usage of the daily phrase from UrbanDictionary.com. """ license = self._licenseCheck(irc) self._define(irc, self.server.get_daily_definition, license) daily = wrap(daily) def random(self, irc, msg, args): """takes no arguments Returns the definition and usage of a random phrase from UrbanDictionary.com. """ license = self._licenseCheck(irc) self._define(irc, self.server.get_random_definition, license) random = wrap(random) Class = UrbanDict # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
kg-bot/SupyBot
plugins/UrbanDict/plugin.py
Python
gpl-3.0
4,100