repo_name
stringlengths
5
100
path
stringlengths
4
299
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1.03M
license
stringclasses
15 values
hash
int64
-9,223,351,895,964,839,000
9,223,297,778B
line_mean
float64
3.17
100
line_max
int64
7
1k
alpha_frac
float64
0.25
0.98
autogenerated
bool
1 class
aplicatii-romanesti/allinclusive-kodi-pi
.kodi/addons/plugin.video.movie4k/plugintools.py
1
19027
# -*- coding: utf-8 -*- #--------------------------------------------------------------------------- # Plugin Tools v1.0.8 #--------------------------------------------------------------------------- # License: GPL (http://www.gnu.org/licenses/gpl-3.0.html) # Based on code from youtube, parsedom and pelisalacarta addons # Author: # Jesús # [email protected] # http://www.mimediacenter.info/plugintools #--------------------------------------------------------------------------- # Changelog: # 1.0.0 # - First release # 1.0.1 # - If find_single_match can't find anything, it returns an empty string # - Remove addon id from this module, so it remains clean # 1.0.2 # - Added parameter on "add_item" to say that item is playable # 1.0.3 # - Added direct play # - Fixed bug when video isPlayable=True # 1.0.4 # - Added get_temp_path, get_runtime_path, get_data_path # - Added get_setting, set_setting, open_settings_dialog and get_localized_string # - Added keyboard_input # - Added message # 1.0.5 # - Added read_body_and_headers for advanced http handling # - Added show_picture for picture addons support # - Added optional parameters "title" and "hidden" to keyboard_input # 1.0.6 # - Added fanart, show, episode and infolabels to add_item # 1.0.7 # - Added set_view function # 1.0.8 # - Added selector #--------------------------------------------------------------------------- import xbmc import xbmcplugin import xbmcaddon import xbmcgui import urllib import urllib2 import re import sys import os import time import socket from StringIO import StringIO import gzip module_log_enabled = False http_debug_log_enabled = False LIST = "list" THUMBNAIL = "thumbnail" MOVIES = "movies" TV_SHOWS = "tvshows" SEASONS = "seasons" EPISODES = "episodes" OTHER = "other" # Suggested view codes for each type from different skins (initial list thanks to xbmcswift2 library) ALL_VIEW_CODES = { 'list': { 'skin.confluence': 50, # List 'skin.aeon.nox': 50, # List 'skin.droid': 50, # List 'skin.quartz': 50, # List 'skin.re-touched': 50, # List }, 'thumbnail': { 'skin.confluence': 500, # Thumbnail 'skin.aeon.nox': 500, # Wall 'skin.droid': 51, # Big icons 'skin.quartz': 51, # Big icons 'skin.re-touched': 500, #Thumbnail }, 'movies': { 'skin.confluence': 500, # 500 Thumbnail # 515 Media Info 3 'skin.aeon.nox': 500, # Wall 'skin.droid': 51, # Big icons 'skin.quartz': 52, # Media info 'skin.re-touched': 500, #Thumbnail }, 'tvshows': { 'skin.confluence': 500, # Thumbnail 515, # Media Info 3 'skin.aeon.nox': 500, # Wall 'skin.droid': 51, # Big icons 'skin.quartz': 52, # Media info 'skin.re-touched': 500, #Thumbnail }, 'seasons': { 'skin.confluence': 50, # List 'skin.aeon.nox': 50, # List 'skin.droid': 50, # List 'skin.quartz': 52, # Media info 'skin.re-touched': 50, # List }, 'episodes': { 'skin.confluence': 504, # Media Info 'skin.aeon.nox': 518, # Infopanel 'skin.droid': 50, # List 'skin.quartz': 52, # Media info 'skin.re-touched': 550, # Wide }, } # Write something on XBMC log def log(message): xbmc.log(message) # Write this module messages on XBMC log def _log(message): if module_log_enabled: xbmc.log("plugintools."+message) # Parse XBMC params - based on script.module.parsedom addon def get_params(): _log("get_params") param_string = sys.argv[2] _log("get_params "+str(param_string)) commands = {} if param_string: split_commands = param_string[param_string.find('?') + 1:].split('&') for command in split_commands: _log("get_params command="+str(command)) if len(command) > 0: if "=" in command: split_command = command.split('=') key = split_command[0] value = urllib.unquote_plus(split_command[1]) commands[key] = value else: commands[command] = "" _log("get_params "+repr(commands)) return commands # Fetch text content from an URL def read(url): _log("read "+url) f = urllib2.urlopen(url) data = f.read() f.close() return data def read_body_and_headers(url, post=None, headers=[], follow_redirects=False, timeout=None): _log("read_body_and_headers "+url) if post is not None: _log("read_body_and_headers post="+post) if len(headers)==0: headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:18.0) Gecko/20100101 Firefox/18.0"]) # Start cookie lib ficherocookies = os.path.join( get_data_path(), 'cookies.dat' ) _log("read_body_and_headers cookies_file="+ficherocookies) cj = None ClientCookie = None cookielib = None # Let's see if cookielib is available try: _log("read_body_and_headers importing cookielib") import cookielib except ImportError: _log("read_body_and_headers cookielib no disponible") # If importing cookielib fails # let's try ClientCookie try: _log("read_body_and_headers importing ClientCookie") import ClientCookie except ImportError: _log("read_body_and_headers ClientCookie not available") # ClientCookie isn't available either urlopen = urllib2.urlopen Request = urllib2.Request else: _log("read_body_and_headers ClientCookie available") # imported ClientCookie urlopen = ClientCookie.urlopen Request = ClientCookie.Request cj = ClientCookie.MozillaCookieJar() else: _log("read_body_and_headers cookielib available") # importing cookielib worked # proxy_handler = urllib2.ProxyHandler({'http':'217.12.25.160:80'}) # opener = urllib2.build_opener(proxy_handler) # urlopen = opener.open urlopen = urllib2.urlopen Request = urllib2.Request cj = cookielib.MozillaCookieJar() # This is a subclass of FileCookieJar # that has useful load and save methods if cj is not None: # we successfully imported # one of the two cookie handling modules _log("read_body_and_headers Cookies enabled") if os.path.isfile(ficherocookies): _log("read_body_and_headers Reading cookie file") # if we have a cookie file already saved # then load the cookies into the Cookie Jar try: cj.load(ficherocookies) except: _log("read_body_and_headers Wrong cookie file, deleting...") os.remove(ficherocookies) # Now we need to get our Cookie Jar # installed in the opener; # for fetching URLs if cookielib is not None: _log("read_body_and_headers opener using urllib2 (cookielib)") # if we use cookielib # then we get the HTTPCookieProcessor # and install the opener in urllib2 if not follow_redirects: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=http_debug_log_enabled),urllib2.HTTPCookieProcessor(cj),NoRedirectHandler()) else: opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=http_debug_log_enabled),urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) else: _log("read_body_and_headers opener using ClientCookie") # if we use ClientCookie # then we get the HTTPCookieProcessor # and install the opener in ClientCookie opener = ClientCookie.build_opener(ClientCookie.HTTPCookieProcessor(cj)) ClientCookie.install_opener(opener) # ------------------------------------------------- # Cookies instaladas, lanza la petición # ------------------------------------------------- # Contador inicio = time.clock() # Diccionario para las cabeceras txheaders = {} # Construye el request if post is None: _log("read_body_and_headers GET request") else: _log("read_body_and_headers POST request") # Añade las cabeceras _log("read_body_and_headers ---------------------------") for header in headers: _log("read_body_and_headers header %s=%s" % (str(header[0]),str(header[1])) ) txheaders[header[0]]=header[1] _log("read_body_and_headers ---------------------------") req = Request(url, post, txheaders) if timeout is None: handle=urlopen(req) else: #Disponible en python 2.6 en adelante --> handle = urlopen(req, timeout=timeout) #Para todas las versiones: try: import socket deftimeout = socket.getdefaulttimeout() socket.setdefaulttimeout(timeout) handle=urlopen(req) socket.setdefaulttimeout(deftimeout) except: import sys for line in sys.exc_info(): _log( "%s" % line ) # Actualiza el almacén de cookies cj.save(ficherocookies) # Lee los datos y cierra if handle.info().get('Content-Encoding') == 'gzip': buf = StringIO( handle.read()) f = gzip.GzipFile(fileobj=buf) data = f.read() else: data=handle.read() info = handle.info() _log("read_body_and_headers Response") returnheaders=[] _log("read_body_and_headers ---------------------------") for header in info: _log("read_body_and_headers "+header+"="+info[header]) returnheaders.append([header,info[header]]) handle.close() _log("read_body_and_headers ---------------------------") ''' # Lanza la petición try: response = urllib2.urlopen(req) # Si falla la repite sustituyendo caracteres especiales except: req = urllib2.Request(url.replace(" ","%20")) # Añade las cabeceras for header in headers: req.add_header(header[0],header[1]) response = urllib2.urlopen(req) ''' # Tiempo transcurrido fin = time.clock() _log("read_body_and_headers Downloaded in %d seconds " % (fin-inicio+1)) _log("read_body_and_headers body="+data) return data,returnheaders class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 # Parse string and extracts multiple matches using regular expressions def find_multiple_matches(text,pattern): _log("find_multiple_matches pattern="+pattern) matches = re.findall(pattern,text,re.DOTALL) return matches # Parse string and extracts first match as a string def find_single_match(text,pattern): _log("find_single_match pattern="+pattern) result = "" try: matches = re.findall(pattern,text, flags=re.DOTALL) result = matches[0] except: result = "" return result def add_item( action="" , title="" , plot="" , url="" , thumbnail="" , fanart="" , show="" , episode="" , extra="", page="", info_labels = None, isPlayable = False , folder=True ): _log("add_item action=["+action+"] title=["+title+"] url=["+url+"] thumbnail=["+thumbnail+"] fanart=["+fanart+"] show=["+show+"] episode=["+episode+"] extra=["+extra+"] page=["+page+"] isPlayable=["+str(isPlayable)+"] folder=["+str(folder)+"]") listitem = xbmcgui.ListItem( title, iconImage="DefaultVideo.png", thumbnailImage=thumbnail ) if info_labels is None: info_labels = { "Title" : title, "FileName" : title, "Plot" : plot } listitem.setInfo( "video", info_labels ) if fanart!="": listitem.setProperty('fanart_image',fanart) xbmcplugin.setPluginFanart(int(sys.argv[1]), fanart) if url.startswith("plugin://"): itemurl = url listitem.setProperty('IsPlayable', 'true') xbmcplugin.addDirectoryItem( handle=int(sys.argv[1]), url=itemurl, listitem=listitem, isFolder=folder) elif isPlayable: listitem.setProperty("Video", "true") listitem.setProperty('IsPlayable', 'true') itemurl = '%s?action=%s&title=%s&url=%s&thumbnail=%s&plot=%s&extra=%s&page=%s' % ( sys.argv[ 0 ] , action , urllib.quote_plus( title ) , urllib.quote_plus(url) , urllib.quote_plus( thumbnail ) , urllib.quote_plus( plot ) , urllib.quote_plus( extra ) , urllib.quote_plus( page )) xbmcplugin.addDirectoryItem( handle=int(sys.argv[1]), url=itemurl, listitem=listitem, isFolder=folder) else: itemurl = '%s?action=%s&title=%s&url=%s&thumbnail=%s&plot=%s&extra=%s&page=%s' % ( sys.argv[ 0 ] , action , urllib.quote_plus( title ) , urllib.quote_plus(url) , urllib.quote_plus( thumbnail ) , urllib.quote_plus( plot ) , urllib.quote_plus( extra ) , urllib.quote_plus( page )) xbmcplugin.addDirectoryItem( handle=int(sys.argv[1]), url=itemurl, listitem=listitem, isFolder=folder) def close_item_list(): _log("close_item_list") xbmcplugin.endOfDirectory(handle=int(sys.argv[1]), succeeded=True) def play_resolved_url(url): _log("play_resolved_url ["+url+"]") listitem = xbmcgui.ListItem(path=url) listitem.setProperty('IsPlayable', 'true') return xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, listitem) def direct_play(url): _log("direct_play ["+url+"]") title = "" try: xlistitem = xbmcgui.ListItem( title, iconImage="DefaultVideo.png", path=url) except: xlistitem = xbmcgui.ListItem( title, iconImage="DefaultVideo.png", ) xlistitem.setInfo( "video", { "Title": title } ) playlist = xbmc.PlayList( xbmc.PLAYLIST_VIDEO ) playlist.clear() playlist.add( url, xlistitem ) player_type = xbmc.PLAYER_CORE_AUTO xbmcPlayer = xbmc.Player( player_type ) xbmcPlayer.play(playlist) def show_picture(url): local_folder = os.path.join(get_data_path(),"images") if not os.path.exists(local_folder): try: os.mkdir(local_folder) except: pass local_file = os.path.join(local_folder,"temp.jpg") # Download picture urllib.urlretrieve(url, local_file) # Show picture xbmc.executebuiltin( "SlideShow("+local_folder+")" ) def get_temp_path(): _log("get_temp_path") dev = xbmc.translatePath( "special://temp/" ) _log("get_temp_path ->'"+str(dev)+"'") return dev def get_runtime_path(): _log("get_runtime_path") dev = xbmc.translatePath( __settings__.getAddonInfo('Path') ) _log("get_runtime_path ->'"+str(dev)+"'") return dev def get_data_path(): _log("get_data_path") dev = xbmc.translatePath( __settings__.getAddonInfo('Profile') ) # Parche para XBMC4XBOX if not os.path.exists(dev): os.makedirs(dev) _log("get_data_path ->'"+str(dev)+"'") return dev def get_setting(name): _log("get_setting name='"+name+"'") dev = __settings__.getSetting( name ) _log("get_setting ->'"+str(dev)+"'") return dev def set_setting(name,value): _log("set_setting name='"+name+"','"+value+"'") __settings__.setSetting( name,value ) def open_settings_dialog(): _log("open_settings_dialog") __settings__.openSettings() def get_localized_string(code): _log("get_localized_string code="+str(code)) dev = __language__(code) try: dev = dev.encode("utf-8") except: pass _log("get_localized_string ->'"+dev+"'") return dev def keyboard_input(default_text="", title="", hidden=False): _log("keyboard_input default_text='"+default_text+"'") keyboard = xbmc.Keyboard(default_text,title,hidden) keyboard.doModal() if (keyboard.isConfirmed()): tecleado = keyboard.getText() else: tecleado = "" _log("keyboard_input ->'"+tecleado+"'") return tecleado def message(text1, text2="", text3=""): _log("message text1='"+text1+"', text2='"+text2+"', text3='"+text3+"'") if text3=="": xbmcgui.Dialog().ok( text1 , text2 ) elif text2=="": xbmcgui.Dialog().ok( "" , text1 ) else: xbmcgui.Dialog().ok( text1 , text2 , text3 ) def message_yes_no(text1, text2="", text3=""): _log("message_yes_no text1='"+text1+"', text2='"+text2+"', text3='"+text3+"'") if text3=="": yes_pressed = xbmcgui.Dialog().yesno( text1 , text2 ) elif text2=="": yes_pressed = xbmcgui.Dialog().yesno( "" , text1 ) else: yes_pressed = xbmcgui.Dialog().yesno( text1 , text2 , text3 ) return yes_pressed def selector(option_list,title="Select one"): _log("selector title='"+title+"', options="+repr(option_list)) dia = xbmcgui.Dialog() selection = dia.select(title,option_list) return selection def set_view(view_mode, view_code=0): _log("set_view view_mode='"+view_mode+"', view_code="+str(view_code)) # Set the content for extended library views if needed if view_mode==MOVIES: _log("set_view content is movies") xbmcplugin.setContent( int(sys.argv[1]) ,"movies" ) elif view_mode==TV_SHOWS: _log("set_view content is tvshows") xbmcplugin.setContent( int(sys.argv[1]) ,"tvshows" ) elif view_mode==SEASONS: _log("set_view content is seasons") xbmcplugin.setContent( int(sys.argv[1]) ,"seasons" ) elif view_mode==EPISODES: _log("set_view content is episodes") xbmcplugin.setContent( int(sys.argv[1]) ,"episodes" ) # Reads skin name skin_name = xbmc.getSkinDir() _log("set_view skin_name='"+skin_name+"'") try: if view_code==0: _log("set_view view mode is "+view_mode) view_codes = ALL_VIEW_CODES.get(view_mode) view_code = view_codes.get(skin_name) _log("set_view view code for "+view_mode+" in "+skin_name+" is "+str(view_code)) xbmc.executebuiltin("Container.SetViewMode("+str(view_code)+")") else: _log("set_view view code forced to "+str(view_code)) xbmc.executebuiltin("Container.SetViewMode("+str(view_code)+")") except: _log("Unable to find view code for view mode "+str(view_mode)+" and skin "+skin_name) f = open( os.path.join( os.path.dirname(__file__) , "addon.xml") ) data = f.read() f.close() addon_id = find_single_match(data,'id="([^"]+)"') if addon_id=="": addon_id = find_single_match(data,"id='([^']+)'") __settings__ = xbmcaddon.Addon(id=addon_id) __language__ = __settings__.getLocalizedString
apache-2.0
4,476,613,958,263,648,000
31.51453
286
0.594816
false
m-kuhn/QGIS
python/plugins/db_manager/db_plugins/oracle/plugin.py
6
22780
# -*- coding: utf-8 -*- """ /*************************************************************************** Name : DB Manager Description : Database manager plugin for QGIS (Oracle) Date : Aug 27, 2014 copyright : (C) 2014 by Médéric RIBREUX email : [email protected] The content of this file is based on - PG_Manager by Martin Dobias <[email protected]> (GPLv2 license) - DB Manager by Giuseppe Sucameli <[email protected]> (GPLv2 license) ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ from builtins import str from builtins import range # this will disable the dbplugin if the connector raise an ImportError from .connector import OracleDBConnector from qgis.PyQt.QtCore import Qt, QCoreApplication from qgis.PyQt.QtGui import QIcon, QKeySequence from qgis.PyQt.QtWidgets import QAction, QApplication, QMessageBox from qgis.core import QgsApplication, QgsVectorLayer, NULL, QgsSettings from ..plugin import ConnectionError, InvalidDataException, DBPlugin, \ Database, Schema, Table, VectorTable, TableField, TableConstraint, \ TableIndex, TableTrigger from qgis.core import QgsCredentials def classFactory(): return OracleDBPlugin class OracleDBPlugin(DBPlugin): @classmethod def icon(self): return QgsApplication.getThemeIcon("/mIconOracle.svg") @classmethod def typeName(self): return 'oracle' @classmethod def typeNameString(self): return QCoreApplication.translate('db_manager', 'Oracle Spatial') @classmethod def providerName(self): return 'oracle' @classmethod def connectionSettingsKey(self): return '/Oracle/connections' def connectToUri(self, uri): self.db = self.databasesFactory(self, uri) if self.db: return True return False def databasesFactory(self, connection, uri): return ORDatabase(connection, uri) def connect(self, parent=None): conn_name = self.connectionName() settings = QgsSettings() settings.beginGroup(u"/{0}/{1}".format( self.connectionSettingsKey(), conn_name)) if not settings.contains("database"): # non-existent entry? raise InvalidDataException( self.tr('There is no defined database connection "{0}".'.format( conn_name))) from qgis.core import QgsDataSourceUri uri = QgsDataSourceUri() settingsList = ["host", "port", "database", "username", "password"] host, port, database, username, password = [ settings.value(x, "", type=str) for x in settingsList] # get all of the connexion options useEstimatedMetadata = settings.value( "estimatedMetadata", False, type=bool) uri.setParam('userTablesOnly', str( settings.value("userTablesOnly", False, type=bool))) uri.setParam('geometryColumnsOnly', str( settings.value("geometryColumnsOnly", False, type=bool))) uri.setParam('allowGeometrylessTables', str( settings.value("allowGeometrylessTables", False, type=bool))) uri.setParam('onlyExistingTypes', str( settings.value("onlyExistingTypes", False, type=bool))) uri.setParam('includeGeoAttributes', str( settings.value("includeGeoAttributes", False, type=bool))) settings.endGroup() uri.setConnection(host, port, database, username, password) uri.setUseEstimatedMetadata(useEstimatedMetadata) err = u"" try: return self.connectToUri(uri) except ConnectionError as e: err = str(e) # ask for valid credentials max_attempts = 3 for i in range(max_attempts): (ok, username, password) = QgsCredentials.instance().get( uri.connectionInfo(False), username, password, err) if not ok: return False uri.setConnection(host, port, database, username, password) try: self.connectToUri(uri) except ConnectionError as e: if i == max_attempts - 1: # failed the last attempt raise e err = str(e) continue QgsCredentials.instance().put( uri.connectionInfo(False), username, password) return True return False class ORDatabase(Database): def __init__(self, connection, uri): self.connName = connection.connectionName() Database.__init__(self, connection, uri) def connectorsFactory(self, uri): return OracleDBConnector(uri, self.connName) def dataTablesFactory(self, row, db, schema=None): return ORTable(row, db, schema) def vectorTablesFactory(self, row, db, schema=None): return ORVectorTable(row, db, schema) def info(self): from .info_model import ORDatabaseInfo return ORDatabaseInfo(self) def schemasFactory(self, row, db): return ORSchema(row, db) def columnUniqueValuesModel(self, col, table, limit=10): l = u"" if limit: l = u"WHERE ROWNUM < {:d}".format(limit) con = self.database().connector # Prevent geometry column show tableName = table.replace(u'"', u"").split(u".") if len(tableName) == 0: tableName = [None, tableName[0]] colName = col.replace(u'"', u"").split(u".")[-1] if con.isGeometryColumn(tableName, colName): return None query = u"SELECT DISTINCT {} FROM {} {}".format(col, table, l) return self.sqlResultModel(query, self) def sqlResultModel(self, sql, parent): from .data_model import ORSqlResultModel return ORSqlResultModel(self, sql, parent) def sqlResultModelAsync(self, sql, parent): from .data_model import ORSqlResultModelAsync return ORSqlResultModelAsync(self, sql, parent) def toSqlLayer(self, sql, geomCol, uniqueCol, layerName=u"QueryLayer", layerType=None, avoidSelectById=False, filter=""): uri = self.uri() con = self.database().connector uri.setDataSource(u"", u"({}\n)".format( sql), geomCol, filter, uniqueCol.strip(u'"')) if avoidSelectById: uri.disableSelectAtId(True) provider = self.dbplugin().providerName() vlayer = QgsVectorLayer(uri.uri(False), layerName, provider) # handling undetermined geometry type if not vlayer.isValid(): wkbType, srid = con.getTableMainGeomType( u"({}\n)".format(sql), geomCol) uri.setWkbType(wkbType) if srid: uri.setSrid(str(srid)) vlayer = QgsVectorLayer(uri.uri(False), layerName, provider) return vlayer def registerDatabaseActions(self, mainWindow): action = QAction(QApplication.translate( "DBManagerPlugin", "&Re-connect"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Database"), self.reconnectActionSlot) if self.schemas(): action = QAction(QApplication.translate( "DBManagerPlugin", "&Create Schema…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Schema"), self.createSchemaActionSlot) action = QAction(QApplication.translate( "DBManagerPlugin", "&Delete (Empty) Schema…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Schema"), self.deleteSchemaActionSlot) action = QAction(QApplication.translate( "DBManagerPlugin", "Delete Selected Item"), self) mainWindow.registerAction(action, None, self.deleteActionSlot) action.setShortcuts(QKeySequence.Delete) action = QAction(QgsApplication.getThemeIcon("/mActionCreateTable.svg"), QApplication.translate( "DBManagerPlugin", "&Create Table…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Table"), self.createTableActionSlot) action = QAction(QgsApplication.getThemeIcon("/mActionEditTable.svg"), QApplication.translate( "DBManagerPlugin", "&Edit Table…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Table"), self.editTableActionSlot) action = QAction(QgsApplication.getThemeIcon("/mActionDeleteTable.svg"), QApplication.translate( "DBManagerPlugin", "&Delete Table/View…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Table"), self.deleteTableActionSlot) action = QAction(QApplication.translate( "DBManagerPlugin", "&Empty Table…"), self) mainWindow.registerAction(action, QApplication.translate( "DBManagerPlugin", "&Table"), self.emptyTableActionSlot) def supportsComment(self): return False class ORSchema(Schema): def __init__(self, row, db): Schema.__init__(self, db) # self.oid, self.name, self.owner, self.perms, self.comment = row self.name = row[0] class ORTable(Table): def __init__(self, row, db, schema=None): Table.__init__(self, db, schema) self.name, self.owner, isView = row self.estimatedRowCount = None self.objectType = None self.isView = False self.isMaterializedView = False if isView == 1: self.isView = True self.creationDate = None self.modificationDate = None def getDates(self): """Grab the creation/modification dates of the table""" self.creationDate, self.modificationDate = ( self.database().connector.getTableDates((self.schemaName(), self.name))) def refreshRowEstimation(self): """Use ALL_ALL_TABLE to get an estimation of rows""" if self.isView: self.estimatedRowCount = 0 self.estimatedRowCount = ( self.database().connector.getTableRowEstimation( (self.schemaName(), self.name))) def getType(self): """Grab the type of object for the table""" self.objectType = self.database().connector.getTableType( (self.schemaName(), self.name)) def getComment(self): """Grab the general comment of the table/view""" self.comment = self.database().connector.getTableComment( (self.schemaName(), self.name), self.objectType) def getDefinition(self): return self.database().connector.getDefinition( (self.schemaName(), self.name), self.objectType) def getMViewInfo(self): if self.objectType == u"MATERIALIZED VIEW": return self.database().connector.getMViewInfo( (self.schemaName(), self.name)) else: return None def runAction(self, action): action = str(action) if action.startswith("rows/"): if action == "rows/recount": self.refreshRowCount() return True elif action.startswith("index/"): parts = action.split('/') index_name = parts[1] index_action = parts[2] msg = QApplication.translate( "DBManagerPlugin", "Do you want to {} index {}?".format( index_action, index_name)) QApplication.restoreOverrideCursor() try: if QMessageBox.question( None, QApplication.translate( "DBManagerPlugin", "Table Index"), msg, QMessageBox.Yes | QMessageBox.No) == QMessageBox.No: return False finally: QApplication.setOverrideCursor(Qt.WaitCursor) if index_action == "rebuild": self.aboutToChange.emit() self.database().connector.rebuildTableIndex( (self.schemaName(), self.name), index_name) self.refreshIndexes() return True elif action.startswith(u"mview/"): if action == "mview/refresh": self.aboutToChange.emit() self.database().connector.refreshMView( (self.schemaName(), self.name)) return True return Table.runAction(self, action) def tableFieldsFactory(self, row, table): return ORTableField(row, table) def tableConstraintsFactory(self, row, table): return ORTableConstraint(row, table) def tableIndexesFactory(self, row, table): return ORTableIndex(row, table) def tableTriggersFactory(self, row, table): return ORTableTrigger(row, table) def info(self): from .info_model import ORTableInfo return ORTableInfo(self) def tableDataModel(self, parent): from .data_model import ORTableDataModel return ORTableDataModel(self, parent) def getValidQgisUniqueFields(self, onlyOne=False): """ list of fields valid to load the table as layer in Qgis canvas. Qgis automatically search for a valid unique field, so it's needed only for queries and views. """ ret = [] # add the pk pkcols = [x for x in self.fields() if x.primaryKey] if len(pkcols) == 1: ret.append(pkcols[0]) # then add integer fields with an unique index indexes = self.indexes() if indexes is not None: for idx in indexes: if idx.isUnique and len(idx.columns) == 1: fld = idx.fields()[idx.columns[0]] if (fld.dataType == u"NUMBER" and not fld.modifier and fld.notNull and fld not in ret): ret.append(fld) # and finally append the other suitable fields for fld in self.fields(): if (fld.dataType == u"NUMBER" and not fld.modifier and fld.notNull and fld not in ret): ret.append(fld) if onlyOne: return ret[0] if len(ret) > 0 else None return ret def uri(self): uri = self.database().uri() schema = self.schemaName() if self.schemaName() else '' geomCol = self.geomColumn if self.type in [ Table.VectorType, Table.RasterType] else "" uniqueCol = self.getValidQgisUniqueFields( True) if self.isView else None uri.setDataSource(schema, self.name, geomCol if geomCol else None, None, uniqueCol.name if uniqueCol else "") # Handle geographic table if geomCol: uri.setWkbType(self.wkbType) uri.setSrid(str(self.srid)) return uri class ORVectorTable(ORTable, VectorTable): def __init__(self, row, db, schema=None): ORTable.__init__(self, row[0:3], db, schema) VectorTable.__init__(self, db, schema) self.geomColumn, self.geomType, self.wkbType, self.geomDim, \ self.srid = row[-7:-2] def info(self): from .info_model import ORVectorTableInfo return ORVectorTableInfo(self) def runAction(self, action): if action.startswith("extent/"): if action == "extent/update": self.aboutToChange.emit() self.updateExtent() return True if ORTable.runAction(self, action): return True return VectorTable.runAction(self, action) def canUpdateMetadata(self): return self.database().connector.canUpdateMetadata((self.schemaName(), self.name)) def updateExtent(self): self.database().connector.updateMetadata( (self.schemaName(), self.name), self.geomColumn, extent=self.extent) self.refreshTableEstimatedExtent() self.refresh() def hasSpatialIndex(self, geom_column=None): geom_column = geom_column if geom_column else self.geomColumn for idx in self.indexes(): if geom_column == idx.column: return True return False class ORTableField(TableField): def __init__(self, row, table): """ build fields information from query and find primary key """ TableField.__init__(self, table) self.num, self.name, self.dataType, self.charMaxLen, \ self.modifier, self.notNull, self.hasDefault, \ self.default, typeStr, self.comment = row self.primaryKey = False self.num = int(self.num) if self.charMaxLen == NULL: self.charMaxLen = None else: self.charMaxLen = int(self.charMaxLen) if self.modifier == NULL: self.modifier = None else: self.modifier = int(self.modifier) if self.notNull.upper() == u"Y": self.notNull = False else: self.notNull = True if self.comment == NULL: self.comment = u"" # find out whether fields are part of primary key for con in self.table().constraints(): if con.type == ORTableConstraint.TypePrimaryKey and self.name == con.column: self.primaryKey = True break def type2String(self): if (u"TIMESTAMP" in self.dataType or self.dataType in [u"DATE", u"SDO_GEOMETRY", u"BINARY_FLOAT", u"BINARY_DOUBLE"]): return u"{}".format(self.dataType) if self.charMaxLen in [None, -1]: return u"{}".format(self.dataType) elif self.modifier in [None, -1, 0]: return u"{}({})".format(self.dataType, self.charMaxLen) return u"{}({},{})".format(self.dataType, self.charMaxLen, self.modifier) def update(self, new_name, new_type_str=None, new_not_null=None, new_default_str=None): self.table().aboutToChange.emit() if self.name == new_name: new_name = None if self.type2String() == new_type_str: new_type_str = None if self.notNull == new_not_null: new_not_null = None if self.default2String() == new_default_str: new_default_str = None ret = self.table().database().connector.updateTableColumn( (self.table().schemaName(), self.table().name), self.name, new_name, new_type_str, new_not_null, new_default_str) # When changing a field, refresh also constraints and # indexes. if ret is not False: self.table().refreshFields() self.table().refreshConstraints() self.table().refreshIndexes() return ret class ORTableConstraint(TableConstraint): TypeCheck, TypeForeignKey, TypePrimaryKey, \ TypeUnique, TypeUnknown = list(range(5)) types = {"c": TypeCheck, "r": TypeForeignKey, "p": TypePrimaryKey, "u": TypeUnique} def __init__(self, row, table): """ build constraints info from query """ TableConstraint.__init__(self, table) self.name, constr_type_str, self.column, self.validated, \ self.generated, self.status = row[0:6] constr_type_str = constr_type_str.lower() if constr_type_str in ORTableConstraint.types: self.type = ORTableConstraint.types[constr_type_str] else: self.type = ORTableConstraint.TypeUnknown if row[6] == NULL: self.checkSource = u"" else: self.checkSource = row[6] if row[8] == NULL: self.foreignTable = u"" else: self.foreignTable = row[8] if row[7] == NULL: self.foreignOnDelete = u"" else: self.foreignOnDelete = row[7] if row[9] == NULL: self.foreignKey = u"" else: self.foreignKey = row[9] def type2String(self): if self.type == ORTableConstraint.TypeCheck: return QApplication.translate("DBManagerPlugin", "Check") if self.type == ORTableConstraint.TypePrimaryKey: return QApplication.translate("DBManagerPlugin", "Primary key") if self.type == ORTableConstraint.TypeForeignKey: return QApplication.translate("DBManagerPlugin", "Foreign key") if self.type == ORTableConstraint.TypeUnique: return QApplication.translate("DBManagerPlugin", "Unique") return QApplication.translate("DBManagerPlugin", 'Unknown') def fields(self): """ Hack to make edit dialog box work """ fields = self.table().fields() field = None for fld in fields: if fld.name == self.column: field = fld cols = {} cols[0] = field return cols class ORTableIndex(TableIndex): def __init__(self, row, table): TableIndex.__init__(self, table) self.name, self.column, self.indexType, self.status, \ self.analyzed, self.compression, self.isUnique = row def fields(self): """ Hack to make edit dialog box work """ self.table().refreshFields() fields = self.table().fields() field = None for fld in fields: if fld.name == self.column: field = fld cols = {} cols[0] = field return cols class ORTableTrigger(TableTrigger): def __init__(self, row, table): TableTrigger.__init__(self, table) self.name, self.event, self.type, self.enabled = row
gpl-2.0
-6,012,125,030,114,924,000
34.187017
125
0.579548
false
miqlar/PyFME
src/pyfme/utils/trimmer.py
5
8158
# -*- coding: utf-8 -*- """ Python Flight Mechanics Engine (PyFME). Copyright (c) AeroPython Development Team. Distributed under the terms of the MIT License. Trimmer ------- This module solves the problem of calculating the values of the state and control vectors that satisfy the state equations of the aircraft at the given condition. This cannot be done analytically because of the very complex functional dependence on the aerodynamic data. Instead, it must be done with a numerical algorithm which iteratively adjusts the independent variables until some solution criterion is met. """ from copy import deepcopy from warnings import warn from math import sqrt, sin, cos, tan, atan import numpy as np from scipy.optimize import least_squares from pyfme.utils.coordinates import wind2body from pyfme.models.constants import GRAVITY def steady_state_flight_trimmer(aircraft, system, env, TAS, controls_0, controls2trim=None, gamma=0.0, turn_rate=0.0, verbose=0): """Finds a combination of values of the state and control variables that correspond to a steady-state flight condition. Steady-state aircraft flight can be defined as a condition in which all of the motion variables are constant or zero. That is, the linear and angular velocity components are constant (or zero), thus all acceleration components are zero. Parameters ---------- aircraft : Aircraft Plane to be trimmed. system : System System for aircraft trimming. env : Environment Environment with the models for wind, atmosphere and gravity. TAS : float True Air Speed (m/s). controls_0 : dict Initial value guess for each control. If the control is not in `controls2trim` or `controls2trim` is `None` the control is considered fixed to that value during the trimming process. controls2trim : list, optional List with controls to be trimmed. If not given, no control is considered fixed. gamma : float, optional Flight path angle (rad). turn_rate : float, optional Turn rate, d(psi)/dt (rad/s). verbose : {0, 1, 2}, optional Level of algorithm's verbosity: * 0 (default) : work silently. * 1 : display a termination report. * 2 : display progress during iterations (not supported by 'lm' method). Returns ------- aircraft : Aircraft Trimmed plane. system : System Trimmed system. env : Environment Trimmed environment (gravity in body axis). results : dict Relevant parameters calculated during the aircraft trimming, including least square results. Notes ----- See section 3.4 in [1] for the algorithm description. See section 2.5 in [1] for the definition of steady-state flight condition. References ---------- .. [1] Stevens, BL and Lewis, FL, "Aircraft Control and Simulation", Wiley-lnterscience. """ # Creating a copy of these objects in order to not modify any attribute # inside this funciton. trimmed_ac = deepcopy(aircraft) trimmed_sys = deepcopy(system) trimmed_env = deepcopy(env) trimmed_ac.TAS = TAS trimmed_ac.Mach = aircraft.TAS / env.a trimmed_ac.q_inf = 0.5 * trimmed_env.rho * aircraft.TAS ** 2 # Update environment trimmed_env.update(trimmed_sys) # Check if every necessary control for the aircraft is given in controls_0. for ac_control in trimmed_ac.controls: if ac_control not in controls_0: raise ValueError("Control {} not given in controls_0: {}".format( ac_control, controls_0)) trimmed_ac.controls = controls_0 # If controls2trim is not given, trim for every control. if controls2trim is None: controls2trim = list(controls_0.keys()) # TODO: try to look for a good initialization method for alpha & beta initial_guess = [0.05 * np.sign(turn_rate), # alpha 0.001 * np.sign(turn_rate)] # beta for control in controls2trim: initial_guess.append(controls_0[control]) args = (trimmed_sys, trimmed_ac, trimmed_env, controls2trim, gamma, turn_rate) lower_bounds = [-0.5, -0.25] # Alpha and beta upper bounds. upper_bounds = [+0.5, +0.25] # Alpha and beta lower bounds. for ii in controls2trim: lower_bounds.append(aircraft.control_limits[ii][0]) upper_bounds.append(aircraft.control_limits[ii][1]) bounds = (lower_bounds, upper_bounds) results = least_squares(trimming_cost_func, x0=initial_guess, args=args, verbose=verbose, bounds=bounds) fun = results['fun'] cost = results['cost'] if cost > 1e-7 or any(abs(fun) > 1e-3): warn("Trim process did not converge", RuntimeWarning) trimmed_sys.set_initial_state_vector() results = {'alpha': trimmed_ac.alpha, 'beta': trimmed_ac.beta, 'u': trimmed_sys.u, 'v': trimmed_sys.v, 'w': trimmed_sys.w, 'p': trimmed_sys.p, 'q': trimmed_sys.q, 'r': trimmed_sys.r, 'theta': trimmed_sys.theta, 'phi': trimmed_sys.phi, 'ls_opt': results} for control in controls2trim: results[control] = trimmed_ac.controls[control] return trimmed_ac, trimmed_sys, trimmed_env, results def turn_coord_cons(turn_rate, alpha, beta, TAS, gamma=0): """Calculates phi for coordinated turn. """ g0 = GRAVITY G = turn_rate * TAS / g0 if abs(gamma) < 1e-8: phi = G * cos(beta) / (cos(alpha) - G * sin(alpha) * sin(beta)) phi = atan(phi) else: a = 1 - G * tan(alpha) * sin(beta) b = sin(gamma) / cos(beta) c = 1 + G ** 2 * cos(beta) ** 2 sq = sqrt(c * (1 - b ** 2) + G ** 2 * sin(beta) ** 2) num = (a - b ** 2) + b * tan(alpha) * sq den = a ** 2 - b ** 2 * (1 + c * tan(alpha) ** 2) phi = atan(G * cos(beta) / cos(alpha) * num / den) return phi def turn_coord_cons_horizontal_and_small_beta(turn_rate, alpha, TAS): """Calculates phi for coordinated turn given that gamma is equal to zero and beta is small (beta << 1). """ g0 = GRAVITY G = turn_rate * TAS / g0 phi = G / cos(alpha) phi = atan(phi) return phi def rate_of_climb_cons(gamma, alpha, beta, phi): """Calculates theta for the given ROC, wind angles, and roll angle. """ a = cos(alpha) * cos(beta) b = sin(phi) * sin(beta) + cos(phi) * sin(alpha) * cos(beta) sq = sqrt(a ** 2 - sin(gamma) ** 2 + b ** 2) theta = (a * b + sin(gamma) * sq) / (a ** 2 - sin(gamma) ** 2) theta = atan(theta) return theta def trimming_cost_func(trimmed_params, system, ac, env, controls2trim, gamma, turn_rate): """Function to optimize """ alpha = trimmed_params[0] beta = trimmed_params[1] new_controls = {} for ii, control in enumerate(controls2trim): new_controls[control] = trimmed_params[ii + 2] # Choose coordinated turn constrain equation: if abs(turn_rate) < 1e-8: phi = 0 else: phi = turn_coord_cons(turn_rate, alpha, beta, ac.TAS, gamma) system.euler_angles[2] = phi # Rate of climb constrain theta = rate_of_climb_cons(gamma, alpha, beta, phi) system.euler_angles[1] = theta # w = turn_rate * k_h # k_h = sin(theta) i_b + sin(phi) * cos(theta) j_b + cos(theta) * sin(phi) # w = p * i_b + q * j_b + r * k_b p = - turn_rate * sin(theta) q = turn_rate * sin(phi) * cos(theta) r = turn_rate * cos(theta) * sin(phi) system.vel_ang = np.array([p, q, r]) system.vel_body = wind2body((ac.TAS, 0, 0), alpha=alpha, beta=beta) env.update(system) ac.update(new_controls, system, env) forces, moments = ac.calculate_forces_and_moments() vel = np.concatenate((system.vel_body[:], system.vel_ang[:])) output = system.lamceq(0, vel, ac.mass, ac.inertia, forces, moments) return output
mit
7,314,654,031,948,047,000
33.133891
79
0.617308
false
vericred/vericred-python
test/test_providers_api.py
1
10307
# coding: utf-8 """ Vericred API Vericred's API allows you to search for Health Plans that a specific doctor accepts. ## Getting Started Visit our [Developer Portal](https://developers.vericred.com) to create an account. Once you have created an account, you can create one Application for Production and another for our Sandbox (select the appropriate Plan when you create the Application). ## SDKs Our API follows standard REST conventions, so you can use any HTTP client to integrate with us. You will likely find it easier to use one of our [autogenerated SDKs](https://github.com/vericred/?query=vericred-), which we make available for several common programming languages. ## Authentication To authenticate, pass the API Key you created in the Developer Portal as a `Vericred-Api-Key` header. `curl -H 'Vericred-Api-Key: YOUR_KEY' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"` ## Versioning Vericred's API default to the latest version. However, if you need a specific version, you can request it with an `Accept-Version` header. The current version is `v3`. Previous versions are `v1` and `v2`. `curl -H 'Vericred-Api-Key: YOUR_KEY' -H 'Accept-Version: v2' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"` ## Pagination Endpoints that accept `page` and `per_page` parameters are paginated. They expose four additional fields that contain data about your position in the response, namely `Total`, `Per-Page`, `Link`, and `Page` as described in [RFC-5988](https://tools.ietf.org/html/rfc5988). For example, to display 5 results per page and view the second page of a `GET` to `/networks`, your final request would be `GET /networks?....page=2&per_page=5`. ## Sideloading When we return multiple levels of an object graph (e.g. `Provider`s and their `State`s we sideload the associated data. In this example, we would provide an Array of `State`s and a `state_id` for each provider. This is done primarily to reduce the payload size since many of the `Provider`s will share a `State` ``` { providers: [{ id: 1, state_id: 1}, { id: 2, state_id: 1 }], states: [{ id: 1, code: 'NY' }] } ``` If you need the second level of the object graph, you can just match the corresponding id. ## Selecting specific data All endpoints allow you to specify which fields you would like to return. This allows you to limit the response to contain only the data you need. For example, let's take a request that returns the following JSON by default ``` { provider: { id: 1, name: 'John', phone: '1234567890', field_we_dont_care_about: 'value_we_dont_care_about' }, states: [{ id: 1, name: 'New York', code: 'NY', field_we_dont_care_about: 'value_we_dont_care_about' }] } ``` To limit our results to only return the fields we care about, we specify the `select` query string parameter for the corresponding fields in the JSON document. In this case, we want to select `name` and `phone` from the `provider` key, so we would add the parameters `select=provider.name,provider.phone`. We also want the `name` and `code` from the `states` key, so we would add the parameters `select=states.name,states.code`. The id field of each document is always returned whether or not it is requested. Our final request would be `GET /providers/12345?select=provider.name,provider.phone,states.name,states.code` The response would be ``` { provider: { id: 1, name: 'John', phone: '1234567890' }, states: [{ id: 1, name: 'New York', code: 'NY' }] } ``` ## Benefits summary format Benefit cost-share strings are formatted to capture: * Network tiers * Compound or conditional cost-share * Limits on the cost-share * Benefit-specific maximum out-of-pocket costs **Example #1** As an example, we would represent [this Summary of Benefits &amp; Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/33602TX0780032.pdf) as: * **Hospital stay facility fees**: - Network Provider: `$400 copay/admit plus 20% coinsurance` - Out-of-Network Provider: `$1,500 copay/admit plus 50% coinsurance` - Vericred's format for this benefit: `In-Network: $400 before deductible then 20% after deductible / Out-of-Network: $1,500 before deductible then 50% after deductible` * **Rehabilitation services:** - Network Provider: `20% coinsurance` - Out-of-Network Provider: `50% coinsurance` - Limitations & Exceptions: `35 visit maximum per benefit period combined with Chiropractic care.` - Vericred's format for this benefit: `In-Network: 20% after deductible / Out-of-Network: 50% after deductible | limit: 35 visit(s) per Benefit Period` **Example #2** In [this other Summary of Benefits &amp; Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/40733CA0110568.pdf), the **specialty_drugs** cost-share has a maximum out-of-pocket for in-network pharmacies. * **Specialty drugs:** - Network Provider: `40% coinsurance up to a $500 maximum for up to a 30 day supply` - Out-of-Network Provider `Not covered` - Vericred's format for this benefit: `In-Network: 40% after deductible, up to $500 per script / Out-of-Network: 100%` **BNF** Here's a description of the benefits summary string, represented as a context-free grammar: ``` root ::= coverage coverage ::= (simple_coverage | tiered_coverage) (space pipe space coverage_modifier)? tiered_coverage ::= tier (space slash space tier)* tier ::= tier_name colon space (tier_coverage | not_applicable) tier_coverage ::= simple_coverage (space (then | or | and) space simple_coverage)* tier_limitation? simple_coverage ::= (pre_coverage_limitation space)? coverage_amount (space post_coverage_limitation)? (comma? space coverage_condition)? coverage_modifier ::= limit_condition colon space (((simple_coverage | simple_limitation) (semicolon space see_carrier_documentation)?) | see_carrier_documentation | waived_if_admitted | shared_across_tiers) waived_if_admitted ::= ("copay" space)? "waived if admitted" simple_limitation ::= pre_coverage_limitation space "copay applies" tier_name ::= "In-Network-Tier-2" | "Out-of-Network" | "In-Network" limit_condition ::= "limit" | "condition" tier_limitation ::= comma space "up to" space (currency | (integer space time_unit plural?)) (space post_coverage_limitation)? coverage_amount ::= currency | unlimited | included | unknown | percentage | (digits space (treatment_unit | time_unit) plural?) pre_coverage_limitation ::= first space digits space time_unit plural? post_coverage_limitation ::= (((then space currency) | "per condition") space)? "per" space (treatment_unit | (integer space time_unit) | time_unit) plural? coverage_condition ::= ("before deductible" | "after deductible" | "penalty" | allowance | "in-state" | "out-of-state") (space allowance)? allowance ::= upto_allowance | after_allowance upto_allowance ::= "up to" space (currency space)? "allowance" after_allowance ::= "after" space (currency space)? "allowance" see_carrier_documentation ::= "see carrier documentation for more information" shared_across_tiers ::= "shared across all tiers" unknown ::= "unknown" unlimited ::= /[uU]nlimited/ included ::= /[iI]ncluded in [mM]edical/ time_unit ::= /[hH]our/ | (((/[cC]alendar/ | /[cC]ontract/) space)? /[yY]ear/) | /[mM]onth/ | /[dD]ay/ | /[wW]eek/ | /[vV]isit/ | /[lL]ifetime/ | ((((/[bB]enefit/ plural?) | /[eE]ligibility/) space)? /[pP]eriod/) treatment_unit ::= /[pP]erson/ | /[gG]roup/ | /[cC]ondition/ | /[sS]cript/ | /[vV]isit/ | /[eE]xam/ | /[iI]tem/ | /[sS]tay/ | /[tT]reatment/ | /[aA]dmission/ | /[eE]pisode/ comma ::= "," colon ::= ":" semicolon ::= ";" pipe ::= "|" slash ::= "/" plural ::= "(s)" | "s" then ::= "then" | ("," space) | space or ::= "or" and ::= "and" not_applicable ::= "Not Applicable" | "N/A" | "NA" first ::= "first" currency ::= "$" number percentage ::= number "%" number ::= float | integer float ::= digits "." digits integer ::= /[0-9]/+ (comma_int | under_int)* comma_int ::= ("," /[0-9]/*3) !"_" under_int ::= ("_" /[0-9]/*3) !"," digits ::= /[0-9]/+ ("_" /[0-9]/+)* space ::= /[ \t]/+ ``` OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import os import sys import unittest import vericred_client from vericred_client.rest import ApiException from vericred_client.apis.providers_api import ProvidersApi class TestProvidersApi(unittest.TestCase): """ ProvidersApi unit test stubs """ def setUp(self): self.api = vericred_client.apis.providers_api.ProvidersApi() def tearDown(self): pass def test_get_provider(self): """ Test case for get_provider Find a Provider """ pass def test_get_providers(self): """ Test case for get_providers Find Providers """ pass def test_get_providers_0(self): """ Test case for get_providers_0 Find Providers """ pass if __name__ == '__main__': unittest.main()
apache-2.0
-2,225,444,863,731,263,500
37.602996
228
0.649365
false
Yuudachimoe/HikariChun-RedBot
lib/youtube_dl/extractor/abc.py
24
6210
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( ExtractorError, js_to_json, int_or_none, parse_iso8601, ) class ABCIE(InfoExtractor): IE_NAME = 'abc.net.au' _VALID_URL = r'https?://(?:www\.)?abc\.net\.au/news/(?:[^/]+/){1,2}(?P<id>\d+)' _TESTS = [{ 'url': 'http://www.abc.net.au/news/2014-11-05/australia-to-staff-ebola-treatment-centre-in-sierra-leone/5868334', 'md5': 'cb3dd03b18455a661071ee1e28344d9f', 'info_dict': { 'id': '5868334', 'ext': 'mp4', 'title': 'Australia to help staff Ebola treatment centre in Sierra Leone', 'description': 'md5:809ad29c67a05f54eb41f2a105693a67', }, 'skip': 'this video has expired', }, { 'url': 'http://www.abc.net.au/news/2015-08-17/warren-entsch-introduces-same-sex-marriage-bill/6702326', 'md5': 'db2a5369238b51f9811ad815b69dc086', 'info_dict': { 'id': 'NvqvPeNZsHU', 'ext': 'mp4', 'upload_date': '20150816', 'uploader': 'ABC News (Australia)', 'description': 'Government backbencher Warren Entsch introduces a cross-party sponsored bill to legalise same-sex marriage, saying the bill is designed to promote "an inclusive Australia, not a divided one.". Read more here: http://ab.co/1Mwc6ef', 'uploader_id': 'NewsOnABC', 'title': 'Marriage Equality: Warren Entsch introduces same sex marriage bill', }, 'add_ie': ['Youtube'], 'skip': 'Not accessible from Travis CI server', }, { 'url': 'http://www.abc.net.au/news/2015-10-23/nab-lifts-interest-rates-following-westpac-and-cba/6880080', 'md5': 'b96eee7c9edf4fc5a358a0252881cc1f', 'info_dict': { 'id': '6880080', 'ext': 'mp3', 'title': 'NAB lifts interest rates, following Westpac and CBA', 'description': 'md5:f13d8edc81e462fce4a0437c7dc04728', }, }, { 'url': 'http://www.abc.net.au/news/2015-10-19/6866214', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) mobj = re.search( r'inline(?P<type>Video|Audio|YouTube)Data\.push\((?P<json_data>[^)]+)\);', webpage) if mobj is None: expired = self._html_search_regex(r'(?s)class="expired-(?:video|audio)".+?<span>(.+?)</span>', webpage, 'expired', None) if expired: raise ExtractorError('%s said: %s' % (self.IE_NAME, expired), expected=True) raise ExtractorError('Unable to extract video urls') urls_info = self._parse_json( mobj.group('json_data'), video_id, transform_source=js_to_json) if not isinstance(urls_info, list): urls_info = [urls_info] if mobj.group('type') == 'YouTube': return self.playlist_result([ self.url_result(url_info['url']) for url_info in urls_info]) formats = [{ 'url': url_info['url'], 'vcodec': url_info.get('codec') if mobj.group('type') == 'Video' else 'none', 'width': int_or_none(url_info.get('width')), 'height': int_or_none(url_info.get('height')), 'tbr': int_or_none(url_info.get('bitrate')), 'filesize': int_or_none(url_info.get('filesize')), } for url_info in urls_info] self._sort_formats(formats) return { 'id': video_id, 'title': self._og_search_title(webpage), 'formats': formats, 'description': self._og_search_description(webpage), 'thumbnail': self._og_search_thumbnail(webpage), } class ABCIViewIE(InfoExtractor): IE_NAME = 'abc.net.au:iview' _VALID_URL = r'https?://iview\.abc\.net\.au/programs/[^/]+/(?P<id>[^/?#]+)' # ABC iview programs are normally available for 14 days only. _TESTS = [{ 'url': 'http://iview.abc.net.au/programs/diaries-of-a-broken-mind/ZX9735A001S00', 'md5': 'cde42d728b3b7c2b32b1b94b4a548afc', 'info_dict': { 'id': 'ZX9735A001S00', 'ext': 'mp4', 'title': 'Diaries Of A Broken Mind', 'description': 'md5:7de3903874b7a1be279fe6b68718fc9e', 'upload_date': '20161010', 'uploader_id': 'abc2', 'timestamp': 1476064920, }, 'skip': 'Video gone', }] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) video_params = self._parse_json(self._search_regex( r'videoParams\s*=\s*({.+?});', webpage, 'video params'), video_id) title = video_params.get('title') or video_params['seriesTitle'] stream = next(s for s in video_params['playlist'] if s.get('type') == 'program') formats = self._extract_akamai_formats(stream['hds-unmetered'], video_id) self._sort_formats(formats) subtitles = {} src_vtt = stream.get('captions', {}).get('src-vtt') if src_vtt: subtitles['en'] = [{ 'url': src_vtt, 'ext': 'vtt', }] return { 'id': video_id, 'title': title, 'description': self._html_search_meta(['og:description', 'twitter:description'], webpage), 'thumbnail': self._html_search_meta(['og:image', 'twitter:image:src'], webpage), 'duration': int_or_none(video_params.get('eventDuration')), 'timestamp': parse_iso8601(video_params.get('pubDate'), ' '), 'series': video_params.get('seriesTitle'), 'series_id': video_params.get('seriesHouseNumber') or video_id[:7], 'episode_number': int_or_none(self._html_search_meta('episodeNumber', webpage, default=None)), 'episode': self._html_search_meta('episode_title', webpage, default=None), 'uploader_id': video_params.get('channel'), 'formats': formats, 'subtitles': subtitles, }
gpl-3.0
7,543,708,028,692,215,000
39.855263
259
0.561192
false
xianjunzhengbackup/Cloud-Native-Python
env/lib/python3.5/site-packages/pip/_vendor/distlib/metadata.py
335
38833
# -*- coding: utf-8 -*- # # Copyright (C) 2012 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """Implementation of the Metadata for Python packages PEPs. Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). """ from __future__ import unicode_literals import codecs from email import message_from_file import json import logging import re from . import DistlibException, __version__ from .compat import StringIO, string_types, text_type from .markers import interpret from .util import extract_by_key, get_extras from .version import get_scheme, PEP440_VERSION_RE logger = logging.getLogger(__name__) class MetadataMissingError(DistlibException): """A required metadata is missing""" class MetadataConflictError(DistlibException): """Attempt to read or write metadata fields that are conflictual.""" class MetadataUnrecognizedVersionError(DistlibException): """Unknown metadata version number.""" class MetadataInvalidError(DistlibException): """A metadata value is invalid""" # public API of this module __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] # Encoding used for the PKG-INFO files PKG_INFO_ENCODING = 'utf-8' # preferred version. Hopefully will be changed # to 1.2 once PEP 345 is supported everywhere PKG_INFO_PREFERRED_VERSION = '1.1' _LINE_PREFIX_1_2 = re.compile('\n \|') _LINE_PREFIX_PRE_1_2 = re.compile('\n ') _241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'License') _314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes', 'Provides', 'Requires') _314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', 'Download-URL') _345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Requires-External') _345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Obsoletes-Dist', 'Requires-External', 'Maintainer', 'Maintainer-email', 'Project-URL') _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Requires-External', 'Private-Version', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', 'Provides-Extra') _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') _ALL_FIELDS = set() _ALL_FIELDS.update(_241_FIELDS) _ALL_FIELDS.update(_314_FIELDS) _ALL_FIELDS.update(_345_FIELDS) _ALL_FIELDS.update(_426_FIELDS) EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') def _version2fieldlist(version): if version == '1.0': return _241_FIELDS elif version == '1.1': return _314_FIELDS elif version == '1.2': return _345_FIELDS elif version == '2.0': return _426_FIELDS raise MetadataUnrecognizedVersionError(version) def _best_version(fields): """Detect the best version depending on the fields used.""" def _has_marker(keys, markers): for marker in markers: if marker in keys: return True return False keys = [] for key, value in fields.items(): if value in ([], 'UNKNOWN', None): continue keys.append(key) possible_versions = ['1.0', '1.1', '1.2', '2.0'] # first let's try to see if a field is not part of one of the version for key in keys: if key not in _241_FIELDS and '1.0' in possible_versions: possible_versions.remove('1.0') if key not in _314_FIELDS and '1.1' in possible_versions: possible_versions.remove('1.1') if key not in _345_FIELDS and '1.2' in possible_versions: possible_versions.remove('1.2') if key not in _426_FIELDS and '2.0' in possible_versions: possible_versions.remove('2.0') # possible_version contains qualified versions if len(possible_versions) == 1: return possible_versions[0] # found ! elif len(possible_versions) == 0: raise MetadataConflictError('Unknown metadata set') # let's see if one unique marker is found is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1: raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields') # we have the choice, 1.0, or 1.2, or 2.0 # - 1.0 has a broken Summary field but works with all tools # - 1.1 is to avoid # - 1.2 fixes Summary but has little adoption # - 2.0 adds more features and is very new if not is_1_1 and not is_1_2 and not is_2_0: # we couldn't find any specific marker if PKG_INFO_PREFERRED_VERSION in possible_versions: return PKG_INFO_PREFERRED_VERSION if is_1_1: return '1.1' if is_1_2: return '1.2' return '2.0' _ATTR2FIELD = { 'metadata_version': 'Metadata-Version', 'name': 'Name', 'version': 'Version', 'platform': 'Platform', 'supported_platform': 'Supported-Platform', 'summary': 'Summary', 'description': 'Description', 'keywords': 'Keywords', 'home_page': 'Home-page', 'author': 'Author', 'author_email': 'Author-email', 'maintainer': 'Maintainer', 'maintainer_email': 'Maintainer-email', 'license': 'License', 'classifier': 'Classifier', 'download_url': 'Download-URL', 'obsoletes_dist': 'Obsoletes-Dist', 'provides_dist': 'Provides-Dist', 'requires_dist': 'Requires-Dist', 'setup_requires_dist': 'Setup-Requires-Dist', 'requires_python': 'Requires-Python', 'requires_external': 'Requires-External', 'requires': 'Requires', 'provides': 'Provides', 'obsoletes': 'Obsoletes', 'project_url': 'Project-URL', 'private_version': 'Private-Version', 'obsoleted_by': 'Obsoleted-By', 'extension': 'Extension', 'provides_extra': 'Provides-Extra', } _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') _VERSIONS_FIELDS = ('Requires-Python',) _VERSION_FIELDS = ('Version',) _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', 'Requires', 'Provides', 'Obsoletes-Dist', 'Provides-Dist', 'Requires-Dist', 'Requires-External', 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', 'Provides-Extra', 'Extension') _LISTTUPLEFIELDS = ('Project-URL',) _ELEMENTSFIELD = ('Keywords',) _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') _MISSING = object() _FILESAFE = re.compile('[^A-Za-z0-9.]+') def _get_name_and_version(name, version, for_filename=False): """Return the distribution name with version. If for_filename is true, return a filename-escaped form.""" if for_filename: # For both name and version any runs of non-alphanumeric or '.' # characters are replaced with a single '-'. Additionally any # spaces in the version string become '.' name = _FILESAFE.sub('-', name) version = _FILESAFE.sub('-', version.replace(' ', '.')) return '%s-%s' % (name, version) class LegacyMetadata(object): """The legacy metadata of a release. Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can instantiate the class with one of these arguments (or none): - *path*, the path to a metadata file - *fileobj* give a file-like object with metadata as content - *mapping* is a dict-like object - *scheme* is a version scheme name """ # TODO document the mapping API and UNKNOWN default key def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._fields = {} self.requires_files = [] self._dependencies = None self.scheme = scheme if path is not None: self.read(path) elif fileobj is not None: self.read_file(fileobj) elif mapping is not None: self.update(mapping) self.set_metadata_version() def set_metadata_version(self): self._fields['Metadata-Version'] = _best_version(self._fields) def _write_field(self, fileobj, name, value): fileobj.write('%s: %s\n' % (name, value)) def __getitem__(self, name): return self.get(name) def __setitem__(self, name, value): return self.set(name, value) def __delitem__(self, name): field_name = self._convert_name(name) try: del self._fields[field_name] except KeyError: raise KeyError(name) def __contains__(self, name): return (name in self._fields or self._convert_name(name) in self._fields) def _convert_name(self, name): if name in _ALL_FIELDS: return name name = name.replace('-', '_').lower() return _ATTR2FIELD.get(name, name) def _default_value(self, name): if name in _LISTFIELDS or name in _ELEMENTSFIELD: return [] return 'UNKNOWN' def _remove_line_prefix(self, value): if self.metadata_version in ('1.0', '1.1'): return _LINE_PREFIX_PRE_1_2.sub('\n', value) else: return _LINE_PREFIX_1_2.sub('\n', value) def __getattr__(self, name): if name in _ATTR2FIELD: return self[name] raise AttributeError(name) # # Public API # # dependencies = property(_get_dependencies, _set_dependencies) def get_fullname(self, filesafe=False): """Return the distribution name with version. If filesafe is true, return a filename-escaped form.""" return _get_name_and_version(self['Name'], self['Version'], filesafe) def is_field(self, name): """return True if name is a valid metadata key""" name = self._convert_name(name) return name in _ALL_FIELDS def is_multi_field(self, name): name = self._convert_name(name) return name in _LISTFIELDS def read(self, filepath): """Read the metadata values from a file path.""" fp = codecs.open(filepath, 'r', encoding='utf-8') try: self.read_file(fp) finally: fp.close() def read_file(self, fileob): """Read the metadata values from a file object.""" msg = message_from_file(fileob) self._fields['Metadata-Version'] = msg['metadata-version'] # When reading, get all the fields we can for field in _ALL_FIELDS: if field not in msg: continue if field in _LISTFIELDS: # we can have multiple lines values = msg.get_all(field) if field in _LISTTUPLEFIELDS and values is not None: values = [tuple(value.split(',')) for value in values] self.set(field, values) else: # single line value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) self.set_metadata_version() def write(self, filepath, skip_unknown=False): """Write the metadata fields to filepath.""" fp = codecs.open(filepath, 'w', encoding='utf-8') try: self.write_file(fp, skip_unknown) finally: fp.close() def write_file(self, fileobject, skip_unknown=False): """Write the PKG-INFO format data to a file object.""" self.set_metadata_version() for field in _version2fieldlist(self['Metadata-Version']): values = self.get(field) if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): continue if field in _ELEMENTSFIELD: self._write_field(fileobject, field, ','.join(values)) continue if field not in _LISTFIELDS: if field == 'Description': if self.metadata_version in ('1.0', '1.1'): values = values.replace('\n', '\n ') else: values = values.replace('\n', '\n |') values = [values] if field in _LISTTUPLEFIELDS: values = [','.join(value) for value in values] for value in values: self._write_field(fileobject, field, value) def update(self, other=None, **kwargs): """Set metadata values from the given iterable `other` and kwargs. Behavior is like `dict.update`: If `other` has a ``keys`` method, they are looped over and ``self[key]`` is assigned ``other[key]``. Else, ``other`` is an iterable of ``(key, value)`` iterables. Keys that don't match a metadata field or that have an empty value are dropped. """ def _set(key, value): if key in _ATTR2FIELD and value: self.set(self._convert_name(key), value) if not other: # other is None or empty container pass elif hasattr(other, 'keys'): for k in other.keys(): _set(k, other[k]) else: for k, v in other: _set(k, v) if kwargs: for k, v in kwargs.items(): _set(k, v) def set(self, name, value): """Control then set a metadata field.""" name = self._convert_name(name) if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [v.strip() for v in value.split(',')] else: value = [] elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [value] else: value = [] if logger.isEnabledFor(logging.WARNING): project_name = self['Name'] scheme = get_scheme(self.scheme) if name in _PREDICATE_FIELDS and value is not None: for v in value: # check that the values are valid if not scheme.is_valid_matcher(v.split(';')[0]): logger.warning( "'%s': '%s' is not valid (field '%s')", project_name, v, name) # FIXME this rejects UNKNOWN, is that right? elif name in _VERSIONS_FIELDS and value is not None: if not scheme.is_valid_constraint_list(value): logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) elif name in _VERSION_FIELDS and value is not None: if not scheme.is_valid_version(value): logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) if name in _UNICODEFIELDS: if name == 'Description': value = self._remove_line_prefix(value) self._fields[name] = value def get(self, name, default=_MISSING): """Get a metadata field.""" name = self._convert_name(name) if name not in self._fields: if default is _MISSING: default = self._default_value(name) return default if name in _UNICODEFIELDS: value = self._fields[name] return value elif name in _LISTFIELDS: value = self._fields[name] if value is None: return [] res = [] for val in value: if name not in _LISTTUPLEFIELDS: res.append(val) else: # That's for Project-URL res.append((val[0], val[1])) return res elif name in _ELEMENTSFIELD: value = self._fields[name] if isinstance(value, string_types): return value.split(',') return self._fields[name] def check(self, strict=False): """Check if the metadata is compliant. If strict is True then raise if no Name or Version are provided""" self.set_metadata_version() # XXX should check the versions (if the file was loaded) missing, warnings = [], [] for attr in ('Name', 'Version'): # required by PEP 345 if attr not in self: missing.append(attr) if strict and missing != []: msg = 'missing required metadata: %s' % ', '.join(missing) raise MetadataMissingError(msg) for attr in ('Home-page', 'Author'): if attr not in self: missing.append(attr) # checking metadata 1.2 (XXX needs to check 1.1, 1.0) if self['Metadata-Version'] != '1.2': return missing, warnings scheme = get_scheme(self.scheme) def are_valid_constraints(value): for v in value: if not scheme.is_valid_matcher(v.split(';')[0]): return False return True for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), (_VERSIONS_FIELDS, scheme.is_valid_constraint_list), (_VERSION_FIELDS, scheme.is_valid_version)): for field in fields: value = self.get(field, None) if value is not None and not controller(value): warnings.append("Wrong value for '%s': %s" % (field, value)) return missing, warnings def todict(self, skip_missing=False): """Return fields as a dict. Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page). """ self.set_metadata_version() mapping_1_0 = ( ('metadata_version', 'Metadata-Version'), ('name', 'Name'), ('version', 'Version'), ('summary', 'Summary'), ('home_page', 'Home-page'), ('author', 'Author'), ('author_email', 'Author-email'), ('license', 'License'), ('description', 'Description'), ('keywords', 'Keywords'), ('platform', 'Platform'), ('classifiers', 'Classifier'), ('download_url', 'Download-URL'), ) data = {} for key, field_name in mapping_1_0: if not skip_missing or field_name in self._fields: data[key] = self[field_name] if self['Metadata-Version'] == '1.2': mapping_1_2 = ( ('requires_dist', 'Requires-Dist'), ('requires_python', 'Requires-Python'), ('requires_external', 'Requires-External'), ('provides_dist', 'Provides-Dist'), ('obsoletes_dist', 'Obsoletes-Dist'), ('project_url', 'Project-URL'), ('maintainer', 'Maintainer'), ('maintainer_email', 'Maintainer-email'), ) for key, field_name in mapping_1_2: if not skip_missing or field_name in self._fields: if key != 'project_url': data[key] = self[field_name] else: data[key] = [','.join(u) for u in self[field_name]] elif self['Metadata-Version'] == '1.1': mapping_1_1 = ( ('provides', 'Provides'), ('requires', 'Requires'), ('obsoletes', 'Obsoletes'), ) for key, field_name in mapping_1_1: if not skip_missing or field_name in self._fields: data[key] = self[field_name] return data def add_requirements(self, requirements): if self['Metadata-Version'] == '1.1': # we can't have 1.1 metadata *and* Setuptools requires for field in ('Obsoletes', 'Requires', 'Provides'): if field in self: del self[field] self['Requires-Dist'] += requirements # Mapping API # TODO could add iter* variants def keys(self): return list(_version2fieldlist(self['Metadata-Version'])) def __iter__(self): for key in self.keys(): yield key def values(self): return [self[key] for key in self.keys()] def items(self): return [(key, self[key]) for key in self.keys()] def __repr__(self): return '<%s %s %s>' % (self.__class__.__name__, self.name, self.version) METADATA_FILENAME = 'pydist.json' WHEEL_METADATA_FILENAME = 'metadata.json' class Metadata(object): """ The metadata of a release. This implementation uses 2.0 (JSON) metadata where possible. If not possible, it wraps a LegacyMetadata instance which handles the key-value metadata format. """ METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$') NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) VERSION_MATCHER = PEP440_VERSION_RE SUMMARY_MATCHER = re.compile('.{1,2047}') METADATA_VERSION = '2.0' GENERATOR = 'distlib (%s)' % __version__ MANDATORY_KEYS = { 'name': (), 'version': (), 'summary': ('legacy',), } INDEX_KEYS = ('name version license summary description author ' 'author_email keywords platform home_page classifiers ' 'download_url') DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' 'dev_requires provides meta_requires obsoleted_by ' 'supports_environments') SYNTAX_VALIDATORS = { 'metadata_version': (METADATA_VERSION_MATCHER, ()), 'name': (NAME_MATCHER, ('legacy',)), 'version': (VERSION_MATCHER, ('legacy',)), 'summary': (SUMMARY_MATCHER, ('legacy',)), } __slots__ = ('_legacy', '_data', 'scheme') def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._legacy = None self._data = None self.scheme = scheme #import pdb; pdb.set_trace() if mapping is not None: try: self._validate_mapping(mapping, scheme) self._data = mapping except MetadataUnrecognizedVersionError: self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) self.validate() else: data = None if path: with open(path, 'rb') as f: data = f.read() elif fileobj: data = fileobj.read() if data is None: # Initialised with no args - to be added self._data = { 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } else: if not isinstance(data, text_type): data = data.decode('utf-8') try: self._data = json.loads(data) self._validate_mapping(self._data, scheme) except ValueError: # Note: MetadataUnrecognizedVersionError does not # inherit from ValueError (it's a DistlibException, # which should not inherit from ValueError). # The ValueError comes from the json.load - if that # succeeds and we get a validation error, we want # that to propagate self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme) self.validate() common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) none_list = (None, list) none_dict = (None, dict) mapped_keys = { 'run_requires': ('Requires-Dist', list), 'build_requires': ('Setup-Requires-Dist', list), 'dev_requires': none_list, 'test_requires': none_list, 'meta_requires': none_list, 'extras': ('Provides-Extra', list), 'modules': none_list, 'namespaces': none_list, 'exports': none_dict, 'commands': none_dict, 'classifiers': ('Classifier', list), 'source_url': ('Download-URL', None), 'metadata_version': ('Metadata-Version', None), } del none_list, none_dict def __getattribute__(self, key): common = object.__getattribute__(self, 'common_keys') mapped = object.__getattribute__(self, 'mapped_keys') if key in mapped: lk, maker = mapped[key] if self._legacy: if lk is None: result = None if maker is None else maker() else: result = self._legacy.get(lk) else: value = None if maker is None else maker() if key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): result = self._data.get(key, value) else: # special cases for PEP 459 sentinel = object() result = sentinel d = self._data.get('extensions') if d: if key == 'commands': result = d.get('python.commands', value) elif key == 'classifiers': d = d.get('python.details') if d: result = d.get(key, value) else: d = d.get('python.exports') if not d: d = self._data.get('python.exports') if d: result = d.get(key, value) if result is sentinel: result = value elif key not in common: result = object.__getattribute__(self, key) elif self._legacy: result = self._legacy.get(key) else: result = self._data.get(key) return result def _validate_value(self, key, value, scheme=None): if key in self.SYNTAX_VALIDATORS: pattern, exclusions = self.SYNTAX_VALIDATORS[key] if (scheme or self.scheme) not in exclusions: m = pattern.match(value) if not m: raise MetadataInvalidError("'%s' is an invalid value for " "the '%s' property" % (value, key)) def __setattr__(self, key, value): self._validate_value(key, value) common = object.__getattribute__(self, 'common_keys') mapped = object.__getattribute__(self, 'mapped_keys') if key in mapped: lk, _ = mapped[key] if self._legacy: if lk is None: raise NotImplementedError self._legacy[lk] = value elif key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): self._data[key] = value else: # special cases for PEP 459 d = self._data.setdefault('extensions', {}) if key == 'commands': d['python.commands'] = value elif key == 'classifiers': d = d.setdefault('python.details', {}) d[key] = value else: d = d.setdefault('python.exports', {}) d[key] = value elif key not in common: object.__setattr__(self, key, value) else: if key == 'keywords': if isinstance(value, string_types): value = value.strip() if value: value = value.split() else: value = [] if self._legacy: self._legacy[key] = value else: self._data[key] = value @property def name_and_version(self): return _get_name_and_version(self.name, self.version, True) @property def provides(self): if self._legacy: result = self._legacy['Provides-Dist'] else: result = self._data.setdefault('provides', []) s = '%s (%s)' % (self.name, self.version) if s not in result: result.append(s) return result @provides.setter def provides(self, value): if self._legacy: self._legacy['Provides-Dist'] = value else: self._data['provides'] = value def get_requirements(self, reqts, extras=None, env=None): """ Base method to get dependencies, given a set of extras to satisfy and an optional environment context. :param reqts: A list of sometimes-wanted dependencies, perhaps dependent on extras and environment. :param extras: A list of optional components being requested. :param env: An optional environment for marker evaluation. """ if self._legacy: result = reqts else: result = [] extras = get_extras(extras or [], self.extras) for d in reqts: if 'extra' not in d and 'environment' not in d: # unconditional include = True else: if 'extra' not in d: # Not extra-dependent - only environment-dependent include = True else: include = d.get('extra') in extras if include: # Not excluded because of extras, check environment marker = d.get('environment') if marker: include = interpret(marker, env) if include: result.extend(d['requires']) for key in ('build', 'dev', 'test'): e = ':%s:' % key if e in extras: extras.remove(e) # A recursive call, but it should terminate since 'test' # has been removed from the extras reqts = self._data.get('%s_requires' % key, []) result.extend(self.get_requirements(reqts, extras=extras, env=env)) return result @property def dictionary(self): if self._legacy: return self._from_legacy() return self._data @property def dependencies(self): if self._legacy: raise NotImplementedError else: return extract_by_key(self._data, self.DEPENDENCY_KEYS) @dependencies.setter def dependencies(self, value): if self._legacy: raise NotImplementedError else: self._data.update(value) def _validate_mapping(self, mapping, scheme): if mapping.get('metadata_version') != self.METADATA_VERSION: raise MetadataUnrecognizedVersionError() missing = [] for key, exclusions in self.MANDATORY_KEYS.items(): if key not in mapping: if scheme not in exclusions: missing.append(key) if missing: msg = 'Missing metadata items: %s' % ', '.join(missing) raise MetadataMissingError(msg) for k, v in mapping.items(): self._validate_value(k, v, scheme) def validate(self): if self._legacy: missing, warnings = self._legacy.check(True) if missing or warnings: logger.warning('Metadata: missing: %s, warnings: %s', missing, warnings) else: self._validate_mapping(self._data, self.scheme) def todict(self): if self._legacy: return self._legacy.todict(True) else: result = extract_by_key(self._data, self.INDEX_KEYS) return result def _from_legacy(self): assert self._legacy and not self._data result = { 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } lmd = self._legacy.todict(True) # skip missing ones for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'): if k in lmd: if k == 'classifier': nk = 'classifiers' else: nk = k result[nk] = lmd[k] kw = lmd.get('Keywords', []) if kw == ['']: kw = [] result['keywords'] = kw keys = (('requires_dist', 'run_requires'), ('setup_requires_dist', 'build_requires')) for ok, nk in keys: if ok in lmd and lmd[ok]: result[nk] = [{'requires': lmd[ok]}] result['provides'] = self.provides author = {} maintainer = {} return result LEGACY_MAPPING = { 'name': 'Name', 'version': 'Version', 'license': 'License', 'summary': 'Summary', 'description': 'Description', 'classifiers': 'Classifier', } def _to_legacy(self): def process_entries(entries): reqts = set() for e in entries: extra = e.get('extra') env = e.get('environment') rlist = e['requires'] for r in rlist: if not env and not extra: reqts.add(r) else: marker = '' if extra: marker = 'extra == "%s"' % extra if env: if marker: marker = '(%s) and %s' % (env, marker) else: marker = env reqts.add(';'.join((r, marker))) return reqts assert self._data and not self._legacy result = LegacyMetadata() nmd = self._data for nk, ok in self.LEGACY_MAPPING.items(): if nk in nmd: result[ok] = nmd[nk] r1 = process_entries(self.run_requires + self.meta_requires) r2 = process_entries(self.build_requires + self.dev_requires) if self.extras: result['Provides-Extra'] = sorted(self.extras) result['Requires-Dist'] = sorted(r1) result['Setup-Requires-Dist'] = sorted(r2) # TODO: other fields such as contacts return result def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): if [path, fileobj].count(None) != 1: raise ValueError('Exactly one of path and fileobj is needed') self.validate() if legacy: if self._legacy: legacy_md = self._legacy else: legacy_md = self._to_legacy() if path: legacy_md.write(path, skip_unknown=skip_unknown) else: legacy_md.write_file(fileobj, skip_unknown=skip_unknown) else: if self._legacy: d = self._from_legacy() else: d = self._data if fileobj: json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True) else: with codecs.open(path, 'w', 'utf-8') as f: json.dump(d, f, ensure_ascii=True, indent=2, sort_keys=True) def add_requirements(self, requirements): if self._legacy: self._legacy.add_requirements(requirements) else: run_requires = self._data.setdefault('run_requires', []) always = None for entry in run_requires: if 'environment' not in entry and 'extra' not in entry: always = entry break if always is None: always = { 'requires': requirements } run_requires.insert(0, always) else: rset = set(always['requires']) | set(requirements) always['requires'] = sorted(rset) def __repr__(self): name = self.name or '(no name)' version = self.version or 'no version' return '<%s %s %s (%s)>' % (self.__class__.__name__, self.metadata_version, name, version)
mit
-134,777,574,496,205,780
35.360487
84
0.521489
false
Elettronik/SickRage
lib/hachoir_metadata/metadata.py
54
9258
# -*- coding: utf-8 -*- from hachoir_core.compatibility import any, sorted from hachoir_core.endian import endian_name from hachoir_core.tools import makePrintable, makeUnicode from hachoir_core.dict import Dict from hachoir_core.error import error, HACHOIR_ERRORS from hachoir_core.i18n import _ from hachoir_core.log import Logger from hachoir_metadata.metadata_item import ( MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL) from hachoir_metadata.register import registerAllItems extractors = {} class Metadata(Logger): header = u"Metadata" def __init__(self, parent, quality=QUALITY_NORMAL): assert isinstance(self.header, unicode) # Limit to 0.0 .. 1.0 if parent: quality = parent.quality else: quality = min(max(0.0, quality), 1.0) object.__init__(self) object.__setattr__(self, "_Metadata__data", {}) object.__setattr__(self, "quality", quality) header = self.__class__.header object.__setattr__(self, "_Metadata__header", header) registerAllItems(self) def _logger(self): pass def __setattr__(self, key, value): """ Add a new value to data with name 'key'. Skip duplicates. """ # Invalid key? if key not in self.__data: raise KeyError(_("%s has no metadata '%s'") % (self.__class__.__name__, key)) # Skip duplicates self.__data[key].add(value) def setHeader(self, text): object.__setattr__(self, "header", text) def getItems(self, key): try: return self.__data[key] except LookupError: raise ValueError("Metadata has no value '%s'" % key) def getItem(self, key, index): try: return self.getItems(key)[index] except (LookupError, ValueError): return None def has(self, key): return 1 <= len(self.getItems(key)) def get(self, key, default=None, index=0): """ Read first value of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.get('duration') datetime.timedelta(0, 2300) >>> a.get('author', u'Anonymous') u'Anonymous' """ item = self.getItem(key, index) if item is None: if default is None: raise ValueError("Metadata has no value '%s' (index %s)" % (key, index)) else: return default return item.value def getValues(self, key): try: data = self.__data[key] except LookupError: raise ValueError("Metadata has no value '%s'" % key) return [ item.value for item in data ] def getText(self, key, default=None, index=0): """ Read first value, as unicode string, of tag with name 'key'. >>> from datetime import timedelta >>> a = RootMetadata() >>> a.duration = timedelta(seconds=2300) >>> a.getText('duration') u'38 min 20 sec' >>> a.getText('titre', u'Unknown') u'Unknown' """ item = self.getItem(key, index) if item is not None: return item.text else: return default def register(self, data): assert data.key not in self.__data data.metadata = self self.__data[data.key] = data def __iter__(self): return self.__data.itervalues() def __str__(self): r""" Create a multi-line ASCII string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.author = "haypo" >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print a Metadata: - Author: haypo - Copyright: \xa9 Hachoir @see __unicode__() and exportPlaintext() """ text = self.exportPlaintext() return "\n".join( makePrintable(line, "ASCII") for line in text ) def __unicode__(self): r""" Create a multi-line Unicode string (end of line is "\n") which represents all datas. >>> a = RootMetadata() >>> a.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(unicode(a)) u'Metadata:\n- Copyright: \xa9 Hachoir' @see __str__() and exportPlaintext() """ return "\n".join(self.exportPlaintext()) def exportPlaintext(self, priority=None, human=True, line_prefix=u"- ", title=None): r""" Convert metadata to multi-line Unicode string and skip datas with priority lower than specified priority. Default priority is Metadata.MAX_PRIORITY. If human flag is True, data key are translated to better human name (eg. "bit_rate" becomes "Bit rate") which may be translated using gettext. If priority is too small, metadata are empty and so None is returned. >>> print RootMetadata().exportPlaintext() None >>> meta = RootMetadata() >>> meta.copyright = unicode("© Hachoir", "UTF-8") >>> print repr(meta.exportPlaintext()) [u'Metadata:', u'- Copyright: \xa9 Hachoir'] @see __str__() and __unicode__() """ if priority is not None: priority = max(priority, MIN_PRIORITY) priority = min(priority, MAX_PRIORITY) else: priority = MAX_PRIORITY if not title: title = self.header text = ["%s:" % title] for data in sorted(self): if priority < data.priority: break if not data.values: continue if human: title = data.description else: title = data.key for item in data.values: if human: value = item.text else: value = makeUnicode(item.value) text.append("%s%s: %s" % (line_prefix, title, value)) if 1 < len(text): return text else: return None def __nonzero__(self): return any(item for item in self.__data.itervalues()) class RootMetadata(Metadata): def __init__(self, quality=QUALITY_NORMAL): Metadata.__init__(self, None, quality) class MultipleMetadata(RootMetadata): header = _("Common") def __init__(self, quality=QUALITY_NORMAL): RootMetadata.__init__(self, quality) object.__setattr__(self, "_MultipleMetadata__groups", Dict()) object.__setattr__(self, "_MultipleMetadata__key_counter", {}) def __contains__(self, key): return key in self.__groups def __getitem__(self, key): return self.__groups[key] def iterGroups(self): return self.__groups.itervalues() def __nonzero__(self): if RootMetadata.__nonzero__(self): return True return any(bool(group) for group in self.__groups) def addGroup(self, key, metadata, header=None): """ Add a new group (metadata of a sub-document). Returns False if the group is skipped, True if it has been added. """ if not metadata: self.warning("Skip empty group %s" % key) return False if key.endswith("[]"): key = key[:-2] if key in self.__key_counter: self.__key_counter[key] += 1 else: self.__key_counter[key] = 1 key += "[%u]" % self.__key_counter[key] if header: metadata.setHeader(header) self.__groups.append(key, metadata) return True def exportPlaintext(self, priority=None, human=True, line_prefix=u"- "): common = Metadata.exportPlaintext(self, priority, human, line_prefix) if common: text = common else: text = [] for key, metadata in self.__groups.iteritems(): if not human: title = key else: title = None value = metadata.exportPlaintext(priority, human, line_prefix, title=title) if value: text.extend(value) if len(text): return text else: return None def registerExtractor(parser, extractor): assert parser not in extractors assert issubclass(extractor, RootMetadata) extractors[parser] = extractor def extractMetadata(parser, quality=QUALITY_NORMAL): """ Create a Metadata class from a parser. Returns None if no metadata extractor does exist for the parser class. """ try: extractor = extractors[parser.__class__] except KeyError: return None metadata = extractor(quality) try: metadata.extract(parser) except HACHOIR_ERRORS, err: error("Error during metadata extraction: %s" % unicode(err)) return None except Exception, err: error("Error during metadata extraction: %s" % unicode(err)) return None if metadata: metadata.mime_type = parser.mime_type metadata.endian = endian_name[parser.endian] return metadata
gpl-3.0
4,867,206,861,105,352,000
30.372881
89
0.559589
false
cloudbase/neutron-virtualbox
neutron/plugins/ml2/plugin.py
1
65394
# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib from eventlet import greenthread from oslo_concurrency import lockutils from oslo_config import cfg from oslo_db import exception as os_db_exception from oslo_serialization import jsonutils from oslo_utils import excutils from oslo_utils import importutils from sqlalchemy import exc as sql_exc from sqlalchemy.orm import exc as sa_exc from neutron.agent import securitygroups_rpc as sg_rpc from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api from neutron.api.rpc.handlers import dhcp_rpc from neutron.api.rpc.handlers import dvr_rpc from neutron.api.rpc.handlers import metadata_rpc from neutron.api.rpc.handlers import securitygroups_rpc from neutron.api.v2 import attributes from neutron.common import constants as const from neutron.common import exceptions as exc from neutron.common import ipv6_utils from neutron.common import rpc as n_rpc from neutron.common import topics from neutron.common import utils from neutron.db import agents_db from neutron.db import agentschedulers_db from neutron.db import allowedaddresspairs_db as addr_pair_db from neutron.db import api as db_api from neutron.db import db_base_plugin_v2 from neutron.db import dvr_mac_db from neutron.db import external_net_db from neutron.db import extradhcpopt_db from neutron.db import models_v2 from neutron.db import quota_db # noqa from neutron.db import securitygroups_rpc_base as sg_db_rpc from neutron.extensions import allowedaddresspairs as addr_pair from neutron.extensions import extra_dhcp_opt as edo_ext from neutron.extensions import l3agentscheduler from neutron.extensions import portbindings from neutron.extensions import providernet as provider from neutron.i18n import _LE, _LI, _LW from neutron import manager from neutron.openstack.common import log from neutron.openstack.common import uuidutils from neutron.plugins.common import constants as service_constants from neutron.plugins.ml2.common import exceptions as ml2_exc from neutron.plugins.ml2 import config # noqa from neutron.plugins.ml2 import db from neutron.plugins.ml2 import driver_api as api from neutron.plugins.ml2 import driver_context from neutron.plugins.ml2 import managers from neutron.plugins.ml2 import models from neutron.plugins.ml2 import rpc LOG = log.getLogger(__name__) MAX_BIND_TRIES = 10 # REVISIT(rkukura): Move this and other network_type constants to # providernet.py? TYPE_MULTI_SEGMENT = 'multi-segment' class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, dvr_mac_db.DVRDbMixin, external_net_db.External_net_db_mixin, sg_db_rpc.SecurityGroupServerRpcMixin, agentschedulers_db.DhcpAgentSchedulerDbMixin, addr_pair_db.AllowedAddressPairsMixin, extradhcpopt_db.ExtraDhcpOptMixin): """Implement the Neutron L2 abstractions using modules. Ml2Plugin is a Neutron plugin based on separately extensible sets of network types and mechanisms for connecting to networks of those types. The network types and mechanisms are implemented as drivers loaded via Python entry points. Networks can be made up of multiple segments (not yet fully implemented). """ # This attribute specifies whether the plugin supports or not # bulk/pagination/sorting operations. Name mangling is used in # order to ensure it is qualified by class __native_bulk_support = True __native_pagination_support = True __native_sorting_support = True # List of supported extensions _supported_extension_aliases = ["provider", "external-net", "binding", "quotas", "security-group", "agent", "dhcp_agent_scheduler", "multi-provider", "allowed-address-pairs", "extra_dhcp_opt"] @property def supported_extension_aliases(self): if not hasattr(self, '_aliases'): aliases = self._supported_extension_aliases[:] aliases += self.extension_manager.extension_aliases() sg_rpc.disable_security_group_extension_by_config(aliases) self._aliases = aliases return self._aliases def __init__(self): # First load drivers, then initialize DB, then initialize drivers self.type_manager = managers.TypeManager() self.extension_manager = managers.ExtensionManager() self.mechanism_manager = managers.MechanismManager() super(Ml2Plugin, self).__init__() self.type_manager.initialize() self.extension_manager.initialize() self.mechanism_manager.initialize() self._setup_rpc() # REVISIT(rkukura): Use stevedore for these? self.network_scheduler = importutils.import_object( cfg.CONF.network_scheduler_driver ) self.start_periodic_dhcp_agent_status_check() LOG.info(_LI("Modular L2 Plugin initialization complete")) def _setup_rpc(self): self.notifier = rpc.AgentNotifierApi(topics.AGENT) self.agent_notifiers[const.AGENT_TYPE_DHCP] = ( dhcp_rpc_agent_api.DhcpAgentNotifyAPI() ) def start_rpc_listeners(self): self.endpoints = [rpc.RpcCallbacks(self.notifier, self.type_manager), securitygroups_rpc.SecurityGroupServerRpcCallback(), dvr_rpc.DVRServerRpcCallback(), dhcp_rpc.DhcpRpcCallback(), agents_db.AgentExtRpcCallback(), metadata_rpc.MetadataRpcCallback()] self.topic = topics.PLUGIN self.conn = n_rpc.create_connection(new=True) self.conn.create_consumer(self.topic, self.endpoints, fanout=False) return self.conn.consume_in_threads() def _filter_nets_provider(self, context, networks, filters): return [network for network in networks if self.type_manager.network_matches_filters(network, filters) ] def _notify_l3_agent_new_port(self, context, port): if not port: return # Whenever a DVR serviceable port comes up on a # node, it has to be communicated to the L3 Plugin # and agent for creating the respective namespaces. if (utils.is_dvr_serviced(port['device_owner'])): l3plugin = manager.NeutronManager.get_service_plugins().get( service_constants.L3_ROUTER_NAT) if (utils.is_extension_supported( l3plugin, const.L3_DISTRIBUTED_EXT_ALIAS)): l3plugin.dvr_update_router_addvm(context, port) def _get_host_port_if_changed(self, mech_context, attrs): binding = mech_context._binding host = attrs and attrs.get(portbindings.HOST_ID) if (attributes.is_attr_set(host) and binding.host != host): return mech_context.current def _check_mac_update_allowed(self, orig_port, port, binding): unplugged_types = (portbindings.VIF_TYPE_BINDING_FAILED, portbindings.VIF_TYPE_UNBOUND) new_mac = port.get('mac_address') mac_change = (new_mac is not None and orig_port['mac_address'] != new_mac) if (mac_change and binding.vif_type not in unplugged_types): raise exc.PortBound(port_id=orig_port['id'], vif_type=binding.vif_type, old_mac=orig_port['mac_address'], new_mac=port['mac_address']) return mac_change def _process_port_binding(self, mech_context, attrs): session = mech_context._plugin_context.session binding = mech_context._binding port = mech_context.current port_id = port['id'] changes = False host = attrs and attrs.get(portbindings.HOST_ID) original_host = binding.host if (attributes.is_attr_set(host) and original_host != host): binding.host = host changes = True vnic_type = attrs and attrs.get(portbindings.VNIC_TYPE) if (attributes.is_attr_set(vnic_type) and binding.vnic_type != vnic_type): binding.vnic_type = vnic_type changes = True # treat None as clear of profile. profile = None if attrs and portbindings.PROFILE in attrs: profile = attrs.get(portbindings.PROFILE) or {} if profile not in (None, attributes.ATTR_NOT_SPECIFIED, self._get_profile(binding)): binding.profile = jsonutils.dumps(profile) if len(binding.profile) > models.BINDING_PROFILE_LEN: msg = _("binding:profile value too large") raise exc.InvalidInput(error_message=msg) changes = True # Unbind the port if needed. if changes: binding.vif_type = portbindings.VIF_TYPE_UNBOUND binding.vif_details = '' db.clear_binding_levels(session, port_id, original_host) mech_context._clear_binding_levels() if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding.vif_type = portbindings.VIF_TYPE_DISTRIBUTED binding.vif_details = '' db.clear_binding_levels(session, port_id, original_host) mech_context._clear_binding_levels() binding.host = '' self._update_port_dict_binding(port, binding) return changes def _bind_port_if_needed(self, context, allow_notify=False, need_notify=False): plugin_context = context._plugin_context port_id = context._port['id'] # Since the mechanism driver bind_port() calls must be made # outside a DB transaction locking the port state, it is # possible (but unlikely) that the port's state could change # concurrently while these calls are being made. If another # thread or process succeeds in binding the port before this # thread commits its results, the already committed results are # used. If attributes such as binding:host_id, # binding:profile, or binding:vnic_type are updated # concurrently, this loop retries binding using the new # values. count = 0 while True: # First, determine whether it is necessary and possible to # bind the port. binding = context._binding if (binding.vif_type not in [portbindings.VIF_TYPE_UNBOUND, portbindings.VIF_TYPE_BINDING_FAILED] or not binding.host): # We either don't need to bind the port, or can't, so # notify if needed and return. if allow_notify and need_notify: self._notify_port_updated(context) return context # Limit binding attempts to avoid any possibility of # infinite looping and to ensure an error is logged # instead. This does not need to be tunable because no # more than a couple attempts should ever be required in # normal operation. Log at info level if not 1st attempt. count += 1 if count > MAX_BIND_TRIES: LOG.error(_LE("Failed to commit binding results for %(port)s " "after %(max)s tries"), {'port': port_id, 'max': MAX_BIND_TRIES}) return context if count > 1: greenthread.sleep(0) # yield LOG.info(_LI("Attempt %(count)s to bind port %(port)s"), {'count': count, 'port': port_id}) # The port isn't already bound and the necessary # information is available, so attempt to bind the port. bind_context = self._bind_port(context) # Now try to commit result of attempting to bind the port. new_context, did_commit = self._commit_port_binding( plugin_context, port_id, binding, bind_context) if not new_context: # The port has been deleted concurrently, so just # return the unbound result from the initial # transaction that completed before the deletion. LOG.debug("Port %s has been deleted concurrently", port_id) return context context = new_context if (context._binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED): return context # Need to notify if we succeed and our results were # committed. need_notify |= did_commit def _bind_port(self, orig_context): # Construct a new PortContext from the one from the previous # transaction. port = orig_context._port orig_binding = orig_context._binding new_binding = models.PortBinding( host=orig_binding.host, vnic_type=orig_binding.vnic_type, profile=orig_binding.profile, vif_type=portbindings.VIF_TYPE_UNBOUND, vif_details='' ) self._update_port_dict_binding(port, new_binding) new_context = driver_context.PortContext( self, orig_context._plugin_context, port, orig_context._network_context._network, new_binding, None) # Attempt to bind the port and return the context with the # result. self.mechanism_manager.bind_port(new_context) return new_context def _commit_port_binding(self, plugin_context, port_id, orig_binding, new_context): session = plugin_context.session new_binding = new_context._binding # After we've attempted to bind the port, we begin a # transaction, get the current port state, and decide whether # to commit the binding results. # # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): # Get the current port state and build a new PortContext # reflecting this state as original state for subsequent # mechanism driver update_port_*commit() calls. port_db, cur_binding = db.get_locked_port_and_binding(session, port_id) if not port_db: # The port has been deleted concurrently. return (None, None) oport = self._make_port_dict(port_db) port = self._make_port_dict(port_db) network = new_context.network.current if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: # REVISIT(rkukura): The PortBinding instance from the # ml2_port_bindings table, returned as cur_binding # from db.get_locked_port_and_binding() above, is # currently not used for DVR distributed ports, and is # replaced here with the DVRPortBinding instance from # the ml2_dvr_port_bindings table specific to the host # on which the distributed port is being bound. It # would be possible to optimize this code to avoid # fetching the PortBinding instance in the DVR case, # and even to avoid creating the unused entry in the # ml2_port_bindings table. But the upcoming resolution # for bug 1367391 will eliminate the # ml2_dvr_port_bindings table, use the # ml2_port_bindings table to store non-host-specific # fields for both distributed and non-distributed # ports, and introduce a new ml2_port_binding_hosts # table for the fields that need to be host-specific # in the distributed case. Since the PortBinding # instance will then be needed, it does not make sense # to optimize this code to avoid fetching it. cur_binding = db.get_dvr_port_binding_by_host( session, port_id, orig_binding.host) cur_context = driver_context.PortContext( self, plugin_context, port, network, cur_binding, None, original_port=oport) # Commit our binding results only if port has not been # successfully bound concurrently by another thread or # process and no binding inputs have been changed. commit = ((cur_binding.vif_type in [portbindings.VIF_TYPE_UNBOUND, portbindings.VIF_TYPE_BINDING_FAILED]) and orig_binding.host == cur_binding.host and orig_binding.vnic_type == cur_binding.vnic_type and orig_binding.profile == cur_binding.profile) if commit: # Update the port's binding state with our binding # results. cur_binding.vif_type = new_binding.vif_type cur_binding.vif_details = new_binding.vif_details db.clear_binding_levels(session, port_id, cur_binding.host) db.set_binding_levels(session, new_context._binding_levels) cur_context._binding_levels = new_context._binding_levels # Update PortContext's port dictionary to reflect the # updated binding state. self._update_port_dict_binding(port, cur_binding) # Update the port status if requested by the bound driver. if (new_context._binding_levels and new_context._new_port_status): port_db.status = new_context._new_port_status port['status'] = new_context._new_port_status # Call the mechanism driver precommit methods, commit # the results, and call the postcommit methods. self.mechanism_manager.update_port_precommit(cur_context) if commit: self.mechanism_manager.update_port_postcommit(cur_context) # Continue, using the port state as of the transaction that # just finished, whether that transaction committed new # results or discovered concurrent port state changes. return (cur_context, commit) def _update_port_dict_binding(self, port, binding): port[portbindings.HOST_ID] = binding.host port[portbindings.VNIC_TYPE] = binding.vnic_type port[portbindings.PROFILE] = self._get_profile(binding) port[portbindings.VIF_TYPE] = binding.vif_type port[portbindings.VIF_DETAILS] = self._get_vif_details(binding) def _get_vif_details(self, binding): if binding.vif_details: try: return jsonutils.loads(binding.vif_details) except Exception: LOG.error(_LE("Serialized vif_details DB value '%(value)s' " "for port %(port)s is invalid"), {'value': binding.vif_details, 'port': binding.port_id}) return {} def _get_profile(self, binding): if binding.profile: try: return jsonutils.loads(binding.profile) except Exception: LOG.error(_LE("Serialized profile DB value '%(value)s' for " "port %(port)s is invalid"), {'value': binding.profile, 'port': binding.port_id}) return {} def _ml2_extend_port_dict_binding(self, port_res, port_db): # None when called during unit tests for other plugins. if port_db.port_binding: self._update_port_dict_binding(port_res, port_db.port_binding) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_ml2_extend_port_dict_binding']) # Register extend dict methods for network and port resources. # Each mechanism driver that supports extend attribute for the resources # can add those attribute to the result. db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.NETWORKS, ['_ml2_md_extend_network_dict']) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_ml2_md_extend_port_dict']) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.SUBNETS, ['_ml2_md_extend_subnet_dict']) def _ml2_md_extend_network_dict(self, result, netdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_network_dict(session, netdb, result) def _ml2_md_extend_port_dict(self, result, portdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_port_dict(session, portdb, result) def _ml2_md_extend_subnet_dict(self, result, subnetdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_subnet_dict( session, subnetdb, result) # Note - The following hook methods have "ml2" in their names so # that they are not called twice during unit tests due to global # registration of hooks in portbindings_db.py used by other # plugins. def _ml2_port_model_hook(self, context, original_model, query): query = query.outerjoin(models.PortBinding, (original_model.id == models.PortBinding.port_id)) return query def _ml2_port_result_filter_hook(self, query, filters): values = filters and filters.get(portbindings.HOST_ID, []) if not values: return query return query.filter(models.PortBinding.host.in_(values)) db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook( models_v2.Port, "ml2_port_bindings", '_ml2_port_model_hook', None, '_ml2_port_result_filter_hook') def _notify_port_updated(self, mech_context): port = mech_context._port segment = mech_context.bottom_bound_segment if not segment: # REVISIT(rkukura): This should notify agent to unplug port network = mech_context.network.current LOG.warning(_LW("In _notify_port_updated(), no bound segment for " "port %(port_id)s on network %(network_id)s"), {'port_id': port['id'], 'network_id': network['id']}) return self.notifier.port_update(mech_context._plugin_context, port, segment[api.NETWORK_TYPE], segment[api.SEGMENTATION_ID], segment[api.PHYSICAL_NETWORK]) def _delete_objects(self, context, resource, objects): delete_op = getattr(self, 'delete_%s' % resource) for obj in objects: try: delete_op(context, obj['result']['id']) except KeyError: LOG.exception(_LE("Could not find %s to delete."), resource) except Exception: LOG.exception(_LE("Could not delete %(res)s %(id)s."), {'res': resource, 'id': obj['result']['id']}) def _create_bulk_ml2(self, resource, context, request_items): objects = [] collection = "%ss" % resource items = request_items[collection] try: with context.session.begin(subtransactions=True): obj_creator = getattr(self, '_create_%s_db' % resource) for item in items: attrs = item[resource] result, mech_context = obj_creator(context, item) objects.append({'mech_context': mech_context, 'result': result, 'attributes': attrs}) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("An exception occurred while creating " "the %(resource)s:%(item)s"), {'resource': resource, 'item': item}) try: postcommit_op = getattr(self.mechanism_manager, 'create_%s_postcommit' % resource) for obj in objects: postcommit_op(obj['mech_context']) return objects except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): resource_ids = [res['result']['id'] for res in objects] LOG.exception(_LE("mechanism_manager.create_%(res)s" "_postcommit failed for %(res)s: " "'%(failed_id)s'. Deleting " "%(res)ss %(resource_ids)s"), {'res': resource, 'failed_id': obj['result']['id'], 'resource_ids': ', '.join(resource_ids)}) self._delete_objects(context, resource, objects) def _create_network_db(self, context, network): net_data = network[attributes.NETWORK] tenant_id = self._get_tenant_id_for_create(context, net_data) session = context.session with session.begin(subtransactions=True): self._ensure_default_security_group(context, tenant_id) result = super(Ml2Plugin, self).create_network(context, network) self.extension_manager.process_create_network(context, net_data, result) self._process_l3_create(context, result, net_data) net_data['id'] = result['id'] self.type_manager.create_network_segments(context, net_data, tenant_id) self.type_manager.extend_network_dict_provider(context, result) mech_context = driver_context.NetworkContext(self, context, result) self.mechanism_manager.create_network_precommit(mech_context) return result, mech_context def create_network(self, context, network): result, mech_context = self._create_network_db(context, network) try: self.mechanism_manager.create_network_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_network_postcommit " "failed, deleting network '%s'"), result['id']) self.delete_network(context, result['id']) return result def create_network_bulk(self, context, networks): objects = self._create_bulk_ml2(attributes.NETWORK, context, networks) return [obj['result'] for obj in objects] def update_network(self, context, id, network): provider._raise_if_updates_provider_attributes(network['network']) session = context.session with session.begin(subtransactions=True): original_network = super(Ml2Plugin, self).get_network(context, id) updated_network = super(Ml2Plugin, self).update_network(context, id, network) self.extension_manager.process_update_network(context, network, updated_network) self._process_l3_update(context, updated_network, network['network']) self.type_manager.extend_network_dict_provider(context, updated_network) mech_context = driver_context.NetworkContext( self, context, updated_network, original_network=original_network) self.mechanism_manager.update_network_precommit(mech_context) # TODO(apech) - handle errors raised by update_network, potentially # by re-calling update_network with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_network_postcommit(mech_context) return updated_network def get_network(self, context, id, fields=None): session = context.session with session.begin(subtransactions=True): result = super(Ml2Plugin, self).get_network(context, id, None) self.type_manager.extend_network_dict_provider(context, result) return self._fields(result, fields) def get_networks(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False): session = context.session with session.begin(subtransactions=True): nets = super(Ml2Plugin, self).get_networks(context, filters, None, sorts, limit, marker, page_reverse) for net in nets: self.type_manager.extend_network_dict_provider(context, net) nets = self._filter_nets_provider(context, nets, filters) nets = self._filter_nets_l3(context, nets, filters) return [self._fields(net, fields) for net in nets] def _delete_ports(self, context, ports): for port in ports: try: self.delete_port(context, port.id) except exc.PortNotFound: # concurrent port deletion can be performed by # release_dhcp_port caused by concurrent subnet_delete LOG.info(_LI("Port %s was deleted concurrently"), port.id) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception auto-deleting port %s"), port.id) def _delete_subnets(self, context, subnets): for subnet in subnets: try: self.delete_subnet(context, subnet.id) except exc.SubnetNotFound: LOG.info(_LI("Subnet %s was deleted concurrently"), subnet.id) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception auto-deleting subnet %s"), subnet.id) def delete_network(self, context, id): # REVISIT(rkukura) The super(Ml2Plugin, self).delete_network() # function is not used because it auto-deletes ports and # subnets from the DB without invoking the derived class's # delete_port() or delete_subnet(), preventing mechanism # drivers from being called. This approach should be revisited # when the API layer is reworked during icehouse. LOG.debug("Deleting network %s", id) session = context.session while True: try: # REVISIT: Serialize this operation with a semaphore # to prevent deadlock waiting to acquire a DB lock # held by another thread in the same process, leading # to 'lock wait timeout' errors. # # Process L3 first, since, depending on the L3 plugin, it may # involve locking the db-access semaphore, sending RPC # notifications, and/or calling delete_port on this plugin. # Additionally, a rollback may not be enough to undo the # deletion of a floating IP with certain L3 backends. self._process_l3_delete(context, id) # Using query().with_lockmode isn't necessary. Foreign-key # constraints prevent deletion if concurrent creation happens. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): # Get ports to auto-delete. ports = (session.query(models_v2.Port). enable_eagerloads(False). filter_by(network_id=id).all()) LOG.debug("Ports to auto-delete: %s", ports) only_auto_del = all(p.device_owner in db_base_plugin_v2. AUTO_DELETE_PORT_OWNERS for p in ports) if not only_auto_del: LOG.debug("Tenant-owned ports exist") raise exc.NetworkInUse(net_id=id) # Get subnets to auto-delete. subnets = (session.query(models_v2.Subnet). enable_eagerloads(False). filter_by(network_id=id).all()) LOG.debug("Subnets to auto-delete: %s", subnets) if not (ports or subnets): network = self.get_network(context, id) mech_context = driver_context.NetworkContext(self, context, network) self.mechanism_manager.delete_network_precommit( mech_context) self.type_manager.release_network_segments(session, id) record = self._get_network(context, id) LOG.debug("Deleting network record %s", record) session.delete(record) # The segment records are deleted via cascade from the # network record, so explicit removal is not necessary. LOG.debug("Committing transaction") break except os_db_exception.DBError as e: with excutils.save_and_reraise_exception() as ctxt: if isinstance(e.inner_exception, sql_exc.IntegrityError): ctxt.reraise = False LOG.warning(_LW("A concurrent port creation has " "occurred")) continue self._delete_ports(context, ports) self._delete_subnets(context, subnets) try: self.mechanism_manager.delete_network_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the network. Ideally we'd notify the caller of # the fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_network_postcommit" " failed")) self.notifier.network_delete(context, id) def _create_subnet_db(self, context, subnet): session = context.session with session.begin(subtransactions=True): result = super(Ml2Plugin, self).create_subnet(context, subnet) self.extension_manager.process_create_subnet(context, subnet, result) mech_context = driver_context.SubnetContext(self, context, result) self.mechanism_manager.create_subnet_precommit(mech_context) return result, mech_context def create_subnet(self, context, subnet): result, mech_context = self._create_subnet_db(context, subnet) try: self.mechanism_manager.create_subnet_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_subnet_postcommit " "failed, deleting subnet '%s'"), result['id']) self.delete_subnet(context, result['id']) return result def create_subnet_bulk(self, context, subnets): objects = self._create_bulk_ml2(attributes.SUBNET, context, subnets) return [obj['result'] for obj in objects] def update_subnet(self, context, id, subnet): session = context.session with session.begin(subtransactions=True): original_subnet = super(Ml2Plugin, self).get_subnet(context, id) updated_subnet = super(Ml2Plugin, self).update_subnet( context, id, subnet) self.extension_manager.process_update_subnet(context, subnet, updated_subnet) mech_context = driver_context.SubnetContext( self, context, updated_subnet, original_subnet=original_subnet) self.mechanism_manager.update_subnet_precommit(mech_context) # TODO(apech) - handle errors raised by update_subnet, potentially # by re-calling update_subnet with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_subnet_postcommit(mech_context) return updated_subnet def delete_subnet(self, context, id): # REVISIT(rkukura) The super(Ml2Plugin, self).delete_subnet() # function is not used because it deallocates the subnet's addresses # from ports in the DB without invoking the derived class's # update_port(), preventing mechanism drivers from being called. # This approach should be revisited when the API layer is reworked # during icehouse. LOG.debug("Deleting subnet %s", id) session = context.session while True: # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock # wait timeout' errors. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): record = self._get_subnet(context, id) subnet = self._make_subnet_dict(record, None) qry_allocated = (session.query(models_v2.IPAllocation). filter_by(subnet_id=id). join(models_v2.Port)) is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet) # Remove network owned ports, and delete IP allocations # for IPv6 addresses which were automatically generated # via SLAAC if not is_auto_addr_subnet: qry_allocated = ( qry_allocated.filter(models_v2.Port.device_owner. in_(db_base_plugin_v2.AUTO_DELETE_PORT_OWNERS))) allocated = qry_allocated.all() # Delete all the IPAllocation that can be auto-deleted if allocated: map(session.delete, allocated) LOG.debug("Ports to auto-deallocate: %s", allocated) # Check if there are more IP allocations, unless # is_auto_address_subnet is True. In that case the check is # unnecessary. This additional check not only would be wasteful # for this class of subnet, but is also error-prone since when # the isolation level is set to READ COMMITTED allocations made # concurrently will be returned by this query if not is_auto_addr_subnet: if self._subnet_check_ip_allocations(context, id): LOG.debug("Found IP allocations on subnet %s, " "cannot delete", id) raise exc.SubnetInUse(subnet_id=id) # If allocated is None, then all the IPAllocation were # correctly deleted during the previous pass. if not allocated: mech_context = driver_context.SubnetContext(self, context, subnet) self.mechanism_manager.delete_subnet_precommit( mech_context) LOG.debug("Deleting subnet record") session.delete(record) LOG.debug("Committing transaction") break for a in allocated: if a.port_id: # calling update_port() for each allocation to remove the # IP from the port and call the MechanismDrivers data = {'port': {'fixed_ips': [{'subnet_id': ip.subnet_id, 'ip_address': ip.ip_address} for ip in a.ports.fixed_ips if ip.subnet_id != id]}} try: self.update_port(context, a.port_id, data) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception deleting fixed_ip " "from port %s"), a.port_id) try: self.mechanism_manager.delete_subnet_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the subnet. Ideally we'd notify the caller of # the fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_subnet_postcommit failed")) def _create_port_db(self, context, port): attrs = port[attributes.PORT] attrs['status'] = const.PORT_STATUS_DOWN session = context.session with session.begin(subtransactions=True): self._ensure_default_security_group_on_port(context, port) sgids = self._get_security_groups_on_port(context, port) dhcp_opts = port['port'].get(edo_ext.EXTRADHCPOPTS, []) result = super(Ml2Plugin, self).create_port(context, port) self.extension_manager.process_create_port(context, attrs, result) self._process_port_create_security_group(context, result, sgids) network = self.get_network(context, result['network_id']) binding = db.add_port_binding(session, result['id']) mech_context = driver_context.PortContext(self, context, result, network, binding, None) self._process_port_binding(mech_context, attrs) result[addr_pair.ADDRESS_PAIRS] = ( self._process_create_allowed_address_pairs( context, result, attrs.get(addr_pair.ADDRESS_PAIRS))) self._process_port_create_extra_dhcp_opts(context, result, dhcp_opts) self.mechanism_manager.create_port_precommit(mech_context) return result, mech_context def create_port(self, context, port): attrs = port['port'] result, mech_context = self._create_port_db(context, port) new_host_port = self._get_host_port_if_changed(mech_context, attrs) self._notify_l3_agent_new_port(context, new_host_port) try: self.mechanism_manager.create_port_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_port_postcommit " "failed, deleting port '%s'"), result['id']) self.delete_port(context, result['id']) # REVISIT(rkukura): Is there any point in calling this before # a binding has been successfully established? self.notify_security_groups_member_updated(context, result) try: bound_context = self._bind_port_if_needed(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("_bind_port_if_needed " "failed, deleting port '%s'"), result['id']) self.delete_port(context, result['id']) return bound_context._port def create_port_bulk(self, context, ports): objects = self._create_bulk_ml2(attributes.PORT, context, ports) # REVISIT(rkukura): Is there any point in calling this before # a binding has been successfully established? results = [obj['result'] for obj in objects] self.notify_security_groups_member_updated_bulk(context, results) for obj in objects: attrs = obj['attributes'] if attrs and attrs.get(portbindings.HOST_ID): new_host_port = self._get_host_port_if_changed( obj['mech_context'], attrs) self._notify_l3_agent_new_port(context, new_host_port) try: for obj in objects: obj['bound_context'] = self._bind_port_if_needed( obj['mech_context']) return [obj['bound_context']._port for obj in objects] except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): resource_ids = [res['result']['id'] for res in objects] LOG.error(_LE("_bind_port_if_needed failed. " "Deleting all ports from create bulk '%s'"), resource_ids) self._delete_objects(context, 'port', objects) def update_port(self, context, id, port): attrs = port['port'] need_port_update_notify = False l3plugin = manager.NeutronManager.get_service_plugins().get( service_constants.L3_ROUTER_NAT) is_dvr_enabled = utils.is_extension_supported( l3plugin, const.L3_DISTRIBUTED_EXT_ALIAS) session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): port_db, binding = db.get_locked_port_and_binding(session, id) if not port_db: raise exc.PortNotFound(port_id=id) mac_address_updated = self._check_mac_update_allowed( port_db, port, binding) need_port_update_notify |= mac_address_updated original_port = self._make_port_dict(port_db) updated_port = super(Ml2Plugin, self).update_port(context, id, port) self.extension_manager.process_update_port(context, attrs, updated_port) if addr_pair.ADDRESS_PAIRS in port['port']: need_port_update_notify |= ( self.update_address_pairs_on_port(context, id, port, original_port, updated_port)) need_port_update_notify |= self.update_security_group_on_port( context, id, port, original_port, updated_port) network = self.get_network(context, original_port['network_id']) need_port_update_notify |= self._update_extra_dhcp_opts_on_port( context, id, port, updated_port) levels = db.get_binding_levels(session, id, binding.host) mech_context = driver_context.PortContext( self, context, updated_port, network, binding, levels, original_port=original_port) new_host_port = self._get_host_port_if_changed(mech_context, attrs) need_port_update_notify |= self._process_port_binding( mech_context, attrs) self.mechanism_manager.update_port_precommit(mech_context) # Notifications must be sent after the above transaction is complete if mac_address_updated and l3plugin and is_dvr_enabled: # NOTE: "add" actually does a 'replace' operation l3plugin.dvr_vmarp_table_update(context, updated_port, "add") self._notify_l3_agent_new_port(context, new_host_port) # TODO(apech) - handle errors raised by update_port, potentially # by re-calling update_port with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_port_postcommit(mech_context) need_port_update_notify |= self.is_security_group_member_updated( context, original_port, updated_port) if original_port['admin_state_up'] != updated_port['admin_state_up']: need_port_update_notify = True bound_context = self._bind_port_if_needed( mech_context, allow_notify=True, need_notify=need_port_update_notify) return bound_context._port def _process_dvr_port_binding(self, mech_context, context, attrs): session = mech_context._plugin_context.session binding = mech_context._binding port = mech_context.current port_id = port['id'] if binding.vif_type != portbindings.VIF_TYPE_UNBOUND: binding.vif_details = '' binding.vif_type = portbindings.VIF_TYPE_UNBOUND if binding.host: db.clear_binding_levels(session, port_id, binding.host) binding.host = '' self._update_port_dict_binding(port, binding) binding.host = attrs and attrs.get(portbindings.HOST_ID) binding.router_id = attrs and attrs.get('device_id') def update_dvr_port_binding(self, context, id, port): attrs = port['port'] host = attrs and attrs.get(portbindings.HOST_ID) host_set = attributes.is_attr_set(host) if not host_set: LOG.error(_LE("No Host supplied to bind DVR Port %s"), id) return session = context.session binding = db.get_dvr_port_binding_by_host(session, id, host) device_id = attrs and attrs.get('device_id') router_id = binding and binding.get('router_id') update_required = (not binding or binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED or router_id != device_id) if update_required: with session.begin(subtransactions=True): try: orig_port = super(Ml2Plugin, self).get_port(context, id) except exc.PortNotFound: LOG.debug("DVR Port %s has been deleted concurrently", id) return if not binding: binding = db.ensure_dvr_port_binding( session, id, host, router_id=device_id) network = self.get_network(context, orig_port['network_id']) levels = db.get_binding_levels(session, id, host) mech_context = driver_context.PortContext(self, context, orig_port, network, binding, levels, original_port=orig_port) self._process_dvr_port_binding(mech_context, context, attrs) self._bind_port_if_needed(mech_context) def delete_port(self, context, id, l3_port_check=True): LOG.debug("Deleting port %s", id) removed_routers = [] l3plugin = manager.NeutronManager.get_service_plugins().get( service_constants.L3_ROUTER_NAT) is_dvr_enabled = utils.is_extension_supported( l3plugin, const.L3_DISTRIBUTED_EXT_ALIAS) if l3plugin and l3_port_check: l3plugin.prevent_l3_port_deletion(context, id) session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): port_db, binding = db.get_locked_port_and_binding(session, id) if not port_db: LOG.debug("The port '%s' was deleted", id) return port = self._make_port_dict(port_db) network = self.get_network(context, port['network_id']) bound_mech_contexts = [] device_owner = port['device_owner'] if device_owner == const.DEVICE_OWNER_DVR_INTERFACE: bindings = db.get_dvr_port_bindings(context.session, id) for bind in bindings: levels = db.get_binding_levels(context.session, id, bind.host) mech_context = driver_context.PortContext( self, context, port, network, bind, levels) self.mechanism_manager.delete_port_precommit(mech_context) bound_mech_contexts.append(mech_context) else: levels = db.get_binding_levels(context.session, id, binding.host) mech_context = driver_context.PortContext( self, context, port, network, binding, levels) if is_dvr_enabled and utils.is_dvr_serviced(device_owner): removed_routers = l3plugin.dvr_deletens_if_no_port( context, id) self.mechanism_manager.delete_port_precommit(mech_context) bound_mech_contexts.append(mech_context) if l3plugin: router_ids = l3plugin.disassociate_floatingips( context, id, do_notify=False) LOG.debug("Calling delete_port for %(port_id)s owned by %(owner)s", {"port_id": id, "owner": device_owner}) super(Ml2Plugin, self).delete_port(context, id) # now that we've left db transaction, we are safe to notify if l3plugin: if is_dvr_enabled: l3plugin.dvr_vmarp_table_update(context, port, "del") l3plugin.notify_routers_updated(context, router_ids) for router in removed_routers: try: l3plugin.remove_router_from_l3_agent( context, router['agent_id'], router['router_id']) except l3agentscheduler.RouterNotHostedByL3Agent: # router may have been removed by another process LOG.debug("Router %(id)s not hosted by L3 agent %(agent)s", {'id': router['router_id'], 'agent': router['agent_id']}) try: # Note that DVR Interface ports will have bindings on # multiple hosts, and so will have multiple mech_contexts, # while other ports typically have just one. for mech_context in bound_mech_contexts: self.mechanism_manager.delete_port_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the port. Ideally we'd notify the caller of the # fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_port_postcommit failed for" " port %s"), id) self.notify_security_groups_member_updated(context, port) def get_bound_port_context(self, plugin_context, port_id, host=None): session = plugin_context.session with session.begin(subtransactions=True): try: port_db = (session.query(models_v2.Port). enable_eagerloads(False). filter(models_v2.Port.id.startswith(port_id)). one()) except sa_exc.NoResultFound: LOG.debug("No ports have port_id starting with %s", port_id) return except sa_exc.MultipleResultsFound: LOG.error(_LE("Multiple ports have port_id starting with %s"), port_id) return port = self._make_port_dict(port_db) network = self.get_network(plugin_context, port['network_id']) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding = db.get_dvr_port_binding_by_host( session, port['id'], host) if not binding: LOG.error(_LE("Binding info for DVR port %s not found"), port_id) return None levels = db.get_binding_levels(session, port_db.id, host) port_context = driver_context.PortContext( self, plugin_context, port, network, binding, levels) else: # since eager loads are disabled in port_db query # related attribute port_binding could disappear in # concurrent port deletion. # It's not an error condition. binding = port_db.port_binding if not binding: LOG.info(_LI("Binding info for port %s was not found, " "it might have been deleted already."), port_id) return levels = db.get_binding_levels(session, port_db.id, port_db.port_binding.host) port_context = driver_context.PortContext( self, plugin_context, port, network, binding, levels) return self._bind_port_if_needed(port_context) def update_port_status(self, context, port_id, status, host=None): """ Returns port_id (non-truncated uuid) if the port exists. Otherwise returns None. """ updated = False session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): port = db.get_port(session, port_id) if not port: LOG.warning(_LW("Port %(port)s updated up by agent not found"), {'port': port_id}) return None if (port.status != status and port['device_owner'] != const.DEVICE_OWNER_DVR_INTERFACE): original_port = self._make_port_dict(port) port.status = status updated_port = self._make_port_dict(port) network = self.get_network(context, original_port['network_id']) levels = db.get_binding_levels(session, port_id, port.port_binding.host) mech_context = driver_context.PortContext( self, context, updated_port, network, port.port_binding, levels, original_port=original_port) self.mechanism_manager.update_port_precommit(mech_context) updated = True elif port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding = db.get_dvr_port_binding_by_host( session, port['id'], host) if not binding: return binding['status'] = status binding.update(binding) updated = True if (updated and port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE): with contextlib.nested(lockutils.lock('db-access'), session.begin(subtransactions=True)): port = db.get_port(session, port_id) if not port: LOG.warning(_LW("Port %s not found during update"), port_id) return original_port = self._make_port_dict(port) network = self.get_network(context, original_port['network_id']) port.status = db.generate_dvr_port_status(session, port['id']) updated_port = self._make_port_dict(port) levels = db.get_binding_levels(session, port_id, host) mech_context = (driver_context.PortContext( self, context, updated_port, network, binding, levels, original_port=original_port)) self.mechanism_manager.update_port_precommit(mech_context) if updated: self.mechanism_manager.update_port_postcommit(mech_context) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: db.delete_dvr_port_binding_if_stale(session, binding) return port['id'] def port_bound_to_host(self, context, port_id, host): port = db.get_port(context.session, port_id) if not port: LOG.debug("No Port match for: %s", port_id) return False if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: bindings = db.get_dvr_port_bindings(context.session, port_id) for b in bindings: if b.host == host: return True LOG.debug("No binding found for DVR port %s", port['id']) return False else: port_host = db.get_port_binding_host(port_id) return (port_host == host) def get_ports_from_devices(self, devices): port_ids_to_devices = dict((self._device_to_port_id(device), device) for device in devices) port_ids = port_ids_to_devices.keys() ports = db.get_ports_and_sgs(port_ids) for port in ports: # map back to original requested id port_id = next((port_id for port_id in port_ids if port['id'].startswith(port_id)), None) port['device'] = port_ids_to_devices.get(port_id) return ports def _device_to_port_id(self, device): # REVISIT(rkukura): Consider calling into MechanismDrivers to # process device names, or having MechanismDrivers supply list # of device prefixes to strip. if device.startswith(const.TAP_DEVICE_PREFIX): return device[len(const.TAP_DEVICE_PREFIX):] else: # REVISIT(irenab): Consider calling into bound MD to # handle the get_device_details RPC, then remove the 'else' clause if not uuidutils.is_uuid_like(device): port = db.get_port_from_device_mac(device) if port: return port.id return device
apache-2.0
-4,031,988,639,562,894,300
47.368343
79
0.572423
false
kmarius/qutebrowser
tests/end2end/fixtures/webserver.py
4
6473
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2015-2018 Florian Bruhin (The Compiler) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Fixtures for the server webserver.""" import re import sys import json import os.path from http import HTTPStatus import attr import pytest from PyQt5.QtCore import pyqtSignal, QUrl from end2end.fixtures import testprocess from qutebrowser.utils import utils class Request(testprocess.Line): """A parsed line from the flask log output. Attributes: verb/path/status: Parsed from the log output. """ def __init__(self, data): super().__init__(data) try: parsed = json.loads(data) except ValueError: raise testprocess.InvalidLine(data) assert isinstance(parsed, dict) assert set(parsed.keys()) == {'path', 'verb', 'status'} self.verb = parsed['verb'] path = parsed['path'] self.path = '/' if path == '/' else path.rstrip('/') self.status = parsed['status'] self._check_status() def _check_status(self): """Check if the http status is what we expected.""" path_to_statuses = { '/favicon.ico': [HTTPStatus.NOT_FOUND], '/does-not-exist': [HTTPStatus.NOT_FOUND], '/does-not-exist-2': [HTTPStatus.NOT_FOUND], '/404': [HTTPStatus.NOT_FOUND], '/redirect-later': [HTTPStatus.FOUND], '/redirect-self': [HTTPStatus.FOUND], '/redirect-to': [HTTPStatus.FOUND], '/relative-redirect': [HTTPStatus.FOUND], '/absolute-redirect': [HTTPStatus.FOUND], '/cookies/set': [HTTPStatus.FOUND], '/500-inline': [HTTPStatus.INTERNAL_SERVER_ERROR], '/500': [HTTPStatus.INTERNAL_SERVER_ERROR], } for i in range(15): path_to_statuses['/redirect/{}'.format(i)] = [HTTPStatus.FOUND] for suffix in ['', '1', '2', '3', '4', '5', '6']: key = '/basic-auth/user{}/password{}'.format(suffix, suffix) path_to_statuses[key] = [HTTPStatus.UNAUTHORIZED, HTTPStatus.OK] default_statuses = [HTTPStatus.OK, HTTPStatus.NOT_MODIFIED] sanitized = QUrl('http://localhost' + self.path).path() # Remove ?foo expected_statuses = path_to_statuses.get(sanitized, default_statuses) if self.status not in expected_statuses: raise AssertionError( "{} loaded with status {} but expected {}".format( sanitized, self.status, ' / '.join(repr(e) for e in expected_statuses))) def __eq__(self, other): return NotImplemented @attr.s(frozen=True, cmp=False, hash=True) class ExpectedRequest: """Class to compare expected requests easily.""" verb = attr.ib() path = attr.ib() @classmethod def from_request(cls, request): """Create an ExpectedRequest from a Request.""" return cls(request.verb, request.path) def __eq__(self, other): if isinstance(other, (Request, ExpectedRequest)): return self.verb == other.verb and self.path == other.path else: return NotImplemented class WebserverProcess(testprocess.Process): """Abstraction over a running Flask server process. Reads the log from its stdout and parses it. Signals: new_request: Emitted when there's a new request received. """ new_request = pyqtSignal(Request) Request = Request # So it can be used from the fixture easily. ExpectedRequest = ExpectedRequest KEYS = ['verb', 'path'] def __init__(self, request, script, parent=None): super().__init__(request, parent) self._script = script self.port = utils.random_port() self.new_data.connect(self.new_request) def get_requests(self): """Get the requests to the server during this test.""" requests = self._get_data() return [r for r in requests if r.path != '/favicon.ico'] def _parse_line(self, line): self._log(line) started_re = re.compile(r' \* Running on https?://127\.0\.0\.1:{}/ ' r'\(Press CTRL\+C to quit\)'.format(self.port)) if started_re.fullmatch(line): self.ready.emit() return None return Request(line) def _executable_args(self): if hasattr(sys, 'frozen'): executable = os.path.join(os.path.dirname(sys.executable), self._script) args = [] else: executable = sys.executable py_file = os.path.join(os.path.dirname(__file__), self._script + '.py') args = [py_file] return executable, args def _default_args(self): return [str(self.port)] @pytest.fixture(scope='session', autouse=True) def server(qapp, request): """Fixture for an server object which ensures clean setup/teardown.""" server = WebserverProcess(request, 'webserver_sub') server.start() yield server server.terminate() @pytest.fixture(autouse=True) def server_per_test(server, request): """Fixture to clean server request list after each test.""" request.node._server_log = server.captured_log server.before_test() yield server.after_test() @pytest.fixture def ssl_server(request, qapp): """Fixture for a webserver with a self-signed SSL certificate. This needs to be explicitly used in a test, and overwrites the server log used in that test. """ server = WebserverProcess(request, 'webserver_sub_ssl') request.node._server_log = server.captured_log server.start() yield server server.after_test() server.terminate()
gpl-3.0
5,028,778,831,955,920,000
30.730392
79
0.616098
false
xissy/titanium-mobile-sdk
android/run.py
1
1625
import os, subprocess, types, sys, re def check_output_for_error(output, match, error_in_first_match): success = re.findall(match, output) if len(success) > 0: if (error_in_first_match): print "[ERROR] %s" % success[0] sys.exit(1) else: return True else: return False def check_and_print_err(err, warning_regex): errored = False for line in err.splitlines(): warning_match = None if warning_regex != None: warning_match = re.search(warning_regex, line) if warning_match != None: sys.stderr.write("[WARN] %s\n" % line) else: errored = True sys.stderr.write("[ERROR] %s\n" % line) sys.stderr.flush() return errored def run(args, ignore_error=False, debug=True, ignore_output=False, warning_regex=None, return_error=False, return_process=False): if debug: print "[DEBUG] %s" % (subprocess.list2cmdline(args)) sys.stdout.flush() if ignore_output: subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE).wait() return None process = subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE) (so, se) = process.communicate() if type(se) != types.NoneType and len(se) > 0: if not ignore_error: err = str(se) if 'adb' in args[0] and ' bytes in ' in err: # adb emits data about compile into stderr so we ignore it in special case pass else: if (check_and_print_err(err, warning_regex)): if return_process: return (None, process) else: return None if return_error: if return_process: return so, se, process else: return so, se elif return_process: return so, process else: return so
apache-2.0
5,110,268,308,287,892,000
26.083333
129
0.68
false
s20121035/rk3288_android5.1_repo
external/clang/bindings/python/tests/cindex/test_cdb.py
38
4306
from clang.cindex import CompilationDatabase from clang.cindex import CompilationDatabaseError from clang.cindex import CompileCommands from clang.cindex import CompileCommand import os import gc kInputsDir = os.path.join(os.path.dirname(__file__), 'INPUTS') def test_create_fail(): """Check we fail loading a database with an assertion""" path = os.path.dirname(__file__) try: cdb = CompilationDatabase.fromDirectory(path) except CompilationDatabaseError as e: assert e.cdb_error == CompilationDatabaseError.ERROR_CANNOTLOADDATABASE else: assert False def test_create(): """Check we can load a compilation database""" cdb = CompilationDatabase.fromDirectory(kInputsDir) def test_lookup_fail(): """Check file lookup failure""" cdb = CompilationDatabase.fromDirectory(kInputsDir) assert cdb.getCompileCommands('file_do_not_exist.cpp') == None def test_lookup_succeed(): """Check we get some results if the file exists in the db""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project.cpp') assert len(cmds) != 0 def test_all_compilecommand(): """Check we get all results from the db""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getAllCompileCommands() assert len(cmds) == 3 expected = [ { 'wd': '/home/john.doe/MyProjectA', 'line': ['clang++', '-o', 'project2.o', '-c', '/home/john.doe/MyProject/project2.cpp']}, { 'wd': '/home/john.doe/MyProjectB', 'line': ['clang++', '-DFEATURE=1', '-o', 'project2-feature.o', '-c', '/home/john.doe/MyProject/project2.cpp']}, { 'wd': '/home/john.doe/MyProject', 'line': ['clang++', '-o', 'project.o', '-c', '/home/john.doe/MyProject/project.cpp']} ] for i in range(len(cmds)): assert cmds[i].directory == expected[i]['wd'] for arg, exp in zip(cmds[i].arguments, expected[i]['line']): assert arg == exp def test_1_compilecommand(): """Check file with single compile command""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project.cpp') assert len(cmds) == 1 assert cmds[0].directory == '/home/john.doe/MyProject' expected = [ 'clang++', '-o', 'project.o', '-c', '/home/john.doe/MyProject/project.cpp'] for arg, exp in zip(cmds[0].arguments, expected): assert arg == exp def test_2_compilecommand(): """Check file with 2 compile commands""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project2.cpp') assert len(cmds) == 2 expected = [ { 'wd': '/home/john.doe/MyProjectA', 'line': ['clang++', '-o', 'project2.o', '-c', '/home/john.doe/MyProject/project2.cpp']}, { 'wd': '/home/john.doe/MyProjectB', 'line': ['clang++', '-DFEATURE=1', '-o', 'project2-feature.o', '-c', '/home/john.doe/MyProject/project2.cpp']} ] for i in range(len(cmds)): assert cmds[i].directory == expected[i]['wd'] for arg, exp in zip(cmds[i].arguments, expected[i]['line']): assert arg == exp def test_compilecommand_iterator_stops(): """Check that iterator stops after the correct number of elements""" cdb = CompilationDatabase.fromDirectory(kInputsDir) count = 0 for cmd in cdb.getCompileCommands('/home/john.doe/MyProject/project2.cpp'): count += 1 assert count <= 2 def test_compilationDB_references(): """Ensure CompilationsCommands are independent of the database""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project.cpp') del cdb gc.collect() workingdir = cmds[0].directory def test_compilationCommands_references(): """Ensure CompilationsCommand keeps a reference to CompilationCommands""" cdb = CompilationDatabase.fromDirectory(kInputsDir) cmds = cdb.getCompileCommands('/home/john.doe/MyProject/project.cpp') del cdb cmd0 = cmds[0] del cmds gc.collect() workingdir = cmd0.directory
gpl-3.0
3,942,392,631,702,334,500
38.145455
79
0.643753
false
dtrip/weevely3
modules/audit/phpconf.py
15
6310
from core.vectors import PhpCode, ShellCmd, ModuleExec, Os from core.module import Module from core import messages from core import modules import re class Phpconf(Module): """Audit PHP configuration.""" def init(self): self.register_info( { 'author': [ 'Emilio Pinna' ], 'license': 'GPLv3' } ) def _check_user(self): user = ModuleExec('system_info', [ '-info', 'whoami' ]).load_result_or_run('whoami') if not user: return messages.module_audit_phpconf.error result = user if 'win' in self.os_type: result += ': ' + messages.module_audit_phpconf.user_win_admin elif user == 'root': result += ': ' + messages.module_audit_phpconf.user_nix_root return result def _check_openbasedir(self): open_basedir = ModuleExec('system_info', [ '-info', 'open_basedir' ]).load_result_or_run('open_basedir') if not open_basedir: return messages.module_audit_phpconf.basedir_unrestricted dir_sep = ModuleExec('system_info', [ '-info', 'dir_sep' ]).load_result_or_run('dir_sep') if not self.os_type or not dir_sep: return messages.module_audit_phpconf.error path_sep = ':' if 'win' in self.os_type else ';' paths = open_basedir.split(path_sep) result = '' for path in paths: result += path + ': ' if not path.endswith(dir_sep): result += ' ' + messages.module_audit_phpconf.basedir_no_slash elif path == '.': result += ' ' + messages.module_audit_phpconf.basedir_dot result += '\n' return result[-2:] def _check_features(self): features = [ 'expose_php', 'file_uploads', 'register_globals', 'allow_url_fopen', 'display_errors', 'enable_dl', 'safe_mode', 'magic_quotes_gpc', 'allow_url_include', 'session.use_trans_sid' ] feat_found = PhpCode("""foreach ( Array("${ '", "'.join(features) }") as $f) if((bool)ini_get($f)) print($f. "\n");""").run( { 'features' : features } ) result = [] if feat_found: for feat in feat_found.split('\n'): feat_msg = 'feat_' + re.sub('[^a-zA-Z_]', '_', feat) if hasattr(messages.module_audit_phpconf, feat_msg): result.append((feat, getattr(messages.module_audit_phpconf, feat_msg))) return result def _check_classes(self): classes = [ 'splFileObject', 'COM', 'Java' ] class_found = PhpCode("""foreach ( Array("${ '", "'.join(classes) }") as $f) if((bool)class_exists($f)) print($f. "\n");""").run( { 'classes' : classes } ) result = [] if class_found: for class_name in class_found.split('\n'): class_msg = 'class_' + re.sub('[^a-zA-Z_]', '_', class_name) if hasattr(messages.module_audit_phpconf, class_msg): result.append((class_name, getattr(messages.module_audit_phpconf, class_msg))) return result def _check_functions(self): functions = { 'info' : [ 'apache_get_modules', 'apache_get_version', 'apache_getenv', 'get_loaded_extensions', 'phpinfo', 'phpversion', ], 'files' : [ 'chgrp', 'chmod', 'chown', 'copy', 'link', 'mkdir', 'rename', 'rmdir', 'symlink', 'touch', 'unlink', 'posix_mkfifo' ], 'log' : [ 'openlog', 'syslog', 'debugger_off', 'debugger_on', 'closelog' ], 'proc_execution' : [ 'exec', 'passthru', 'pcntl_exec', 'popen', 'proc_open', 'shell_exec', 'system', 'dotnet_load' ], 'proc_manipulation' : [ 'apache_child_terminate', 'apache_note', 'apache_setenv', 'dl', 'proc_close', 'proc_get_status', 'proc_terminate', 'proc_nice', 'putenv', 'virtual' 'posix_kill', 'posix_setpgid', 'posix_setsid', 'posix_setuid', 'runkit_function_rename' ] } result = [] for ftype, flist in functions.items(): func_found = PhpCode("""foreach ( Array("${ '", "'.join(functions) }") as $f) if(function_exists($f)&&is_callable($f)) print($f. "\n");""").run( { 'functions' : flist } ) if func_found: for func_name in func_found.split('\n'): type_msg = 'func_' + re.sub('[^a-zA-Z_]', '_', ftype) if hasattr(messages.module_audit_phpconf, type_msg): result.append((func_name, getattr(messages.module_audit_phpconf, type_msg))) return result def run(self): self.os_type = ModuleExec('system_info', [ '-info', 'os' ]).load_result_or_run('os') self.php_version = ModuleExec('system_info', [ '-info', 'php_version' ]).load_result_or_run('php_version') results = [ ( 'Operating System', self.os_type if self.os_type else 'Undetected' ), ( 'PHP version', self.php_version if self.php_version else 'Undetected' ), ( 'User', self._check_user() ), ( 'open_basedir', self._check_openbasedir() ) ] + self._check_features() + self._check_classes() + self._check_functions() return results
gpl-3.0
40,510,162,782,217,810
31.193878
156
0.454834
false
tima/ansible
lib/ansible/modules/network/netvisor/pn_show.py
72
5460
#!/usr/bin/python """ PN CLI show commands """ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: pn_show author: "Pluribus Networks (@amitsi)" version_added: "2.2" short_description: Run show commands on nvOS device. description: - Execute show command in the nodes and returns the results read from the device. options: pn_cliusername: description: - Provide login username if user is not root. required: False pn_clipassword: description: - Provide login password if user is not root. required: False pn_cliswitch: description: - Target switch(es) to run the cli on. required: False pn_command: description: - The C(pn_command) takes a CLI show command as value. required: true pn_parameters: description: - Display output using a specific parameter. Use 'all' to display possible output. List of comma separated parameters. pn_options: description: - Specify formatting options. """ EXAMPLES = """ - name: run the vlan-show command pn_show: pn_command: 'vlan-show' pn_parameters: id,scope,ports pn_options: 'layout vertical' - name: run the vlag-show command pn_show: pn_command: 'vlag-show' pn_parameters: 'id,name,cluster,mode' pn_options: 'no-show-headers' - name: run the cluster-show command pn_show: pn_command: 'cluster-show' """ RETURN = """ command: description: The CLI command run on the target node(s). returned: always type: str stdout: description: The set of responses from the show command. returned: always type: list stderr: description: The set of error responses from the show command. returned: on error type: list changed: description: Indicates whether the CLI caused any change on the target. returned: always(False) type: bool """ import shlex def pn_cli(module): """ This method is to generate the cli portion to launch the Netvisor cli. It parses the username, password, switch parameters from module. :param module: The Ansible module to fetch username, password and switch :return: returns the cli string for further processing """ username = module.params['pn_cliusername'] password = module.params['pn_clipassword'] cliswitch = module.params['pn_cliswitch'] if username and password: cli = '/usr/bin/cli --quiet --user %s:%s ' % (username, password) else: cli = '/usr/bin/cli --quiet ' if cliswitch: if cliswitch == 'local': cli += ' switch-local ' else: cli += ' switch ' + cliswitch return cli def run_cli(module, cli): """ This method executes the cli command on the target node(s) and returns the output. The module then exits based on the output. :param cli: the complete cli string to be executed on the target node(s). :param module: The Ansible module to fetch command """ cliswitch = module.params['pn_cliswitch'] command = module.params['pn_command'] cmd = shlex.split(cli) # 'out' contains the output # 'err' contains the error messages result, out, err = module.run_command(cmd) print_cli = cli.split(cliswitch)[1] # Response in JSON format if result != 0: module.exit_json( command=print_cli, msg='%s: ' % command, stderr=err.strip(), changed=False ) if out: module.exit_json( command=print_cli, msg='%s: ' % command, stdout=out.strip(), changed=False ) else: module.exit_json( command=cli, msg='%s: Nothing to display!!!' % command, changed=False ) def main(): """ This section is for arguments parsing """ module = AnsibleModule( argument_spec=dict( pn_cliusername=dict(required=True, type='str'), pn_clipassword=dict(required=True, type='str', no_log=True), pn_cliswitch=dict(required=False, type='str'), pn_command=dict(required=True, type='str'), pn_parameters=dict(default='all', type='str'), pn_options=dict(type='str') ) ) # Accessing the arguments command = module.params['pn_command'] parameters = module.params['pn_parameters'] options = module.params['pn_options'] # Building the CLI command string cli = pn_cli(module) cli += ' %s format %s ' % (command, parameters) if options: cli += options run_cli(module, cli) # AnsibleModule boilerplate from ansible.module_utils.basic import AnsibleModule if __name__ == '__main__': main()
gpl-3.0
-1,967,902,714,839,049,700
26.437186
80
0.64011
false
webmakin/scrapy
tests/test_utils_signal.py
121
2741
from testfixtures import LogCapture from twisted.trial import unittest from twisted.python.failure import Failure from twisted.internet import defer, reactor from pydispatch import dispatcher from scrapy.utils.signal import send_catch_log, send_catch_log_deferred class SendCatchLogTest(unittest.TestCase): @defer.inlineCallbacks def test_send_catch_log(self): test_signal = object() handlers_called = set() dispatcher.connect(self.error_handler, signal=test_signal) dispatcher.connect(self.ok_handler, signal=test_signal) with LogCapture() as l: result = yield defer.maybeDeferred( self._get_result, test_signal, arg='test', handlers_called=handlers_called ) assert self.error_handler in handlers_called assert self.ok_handler in handlers_called self.assertEqual(len(l.records), 1) record = l.records[0] self.assertIn('error_handler', record.getMessage()) self.assertEqual(record.levelname, 'ERROR') self.assertEqual(result[0][0], self.error_handler) self.assert_(isinstance(result[0][1], Failure)) self.assertEqual(result[1], (self.ok_handler, "OK")) dispatcher.disconnect(self.error_handler, signal=test_signal) dispatcher.disconnect(self.ok_handler, signal=test_signal) def _get_result(self, signal, *a, **kw): return send_catch_log(signal, *a, **kw) def error_handler(self, arg, handlers_called): handlers_called.add(self.error_handler) a = 1/0 def ok_handler(self, arg, handlers_called): handlers_called.add(self.ok_handler) assert arg == 'test' return "OK" class SendCatchLogDeferredTest(SendCatchLogTest): def _get_result(self, signal, *a, **kw): return send_catch_log_deferred(signal, *a, **kw) class SendCatchLogDeferredTest2(SendCatchLogTest): def ok_handler(self, arg, handlers_called): handlers_called.add(self.ok_handler) assert arg == 'test' d = defer.Deferred() reactor.callLater(0, d.callback, "OK") return d def _get_result(self, signal, *a, **kw): return send_catch_log_deferred(signal, *a, **kw) class SendCatchLogTest2(unittest.TestCase): def test_error_logged_if_deferred_not_supported(self): test_signal = object() test_handler = lambda: defer.Deferred() dispatcher.connect(test_handler, test_signal) with LogCapture() as l: send_catch_log(test_signal) self.assertEqual(len(l.records), 1) self.assertIn("Cannot return deferreds from signal handler", str(l)) dispatcher.disconnect(test_handler, test_signal)
bsd-3-clause
-6,279,461,906,484,631,000
33.696203
76
0.660343
false
veger/ansible
test/units/conftest.py
37
1052
"""Monkey patch os._exit when running under coverage so we don't lose coverage data in forks, such as with `pytest --boxed`.""" import gc import os try: import coverage except ImportError: coverage = None try: test = coverage.Coverage except AttributeError: coverage = None def pytest_configure(): if not coverage: return coverage_instances = [] for obj in gc.get_objects(): if isinstance(obj, coverage.Coverage): coverage_instances.append(obj) if not coverage_instances: coverage_config = os.environ.get('_ANSIBLE_COVERAGE_CONFIG') if not coverage_config: return cov = coverage.Coverage(config_file=coverage_config) coverage_instances.append(cov) else: cov = None os_exit = os._exit def coverage_exit(*args, **kwargs): for instance in coverage_instances: instance.stop() instance.save() os_exit(*args, **kwargs) os._exit = coverage_exit if cov: cov.start()
gpl-3.0
2,484,419,202,556,002,000
20.469388
127
0.620722
false
nave91/dbt
test/integration/014_hook_tests/test_model_hooks_bq.py
1
4613
from nose.plugins.attrib import attr from test.integration.base import DBTIntegrationTest MODEL_PRE_HOOK = """ insert into {{this.schema}}.on_model_hook ( state, target_name, target_schema, target_type, target_threads, run_started_at, invocation_id ) VALUES ( 'start', '{{ target.name }}', '{{ target.schema }}', '{{ target.type }}', {{ target.threads }}, '{{ run_started_at }}', '{{ invocation_id }}' ) """ MODEL_POST_HOOK = """ insert into {{this.schema}}.on_model_hook ( state, target_name, target_schema, target_type, target_threads, run_started_at, invocation_id ) VALUES ( 'end', '{{ target.name }}', '{{ target.schema }}', '{{ target.type }}', {{ target.threads }}, '{{ run_started_at }}', '{{ invocation_id }}' ) """ class TestBigqueryPrePostModelHooks(DBTIntegrationTest): def setUp(self): DBTIntegrationTest.setUp(self) self.use_profile('bigquery') self.use_default_project() self.run_sql_file("test/integration/014_hook_tests/seed_model_bigquery.sql") self.fields = [ 'state', 'target_name', 'target_schema', 'target_threads', 'target_type', 'run_started_at', 'invocation_id' ] @property def schema(self): return "model_hooks_014" @property def profile_config(self): profile = self.bigquery_profile() profile['test']['outputs']['default2']['threads'] = 3 return profile @property def project_config(self): return { 'macro-paths': ['test/integration/014_hook_tests/macros'], 'models': { 'test': { 'pre-hook': [MODEL_PRE_HOOK], 'post-hook':[MODEL_POST_HOOK] } } } @property def models(self): return "test/integration/014_hook_tests/models" def get_ctx_vars(self, state): field_list = ", ".join(self.fields) query = "select {field_list} from `{schema}.on_model_hook` where state = '{state}'".format(field_list=field_list, schema=self.unique_schema(), state=state) vals = self.run_sql(query, fetch='all') self.assertFalse(len(vals) == 0, 'nothing inserted into hooks table') self.assertFalse(len(vals) > 1, 'too many rows in hooks table') ctx = dict(zip(self.fields, vals[0])) return ctx def check_hooks(self, state): ctx = self.get_ctx_vars(state) self.assertEqual(ctx['state'], state) self.assertEqual(ctx['target_name'], 'default2') self.assertEqual(ctx['target_schema'], self.unique_schema()) self.assertEqual(ctx['target_threads'], 3) self.assertEqual(ctx['target_type'], 'bigquery') self.assertTrue(ctx['run_started_at'] is not None and len(ctx['run_started_at']) > 0, 'run_started_at was not set') self.assertTrue(ctx['invocation_id'] is not None and len(ctx['invocation_id']) > 0, 'invocation_id was not set') @attr(type='bigquery') def test_pre_and_post_model_hooks(self): self.run_dbt(['run']) self.check_hooks('start') self.check_hooks('end') class TestBigqueryPrePostModelHooksOnSeeds(DBTIntegrationTest): def setUp(self): DBTIntegrationTest.setUp(self) self.use_profile('bigquery') self.use_default_project() @property def schema(self): return "model_hooks_014" @property def models(self): return "test/integration/014_hook_tests/seed-models-bq" @property def project_config(self): return { 'data-paths': ['test/integration/014_hook_tests/data'], 'models': {}, 'seeds': { 'post-hook': [ 'insert into {{ this }} (a, b, c) VALUES (10, 11, 12)', ] } } @attr(type='bigquery') def test_hooks_on_seeds(self): res = self.run_dbt(['seed']) self.assertEqual(len(res), 1, 'Expected exactly one item') res = self.run_dbt(['test']) self.assertEqual(len(res), 1, 'Expected exactly one item') result = self.run_sql( 'select a, b, c from `{schema}`.`example_seed` where a = 10', fetch='all' ) self.assertFalse(len(result) == 0, 'nothing inserted into table by hook') self.assertFalse(len(result) > 1, 'too many rows in table')
apache-2.0
5,494,990,036,721,369,000
28.570513
163
0.555821
false
niksolaz/GeoJS
venv/lib/python2.7/site-packages/pip/commands/install.py
61
15982
from __future__ import absolute_import import logging import operator import os import tempfile import shutil import warnings from pip.req import InstallRequirement, RequirementSet, parse_requirements from pip.locations import build_prefix, virtualenv_no_global, distutils_scheme from pip.basecommand import Command from pip.index import PackageFinder from pip.exceptions import ( InstallationError, CommandError, PreviousBuildDirError, ) from pip import cmdoptions from pip.utils.build import BuildDirectory from pip.utils.deprecation import RemovedInPip7Warning, RemovedInPip8Warning logger = logging.getLogger(__name__) class InstallCommand(Command): """ Install packages from: - PyPI (and other indexes) using requirement specifiers. - VCS project urls. - Local project directories. - Local or remote source archives. pip also supports installing from "requirements files", which provide an easy way to specify a whole environment to be installed. """ name = 'install' usage = """ %prog [options] <requirement specifier> [package-index-options] ... %prog [options] -r <requirements file> [package-index-options] ... %prog [options] [-e] <vcs project url> ... %prog [options] [-e] <local project path> ... %prog [options] <archive url/path> ...""" summary = 'Install packages.' def __init__(self, *args, **kw): super(InstallCommand, self).__init__(*args, **kw) cmd_opts = self.cmd_opts cmd_opts.add_option(cmdoptions.editable.make()) cmd_opts.add_option(cmdoptions.requirements.make()) cmd_opts.add_option(cmdoptions.build_dir.make()) cmd_opts.add_option( '-t', '--target', dest='target_dir', metavar='dir', default=None, help='Install packages into <dir>. ' 'By default this will not replace existing files/folders in ' '<dir>. Use --upgrade to replace existing packages in <dir> ' 'with new versions.' ) cmd_opts.add_option( '-d', '--download', '--download-dir', '--download-directory', dest='download_dir', metavar='dir', default=None, help=("Download packages into <dir> instead of installing them, " "regardless of what's already installed."), ) cmd_opts.add_option(cmdoptions.download_cache.make()) cmd_opts.add_option(cmdoptions.src.make()) cmd_opts.add_option( '-U', '--upgrade', dest='upgrade', action='store_true', help='Upgrade all specified packages to the newest available ' 'version. This process is recursive regardless of whether ' 'a dependency is already satisfied.' ) cmd_opts.add_option( '--force-reinstall', dest='force_reinstall', action='store_true', help='When upgrading, reinstall all packages even if they are ' 'already up-to-date.') cmd_opts.add_option( '-I', '--ignore-installed', dest='ignore_installed', action='store_true', help='Ignore the installed packages (reinstalling instead).') cmd_opts.add_option(cmdoptions.no_deps.make()) cmd_opts.add_option( '--no-install', dest='no_install', action='store_true', help="DEPRECATED. Download and unpack all packages, but don't " "actually install them." ) cmd_opts.add_option( '--no-download', dest='no_download', action="store_true", help="DEPRECATED. Don't download any packages, just install the " "ones already downloaded (completes an install run with " "--no-install).") cmd_opts.add_option(cmdoptions.install_options.make()) cmd_opts.add_option(cmdoptions.global_options.make()) cmd_opts.add_option( '--user', dest='use_user_site', action='store_true', help='Install using the user scheme.') cmd_opts.add_option( '--egg', dest='as_egg', action='store_true', help="Install packages as eggs, not 'flat', like pip normally " "does. This option is not about installing *from* eggs. " "(WARNING: Because this option overrides pip's normal install" " logic, requirements files may not behave as expected.)") cmd_opts.add_option( '--root', dest='root_path', metavar='dir', default=None, help="Install everything relative to this alternate root " "directory.") cmd_opts.add_option( "--compile", action="store_true", dest="compile", default=True, help="Compile py files to pyc", ) cmd_opts.add_option( "--no-compile", action="store_false", dest="compile", help="Do not compile py files to pyc", ) cmd_opts.add_option(cmdoptions.use_wheel.make()) cmd_opts.add_option(cmdoptions.no_use_wheel.make()) cmd_opts.add_option( '--pre', action='store_true', default=False, help="Include pre-release and development versions. By default, " "pip only finds stable versions.") cmd_opts.add_option(cmdoptions.no_clean.make()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, self.parser, ) self.parser.insert_option_group(0, index_opts) self.parser.insert_option_group(0, cmd_opts) def _build_package_finder(self, options, index_urls, session): """ Create a package finder appropriate to this install command. This method is meant to be overridden by subclasses, not called directly. """ return PackageFinder( find_links=options.find_links, index_urls=index_urls, use_wheel=options.use_wheel, allow_external=options.allow_external, allow_unverified=options.allow_unverified, allow_all_external=options.allow_all_external, trusted_hosts=options.trusted_hosts, allow_all_prereleases=options.pre, process_dependency_links=options.process_dependency_links, session=session, ) def run(self, options, args): if ( options.no_install or options.no_download ): warnings.warn( "--no-install and --no-download are deprecated. " "See https://github.com/pypa/pip/issues/906.", RemovedInPip7Warning, ) # If we have --no-install or --no-download and no --build we use the # legacy static build dir if (options.build_dir is None and (options.no_install or options.no_download)): options.build_dir = build_prefix if options.download_dir: options.no_install = True options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) install_options.append('--home=' + temp_target_dir) global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.use_mirrors: warnings.warn( "--use-mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) if options.mirrors: warnings.warn( "--mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) index_urls += options.mirrors if options.download_cache: warnings.warn( "--download-cache has been deprecated and will be removed in " "the future. Pip now automatically uses and configures its " "cache.", RemovedInPip8Warning, ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) build_delete = (not (options.no_clean or options.build_dir)) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, None, isolated=options.isolated_mode, ) ) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs, isolated=options.isolated_mode, ) ) for filename in options.requirements: for req in parse_requirements( filename, finder=finder, options=options, session=session): requirement_set.add_requirement(req) if not requirement_set.has_requirements: opts = {'name': self.name} if options.find_links: msg = ('You must give at least one requirement to ' '%(name)s (maybe you meant "pip %(name)s ' '%(links)s"?)' % dict(opts, links=' '.join(options.find_links))) else: msg = ('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % opts) logger.warning(msg) return try: if not options.no_download: requirement_set.prepare_files(finder) else: requirement_set.locate_files() if not options.no_install: requirement_set.install( install_options, global_options, root=options.root_path, ) reqs = sorted( requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: if hasattr(req, 'installed_version'): if req.installed_version: item += '-' + req.installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if ((not options.no_clean) and ((not options.no_install) or options.download_dir)): requirement_set.cleanup_files() if options.target_dir: if not os.path.exists(options.target_dir): os.makedirs(options.target_dir) lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir ) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move( os.path.join(lib_dir, item), target_item_dir ) shutil.rmtree(temp_target_dir) return requirement_set
mit
2,464,007,097,171,582,000
37.143198
79
0.504005
false
progamer001/Testchat
auth.py
1
9013
# coding=UTF-8 # Tornado modules. import tornado.web import tornado.escape # Import application modules. from base import BaseHandler # General modules. import logging class LoginHandler(BaseHandler, tornado.auth.GoogleMixin): """ Handler for logins with Google Open ID / OAuth http://www.tornadoweb.org/documentation/auth.html#google """ @tornado.web.asynchronous def get(self): if self.get_argument("openid.mode", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return elif self.get_argument("start_google_oauth", None): # Set users attributes to ask for. ax_attrs = ['name', 'email', 'language', 'username'] self.authenticate_redirect(ax_attrs=ax_attrs) elif self.get_argument("start_direct_auth", None): # Get form inputs. try: user = dict() user["name"] = self.get_argument("name", default="") user["pass_login"] = self.get_argument("pass_login", default="") user["password"] = "" except: # Send an error back to client. content = "<p>There was an input error. Fill in all fields!</p>" self.render_default("index.html", content=content) # If user has not filled in all fields. if not user["pass_login"] or not user["name"]: content = ('<h2>2. Direct Login</h2>' + '<p>Fill in both fields!</p>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_direct_auth" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name" value="' + str(user["name"]) + '"> ' + '<input class="form-control" type="password" name="pass_login" placeholder="Your Password" value="' + str(user["pass_login"]) + '"> ' + '<input type="submit" class="btn btn-default" value="Sign in">' + '</form>') self.render_default("index.html", content=content) # All data given. Log user in! else: self._on_auth(user) elif self.get_argument("start_registration", None): # Get form inputs. try: user = dict() user["name"] = self.get_argument("name", default="") user["password"] = self.get_argument("password", default="") user["pass_login"] = "" except: # Send an error back to client. content = "<p>There was an input error. Fill in all fields!</p>" self.render_default("index.html", content=content) # If user has not filled in all fields. if not user["password"] or not user["name"]: content = ('<h2>3. Registration</h2>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_registration" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name"> ' + '<input class="form-control" type="password" name="password" placeholder="Your Password"> ' + '<input type="submit" class="btn btn-default" value="Register">' + '</form>') self.render_default("index.html", content=content) # All data given. Log user in! else: self._on_auth(user) else: # Logins. content = '<div class="page-header"><h1>Login</h1></div>' content += ('<h2>1. Google Login</h2>' + '<form action="/login" method="get">' + '<input type="hidden" name="start_google_oauth" value="1">' + '<input type="submit" class="btn" value="Sign in with Google">' + '</form>') content += ('<h2>2. Direct Login</h2>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_direct_auth" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name"> ' + '<input class="form-control" type="password" name="pass_login" placeholder="Your Password"> ' + '<input type="submit" class="btn btn-default" value="Sign in">' + '</form>') content += ('<h2>3. Registration</h2>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_registration" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name"> ' + '<input class="form-control" type="password" name="password" placeholder="Your Password"> ' + '<input type="submit" class="btn btn-default" value="Register">' + '</form>') self.render_default("index.html", content=content) def _on_auth(self, user): """ Callback for third party authentication (last step). """ if not user: content = ('<div class="page-header"><h1>Login</h1></div>' + '<div class="alert alert-error">' + '<button class="close" data-dismiss="alert">×</button>' + '<h3>Authentication failed</h3>' + '<p>This might be due to a problem in Tornados GoogleMixin.</p>' + '</div>') self.render_default("index.html", content=content) return None # @todo: Validate user data. # Save user when authentication was successful. def on_user_find(result, user=user): #@todo: We should check if email is given even though we can assume. if result == "null" or not result: # If user does not exist, create a new entry. # self.application.client.set("user:" + user["email"], tornado.escape.json_encode(user)) self.application.client.set("user:" + user["name"], tornado.escape.json_encode(user)) else: dbuser = tornado.escape.json_decode(result) # If try to register if user["password"] != "": content = ('<h2>Login</h2>' + '<p>Username taken!</p>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_registration" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name"> ' + '<input class="form-control" type="password" name="password" placeholder="Your Password"> ' + '<input type="submit" class="btn btn-default" value="Register">' + '</form>') self.render_default("index.html", content=content) return None # If try to login if user["pass_login"] != dbuser.get("password"): content = ('<h2>Login</h2>' + '<p>Password incorrect!</p>' + '<form class="form-inline" action="/login" method="get"> ' + '<input type="hidden" name="start_direct_auth" value="1">' + '<input class="form-control" type="text" name="name" placeholder="Your Name"> ' + '<input class="form-control" type="password" name="pass_login" placeholder="Your Password"> ' + '<input type="submit" class="btn btn-default" value="Login">' + '</form>') self.render_default("index.html", content=content) return None # dbuser.update(user) # user = dbuser # self.application.client.set("user:" + user["email"], tornado.escape.json_encode(user)) # self.application.client.set("user:" + user["name"], tornado.escape.json_encode(user)) # Save user id in cookie. # self.set_secure_cookie("user", user["email"]) self.set_cookie("user", user["name"]) # self.application.usernames[user["email"]] = user.get("name") or user["email"] self.application.usernames[user["name"]] = user.get("name") # or user["email"] # Closed client connection if self.request.connection.stream.closed(): logging.warning("Waiter disappeared") return self.redirect("/") # dbuser = self.application.client.get("user:" + user["email"], on_user_find) dbuser = self.application.client.get("user:" + user["name"], on_user_find) class LogoutHandler(BaseHandler): def get(self): self.clear_cookie('user') self.redirect("/")
mit
2,882,877,958,796,473,300
49.629213
151
0.525078
false
kbrebanov/ansible
contrib/inventory/nsot.py
117
9825
#!/usr/bin/env python ''' nsot ==== Ansible Dynamic Inventory to pull hosts from NSoT, a flexible CMDB by Dropbox Features -------- * Define host groups in form of NSoT device attribute criteria * All parameters defined by the spec as of 2015-09-05 are supported. + ``--list``: Returns JSON hash of host groups -> hosts and top-level ``_meta`` -> ``hostvars`` which correspond to all device attributes. Group vars can be specified in the YAML configuration, noted below. + ``--host <hostname>``: Returns JSON hash where every item is a device attribute. * In addition to all attributes assigned to resource being returned, script will also append ``site_id`` and ``id`` as facts to utilize. Confguration ------------ Since it'd be annoying and failure prone to guess where you're configuration file is, use ``NSOT_INVENTORY_CONFIG`` to specify the path to it. This file should adhere to the YAML spec. All top-level variable must be desired Ansible group-name hashed with single 'query' item to define the NSoT attribute query. Queries follow the normal NSoT query syntax, `shown here`_ .. _shown here: https://github.com/dropbox/pynsot#set-queries .. code:: yaml routers: query: 'deviceType=ROUTER' vars: a: b c: d juniper_fw: query: 'deviceType=FIREWALL manufacturer=JUNIPER' not_f10: query: '-manufacturer=FORCE10' The inventory will automatically use your ``.pynsotrc`` like normal pynsot from cli would, so make sure that's configured appropriately. .. note:: Attributes I'm showing above are influenced from ones that the Trigger project likes. As is the spirit of NSoT, use whichever attributes work best for your workflow. If config file is blank or absent, the following default groups will be created: * ``routers``: deviceType=ROUTER * ``switches``: deviceType=SWITCH * ``firewalls``: deviceType=FIREWALL These are likely not useful for everyone so please use the configuration. :) .. note:: By default, resources will only be returned for what your default site is set for in your ``~/.pynsotrc``. If you want to specify, add an extra key under the group for ``site: n``. Output Examples --------------- Here are some examples shown from just calling the command directly:: $ NSOT_INVENTORY_CONFIG=$PWD/test.yaml ansible_nsot --list | jq '.' { "routers": { "hosts": [ "test1.example.com" ], "vars": { "cool_level": "very", "group": "routers" } }, "firewalls": { "hosts": [ "test2.example.com" ], "vars": { "cool_level": "enough", "group": "firewalls" } }, "_meta": { "hostvars": { "test2.example.com": { "make": "SRX", "site_id": 1, "id": 108 }, "test1.example.com": { "make": "MX80", "site_id": 1, "id": 107 } } }, "rtr_and_fw": { "hosts": [ "test1.example.com", "test2.example.com" ], "vars": {} } } $ NSOT_INVENTORY_CONFIG=$PWD/test.yaml ansible_nsot --host test1 | jq '.' { "make": "MX80", "site_id": 1, "id": 107 } ''' from __future__ import print_function import sys import os import pkg_resources import argparse import json import yaml from textwrap import dedent from pynsot.client import get_api_client from pynsot.app import HttpServerError from click.exceptions import UsageError from six import string_types def warning(*objs): print("WARNING: ", *objs, file=sys.stderr) class NSoTInventory(object): '''NSoT Client object for gather inventory''' def __init__(self): self.config = dict() config_env = os.environ.get('NSOT_INVENTORY_CONFIG') if config_env: try: config_file = os.path.abspath(config_env) except IOError: # If file non-existent, use default config self._config_default() except Exception as e: sys.exit('%s\n' % e) with open(config_file) as f: try: self.config.update(yaml.safe_load(f)) except TypeError: # If empty file, use default config warning('Empty config file') self._config_default() except Exception as e: sys.exit('%s\n' % e) else: # Use defaults if env var missing self._config_default() self.groups = self.config.keys() self.client = get_api_client() self._meta = {'hostvars': dict()} def _config_default(self): default_yaml = ''' --- routers: query: deviceType=ROUTER switches: query: deviceType=SWITCH firewalls: query: deviceType=FIREWALL ''' self.config = yaml.safe_load(dedent(default_yaml)) def do_list(self): '''Direct callback for when ``--list`` is provided Relies on the configuration generated from init to run _inventory_group() ''' inventory = dict() for group, contents in self.config.items(): group_response = self._inventory_group(group, contents) inventory.update(group_response) inventory.update({'_meta': self._meta}) return json.dumps(inventory) def do_host(self, host): return json.dumps(self._hostvars(host)) def _hostvars(self, host): '''Return dictionary of all device attributes Depending on number of devices in NSoT, could be rather slow since this has to request every device resource to filter through ''' device = [i for i in self.client.devices.get() if host in i['hostname']][0] attributes = device['attributes'] attributes.update({'site_id': device['site_id'], 'id': device['id']}) return attributes def _inventory_group(self, group, contents): '''Takes a group and returns inventory for it as dict :param group: Group name :type group: str :param contents: The contents of the group's YAML config :type contents: dict contents param should look like:: { 'query': 'xx', 'vars': 'a': 'b' } Will return something like:: { group: { hosts: [], vars: {}, } ''' query = contents.get('query') hostvars = contents.get('vars', dict()) site = contents.get('site', dict()) obj = {group: dict()} obj[group]['hosts'] = [] obj[group]['vars'] = hostvars try: assert isinstance(query, string_types) except: sys.exit('ERR: Group queries must be a single string\n' ' Group: %s\n' ' Query: %s\n' % (group, query) ) try: if site: site = self.client.sites(site) devices = site.devices.query.get(query=query) else: devices = self.client.devices.query.get(query=query) except HttpServerError as e: if '500' in str(e.response): _site = 'Correct site id?' _attr = 'Queried attributes actually exist?' questions = _site + '\n' + _attr sys.exit('ERR: 500 from server.\n%s' % questions) else: raise except UsageError: sys.exit('ERR: Could not connect to server. Running?') # Would do a list comprehension here, but would like to save code/time # and also acquire attributes in this step for host in devices: # Iterate through each device that matches query, assign hostname # to the group's hosts array and then use this single iteration as # a chance to update self._meta which will be used in the final # return hostname = host['hostname'] obj[group]['hosts'].append(hostname) attributes = host['attributes'] attributes.update({'site_id': host['site_id'], 'id': host['id']}) self._meta['hostvars'].update({hostname: attributes}) return obj def parse_args(): desc = __doc__.splitlines()[4] # Just to avoid being redundant # Establish parser with options and error out if no action provided parser = argparse.ArgumentParser( description=desc, conflict_handler='resolve', ) # Arguments # # Currently accepting (--list | -l) and (--host | -h) # These must not be allowed together parser.add_argument( '--list', '-l', help='Print JSON object containing hosts to STDOUT', action='store_true', dest='list_', # Avoiding syntax highlighting for list ) parser.add_argument( '--host', '-h', help='Print JSON object containing hostvars for <host>', action='store', ) args = parser.parse_args() if not args.list_ and not args.host: # Require at least one option parser.exit(status=1, message='No action requested') if args.list_ and args.host: # Do not allow multiple options parser.exit(status=1, message='Too many actions requested') return args def main(): '''Set up argument handling and callback routing''' args = parse_args() client = NSoTInventory() # Callback condition if args.list_: print(client.do_list()) elif args.host: print(client.do_host(args.host)) if __name__ == '__main__': main()
gpl-3.0
6,341,694,139,145,834,000
27.644315
79
0.575573
false
Andrew-McNab-UK/DIRAC
docs/source/conf.py
4
9724
# -*- coding: utf-8 -*- # # DiracDocs documentation build configuration file, created by # sphinx-quickstart on Sun Apr 25 17:34:37 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import datetime import os import sys import subprocess sys.path.insert(0, ".") try: import fakeEnvironment except ImportError: pass try: import fakeEnv except ImportError: pass diracRelease = os.environ.get( 'DIRACVERSION', 'integration' ) if os.environ.get('READTHEDOCS') == 'True': diracRelease = os.path.basename( os.path.abspath( "../../" ) ) if diracRelease.startswith("rel-"): diracRelease = diracRelease[4:] print 'conf.py: %s as DIRACVERSION' % diracRelease #............................................................................... # configuration # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. if os.environ.get('READTHEDOCS') == 'True': sys.path.append(os.path.abspath('.')) diracPath = os.path.abspath( os.path.join( os.getcwd(), "../..") ) print "DiracPath",diracPath buildfolder ="_build" try: os.mkdir( os.path.abspath( "../"+buildfolder) ) except: pass ##We need to have the DIRAC module somewhere, or we cannot import it, as readtheDocs clones the repo into something based on the branchname if not os.path.exists( "../../DIRAC" ): diracLink = os.path.abspath( os.path.join( os.getcwd() , "../" , buildfolder, "DIRAC" ) ) print "DiracLink",diracLink if not os.path.exists( diracLink ): RES = subprocess.check_output( ["ln","-s", diracPath, diracLink ] ) diracPath = os.path.abspath( os.path.join( diracLink, ".." ) ) sys.path.insert(0, diracPath) for path in sys.path: os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '')+":"+path ## this is not working at the moment because the DIRAC folder is not found by the buildScriptsDOC script # print "Pythonpath",os.environ['PYTHONPATH'] # buildCommand = os.path.join( os.getcwd() , "../Tools/buildScriptsDOC.py" ) # scriptdir = os.path.abspath(os.path.join( os.getcwd() , "../", buildfolder, "scripts" )) # try: # os.mkdir( scriptdir ) # except: # pass # print "command", buildCommand # code = subprocess.Popen( ["python", buildCommand, scriptdir ], env = os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # stdout , err = code.communicate() # print "script",stdout # print "script",err os.environ["DIRAC"] = diracPath print "DIRAC ENVIRON", os.environ["DIRAC"] ##singlehtml build needs too much memory, so we need to create less code documentation buildtype = "limited" if any("singlehtml" in arg for arg in sys.argv ) else "full" print "Chosing build type:", buildtype buildCommand =os.path.join( os.getcwd() , "../Tools/MakeDoc.py" ) code = subprocess.Popen( ["python",buildCommand, buildtype], env = os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout , err = code.communicate() print "code",stdout print "code",err # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.intersphinx', 'sphinx.ext.napoleon', 'sphinx.ext.graphviz', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'DIRAC' copyright = u'%s, DIRAC Project' % datetime.datetime.utcnow().year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = diracRelease # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%H:%M %d/%m/%Y %Z' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. #ADRI: Ignore old stuff that is not included in the compilation exclude_trees = [ 'AdministratorGuide/Configuration/ConfigurationReference' ] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'nature' html_style = 'dirac.css' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = { # 'sidebarbgcolor':'#D5E2F2' #} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". html_title = "DIRAC Documentation" # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = '_static/DIRAC-logo.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = '_static/favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%d/%m/%Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'DiracDocsdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'DiracDocs.tex', u'DIRAC Documentation', u'DIRAC Project.', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True ## link with the python standard library docs intersphinx_mapping = { 'python': ('https://docs.python.org/2.7', None), } #............................................................................... #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
gpl-3.0
-8,202,472,333,667,071,000
32.881533
141
0.686857
false
ganeti/ganeti
lib/hooksmaster.py
1
10785
# # # Copyright (C) 2006, 2007, 2011, 2012 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Module implementing the logic for running hooks. """ from ganeti import constants from ganeti import errors from ganeti import utils from ganeti import compat from ganeti import pathutils def _RpcResultsToHooksResults(rpc_results): """Function to convert RPC results to the format expected by HooksMaster. @type rpc_results: dict(node: L{rpc.RpcResult}) @param rpc_results: RPC results @rtype: dict(node: (fail_msg, offline, hooks_results)) @return: RPC results unpacked according to the format expected by L({hooksmaster.HooksMaster} """ return dict((node, (rpc_res.fail_msg, rpc_res.offline, rpc_res.payload)) for (node, rpc_res) in rpc_results.items()) class HooksMaster(object): def __init__(self, opcode, hooks_path, nodes, hooks_execution_fn, hooks_results_adapt_fn, build_env_fn, prepare_post_nodes_fn, log_fn, htype=None, cluster_name=None, master_name=None): """Base class for hooks masters. This class invokes the execution of hooks according to the behaviour specified by its parameters. @type opcode: string @param opcode: opcode of the operation to which the hooks are tied @type hooks_path: string @param hooks_path: prefix of the hooks directories @type nodes: 2-tuple of lists @param nodes: 2-tuple of lists containing nodes on which pre-hooks must be run and nodes on which post-hooks must be run @type hooks_execution_fn: function that accepts the following parameters: (node_list, hooks_path, phase, environment) @param hooks_execution_fn: function that will execute the hooks; can be None, indicating that no conversion is necessary. @type hooks_results_adapt_fn: function @param hooks_results_adapt_fn: function that will adapt the return value of hooks_execution_fn to the format expected by RunPhase @type build_env_fn: function that returns a dictionary having strings as keys @param build_env_fn: function that builds the environment for the hooks @type prepare_post_nodes_fn: function that take a list of node UUIDs and returns a list of node UUIDs @param prepare_post_nodes_fn: function that is invoked right before executing post hooks and can change the list of node UUIDs to run the post hooks on @type log_fn: function that accepts a string @param log_fn: logging function @type htype: string or None @param htype: None or one of L{constants.HTYPE_CLUSTER}, L{constants.HTYPE_NODE}, L{constants.HTYPE_INSTANCE} @type cluster_name: string @param cluster_name: name of the cluster @type master_name: string @param master_name: name of the master """ self.opcode = opcode self.hooks_path = hooks_path self.hooks_execution_fn = hooks_execution_fn self.hooks_results_adapt_fn = hooks_results_adapt_fn self.build_env_fn = build_env_fn self.prepare_post_nodes_fn = prepare_post_nodes_fn self.log_fn = log_fn self.htype = htype self.cluster_name = cluster_name self.master_name = master_name self.pre_env = self._BuildEnv(constants.HOOKS_PHASE_PRE) (self.pre_nodes, self.post_nodes) = nodes def _BuildEnv(self, phase): """Compute the environment and the target nodes. Based on the opcode and the current node list, this builds the environment for the hooks and the target node list for the run. """ if phase == constants.HOOKS_PHASE_PRE: prefix = "GANETI_" elif phase == constants.HOOKS_PHASE_POST: prefix = "GANETI_POST_" else: raise AssertionError("Unknown phase '%s'" % phase) env = {} if self.hooks_path is not None: phase_env = self.build_env_fn() if phase_env: assert not compat.any(key.upper().startswith(prefix) for key in phase_env) env.update(("%s%s" % (prefix, key), value) for (key, value) in phase_env.items()) if phase == constants.HOOKS_PHASE_PRE: assert compat.all((key.startswith("GANETI_") and not key.startswith("GANETI_POST_")) for key in env) elif phase == constants.HOOKS_PHASE_POST: assert compat.all(key.startswith("GANETI_POST_") for key in env) assert isinstance(self.pre_env, dict) # Merge with pre-phase environment assert not compat.any(key.startswith("GANETI_POST_") for key in self.pre_env) env.update(self.pre_env) else: raise AssertionError("Unknown phase '%s'" % phase) return env def _RunWrapper(self, node_list, hpath, phase, phase_env): """Simple wrapper over self.callfn. This method fixes the environment before executing the hooks. """ env = { "PATH": constants.HOOKS_PATH, "GANETI_HOOKS_VERSION": constants.HOOKS_VERSION, "GANETI_OP_CODE": self.opcode, "GANETI_DATA_DIR": pathutils.DATA_DIR, "GANETI_HOOKS_PHASE": phase, "GANETI_HOOKS_PATH": hpath, } if self.htype: env["GANETI_OBJECT_TYPE"] = self.htype if self.cluster_name is not None: env["GANETI_CLUSTER"] = self.cluster_name if self.master_name is not None: env["GANETI_MASTER"] = self.master_name if phase_env: env = utils.algo.JoinDisjointDicts(env, phase_env) # Convert everything to strings env = dict([(str(key), str(val)) for key, val in env.items()]) assert compat.all(key == "PATH" or key.startswith("GANETI_") for key in env) return self.hooks_execution_fn(node_list, hpath, phase, env) def RunPhase(self, phase, node_names=None): """Run all the scripts for a phase. This is the main function of the HookMaster. It executes self.hooks_execution_fn, and after running self.hooks_results_adapt_fn on its results it expects them to be in the form {node_name: (fail_msg, [(script, result, output), ...]}). @param phase: one of L{constants.HOOKS_PHASE_POST} or L{constants.HOOKS_PHASE_PRE}; it denotes the hooks phase @param node_names: overrides the predefined list of nodes for the given phase @return: the processed results of the hooks multi-node rpc call @raise errors.HooksFailure: on communication failure to the nodes @raise errors.HooksAbort: on failure of one of the hooks """ if phase == constants.HOOKS_PHASE_PRE: if node_names is None: node_names = self.pre_nodes env = self.pre_env elif phase == constants.HOOKS_PHASE_POST: if node_names is None: node_names = self.post_nodes if node_names is not None and self.prepare_post_nodes_fn is not None: node_names = frozenset(self.prepare_post_nodes_fn(list(node_names))) env = self._BuildEnv(phase) else: raise AssertionError("Unknown phase '%s'" % phase) if not node_names: # empty node list, we should not attempt to run this as either # we're in the cluster init phase and the rpc client part can't # even attempt to run, or this LU doesn't do hooks at all return results = self._RunWrapper(node_names, self.hooks_path, phase, env) if not results: msg = "Communication Failure" if phase == constants.HOOKS_PHASE_PRE: raise errors.HooksFailure(msg) else: self.log_fn(msg) return results converted_res = results if self.hooks_results_adapt_fn: converted_res = self.hooks_results_adapt_fn(results) errs = [] for node_name, (fail_msg, offline, hooks_results) in converted_res.items(): if offline: continue if fail_msg: self.log_fn("Communication failure to node %s: %s", node_name, fail_msg) continue for script, hkr, output in hooks_results: if hkr == constants.HKR_FAIL: if phase == constants.HOOKS_PHASE_PRE: errs.append((node_name, script, output)) else: if not output: output = "(no output)" self.log_fn("On %s script %s failed, output: %s" % (node_name, script, output)) if errs and phase == constants.HOOKS_PHASE_PRE: raise errors.HooksAbort(errs) return results def RunConfigUpdate(self): """Run the special configuration update hook This is a special hook that runs only on the master after each top-level LI if the configuration has been updated. """ phase = constants.HOOKS_PHASE_POST hpath = constants.HOOKS_NAME_CFGUPDATE nodes = [self.master_name] self._RunWrapper(nodes, hpath, phase, self.pre_env) @staticmethod def BuildFromLu(hooks_execution_fn, lu): if lu.HPATH is None: nodes = (None, None) else: hooks_nodes = lu.BuildHooksNodes() if len(hooks_nodes) != 2: raise errors.ProgrammerError( "LogicalUnit.BuildHooksNodes must return a 2-tuple") nodes = (frozenset(hooks_nodes[0]), frozenset(hooks_nodes[1])) master_name = cluster_name = None if lu.cfg: master_name = lu.cfg.GetMasterNodeName() cluster_name = lu.cfg.GetClusterName() return HooksMaster(lu.op.OP_ID, lu.HPATH, nodes, hooks_execution_fn, _RpcResultsToHooksResults, lu.BuildHooksEnv, lu.PreparePostHookNodes, lu.LogWarning, lu.HTYPE, cluster_name, master_name)
bsd-2-clause
1,631,890,072,393,134,000
35.808874
80
0.671025
false
ChanChiChoi/scikit-learn
sklearn/covariance/tests/test_covariance.py
142
11068
# Author: Alexandre Gramfort <[email protected]> # Gael Varoquaux <[email protected]> # Virgile Fritsch <[email protected]> # # License: BSD 3 clause import numpy as np from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_warns from sklearn import datasets from sklearn.covariance import empirical_covariance, EmpiricalCovariance, \ ShrunkCovariance, shrunk_covariance, \ LedoitWolf, ledoit_wolf, ledoit_wolf_shrinkage, OAS, oas X = datasets.load_diabetes().data X_1d = X[:, 0] n_samples, n_features = X.shape def test_covariance(): # Tests Covariance module on a simple dataset. # test covariance fit from data cov = EmpiricalCovariance() cov.fit(X) emp_cov = empirical_covariance(X) assert_array_almost_equal(emp_cov, cov.covariance_, 4) assert_almost_equal(cov.error_norm(emp_cov), 0) assert_almost_equal( cov.error_norm(emp_cov, norm='spectral'), 0) assert_almost_equal( cov.error_norm(emp_cov, norm='frobenius'), 0) assert_almost_equal( cov.error_norm(emp_cov, scaling=False), 0) assert_almost_equal( cov.error_norm(emp_cov, squared=False), 0) assert_raises(NotImplementedError, cov.error_norm, emp_cov, norm='foo') # Mahalanobis distances computation test mahal_dist = cov.mahalanobis(X) print(np.amin(mahal_dist), np.amax(mahal_dist)) assert(np.amin(mahal_dist) > 0) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) cov = EmpiricalCovariance() cov.fit(X_1d) assert_array_almost_equal(empirical_covariance(X_1d), cov.covariance_, 4) assert_almost_equal(cov.error_norm(empirical_covariance(X_1d)), 0) assert_almost_equal( cov.error_norm(empirical_covariance(X_1d), norm='spectral'), 0) # test with one sample # FIXME I don't know what this test does X_1sample = np.arange(5) cov = EmpiricalCovariance() assert_warns(UserWarning, cov.fit, X_1sample) assert_array_almost_equal(cov.covariance_, np.zeros(shape=(5, 5), dtype=np.float64)) # test integer type X_integer = np.asarray([[0, 1], [1, 0]]) result = np.asarray([[0.25, -0.25], [-0.25, 0.25]]) assert_array_almost_equal(empirical_covariance(X_integer), result) # test centered case cov = EmpiricalCovariance(assume_centered=True) cov.fit(X) assert_array_equal(cov.location_, np.zeros(X.shape[1])) def test_shrunk_covariance(): # Tests ShrunkCovariance module on a simple dataset. # compare shrunk covariance obtained from data and from MLE estimate cov = ShrunkCovariance(shrinkage=0.5) cov.fit(X) assert_array_almost_equal( shrunk_covariance(empirical_covariance(X), shrinkage=0.5), cov.covariance_, 4) # same test with shrinkage not provided cov = ShrunkCovariance() cov.fit(X) assert_array_almost_equal( shrunk_covariance(empirical_covariance(X)), cov.covariance_, 4) # same test with shrinkage = 0 (<==> empirical_covariance) cov = ShrunkCovariance(shrinkage=0.) cov.fit(X) assert_array_almost_equal(empirical_covariance(X), cov.covariance_, 4) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) cov = ShrunkCovariance(shrinkage=0.3) cov.fit(X_1d) assert_array_almost_equal(empirical_covariance(X_1d), cov.covariance_, 4) # test shrinkage coeff on a simple data set (without saving precision) cov = ShrunkCovariance(shrinkage=0.5, store_precision=False) cov.fit(X) assert(cov.precision_ is None) def test_ledoit_wolf(): # Tests LedoitWolf module on a simple dataset. # test shrinkage coeff on a simple data set X_centered = X - X.mean(axis=0) lw = LedoitWolf(assume_centered=True) lw.fit(X_centered) shrinkage_ = lw.shrinkage_ score_ = lw.score(X_centered) assert_almost_equal(ledoit_wolf_shrinkage(X_centered, assume_centered=True), shrinkage_) assert_almost_equal(ledoit_wolf_shrinkage(X_centered, assume_centered=True, block_size=6), shrinkage_) # compare shrunk covariance obtained from data and from MLE estimate lw_cov_from_mle, lw_shinkrage_from_mle = ledoit_wolf(X_centered, assume_centered=True) assert_array_almost_equal(lw_cov_from_mle, lw.covariance_, 4) assert_almost_equal(lw_shinkrage_from_mle, lw.shrinkage_) # compare estimates given by LW and ShrunkCovariance scov = ShrunkCovariance(shrinkage=lw.shrinkage_, assume_centered=True) scov.fit(X_centered) assert_array_almost_equal(scov.covariance_, lw.covariance_, 4) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) lw = LedoitWolf(assume_centered=True) lw.fit(X_1d) lw_cov_from_mle, lw_shinkrage_from_mle = ledoit_wolf(X_1d, assume_centered=True) assert_array_almost_equal(lw_cov_from_mle, lw.covariance_, 4) assert_almost_equal(lw_shinkrage_from_mle, lw.shrinkage_) assert_array_almost_equal((X_1d ** 2).sum() / n_samples, lw.covariance_, 4) # test shrinkage coeff on a simple data set (without saving precision) lw = LedoitWolf(store_precision=False, assume_centered=True) lw.fit(X_centered) assert_almost_equal(lw.score(X_centered), score_, 4) assert(lw.precision_ is None) # Same tests without assuming centered data # test shrinkage coeff on a simple data set lw = LedoitWolf() lw.fit(X) assert_almost_equal(lw.shrinkage_, shrinkage_, 4) assert_almost_equal(lw.shrinkage_, ledoit_wolf_shrinkage(X)) assert_almost_equal(lw.shrinkage_, ledoit_wolf(X)[1]) assert_almost_equal(lw.score(X), score_, 4) # compare shrunk covariance obtained from data and from MLE estimate lw_cov_from_mle, lw_shinkrage_from_mle = ledoit_wolf(X) assert_array_almost_equal(lw_cov_from_mle, lw.covariance_, 4) assert_almost_equal(lw_shinkrage_from_mle, lw.shrinkage_) # compare estimates given by LW and ShrunkCovariance scov = ShrunkCovariance(shrinkage=lw.shrinkage_) scov.fit(X) assert_array_almost_equal(scov.covariance_, lw.covariance_, 4) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) lw = LedoitWolf() lw.fit(X_1d) lw_cov_from_mle, lw_shinkrage_from_mle = ledoit_wolf(X_1d) assert_array_almost_equal(lw_cov_from_mle, lw.covariance_, 4) assert_almost_equal(lw_shinkrage_from_mle, lw.shrinkage_) assert_array_almost_equal(empirical_covariance(X_1d), lw.covariance_, 4) # test with one sample # FIXME I don't know what this test does X_1sample = np.arange(5) lw = LedoitWolf() assert_warns(UserWarning, lw.fit, X_1sample) assert_array_almost_equal(lw.covariance_, np.zeros(shape=(5, 5), dtype=np.float64)) # test shrinkage coeff on a simple data set (without saving precision) lw = LedoitWolf(store_precision=False) lw.fit(X) assert_almost_equal(lw.score(X), score_, 4) assert(lw.precision_ is None) def test_ledoit_wolf_large(): # test that ledoit_wolf doesn't error on data that is wider than block_size rng = np.random.RandomState(0) # use a number of features that is larger than the block-size X = rng.normal(size=(10, 20)) lw = LedoitWolf(block_size=10).fit(X) # check that covariance is about diagonal (random normal noise) assert_almost_equal(lw.covariance_, np.eye(20), 0) cov = lw.covariance_ # check that the result is consistent with not splitting data into blocks. lw = LedoitWolf(block_size=25).fit(X) assert_almost_equal(lw.covariance_, cov) def test_oas(): # Tests OAS module on a simple dataset. # test shrinkage coeff on a simple data set X_centered = X - X.mean(axis=0) oa = OAS(assume_centered=True) oa.fit(X_centered) shrinkage_ = oa.shrinkage_ score_ = oa.score(X_centered) # compare shrunk covariance obtained from data and from MLE estimate oa_cov_from_mle, oa_shinkrage_from_mle = oas(X_centered, assume_centered=True) assert_array_almost_equal(oa_cov_from_mle, oa.covariance_, 4) assert_almost_equal(oa_shinkrage_from_mle, oa.shrinkage_) # compare estimates given by OAS and ShrunkCovariance scov = ShrunkCovariance(shrinkage=oa.shrinkage_, assume_centered=True) scov.fit(X_centered) assert_array_almost_equal(scov.covariance_, oa.covariance_, 4) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) oa = OAS(assume_centered=True) oa.fit(X_1d) oa_cov_from_mle, oa_shinkrage_from_mle = oas(X_1d, assume_centered=True) assert_array_almost_equal(oa_cov_from_mle, oa.covariance_, 4) assert_almost_equal(oa_shinkrage_from_mle, oa.shrinkage_) assert_array_almost_equal((X_1d ** 2).sum() / n_samples, oa.covariance_, 4) # test shrinkage coeff on a simple data set (without saving precision) oa = OAS(store_precision=False, assume_centered=True) oa.fit(X_centered) assert_almost_equal(oa.score(X_centered), score_, 4) assert(oa.precision_ is None) # Same tests without assuming centered data-------------------------------- # test shrinkage coeff on a simple data set oa = OAS() oa.fit(X) assert_almost_equal(oa.shrinkage_, shrinkage_, 4) assert_almost_equal(oa.score(X), score_, 4) # compare shrunk covariance obtained from data and from MLE estimate oa_cov_from_mle, oa_shinkrage_from_mle = oas(X) assert_array_almost_equal(oa_cov_from_mle, oa.covariance_, 4) assert_almost_equal(oa_shinkrage_from_mle, oa.shrinkage_) # compare estimates given by OAS and ShrunkCovariance scov = ShrunkCovariance(shrinkage=oa.shrinkage_) scov.fit(X) assert_array_almost_equal(scov.covariance_, oa.covariance_, 4) # test with n_features = 1 X_1d = X[:, 0].reshape((-1, 1)) oa = OAS() oa.fit(X_1d) oa_cov_from_mle, oa_shinkrage_from_mle = oas(X_1d) assert_array_almost_equal(oa_cov_from_mle, oa.covariance_, 4) assert_almost_equal(oa_shinkrage_from_mle, oa.shrinkage_) assert_array_almost_equal(empirical_covariance(X_1d), oa.covariance_, 4) # test with one sample # FIXME I don't know what this test does X_1sample = np.arange(5) oa = OAS() assert_warns(UserWarning, oa.fit, X_1sample) assert_array_almost_equal(oa.covariance_, np.zeros(shape=(5, 5), dtype=np.float64)) # test shrinkage coeff on a simple data set (without saving precision) oa = OAS(store_precision=False) oa.fit(X) assert_almost_equal(oa.score(X), score_, 4) assert(oa.precision_ is None)
bsd-3-clause
-259,602,035,515,206,050
39.542125
79
0.66245
false
rupak0577/ginga
ginga/web/pgw/Plot.py
3
4306
# # Plot.py -- Plotting widget canvas wrapper. # # Copyright (c) Eric R. Jeschke. All rights reserved. # This is open-source software licensed under a BSD license. # Please see the file LICENSE.txt for details. # from io import BytesIO from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from ginga.web.pgw import Widgets class PlotWidget(Widgets.Canvas): """ This class implements the server-side backend of the surface for a web-based plot viewer. It uses a web socket to connect to an HTML5 canvas with javascript callbacks in a web browser on the client. The viewer is created separately on the backend and connects to this surface via the set_viewer() method. """ def __init__(self, plot, width=500, height=500): super(PlotWidget, self).__init__(width=width, height=height) self.widget = FigureCanvas(plot.get_figure()) self.logger = plot.logger self._configured = False self.refresh_delay = 0.010 self.set_plot(plot) def set_plot(self, plot): self.logger.debug("set_plot called") self.plot = plot self._dispatch_event_table = { "activate": self.ignore_event, "setbounds": self.map_event_cb, "mousedown": self.ignore_event, "mouseup": self.ignore_event, "mousemove": self.ignore_event, "mouseout": self.ignore_event, "mouseover": self.ignore_event, "mousewheel": self.ignore_event, "wheel": self.ignore_event, "click": self.ignore_event, "dblclick": self.ignore_event, "keydown": self.ignore_event, "keyup": self.ignore_event, "keypress": self.ignore_event, "resize": self.resize_event, "focus": self.ignore_event, "focusout": self.ignore_event, "blur": self.ignore_event, "drop": self.ignore_event, "paste": self.ignore_event, # Hammer.js events "pinch": self.ignore_event, "pinchstart": self.ignore_event, "pinchend": self.ignore_event, "rotate": self.ignore_event, "rotatestart": self.ignore_event, "rotateend": self.ignore_event, "tap": self.ignore_event, "pan": self.ignore_event, "panstart": self.ignore_event, "panend": self.ignore_event, "swipe": self.ignore_event, } self.plot.add_callback('draw-canvas', self.draw_cb) self.add_timer('refresh', self.refresh_cb) def get_plot(self): return self.plot def ignore_event(self, event): pass def refresh_cb(self): app = self.get_app() app.do_operation('refresh_canvas', id=self.id) self.reset_timer('refresh', self.refresh_delay) def get_rgb_buffer(self, plot): buf = BytesIO() fig = plot.get_figure() fig.canvas.print_figure(buf, format='png') wd, ht = self.width, self.height return (wd, ht, buf.getvalue()) def draw_cb(self, plot): self.logger.debug("getting RGB buffer") wd, ht, buf = self.get_rgb_buffer(plot) #self.logger.debug("clear_rect") #self.clear_rect(0, 0, wd, ht) self.logger.debug("drawing %dx%d image" % (wd, ht)) self.draw_image(buf, 0, 0, wd, ht) self.reset_timer('refresh', self.refresh_delay) def configure_window(self, wd, ht): self.logger.debug("canvas resized to %dx%d" % (wd, ht)) fig = self.plot.get_figure() fig.set_size_inches(float(wd) / fig.dpi, float(ht) / fig.dpi) def map_event_cb(self, event): wd, ht = event.width, event.height self.configure_window(wd, ht) self.plot.draw() def resize_event(self, event): wd, ht = event.x, event.y self.configure_window(wd, ht) self.plot.draw() def _cb_redirect(self, event): method = self._dispatch_event_table[event.type] try: method(event) except Exception as e: self.logger.error("error redirecting '%s' event: %s" % ( event.type, str(e))) # TODO: dump traceback to debug log #END
bsd-3-clause
-1,079,178,573,254,554,400
30.896296
75
0.584301
false
arifsetiawan/edx-platform
lms/djangoapps/courseware/tests/test_navigation.py
28
11973
""" This test file will run through some LMS test scenarios regarding access and navigation of the LMS """ import time from mock import patch from nose.plugins.attrib import attr from django.conf import settings from django.core.urlresolvers import reverse from django.test.utils import override_settings from courseware.tests.helpers import LoginEnrollmentTestCase from courseware.tests.factories import GlobalStaffFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.django import modulestore @attr('shard_1') class TestNavigation(ModuleStoreTestCase, LoginEnrollmentTestCase): """ Check that navigation state is saved properly. """ STUDENT_INFO = [('[email protected]', 'foo'), ('[email protected]', 'foo')] def setUp(self): super(TestNavigation, self).setUp() self.test_course = CourseFactory.create() self.course = CourseFactory.create() self.chapter0 = ItemFactory.create(parent=self.course, display_name='Overview') self.chapter9 = ItemFactory.create(parent=self.course, display_name='factory_chapter') self.section0 = ItemFactory.create(parent=self.chapter0, display_name='Welcome') self.section9 = ItemFactory.create(parent=self.chapter9, display_name='factory_section') self.unit0 = ItemFactory.create(parent=self.section0, display_name='New Unit') self.chapterchrome = ItemFactory.create(parent=self.course, display_name='Chrome') self.chromelesssection = ItemFactory.create(parent=self.chapterchrome, display_name='chromeless', chrome='none') self.accordionsection = ItemFactory.create(parent=self.chapterchrome, display_name='accordion', chrome='accordion') self.tabssection = ItemFactory.create(parent=self.chapterchrome, display_name='tabs', chrome='tabs') self.defaultchromesection = ItemFactory.create( parent=self.chapterchrome, display_name='defaultchrome', ) self.fullchromesection = ItemFactory.create(parent=self.chapterchrome, display_name='fullchrome', chrome='accordion,tabs') self.tabtest = ItemFactory.create(parent=self.chapterchrome, display_name='progress_tab', default_tab='progress') # Create student accounts and activate them. for i in range(len(self.STUDENT_INFO)): email, password = self.STUDENT_INFO[i] username = 'u{0}'.format(i) self.create_account(username, email, password) self.activate_user(email) self.staff_user = GlobalStaffFactory() def assertTabActive(self, tabname, response): ''' Check if the progress tab is active in the tab set ''' for line in response.content.split('\n'): if tabname in line and 'active' in line: return raise AssertionError("assertTabActive failed: {} not active".format(tabname)) def assertTabInactive(self, tabname, response): ''' Check if the progress tab is active in the tab set ''' for line in response.content.split('\n'): if tabname in line and 'active' in line: raise AssertionError("assertTabInactive failed: " + tabname + " active") return def test_chrome_settings(self): ''' Test settings for disabling and modifying navigation chrome in the courseware: - Accordion enabled, or disabled - Navigation tabs enabled, disabled, or redirected ''' email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.course, True) test_data = ( ('tabs', False, True), ('none', False, False), ('fullchrome', True, True), ('accordion', True, False), ('fullchrome', True, True) ) for (displayname, accordion, tabs) in test_data: response = self.client.get(reverse('courseware_section', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'Chrome', 'section': displayname, })) self.assertEquals('open_close_accordion' in response.content, accordion) self.assertEquals('course-tabs' in response.content, tabs) self.assertTabInactive('progress', response) self.assertTabActive('courseware', response) response = self.client.get(reverse('courseware_section', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'Chrome', 'section': 'progress_tab', })) self.assertTabActive('progress', response) self.assertTabInactive('courseware', response) @override_settings(SESSION_INACTIVITY_TIMEOUT_IN_SECONDS=1) def test_inactive_session_timeout(self): """ Verify that an inactive session times out and redirects to the login page """ email, password = self.STUDENT_INFO[0] self.login(email, password) # make sure we can access courseware immediately resp = self.client.get(reverse('dashboard')) self.assertEquals(resp.status_code, 200) # then wait a bit and see if we get timed out time.sleep(2) resp = self.client.get(reverse('dashboard')) # re-request, and we should get a redirect to login page self.assertRedirects(resp, settings.LOGIN_REDIRECT_URL + '?next=' + reverse('dashboard')) def test_redirects_first_time(self): """ Verify that the first time we click on the courseware tab we are redirected to the 'Welcome' section. """ email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.course, True) self.enroll(self.test_course, True) resp = self.client.get(reverse('courseware', kwargs={'course_id': self.course.id.to_deprecated_string()})) self.assertRedirects(resp, reverse( 'courseware_section', kwargs={'course_id': self.course.id.to_deprecated_string(), 'chapter': 'Overview', 'section': 'Welcome'})) def test_redirects_second_time(self): """ Verify the accordion remembers we've already visited the Welcome section and redirects correpondingly. """ email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.course, True) self.enroll(self.test_course, True) self.client.get(reverse('courseware_section', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'Overview', 'section': 'Welcome', })) resp = self.client.get(reverse('courseware', kwargs={'course_id': self.course.id.to_deprecated_string()})) redirect_url = reverse( 'courseware_chapter', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'Overview' } ) self.assertRedirects(resp, redirect_url) def test_accordion_state(self): """ Verify the accordion remembers which chapter you were last viewing. """ email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.course, True) self.enroll(self.test_course, True) # Now we directly navigate to a section in a chapter other than 'Overview'. url = reverse( 'courseware_section', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'factory_chapter', 'section': 'factory_section' } ) self.assert_request_status_code(200, url) # And now hitting the courseware tab should redirect to 'factory_chapter' url = reverse( 'courseware', kwargs={'course_id': self.course.id.to_deprecated_string()} ) resp = self.client.get(url) redirect_url = reverse( 'courseware_chapter', kwargs={ 'course_id': self.course.id.to_deprecated_string(), 'chapter': 'factory_chapter', } ) self.assertRedirects(resp, redirect_url) def test_incomplete_course(self): email = self.staff_user.email password = "test" self.login(email, password) self.enroll(self.test_course, True) test_course_id = self.test_course.id.to_deprecated_string() url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) self.assert_request_status_code(200, url) section = ItemFactory.create( parent_location=self.test_course.location, display_name='New Section' ) url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) self.assert_request_status_code(200, url) subsection = ItemFactory.create( parent_location=section.location, display_name='New Subsection' ) url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) self.assert_request_status_code(200, url) ItemFactory.create( parent_location=subsection.location, display_name='New Unit' ) url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) self.assert_request_status_code(302, url) def test_proctoring_js_includes(self): """ Make sure that proctoring JS does not get included on courseware pages if either the FEATURE flag is turned off or the course is not proctored enabled """ email, password = self.STUDENT_INFO[0] self.login(email, password) self.enroll(self.test_course, True) test_course_id = self.test_course.id.to_deprecated_string() with patch.dict(settings.FEATURES, {'ENABLE_PROCTORED_EXAMS': False}): url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) resp = self.client.get(url) self.assertNotContains(resp, '/static/js/lms-proctoring.js') with patch.dict(settings.FEATURES, {'ENABLE_PROCTORED_EXAMS': True}): url = reverse( 'courseware', kwargs={'course_id': test_course_id} ) resp = self.client.get(url) self.assertNotContains(resp, '/static/js/lms-proctoring.js') # now set up a course which is proctored enabled self.test_course.enable_proctored_exams = True self.test_course.save() modulestore().update_item(self.test_course, self.user.id) resp = self.client.get(url) self.assertContains(resp, '/static/js/lms-proctoring.js')
agpl-3.0
8,853,915,571,294,456,000
37.375
98
0.56903
false
pangweishen/rt-thread
tools/keil.py
20
13161
# # File : keil.py # This file is part of RT-Thread RTOS # COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Change Logs: # Date Author Notes # 2015-01-20 Bernard Add copyright information # import os import sys import string import xml.etree.ElementTree as etree from xml.etree.ElementTree import SubElement from utils import _make_path_relative from utils import xml_indent fs_encoding = sys.getfilesystemencoding() def _get_filetype(fn): if fn.rfind('.cpp') != -1 or fn.rfind('.cxx') != -1: return 8 if fn.rfind('.c') != -1 or fn.rfind('.C') != -1: return 1 # assemble file type if fn.rfind('.s') != -1 or fn.rfind('.S') != -1: return 2 # header type if fn.rfind('.h') != -1: return 5 if fn.rfind('.lib') != -1: return 4 # other filetype return 5 def MDK4AddGroupForFN(ProjectFiles, parent, name, filename, project_path): group = SubElement(parent, 'Group') group_name = SubElement(group, 'GroupName') group_name.text = name name = os.path.basename(filename) path = os.path.dirname (filename) basename = os.path.basename(path) path = _make_path_relative(project_path, path) path = os.path.join(path, name) files = SubElement(group, 'Files') file = SubElement(files, 'File') file_name = SubElement(file, 'FileName') name = os.path.basename(path) if name.find('.cpp') != -1: obj_name = name.replace('.cpp', '.o') elif name.find('.c') != -1: obj_name = name.replace('.c', '.o') elif name.find('.s') != -1: obj_name = name.replace('.s', '.o') elif name.find('.S') != -1: obj_name = name.replace('.s', '.o') else: obj_name = name if ProjectFiles.count(obj_name): name = basename + '_' + name ProjectFiles.append(obj_name) file_name.text = name.decode(fs_encoding) file_type = SubElement(file, 'FileType') file_type.text = '%d' % _get_filetype(name) file_path = SubElement(file, 'FilePath') file_path.text = path.decode(fs_encoding) def MDK4AddLibToGroup(ProjectFiles, group, name, filename, project_path): name = os.path.basename(filename) path = os.path.dirname (filename) basename = os.path.basename(path) path = _make_path_relative(project_path, path) path = os.path.join(path, name) files = SubElement(group, 'Files') file = SubElement(files, 'File') file_name = SubElement(file, 'FileName') name = os.path.basename(path) if name.find('.cpp') != -1: obj_name = name.replace('.cpp', '.o') elif name.find('.c') != -1: obj_name = name.replace('.c', '.o') elif name.find('.s') != -1: obj_name = name.replace('.s', '.o') elif name.find('.S') != -1: obj_name = name.replace('.s', '.o') else: obj_name = name if ProjectFiles.count(obj_name): name = basename + '_' + name ProjectFiles.append(obj_name) file_name.text = name.decode(fs_encoding) file_type = SubElement(file, 'FileType') file_type.text = '%d' % _get_filetype(name) file_path = SubElement(file, 'FilePath') file_path.text = path.decode(fs_encoding) def MDK4AddGroup(ProjectFiles, parent, name, files, project_path): # don't add an empty group if len(files) == 0: return group = SubElement(parent, 'Group') group_name = SubElement(group, 'GroupName') group_name.text = name for f in files: fn = f.rfile() name = fn.name path = os.path.dirname(fn.abspath) basename = os.path.basename(path) path = _make_path_relative(project_path, path) path = os.path.join(path, name) files = SubElement(group, 'Files') file = SubElement(files, 'File') file_name = SubElement(file, 'FileName') name = os.path.basename(path) if name.find('.cpp') != -1: obj_name = name.replace('.cpp', '.o') elif name.find('.c') != -1: obj_name = name.replace('.c', '.o') elif name.find('.s') != -1: obj_name = name.replace('.s', '.o') elif name.find('.S') != -1: obj_name = name.replace('.s', '.o') if ProjectFiles.count(obj_name): name = basename + '_' + name ProjectFiles.append(obj_name) file_name.text = name.decode(fs_encoding) file_type = SubElement(file, 'FileType') file_type.text = '%d' % _get_filetype(name) file_path = SubElement(file, 'FilePath') file_path.text = path.decode(fs_encoding) return group # The common part of making MDK4/5 project def MDK45Project(tree, target, script): project_path = os.path.dirname(os.path.abspath(target)) root = tree.getroot() out = file(target, 'wb') out.write('<?xml version="1.0" encoding="UTF-8" standalone="no" ?>\n') CPPPATH = [] CPPDEFINES = [] LINKFLAGS = '' CCFLAGS = '' ProjectFiles = [] # add group groups = tree.find('Targets/Target/Groups') if groups is None: groups = SubElement(tree.find('Targets/Target'), 'Groups') groups.clear() # clean old groups for group in script: group_tree = MDK4AddGroup(ProjectFiles, groups, group['name'], group['src'], project_path) # for local CPPPATH/CPPDEFINES if (group_tree != None) and (group.has_key('LOCAL_CPPPATH') or group.has_key('LOCAL_CCFLAGS')): GroupOption = SubElement(group_tree, 'GroupOption') GroupArmAds = SubElement(GroupOption, 'GroupArmAds') Cads = SubElement(GroupArmAds, 'Cads') VariousControls = SubElement(Cads, 'VariousControls') MiscControls = SubElement(VariousControls, 'MiscControls') if group.has_key('LOCAL_CCFLAGS'): MiscControls.text = group['LOCAL_CCFLAGS'] else: MiscControls.text = ' ' Define = SubElement(VariousControls, 'Define') if group.has_key('LOCAL_CPPDEFINES'): Define.text = ', '.join(set(group['LOCAL_CPPDEFINES'])) else: Define.text = ' ' Undefine = SubElement(VariousControls, 'Undefine') Undefine.text = ' ' IncludePath = SubElement(VariousControls, 'IncludePath') if group.has_key('LOCAL_CPPPATH'): IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in group['LOCAL_CPPPATH']]) else: IncludePath.text = ' ' # get each include path if group.has_key('CPPPATH') and group['CPPPATH']: if CPPPATH: CPPPATH += group['CPPPATH'] else: CPPPATH += group['CPPPATH'] # get each group's definitions if group.has_key('CPPDEFINES') and group['CPPDEFINES']: if CPPDEFINES: CPPDEFINES += group['CPPDEFINES'] else: CPPDEFINES += group['CPPDEFINES'] # get each group's link flags if group.has_key('LINKFLAGS') and group['LINKFLAGS']: if LINKFLAGS: LINKFLAGS += ' ' + group['LINKFLAGS'] else: LINKFLAGS += group['LINKFLAGS'] if group.has_key('LIBS') and group['LIBS']: for item in group['LIBS']: lib_path = '' for path_item in group['LIBPATH']: full_path = os.path.join(path_item, item + '.lib') if os.path.isfile(full_path): # has this library lib_path = full_path if lib_path != '': if (group_tree != None): MDK4AddLibToGroup(ProjectFiles, group_tree, group['name'], lib_path, project_path) else: MDK4AddGroupForFN(ProjectFiles, groups, group['name'], lib_path, project_path) # write include path, definitions and link flags IncludePath = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/IncludePath') IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in CPPPATH]) Define = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/Define') Define.text = ', '.join(set(CPPDEFINES)) Misc = tree.find('Targets/Target/TargetOption/TargetArmAds/LDads/Misc') Misc.text = LINKFLAGS xml_indent(root) out.write(etree.tostring(root, encoding='utf-8')) out.close() def MDK4Project(target, script): template_tree = etree.parse('template.uvproj') MDK45Project(template_tree, target, script) # remove project.uvopt file project_uvopt = os.path.abspath(target).replace('uvproj', 'uvopt') if os.path.isfile(project_uvopt): os.unlink(project_uvopt) # copy uvopt file if os.path.exists('template.uvopt'): import shutil shutil.copy2('template.uvopt', 'project.uvopt') def MDK5Project(target, script): template_tree = etree.parse('template.uvprojx') MDK45Project(template_tree, target, script) # remove project.uvopt file project_uvopt = os.path.abspath(target).replace('uvprojx', 'uvoptx') if os.path.isfile(project_uvopt): os.unlink(project_uvopt) # copy uvopt file if os.path.exists('template.uvoptx'): import shutil shutil.copy2('template.uvoptx', 'project.uvoptx') def MDKProject(target, script): template = file('template.Uv2', "rb") lines = template.readlines() project = file(target, "wb") project_path = os.path.dirname(os.path.abspath(target)) line_index = 5 # write group for group in script: lines.insert(line_index, 'Group (%s)\r\n' % group['name']) line_index += 1 lines.insert(line_index, '\r\n') line_index += 1 # write file ProjectFiles = [] CPPPATH = [] CPPDEFINES = [] LINKFLAGS = '' CCFLAGS = '' # number of groups group_index = 1 for group in script: # print group['name'] # get each include path if group.has_key('CPPPATH') and group['CPPPATH']: if CPPPATH: CPPPATH += group['CPPPATH'] else: CPPPATH += group['CPPPATH'] # get each group's definitions if group.has_key('CPPDEFINES') and group['CPPDEFINES']: if CPPDEFINES: CPPDEFINES += ';' + group['CPPDEFINES'] else: CPPDEFINES += group['CPPDEFINES'] # get each group's link flags if group.has_key('LINKFLAGS') and group['LINKFLAGS']: if LINKFLAGS: LINKFLAGS += ' ' + group['LINKFLAGS'] else: LINKFLAGS += group['LINKFLAGS'] # generate file items for node in group['src']: fn = node.rfile() name = fn.name path = os.path.dirname(fn.abspath) basename = os.path.basename(path) path = _make_path_relative(project_path, path) path = os.path.join(path, name) if ProjectFiles.count(name): name = basename + '_' + name ProjectFiles.append(name) lines.insert(line_index, 'File %d,%d,<%s><%s>\r\n' % (group_index, _get_filetype(name), path, name)) line_index += 1 group_index = group_index + 1 lines.insert(line_index, '\r\n') line_index += 1 # remove repeat path paths = set() for path in CPPPATH: inc = _make_path_relative(project_path, os.path.normpath(path)) paths.add(inc) #.replace('\\', '/') paths = [i for i in paths] CPPPATH = string.join(paths, ';') definitions = [i for i in set(CPPDEFINES)] CPPDEFINES = string.join(definitions, ', ') while line_index < len(lines): if lines[line_index].startswith(' ADSCINCD '): lines[line_index] = ' ADSCINCD (' + CPPPATH + ')\r\n' if lines[line_index].startswith(' ADSLDMC ('): lines[line_index] = ' ADSLDMC (' + LINKFLAGS + ')\r\n' if lines[line_index].startswith(' ADSCDEFN ('): lines[line_index] = ' ADSCDEFN (' + CPPDEFINES + ')\r\n' line_index += 1 # write project for line in lines: project.write(line) project.close()
gpl-2.0
-6,143,641,134,354,828,000
32.403553
133
0.583238
false
Scarygami/gae-gcs-push2deploy-secrets
lib/werkzeug/testsuite/urls.py
83
14595
# -*- coding: utf-8 -*- """ werkzeug.testsuite.urls ~~~~~~~~~~~~~~~~~~~~~~~ URL helper tests. :copyright: (c) 2013 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import unittest from werkzeug.testsuite import WerkzeugTestCase from werkzeug.datastructures import OrderedMultiDict from werkzeug import urls from werkzeug._compat import text_type, NativeStringIO, BytesIO class URLsTestCase(WerkzeugTestCase): def test_replace(self): url = urls.url_parse('http://de.wikipedia.org/wiki/Troll') self.assert_strict_equal(url.replace(query='foo=bar'), urls.url_parse('http://de.wikipedia.org/wiki/Troll?foo=bar')) self.assert_strict_equal(url.replace(scheme='https'), urls.url_parse('https://de.wikipedia.org/wiki/Troll')) def test_quoting(self): self.assert_strict_equal(urls.url_quote(u'\xf6\xe4\xfc'), '%C3%B6%C3%A4%C3%BC') self.assert_strict_equal(urls.url_unquote(urls.url_quote(u'#%="\xf6')), u'#%="\xf6') self.assert_strict_equal(urls.url_quote_plus('foo bar'), 'foo+bar') self.assert_strict_equal(urls.url_unquote_plus('foo+bar'), u'foo bar') self.assert_strict_equal(urls.url_quote_plus('foo+bar'), 'foo%2Bbar') self.assert_strict_equal(urls.url_unquote_plus('foo%2Bbar'), u'foo+bar') self.assert_strict_equal(urls.url_encode({b'a': None, b'b': b'foo bar'}), 'b=foo+bar') self.assert_strict_equal(urls.url_encode({u'a': None, u'b': u'foo bar'}), 'b=foo+bar') self.assert_strict_equal(urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)'), 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)') self.assert_strict_equal(urls.url_quote_plus(42), '42') self.assert_strict_equal(urls.url_quote(b'\xff'), '%FF') def test_bytes_unquoting(self): self.assert_strict_equal(urls.url_unquote(urls.url_quote( u'#%="\xf6', charset='latin1'), charset=None), b'#%="\xf6') def test_url_decoding(self): x = urls.url_decode(b'foo=42&bar=23&uni=H%C3%A4nsel') self.assert_strict_equal(x['foo'], u'42') self.assert_strict_equal(x['bar'], u'23') self.assert_strict_equal(x['uni'], u'Hänsel') x = urls.url_decode(b'foo=42;bar=23;uni=H%C3%A4nsel', separator=b';') self.assert_strict_equal(x['foo'], u'42') self.assert_strict_equal(x['bar'], u'23') self.assert_strict_equal(x['uni'], u'Hänsel') x = urls.url_decode(b'%C3%9Ch=H%C3%A4nsel', decode_keys=True) self.assert_strict_equal(x[u'Üh'], u'Hänsel') def test_url_bytes_decoding(self): x = urls.url_decode(b'foo=42&bar=23&uni=H%C3%A4nsel', charset=None) self.assert_strict_equal(x[b'foo'], b'42') self.assert_strict_equal(x[b'bar'], b'23') self.assert_strict_equal(x[b'uni'], u'Hänsel'.encode('utf-8')) def test_streamed_url_decoding(self): item1 = u'a' * 100000 item2 = u'b' * 400 string = ('a=%s&b=%s&c=%s' % (item1, item2, item2)).encode('ascii') gen = urls.url_decode_stream(BytesIO(string), limit=len(string), return_iterator=True) self.assert_strict_equal(next(gen), ('a', item1)) self.assert_strict_equal(next(gen), ('b', item2)) self.assert_strict_equal(next(gen), ('c', item2)) self.assert_raises(StopIteration, lambda: next(gen)) def test_stream_decoding_string_fails(self): self.assert_raises(TypeError, urls.url_decode_stream, 'testing') def test_url_encoding(self): self.assert_strict_equal(urls.url_encode({'foo': 'bar 45'}), 'foo=bar+45') d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'} self.assert_strict_equal(urls.url_encode(d, sort=True), 'bar=23&blah=H%C3%A4nsel&foo=1') self.assert_strict_equal(urls.url_encode(d, sort=True, separator=u';'), 'bar=23;blah=H%C3%A4nsel;foo=1') def test_sorted_url_encode(self): self.assert_strict_equal(urls.url_encode({u"a": 42, u"b": 23, 1: 1, 2: 2}, sort=True, key=lambda i: text_type(i[0])), '1=1&2=2&a=42&b=23') self.assert_strict_equal(urls.url_encode({u'A': 1, u'a': 2, u'B': 3, 'b': 4}, sort=True, key=lambda x: x[0].lower() + x[0]), 'A=1&a=2&B=3&b=4') def test_streamed_url_encoding(self): out = NativeStringIO() urls.url_encode_stream({'foo': 'bar 45'}, out) self.assert_strict_equal(out.getvalue(), 'foo=bar+45') d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'} out = NativeStringIO() urls.url_encode_stream(d, out, sort=True) self.assert_strict_equal(out.getvalue(), 'bar=23&blah=H%C3%A4nsel&foo=1') out = NativeStringIO() urls.url_encode_stream(d, out, sort=True, separator=u';') self.assert_strict_equal(out.getvalue(), 'bar=23;blah=H%C3%A4nsel;foo=1') gen = urls.url_encode_stream(d, sort=True) self.assert_strict_equal(next(gen), 'bar=23') self.assert_strict_equal(next(gen), 'blah=H%C3%A4nsel') self.assert_strict_equal(next(gen), 'foo=1') self.assert_raises(StopIteration, lambda: next(gen)) def test_url_fixing(self): x = urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') self.assert_line_equal(x, 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)') x = urls.url_fix("http://just.a.test/$-_.+!*'(),") self.assert_equal(x, "http://just.a.test/$-_.+!*'(),") def test_url_fixing_qs(self): x = urls.url_fix(b'http://example.com/?foo=%2f%2f') self.assert_line_equal(x, 'http://example.com/?foo=%2f%2f') x = urls.url_fix('http://acronyms.thefreedictionary.com/Algebraic+Methods+of+Solving+the+Schr%C3%B6dinger+Equation') self.assert_equal(x, 'http://acronyms.thefreedictionary.com/Algebraic+Methods+of+Solving+the+Schr%C3%B6dinger+Equation') def test_iri_support(self): self.assert_strict_equal(urls.uri_to_iri('http://xn--n3h.net/'), u'http://\u2603.net/') self.assert_strict_equal( urls.uri_to_iri(b'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th'), u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th') self.assert_strict_equal(urls.iri_to_uri(u'http://☃.net/'), 'http://xn--n3h.net/') self.assert_strict_equal( urls.iri_to_uri(u'http://üser:pässword@☃.net/påth'), 'http://%C3%BCser:p%C3%[email protected]/p%C3%A5th') self.assert_strict_equal(urls.uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F'), u'http://test.com/%3Fmeh?foo=%26%2F') # this should work as well, might break on 2.4 because of a broken # idna codec self.assert_strict_equal(urls.uri_to_iri(b'/foo'), u'/foo') self.assert_strict_equal(urls.iri_to_uri(u'/foo'), '/foo') self.assert_strict_equal(urls.iri_to_uri(u'http://föö.com:8080/bam/baz'), 'http://xn--f-1gaa.com:8080/bam/baz') def test_iri_safe_quoting(self): uri = b'http://xn--f-1gaa.com/%2F%25?q=%C3%B6&x=%3D%25#%25' iri = u'http://föö.com/%2F%25?q=ö&x=%3D%25#%25' self.assert_strict_equal(urls.uri_to_iri(uri), iri) self.assert_strict_equal(urls.iri_to_uri(urls.uri_to_iri(uri)), uri) def test_ordered_multidict_encoding(self): d = OrderedMultiDict() d.add('foo', 1) d.add('foo', 2) d.add('foo', 3) d.add('bar', 0) d.add('foo', 4) self.assert_equal(urls.url_encode(d), 'foo=1&foo=2&foo=3&bar=0&foo=4') def test_href(self): x = urls.Href('http://www.example.com/') self.assert_strict_equal(x(u'foo'), 'http://www.example.com/foo') self.assert_strict_equal(x.foo(u'bar'), 'http://www.example.com/foo/bar') self.assert_strict_equal(x.foo(u'bar', x=42), 'http://www.example.com/foo/bar?x=42') self.assert_strict_equal(x.foo(u'bar', class_=42), 'http://www.example.com/foo/bar?class=42') self.assert_strict_equal(x.foo(u'bar', {u'class': 42}), 'http://www.example.com/foo/bar?class=42') self.assert_raises(AttributeError, lambda: x.__blah__) x = urls.Href('blah') self.assert_strict_equal(x.foo(u'bar'), 'blah/foo/bar') self.assert_raises(TypeError, x.foo, {u"foo": 23}, x=42) x = urls.Href('') self.assert_strict_equal(x('foo'), 'foo') def test_href_url_join(self): x = urls.Href(u'test') self.assert_line_equal(x(u'foo:bar'), u'test/foo:bar') self.assert_line_equal(x(u'http://example.com/'), u'test/http://example.com/') self.assert_line_equal(x.a(), u'test/a') def test_href_past_root(self): base_href = urls.Href('http://www.blagga.com/1/2/3') self.assert_strict_equal(base_href('../foo'), 'http://www.blagga.com/1/2/foo') self.assert_strict_equal(base_href('../../foo'), 'http://www.blagga.com/1/foo') self.assert_strict_equal(base_href('../../../foo'), 'http://www.blagga.com/foo') self.assert_strict_equal(base_href('../../../../foo'), 'http://www.blagga.com/foo') self.assert_strict_equal(base_href('../../../../../foo'), 'http://www.blagga.com/foo') self.assert_strict_equal(base_href('../../../../../../foo'), 'http://www.blagga.com/foo') def test_url_unquote_plus_unicode(self): # was broken in 0.6 self.assert_strict_equal(urls.url_unquote_plus(u'\x6d'), u'\x6d') self.assert_is(type(urls.url_unquote_plus(u'\x6d')), text_type) def test_quoting_of_local_urls(self): rv = urls.iri_to_uri(u'/foo\x8f') self.assert_strict_equal(rv, '/foo%C2%8F') self.assert_is(type(rv), str) def test_url_attributes(self): rv = urls.url_parse('http://foo%3a:bar%3a@[::1]:80/123?x=y#frag') self.assert_strict_equal(rv.scheme, 'http') self.assert_strict_equal(rv.auth, 'foo%3a:bar%3a') self.assert_strict_equal(rv.username, u'foo:') self.assert_strict_equal(rv.password, u'bar:') self.assert_strict_equal(rv.raw_username, 'foo%3a') self.assert_strict_equal(rv.raw_password, 'bar%3a') self.assert_strict_equal(rv.host, '::1') self.assert_equal(rv.port, 80) self.assert_strict_equal(rv.path, '/123') self.assert_strict_equal(rv.query, 'x=y') self.assert_strict_equal(rv.fragment, 'frag') rv = urls.url_parse(u'http://\N{SNOWMAN}.com/') self.assert_strict_equal(rv.host, u'\N{SNOWMAN}.com') self.assert_strict_equal(rv.ascii_host, 'xn--n3h.com') def test_url_attributes_bytes(self): rv = urls.url_parse(b'http://foo%3a:bar%3a@[::1]:80/123?x=y#frag') self.assert_strict_equal(rv.scheme, b'http') self.assert_strict_equal(rv.auth, b'foo%3a:bar%3a') self.assert_strict_equal(rv.username, u'foo:') self.assert_strict_equal(rv.password, u'bar:') self.assert_strict_equal(rv.raw_username, b'foo%3a') self.assert_strict_equal(rv.raw_password, b'bar%3a') self.assert_strict_equal(rv.host, b'::1') self.assert_equal(rv.port, 80) self.assert_strict_equal(rv.path, b'/123') self.assert_strict_equal(rv.query, b'x=y') self.assert_strict_equal(rv.fragment, b'frag') def test_url_joining(self): self.assert_strict_equal(urls.url_join('/foo', '/bar'), '/bar') self.assert_strict_equal(urls.url_join('http://example.com/foo', '/bar'), 'http://example.com/bar') self.assert_strict_equal(urls.url_join('file:///tmp/', 'test.html'), 'file:///tmp/test.html') self.assert_strict_equal(urls.url_join('file:///tmp/x', 'test.html'), 'file:///tmp/test.html') self.assert_strict_equal(urls.url_join('file:///tmp/x', '../../../x.html'), 'file:///x.html') def test_partial_unencoded_decode(self): ref = u'foo=정상처리'.encode('euc-kr') x = urls.url_decode(ref, charset='euc-kr') self.assert_strict_equal(x['foo'], u'정상처리') def test_iri_to_uri_idempotence_ascii_only(self): uri = u'http://www.idempoten.ce' uri = urls.iri_to_uri(uri) self.assert_equal(urls.iri_to_uri(uri), uri) def test_iri_to_uri_idempotence_non_ascii(self): uri = u'http://\N{SNOWMAN}/\N{SNOWMAN}' uri = urls.iri_to_uri(uri) self.assert_equal(urls.iri_to_uri(uri), uri) def test_uri_to_iri_idempotence_ascii_only(self): uri = 'http://www.idempoten.ce' uri = urls.uri_to_iri(uri) self.assert_equal(urls.uri_to_iri(uri), uri) def test_uri_to_iri_idempotence_non_ascii(self): uri = 'http://xn--n3h/%E2%98%83' uri = urls.uri_to_iri(uri) self.assert_equal(urls.uri_to_iri(uri), uri) def test_iri_to_uri_to_iri(self): iri = u'http://föö.com/' uri = urls.iri_to_uri(iri) self.assert_equal(urls.uri_to_iri(uri), iri) def test_uri_to_iri_to_uri(self): uri = 'http://xn--f-rgao.com/%C3%9E' iri = urls.uri_to_iri(uri) self.assert_equal(urls.iri_to_uri(iri), uri) def test_uri_iri_normalization(self): uri = 'http://xn--f-rgao.com/%E2%98%90/fred?utf8=%E2%9C%93' iri = u'http://föñ.com/\N{BALLOT BOX}/fred?utf8=\u2713' tests = [ u'http://föñ.com/\N{BALLOT BOX}/fred?utf8=\u2713', u'http://xn--f-rgao.com/\u2610/fred?utf8=\N{CHECK MARK}', b'http://xn--f-rgao.com/%E2%98%90/fred?utf8=%E2%9C%93', u'http://xn--f-rgao.com/%E2%98%90/fred?utf8=%E2%9C%93', u'http://föñ.com/\u2610/fred?utf8=%E2%9C%93', b'http://xn--f-rgao.com/\xe2\x98\x90/fred?utf8=\xe2\x9c\x93', ] for test in tests: self.assert_equal(urls.uri_to_iri(test), iri) self.assert_equal(urls.iri_to_uri(test), uri) self.assert_equal(urls.uri_to_iri(urls.iri_to_uri(test)), iri) self.assert_equal(urls.iri_to_uri(urls.uri_to_iri(test)), uri) self.assert_equal(urls.uri_to_iri(urls.uri_to_iri(test)), iri) self.assert_equal(urls.iri_to_uri(urls.iri_to_uri(test)), uri) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(URLsTestCase)) return suite
apache-2.0
166,456,976,874,650,980
46.243506
128
0.588207
false
bohdan-shramko/learning-python
source/sublime-packages/Packages/mdpopups/st3/mdpopups/mdx/superfences.py
2
21280
""" Superfences. pymdownx.superfences Neseted Fenced Code Blocks This is a modification of the original Fenced Code Extension. Algorithm has been rewritten to allow for fenced blocks in blockquotes, lists, etc. And also , allow for special UML fences like 'flow' for flowcharts and `sequence` for sequence diagrams. Modified: 2014 - 2017 Isaac Muse <[email protected]> --- Fenced Code Extension for Python Markdown ========================================= This extension adds Fenced Code Blocks to Python-Markdown. See <https://pythonhosted.org/Markdown/extensions/fenced_code_blocks.html> for documentation. Original code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/). All changes Copyright 2008-2014 The Python Markdown Project License: [BSD](http://www.opensource.org/licenses/bsd-license.php) """ from __future__ import absolute_import from __future__ import unicode_literals from markdown.extensions import Extension from markdown.preprocessors import Preprocessor from markdown.blockprocessors import CodeBlockProcessor from markdown import util as md_util from . import highlight as hl from .util import PymdownxDeprecationWarning import warnings import re NESTED_FENCE_START = r'''(?x) (?:^(?P<ws>[\> ]*)(?P<fence>~{3,}|`{3,}))[ ]* # Fence opening (\{? # Language opening \.?(?P<lang>[\w#.+-]*))?[ ]* # Language (?: (hl_lines=(?P<quot>"|')(?P<hl_lines>\d+(?:[ ]+\d+)*)(?P=quot))?[ ]*| # highlight lines (linenums=(?P<quot2>"|') # Line numbers (?P<linestart>[\d]+) # Line number start (?:[ ]+(?P<linestep>[\d]+))? # Line step (?:[ ]+(?P<linespecial>[\d]+))? # Line special (?P=quot2))?[ ]* ){,2} }?[ ]*$ # Language closing ''' NESTED_FENCE_END = r'^[\> ]*%s[ ]*$' WS = r'^([\> ]{0,%d})(.*)' RE_FENCE = re.compile( r'''(?xsm) (?P<fence>^(?:~{3,}|`{3,}))[ ]* # Opening (\{?\.?(?P<lang>[\w#.+-]*))?[ ]* # Optional {, and lang (?: (hl_lines=(?P<quot>"|')(?P<hl_lines>\d+(?:[ ]+\d+)*)(?P=quot))?[ ]*| # Optional highlight lines option (linenums=(?P<quot2>"|') # Line numbers (?P<linestart>[\d]+) # Line number start (?:[ ]+(?P<linestep>[\d]+))? # Line step (?:[ ]+(?P<linespecial>[\d]+))? # Line special (?P=quot2))?[ ]* ){,2} }?[ ]*\n # Optional closing } (?P<code>.*?)(?<=\n) # Code (?P=fence)[ ]*$ # Closing ''' ) def _escape(txt): """Basic html escaping.""" txt = txt.replace('&', '&amp;') txt = txt.replace('<', '&lt;') txt = txt.replace('>', '&gt;') txt = txt.replace('"', '&quot;') return txt class CodeStash(object): """ Stash code for later retrieval. Store original fenced code here in case we were too greedy and need to restore in an indented code block. """ def __init__(self): """Initialize.""" self.stash = {} def __len__(self): # pragma: no cover """Length of stash.""" return len(self.stash) def get(self, key, default=None): """Get the code from the key.""" code = self.stash.get(key, default) return code def remove(self, key): """Remove the stashed code.""" del self.stash[key] def store(self, key, code, indent_level): """Store the code in the stash.""" self.stash[key] = (code, indent_level) def clear_stash(self): """Clear the stash.""" self.stash = {} def fence_code_format(source, language, css_class): """Format source as code blocks.""" return '<pre class="%s"><code>%s</code></pre>' % (css_class, _escape(source)) def fence_div_format(source, language, css_class): """Format source as div.""" return '<div class="%s">%s</div>' % (css_class, _escape(source)) class SuperFencesCodeExtension(Extension): """Superfences code block extension.""" def __init__(self, *args, **kwargs): """Initialize.""" self.superfences = [] self.config = { 'disable_indented_code_blocks': [False, "Disable indented code blocks - Default: False"], 'uml_flow': [True, "Enable flowcharts - Default: True"], 'uml_sequence': [True, "Enable sequence diagrams - Default: True"], 'custom_fences': [ [ {'name': 'flow', 'class': 'uml-flowchart'}, {'name': 'sequence', 'class': 'uml-sequence-diagram'} ], 'Specify custom fences. Default: See documentation.' ], 'highlight_code': [True, "Highlight code - Default: True"], 'use_codehilite_settings': [ None, "Deprecatd and does nothing. " "- Default: None" ], 'css_class': [ '', "Set class name for wrapper element. The default of CodeHilite or Highlight will be used" "if nothing is set. - " "Default: ''" ] } super(SuperFencesCodeExtension, self).__init__(*args, **kwargs) def extend_super_fences(self, name, formatter): """Extend superfences with the given name, language, and formatter.""" self.superfences.append( { "name": name, "test": lambda l, language=name: language == l, "formatter": formatter } ) def extendMarkdown(self, md, md_globals): """Add FencedBlockPreprocessor to the Markdown instance.""" # Not super yet, so let's make it super md.registerExtension(self) config = self.getConfigs() # Default fenced blocks self.superfences.insert( 0, { "name": "superfences", "test": lambda language: True, "formatter": None } ) if config.get('use_codehilite_settings'): # pragma: no coverage warnings.warn( "'use_codehilite_settings' is deprecated and does nothing.\n" "\nCodeHilite settings will only be used if CodeHilite is configured\n" " and 'pymdownx.highlight' is not configured.\n" "Please discontinue use of this setting as it will be removed in the future.", PymdownxDeprecationWarning ) # UML blocks custom_fences = config.get('custom_fences', []) for custom in custom_fences: name = custom.get('name') class_name = custom.get('class') fence_format = custom.get('format', fence_code_format) if name is not None and class_name is not None: self.extend_super_fences( name, lambda s, l, c=class_name, f=fence_format: f(s, l, c) ) self.markdown = md self.patch_fenced_rule() for entry in self.superfences: entry["stash"] = CodeStash() def patch_fenced_rule(self): """ Patch Python Markdown with our own fenced block extension. We don't attempt to protect against a user loading the `fenced_code` extension with this. Most likely they will have issues, but they shouldn't have loaded them together in the first place :). """ config = self.getConfigs() fenced = SuperFencesBlockPreprocessor(self.markdown) indented_code = SuperFencesCodeBlockProcessor(self) fenced.config = config fenced.extension = self indented_code.config = config indented_code.markdown = self.markdown indented_code.extension = self self.superfences[0]["formatter"] = fenced.highlight self.markdown.parser.blockprocessors['code'] = indented_code self.markdown.preprocessors.add('fenced_code_block', fenced, ">normalize_whitespace") def reset(self): """Clear the stash.""" for entry in self.superfences: entry["stash"].clear_stash() class SuperFencesBlockPreprocessor(Preprocessor): """ Preprocessor to find fenced code blocks. Because this is done as a preprocessor, it might be too greedy. We will stash the blocks code and restore if we mistakenly processed text from an indented code block. """ fence_start = re.compile(NESTED_FENCE_START) CODE_WRAP = '<pre%s><code%s>%s</code></pre>' def __init__(self, md): """Initialize.""" super(SuperFencesBlockPreprocessor, self).__init__(md) self.markdown = md self.checked_hl_settings = False self.codehilite_conf = {} def rebuild_block(self, lines): """Deindent the fenced block lines.""" return '\n'.join([line[self.ws_len:] for line in lines]) def get_hl_settings(self): """Check for code hilite extension to get its config.""" if not self.checked_hl_settings: self.checked_hl_settings = True self.highlight_code = self.config['highlight_code'] config = hl.get_hl_settings(self.markdown) css_class = self.config['css_class'] self.css_class = css_class if css_class else config['css_class'] self.extend_pygments_lang = config.get('extend_pygments_lang', None) self.guess_lang = config['guess_lang'] self.pygments_style = config['pygments_style'] self.use_pygments = config['use_pygments'] self.noclasses = config['noclasses'] self.linenums = config['linenums'] def clear(self): """Reset the class variables.""" self.ws = None self.ws_len = 0 self.fence = None self.lang = None self.hl_lines = None self.linestart = None self.linestep = None self.linespecial = None self.quote_level = 0 self.code = [] self.empty_lines = 0 self.whitespace = None self.fence_end = None def eval(self, m, start, end): """Evaluate a normal fence.""" if m.group(0).strip() == '': # Empty line is okay self.empty_lines += 1 self.code.append(m.group(0)) elif len(m.group(1)) != self.ws_len and m.group(2) != '': # Not indented enough self.clear() elif self.fence_end.match(m.group(0)) is not None and not m.group(2).startswith(' '): # End of fence self.process_nested_block(m, start, end) else: # Content line self.empty_lines = 0 self.code.append(m.group(0)) def eval_quoted(self, m, quote_level, start, end): """Evaluate fence inside a blockquote.""" if quote_level > self.quote_level: # Quote level exceeds the starting quote level self.clear() elif quote_level <= self.quote_level: if m.group(2) == '': # Empty line is okay self.code.append(m.group(0)) self.empty_lines += 1 elif len(m.group(1)) < self.ws_len: # Not indented enough self.clear() elif self.empty_lines and quote_level < self.quote_level: # Quote levels don't match and we are signified # the end of the block with an empty line self.clear() elif self.fence_end.match(m.group(0)) is not None: # End of fence self.process_nested_block(m, start, end) else: # Content line self.empty_lines = 0 self.code.append(m.group(0)) def process_nested_block(self, m, start, end): """Process the contents of the nested block.""" self.last = m.group(0) code = None for entry in reversed(self.extension.superfences): if entry["test"](self.lang): code = entry["formatter"](self.rebuild_block(self.code), self.lang) break if code is not None: self._store('\n'.join(self.code) + '\n', code, start, end, entry) self.clear() def parse_hl_lines(self, hl_lines): """Parse the lines to highlight.""" return list(map(int, hl_lines.strip().split())) if hl_lines else [] def parse_line_start(self, linestart): """Parse line start.""" return int(linestart) if linestart else -1 def parse_line_step(self, linestep): """Parse line start.""" step = int(linestep) if linestep else -1 return step if step > 1 else -1 def parse_line_special(self, linespecial): """Parse line start.""" return int(linespecial) if linespecial else -1 def search_nested(self, lines): """Search for nested fenced blocks.""" count = 0 for line in lines: if self.fence is None: # Found the start of a fenced block. m = self.fence_start.match(line) if m is not None: start = count self.first = m.group(0) self.ws = m.group('ws') if m.group('ws') else '' self.ws_len = len(self.ws) self.quote_level = self.ws.count(">") self.empty_lines = 0 self.fence = m.group('fence') self.lang = m.group('lang') self.hl_lines = m.group('hl_lines') self.linestart = m.group('linestart') self.linestep = m.group('linestep') self.linespecial = m.group('linespecial') self.fence_end = re.compile(NESTED_FENCE_END % self.fence) self.whitespace = re.compile(WS % self.ws_len) else: # Evaluate lines # - Determine if it is the ending line or content line # - If is a content line, make sure it is all indentend # with the opening and closing lines (lines with just # whitespace will be stripped so those don't matter). # - When content lines are inside blockquotes, make sure # the nested block quote levels make sense according to # blockquote rules. m = self.whitespace.match(line) if m: end = count + 1 quote_level = m.group(1).count(">") if self.quote_level: # Handle blockquotes self.eval_quoted(m, quote_level, start, end) elif quote_level == 0: # Handle all other cases self.eval(m, start, end) else: # Looks like we got a blockquote line # when not in a blockquote. self.clear() else: # pragma: no cover # I am 99.9999% sure we will never hit this line. # But I am too chicken to pull it out :). self.clear() count += 1 # Now that we are done iterating the lines, # let's replace the original content with the # fenced blocks. while len(self.stack): fenced, start, end = self.stack.pop() lines = lines[:start] + [fenced] + lines[end:] return lines def highlight(self, src, language): """ Syntax highlight the code block. If config is not empty, then the codehlite extension is enabled, so we call into it to highlight the code. """ if self.highlight_code: linestep = self.parse_line_step(self.linestep) linestart = self.parse_line_start(self.linestart) linespecial = self.parse_line_special(self.linespecial) hl_lines = self.parse_hl_lines(self.hl_lines) el = hl.Highlight( guess_lang=self.guess_lang, pygments_style=self.pygments_style, use_pygments=self.use_pygments, noclasses=self.noclasses, linenums=self.linenums, extend_pygments_lang=self.extend_pygments_lang ).highlight( src, language, self.css_class, hl_lines=hl_lines, linestart=linestart, linestep=linestep, linespecial=linespecial ) else: # Format as a code block. el = self.CODE_WRAP % ('', '', _escape(src)) return el def _store(self, source, code, start, end, obj): """ Store the fenced blocks in the stack to be replaced when done iterating. Store the original text in case we need to restore if we are too greedy. """ # Save the fenced blocks to add once we are done iterating the lines placeholder = self.markdown.htmlStash.store(code, safe=True) self.stack.append(('%s%s' % (self.ws, placeholder), start, end)) if not self.disabled_indented: # If an indented block consumes this placeholder, # we can restore the original source obj["stash"].store( placeholder[1:-1], "%s\n%s%s" % (self.first, source, self.last), self.ws_len ) def run(self, lines): """Search for fenced blocks.""" self.get_hl_settings() self.clear() self.stack = [] self.disabled_indented = self.config.get("disable_indented_code_blocks", False) lines = self.search_nested(lines) return lines class SuperFencesCodeBlockProcessor(CodeBlockProcessor): """Process idented code blocks to see if we accidentaly processed its content as a fenced block.""" FENCED_BLOCK_RE = re.compile( r'^([\> ]*)%s(%s)%s$' % ( md_util.HTML_PLACEHOLDER[0], md_util.HTML_PLACEHOLDER[1:-1] % r'([0-9]+)', md_util.HTML_PLACEHOLDER[-1] ) ) def test(self, parent, block): """Test method that is one day to be deprecated.""" return True def reindent(self, text, pos, level): """Reindent the code to where it is supposed to be.""" indented = [] for line in text.split('\n'): index = pos - level indented.append(line[index:]) return '\n'.join(indented) def revert_greedy_fences(self, block): """Revert a prematurely converted fenced block.""" new_block = [] for line in block.split('\n'): m = self.FENCED_BLOCK_RE.match(line) if m: key = m.group(2) indent_level = len(m.group(1)) original = None for entry in self.extension.superfences: stash = entry["stash"] original, pos = stash.get(key) if original is not None: code = self.reindent(original, pos, indent_level) new_block.append(code) stash.remove(key) break if original is None: # pragma: no cover # Too much work to test this. This is just a fall back in case # we find a placeholder, and we went to revert it and it wasn't in our stash. # Most likely this would be caused by someone else. We just want to put it # back in the block if we can't revert it. Maybe we can do a more directed # unit test in the future. new_block.append(line) else: new_block.append(line) return '\n'.join(new_block) def run(self, parent, blocks): """Look for and parse code block.""" handled = False if not self.config.get("disable_indented_code_blocks", False): handled = CodeBlockProcessor.test(self, parent, blocks[0]) if handled: if self.config.get("nested", True): blocks[0] = self.revert_greedy_fences(blocks[0]) handled = CodeBlockProcessor.run(self, parent, blocks) is not False return handled def makeExtension(*args, **kwargs): """Return extension.""" return SuperFencesCodeExtension(*args, **kwargs)
mit
-5,264,952,881,420,983,000
34.824916
110
0.524389
false
dneiter/exabgp
lib/exabgp/reactor/api/command/text.py
2
15907
# encoding: utf-8 """ command.py Created by Thomas Mangin on 2015-12-15. Copyright (c) 2009-2015 Exa Networks. All rights reserved. """ from exabgp.version import version as _version class Text (object): callback = {} def __new__ (cls,name): def register (function): cls.callback[name] = function return function return register @Text('shutdown') def shutdown (self, reactor, service, command): reactor.answer(service,'shutdown in progress') return reactor.api.shutdown() @Text('reload') def reload (self, reactor, service, command): reactor.answer(service,'reload in progress') return reactor.api.reload() @Text('restart') def restart (self, reactor, service, command): reactor.answer(service,'restart in progress') return reactor.api.restart() @Text('version') def version (self, reactor, service, command): reactor.answer(service,'exabgp %s\n' % _version) return True @Text('teardown') def teardown (self, reactor, service, command): try: descriptions,command = self.parser.extract_neighbors(command) _,code = command.split(' ',1) for key in reactor.peers: for description in descriptions: if reactor.match_neighbor(description,key): reactor.peers[key].teardown(int(code)) self.logger.reactor('teardown scheduled for %s' % ' '.join(description)) return True except ValueError: return False except IndexError: return False @Text('show neighbor') def show_neighbor (self, reactor, service, command): def callback (): for key in reactor.configuration.neighbor.neighbors.keys(): neighbor = reactor.configuration.neighbor.neighbors[key] for line in str(neighbor).split('\n'): reactor.answer(service,line) yield True reactor.plan(callback(),'show_neighbor') return True @Text('show neighbors') def show_neighbors (self, reactor, service, command): def callback (): for key in reactor.configuration.neighbor.neighbors.keys(): neighbor = reactor.configuration.neighbor.neighbors[key] for line in str(neighbor).split('\n'): reactor.answer(service,line) yield True reactor.plan(callback(),'show_neighbors') return True @Text('show routes') def show_routes (self, reactor, service, command): def callback (): last = command.split()[-1] if last == 'routes': neighbors = reactor.configuration.neighbor.neighbors.keys() else: neighbors = [n for n in reactor.configuration.neighbor.neighbors.keys() if 'neighbor %s' % last in n] for key in neighbors: neighbor = reactor.configuration.neighbor.neighbors[key] for change in list(neighbor.rib.outgoing.sent_changes()): reactor.answer(service,'neighbor %s %s' % (neighbor.local_address,str(change.nlri))) yield True reactor.plan(callback(),'show_routes') return True @Text('show routes extensive') def show_routes_extensive (self, reactor, service, command): def callback (): last = command.split()[-1] if last == 'extensive': neighbors = reactor.configuration.neighbor.neighbors.keys() else: neighbors = [n for n in reactor.configuration.neighbor.neighbors.keys() if 'neighbor %s' % last in n] for key in neighbors: neighbor = reactor.configuration.neighbor.neighbors[key] for change in list(neighbor.rib.outgoing.sent_changes()): reactor.answer(service,'neighbor %s %s' % (neighbor.name(),change.extensive())) yield True reactor.plan(callback(),'show_routes_extensive') return True @Text('announce watchdog') def announce_watchdog (self, reactor, service, command): def callback (name): # XXX: move into Action for neighbor in reactor.configuration.neighbor.neighbors: reactor.configuration.neighbor.neighbors[neighbor].rib.outgoing.announce_watchdog(name) yield False reactor.route_update = True try: name = command.split(' ')[2] except IndexError: name = service reactor.plan(callback(name),'announce_watchdog') return True @Text('withdraw watchdog') def withdraw_watchdog (self, reactor, service, command): def callback (name): # XXX: move into Action for neighbor in reactor.configuration.neighbor.neighbors: reactor.configuration.neighbor.neighbors[neighbor].rib.outgoing.withdraw_watchdog(name) yield False reactor.route_update = True try: name = command.split(' ')[2] except IndexError: name = service reactor.plan(callback(name),'withdraw_watchdog') return True @Text('flush route') def flush_route (self, reactor, service, command): def callback (self, peers): self.logger.reactor("Flushing routes for %s" % ', '.join(peers if peers else []) if peers is not None else 'all peers') yield True reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,peers),'flush_route') return True except ValueError: return False except IndexError: return False @Text('announce route') def announce_route (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_route(command,nexthops,'announce') if not changes: self.logger.reactor("Command could not parse route in : %s" % command,'warning') yield True else: peers = [] for (peer,change) in changes: peers.append(peer) reactor.api.change_to_peers(change,[peer,]) self.logger.reactor("Route added to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',change.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'announce_route') return True except ValueError: return False except IndexError: return False @Text('withdraw route') def withdraw_route (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_route(command,nexthops,'withdraw') if not changes: self.logger.reactor("Command could not parse route in : %s" % command,'warning') yield True else: for (peer,change) in changes: if reactor.api.change_to_peers(change,[peer,]): self.logger.reactor("Route removed : %s" % change.extensive()) yield False else: self.logger.reactor("Could not find therefore remove route : %s" % change.extensive(),'warning') yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'withdraw_route') return True except ValueError: return False except IndexError: return False @Text('announce vpls') def announce_vpls (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_vpls(command,nexthops,'announce') if not changes: self.logger.reactor("Command could not parse vpls in : %s" % command,'warning') yield True else: peers = [] for (peer,change) in changes: peers.append(peer) reactor.api.change_to_peers(change,[peer,]) self.logger.reactor("vpls added to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',change.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'announce_vpls') return True except ValueError: return False except IndexError: return False @Text('withdraw vpls') def withdraw_vpls (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_vpls(command,nexthops,'withdraw') if not changes: self.logger.reactor("Command could not parse vpls in : %s" % command,'warning') yield True else: for (peer,change) in changes: if reactor.api.change_to_peers(change,[peer,]): self.logger.reactor("vpls removed : %s" % change.extensive()) yield False else: self.logger.reactor("Could not find therefore remove vpls : %s" % change.extensive(),'warning') yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'withdraw_vpls') return True except ValueError: return False except IndexError: return False @Text('announce attribute') def announce_attribute (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_attribute(command,nexthops,'announce') if not changes: self.logger.reactor("Command could not parse attribute in : %s" % command,'warning') yield True else: for (peers,change) in changes: reactor.api.change_to_peers(change,peers) self.logger.reactor("Route added to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',change.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'announce_attribute') return True except ValueError: return False except IndexError: return False @Text('withdraw attribute') def withdraw_attribute (self, reactor, service, command): def callback (self, command, nexthops): changes = self.parser.api_attribute(command,nexthops,'withdraw') if not changes: self.logger.reactor("Command could not parse attribute in : %s" % command,'warning') yield True else: for (peers,change) in changes: if reactor.api.change_to_peers(change,peers): self.logger.reactor("Route removed : %s" % change.extensive()) yield False else: self.logger.reactor("Could not find therefore remove route : %s" % change.extensive(),'warning') yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,reactor.nexthops(peers)),'withdraw_attribute') return True except ValueError: return False except IndexError: return False @Text('announce flow') def announce_flow (self, reactor, service, command): def callback (self, command, peers): changes = self.parser.api_flow(command,'announce') if not changes: self.logger.reactor("Command could not parse flow in : %s" % command) yield True else: for change in changes: reactor.api.change_to_peers(change,peers) self.logger.reactor("Flow added to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',change.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,peers),'announce_flow') return True except ValueError: return False except IndexError: return False @Text('withdraw flow') def withdraw_flow (self, reactor, service, command): def callback (self, command, peers): changes = self.parser.api_flow(command,'withdraw') if not changes: self.logger.reactor("Command could not parse flow in : %s" % command) yield True else: for change in changes: if reactor.api.change_to_peers(change,peers): self.logger.reactor("Flow found and removed : %s" % change.extensive()) yield False else: self.logger.reactor("Could not find therefore remove flow : %s" % change.extensive(),'warning') yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,peers),'withdraw_flow') return True except ValueError: return False except IndexError: return False @Text('announce eor') def announce_eor (self, reactor, service, command): def callback (self, command, peers): family = self.parser.api_eor(command) if not family: self.logger.reactor("Command could not parse eor : %s" % command) yield True else: reactor.api.eor_to_peers(family,peers) self.logger.reactor("Sent to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',family.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,peers),'announce_eor') return True except ValueError: return False except IndexError: return False @Text('announce route-refresh') def announce_refresh (self, reactor, service, command): def callback (self, command, peers): refresh = self.parser.api_refresh(command) if not refresh: self.logger.reactor("Command could not parse flow in : %s" % command) yield True else: reactor.api.refresh_to_peers(refresh,peers) self.logger.reactor("Sent to %s : %s" % (', '.join(peers if peers else []) if peers is not None else 'all peers',refresh.extensive())) yield False reactor.route_update = True try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,peers),'announce_refresh') return True except ValueError: return False except IndexError: return False @Text('announce operational') def announce_operational (self, reactor, service, command): def callback (self, command, peers): operational = self.parser.api_operational(command) if not operational: self.logger.reactor("Command could not parse operational command : %s" % command) yield True else: reactor.api.operational_to_peers(operational,peers) self.logger.reactor("operational message sent to %s : %s" % ( ', '.join(peers if peers else []) if peers is not None else 'all peers',operational.extensive() ) ) yield False reactor.route_update = True if (command.split() + ['be','safe'])[2].lower() not in ('asm','adm','rpcq','rpcp','apcq','apcp','lpcq','lpcp'): return False try: descriptions,command = self.parser.extract_neighbors(command) peers = reactor.match_neighbors(descriptions) if not peers: self.logger.reactor('no neighbor matching the command : %s' % command,'warning') return False reactor.plan(callback(self,command,peers),'announce_operational') return True except ValueError: return False except IndexError: return False
bsd-3-clause
-695,674,210,824,854,500
30.561508
144
0.717797
false
redhat-openstack/django
django/contrib/contenttypes/views.py
115
3383
from __future__ import unicode_literals from django import http from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site, get_current_site from django.core.exceptions import ObjectDoesNotExist from django.utils.translation import ugettext as _ def shortcut(request, content_type_id, object_id): """ Redirect to an object's page based on a content-type ID and an object ID. """ # Look up the object, making sure it's got a get_absolute_url() function. try: content_type = ContentType.objects.get(pk=content_type_id) if not content_type.model_class(): raise http.Http404(_("Content type %(ct_id)s object has no associated model") % {'ct_id': content_type_id}) obj = content_type.get_object_for_this_type(pk=object_id) except (ObjectDoesNotExist, ValueError): raise http.Http404(_("Content type %(ct_id)s object %(obj_id)s doesn't exist") % {'ct_id': content_type_id, 'obj_id': object_id}) try: get_absolute_url = obj.get_absolute_url except AttributeError: raise http.Http404(_("%(ct_name)s objects don't have a get_absolute_url() method") % {'ct_name': content_type.name}) absurl = get_absolute_url() # Try to figure out the object's domain, so we can do a cross-site redirect # if necessary. # If the object actually defines a domain, we're done. if absurl.startswith('http://') or absurl.startswith('https://'): return http.HttpResponseRedirect(absurl) # Otherwise, we need to introspect the object's relationships for a # relation to the Site object object_domain = None if Site._meta.installed: opts = obj._meta # First, look for an many-to-many relationship to Site. for field in opts.many_to_many: if field.rel.to is Site: try: # Caveat: In the case of multiple related Sites, this just # selects the *first* one, which is arbitrary. object_domain = getattr(obj, field.name).all()[0].domain except IndexError: pass if object_domain is not None: break # Next, look for a many-to-one relationship to Site. if object_domain is None: for field in obj._meta.fields: if field.rel and field.rel.to is Site: try: object_domain = getattr(obj, field.name).domain except Site.DoesNotExist: pass if object_domain is not None: break # Fall back to the current site (if possible). if object_domain is None: try: object_domain = get_current_site(request).domain except Site.DoesNotExist: pass # If all that malarkey found an object domain, use it. Otherwise, fall back # to whatever get_absolute_url() returned. if object_domain is not None: protocol = 'https' if request.is_secure() else 'http' return http.HttpResponseRedirect('%s://%s%s' % (protocol, object_domain, absurl)) else: return http.HttpResponseRedirect(absurl)
bsd-3-clause
-7,892,283,069,216,955,000
40.256098
92
0.593556
false
bdang2012/taiga-back
taiga/projects/attachments/api.py
5
3684
# Copyright (C) 2014 Andrey Antukh <[email protected]> # Copyright (C) 2014 Jesús Espino <[email protected]> # Copyright (C) 2014 David Barragán <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os.path as path import mimetypes mimetypes.init() from django.utils.translation import ugettext as _ from django.contrib.contenttypes.models import ContentType from taiga.base import filters from taiga.base import exceptions as exc from taiga.base.api import ModelCrudViewSet from taiga.base.api.utils import get_object_or_404 from taiga.projects.notifications.mixins import WatchedResourceMixin from taiga.projects.history.mixins import HistoryResourceMixin from . import permissions from . import serializers from . import models class BaseAttachmentViewSet(HistoryResourceMixin, WatchedResourceMixin, ModelCrudViewSet): model = models.Attachment serializer_class = serializers.AttachmentSerializer filter_fields = ["project", "object_id"] content_type = None def update(self, *args, **kwargs): partial = kwargs.get("partial", False) if not partial: raise exc.NotSupported(_("Non partial updates not supported")) return super().update(*args, **kwargs) def get_content_type(self): app_name, model = self.content_type.split(".", 1) return get_object_or_404(ContentType, app_label=app_name, model=model) def pre_save(self, obj): if not obj.id: obj.content_type = self.get_content_type() obj.owner = self.request.user obj.size = obj.attached_file.size obj.name = path.basename(obj.attached_file.name).lower() if obj.project_id != obj.content_object.project_id: raise exc.WrongArguments(_("Project ID not matches between object and project")) super().pre_save(obj) def post_delete(self, obj): # NOTE: When destroy an attachment, the content_object change # after and not before self.persist_history_snapshot(obj, delete=True) super().pre_delete(obj) def get_object_for_snapshot(self, obj): return obj.content_object class UserStoryAttachmentViewSet(BaseAttachmentViewSet): permission_classes = (permissions.UserStoryAttachmentPermission,) filter_backends = (filters.CanViewUserStoryAttachmentFilterBackend,) content_type = "userstories.userstory" class IssueAttachmentViewSet(BaseAttachmentViewSet): permission_classes = (permissions.IssueAttachmentPermission,) filter_backends = (filters.CanViewIssueAttachmentFilterBackend,) content_type = "issues.issue" class TaskAttachmentViewSet(BaseAttachmentViewSet): permission_classes = (permissions.TaskAttachmentPermission,) filter_backends = (filters.CanViewTaskAttachmentFilterBackend,) content_type = "tasks.task" class WikiAttachmentViewSet(BaseAttachmentViewSet): permission_classes = (permissions.WikiAttachmentPermission,) filter_backends = (filters.CanViewWikiAttachmentFilterBackend,) content_type = "wiki.wikipage"
agpl-3.0
8,743,416,350,056,382,000
36.958763
92
0.737099
false
bhatfield/titanium_mobile
support/common/markdown/inlinepatterns.py
107
12185
""" INLINE PATTERNS ============================================================================= Inline patterns such as *emphasis* are handled by means of auxiliary objects, one per pattern. Pattern objects must be instances of classes that extend markdown.Pattern. Each pattern object uses a single regular expression and needs support the following methods: pattern.getCompiledRegExp() # returns a regular expression pattern.handleMatch(m) # takes a match object and returns # an ElementTree element or just plain text All of python markdown's built-in patterns subclass from Pattern, but you can add additional patterns that don't. Also note that all the regular expressions used by inline must capture the whole block. For this reason, they all start with '^(.*)' and end with '(.*)!'. In case with built-in expression Pattern takes care of adding the "^(.*)" and "(.*)!". Finally, the order in which regular expressions are applied is very important - e.g. if we first replace http://.../ links with <a> tags and _then_ try to replace inline html, we would end up with a mess. So, we apply the expressions in the following order: * escape and backticks have to go before everything else, so that we can preempt any markdown patterns by escaping them. * then we handle auto-links (must be done before inline html) * then we handle inline HTML. At this point we will simply replace all inline HTML strings with a placeholder and add the actual HTML to a hash. * then inline images (must be done before links) * then bracketed links, first regular then reference-style * finally we apply strong and emphasis """ import markdown import re from urlparse import urlparse, urlunparse import sys if sys.version >= "3.0": from html import entities as htmlentitydefs else: import htmlentitydefs """ The actual regular expressions for patterns ----------------------------------------------------------------------------- """ NOBRACKET = r'[^\]\[]*' BRK = ( r'\[(' + (NOBRACKET + r'(\[')*6 + (NOBRACKET+ r'\])*')*6 + NOBRACKET + r')\]' ) NOIMG = r'(?<!\!)' BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' # `e=f()` or ``e=f("`")`` ESCAPE_RE = r'\\(.)' # \< EMPHASIS_RE = r'(\*)([^\*]+)\2' # *emphasis* STRONG_RE = r'(\*{2}|_{2})(.+?)\2' # **strong** STRONG_EM_RE = r'(\*{3}|_{3})(.+?)\2' # ***strong*** if markdown.SMART_EMPHASIS: EMPHASIS_2_RE = r'(?<!\w)(_)(\S.+?)\2(?!\w)' # _emphasis_ else: EMPHASIS_2_RE = r'(_)(.+?)\2' # _emphasis_ LINK_RE = NOIMG + BRK + \ r'''\(\s*(<.*?>|((?:(?:\(.*?\))|[^\(\)]))*?)\s*((['"])(.*?)\12)?\)''' # [text](url) or [text](<url>) IMAGE_LINK_RE = r'\!' + BRK + r'\s*\((<.*?>|([^\)]*))\)' # ![alttxt](http://x.com/) or ![alttxt](<http://x.com/>) REFERENCE_RE = NOIMG + BRK+ r'\s*\[([^\]]*)\]' # [Google][3] IMAGE_REFERENCE_RE = r'\!' + BRK + '\s*\[([^\]]*)\]' # ![alt text][2] NOT_STRONG_RE = r'((^| )(\*|_)( |$))' # stand-alone * or _ AUTOLINK_RE = r'<((?:f|ht)tps?://[^>]*)>' # <http://www.123.com> AUTOMAIL_RE = r'<([^> \!]*@[^> ]*)>' # <[email protected]> HTML_RE = r'(\<([a-zA-Z/][^\>]*?|\!--.*?--)\>)' # <...> ENTITY_RE = r'(&[\#a-zA-Z0-9]*;)' # &amp; LINE_BREAK_RE = r' \n' # two spaces at end of line LINE_BREAK_2_RE = r' $' # two spaces at end of text def dequote(string): """Remove quotes from around a string.""" if ( ( string.startswith('"') and string.endswith('"')) or (string.startswith("'") and string.endswith("'")) ): return string[1:-1] else: return string ATTR_RE = re.compile("\{@([^\}]*)=([^\}]*)}") # {@id=123} def handleAttributes(text, parent): """Set values of an element based on attribute definitions ({@id=123}).""" def attributeCallback(match): parent.set(match.group(1), match.group(2).replace('\n', ' ')) return ATTR_RE.sub(attributeCallback, text) """ The pattern classes ----------------------------------------------------------------------------- """ class Pattern: """Base class that inline patterns subclass. """ def __init__ (self, pattern, markdown_instance=None): """ Create an instant of an inline pattern. Keyword arguments: * pattern: A regular expression that matches a pattern """ self.pattern = pattern self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern, re.DOTALL) # Api for Markdown to pass safe_mode into instance self.safe_mode = False if markdown_instance: self.markdown = markdown_instance def getCompiledRegExp (self): """ Return a compiled regular expression. """ return self.compiled_re def handleMatch(self, m): """Return a ElementTree element from the given match. Subclasses should override this method. Keyword arguments: * m: A re match object containing a match of the pattern. """ pass def type(self): """ Return class name, to define pattern type """ return self.__class__.__name__ BasePattern = Pattern # for backward compatibility class SimpleTextPattern (Pattern): """ Return a simple text of group(2) of a Pattern. """ def handleMatch(self, m): text = m.group(2) if text == markdown.INLINE_PLACEHOLDER_PREFIX: return None return text class SimpleTagPattern (Pattern): """ Return element of type `tag` with a text attribute of group(3) of a Pattern. """ def __init__ (self, pattern, tag): Pattern.__init__(self, pattern) self.tag = tag def handleMatch(self, m): el = markdown.etree.Element(self.tag) el.text = m.group(3) return el class SubstituteTagPattern (SimpleTagPattern): """ Return a eLement of type `tag` with no children. """ def handleMatch (self, m): return markdown.etree.Element(self.tag) class BacktickPattern (Pattern): """ Return a `<code>` element containing the matching text. """ def __init__ (self, pattern): Pattern.__init__(self, pattern) self.tag = "code" def handleMatch(self, m): el = markdown.etree.Element(self.tag) el.text = markdown.AtomicString(m.group(3).strip()) return el class DoubleTagPattern (SimpleTagPattern): """Return a ElementTree element nested in tag2 nested in tag1. Useful for strong emphasis etc. """ def handleMatch(self, m): tag1, tag2 = self.tag.split(",") el1 = markdown.etree.Element(tag1) el2 = markdown.etree.SubElement(el1, tag2) el2.text = m.group(3) return el1 class HtmlPattern (Pattern): """ Store raw inline html and return a placeholder. """ def handleMatch (self, m): rawhtml = m.group(2) inline = True place_holder = self.markdown.htmlStash.store(rawhtml) return place_holder class LinkPattern (Pattern): """ Return a link element from the given match. """ def handleMatch(self, m): el = markdown.etree.Element("a") el.text = m.group(2) title = m.group(11) href = m.group(9) if href: if href[0] == "<": href = href[1:-1] el.set("href", self.sanitize_url(href.strip())) else: el.set("href", "") if title: title = dequote(title) #.replace('"', "&quot;") el.set("title", title) return el def sanitize_url(self, url): """ Sanitize a url against xss attacks in "safe_mode". Rather than specifically blacklisting `javascript:alert("XSS")` and all its aliases (see <http://ha.ckers.org/xss.html>), we whitelist known safe url formats. Most urls contain a network location, however some are known not to (i.e.: mailto links). Script urls do not contain a location. Additionally, for `javascript:...`, the scheme would be "javascript" but some aliases will appear to `urlparse()` to have no scheme. On top of that relative links (i.e.: "foo/bar.html") have no scheme. Therefore we must check "path", "parameters", "query" and "fragment" for any literal colons. We don't check "scheme" for colons because it *should* never have any and "netloc" must allow the form: `username:password@host:port`. """ locless_schemes = ['', 'mailto', 'news'] scheme, netloc, path, params, query, fragment = url = urlparse(url) safe_url = False if netloc != '' or scheme in locless_schemes: safe_url = True for part in url[2:]: if ":" in part: safe_url = False if self.markdown.safeMode and not safe_url: return '' else: return urlunparse(url) class ImagePattern(LinkPattern): """ Return a img element from the given match. """ def handleMatch(self, m): el = markdown.etree.Element("img") src_parts = m.group(9).split() if src_parts: src = src_parts[0] if src[0] == "<" and src[-1] == ">": src = src[1:-1] el.set('src', self.sanitize_url(src)) else: el.set('src', "") if len(src_parts) > 1: el.set('title', dequote(" ".join(src_parts[1:]))) if markdown.ENABLE_ATTRIBUTES: truealt = handleAttributes(m.group(2), el) else: truealt = m.group(2) el.set('alt', truealt) return el class ReferencePattern(LinkPattern): """ Match to a stored reference and return link element. """ def handleMatch(self, m): if m.group(9): id = m.group(9).lower() else: # if we got something like "[Google][]" # we'll use "google" as the id id = m.group(2).lower() if not id in self.markdown.references: # ignore undefined refs return None href, title = self.markdown.references[id] text = m.group(2) return self.makeTag(href, title, text) def makeTag(self, href, title, text): el = markdown.etree.Element('a') el.set('href', self.sanitize_url(href)) if title: el.set('title', title) el.text = text return el class ImageReferencePattern (ReferencePattern): """ Match to a stored reference and return img element. """ def makeTag(self, href, title, text): el = markdown.etree.Element("img") el.set("src", self.sanitize_url(href)) if title: el.set("title", title) el.set("alt", text) return el class AutolinkPattern (Pattern): """ Return a link Element given an autolink (`<http://example/com>`). """ def handleMatch(self, m): el = markdown.etree.Element("a") el.set('href', m.group(2)) el.text = markdown.AtomicString(m.group(2)) return el class AutomailPattern (Pattern): """ Return a mailto link Element given an automail link (`<[email protected]>`). """ def handleMatch(self, m): el = markdown.etree.Element('a') email = m.group(2) if email.startswith("mailto:"): email = email[len("mailto:"):] def codepoint2name(code): """Return entity definition by code, or the code if not defined.""" entity = htmlentitydefs.codepoint2name.get(code) if entity: return "%s%s;" % (markdown.AMP_SUBSTITUTE, entity) else: return "%s#%d;" % (markdown.AMP_SUBSTITUTE, code) letters = [codepoint2name(ord(letter)) for letter in email] el.text = markdown.AtomicString(''.join(letters)) mailto = "mailto:" + email mailto = "".join([markdown.AMP_SUBSTITUTE + '#%d;' % ord(letter) for letter in mailto]) el.set('href', mailto) return el
apache-2.0
3,404,557,503,276,708,400
31.843666
81
0.560854
false
lochiiconnectivity/boto
boto/sdb/db/blob.py
57
2398
# Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. class Blob(object): """Blob object""" def __init__(self, value=None, file=None, id=None): self._file = file self.id = id self.value = value @property def file(self): from StringIO import StringIO if self._file: f = self._file else: f = StringIO(self.value) return f def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): if hasattr(self.file, "get_contents_as_string"): value = self.file.get_contents_as_string() else: value = self.file.getvalue() if isinstance(value, unicode): return value else: return value.decode('utf-8') def read(self): if hasattr(self.file, "get_contents_as_string"): return self.file.get_contents_as_string() else: return self.file.read() def readline(self): return self.file.readline() def next(self): return self.file.next() def __iter__(self): return iter(self.file) @property def size(self): if self._file: return self._file.size elif self.value: return len(self.value) else: return 0
mit
-7,452,152,382,092,280,000
30.973333
74
0.638449
false
richardcs/ansible
lib/ansible/modules/network/cloudengine/ce_switchport.py
7
27424
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ce_switchport version_added: "2.4" short_description: Manages Layer 2 switchport interfaces on HUAWEI CloudEngine switches. description: - Manages Layer 2 switchport interfaces on HUAWEI CloudEngine switches. author: QijunPan (@QijunPan) notes: - When C(state=absent), VLANs can be added/removed from trunk links and the existing access VLAN can be 'unconfigured' to just having VLAN 1 on that interface. - When working with trunks VLANs the keywords add/remove are always sent in the C(port trunk allow-pass vlan) command. Use verbose mode to see commands sent. - When C(state=unconfigured), the interface will result with having a default Layer 2 interface, i.e. vlan 1 in access mode. options: interface: description: - Full name of the interface, i.e. 40GE1/0/22. required: true mode: description: - The link type of an interface. choices: ['access','trunk'] access_vlan: description: - If C(mode=access), used as the access VLAN ID, in the range from 1 to 4094. native_vlan: description: - If C(mode=trunk), used as the trunk native VLAN ID, in the range from 1 to 4094. trunk_vlans: description: - If C(mode=trunk), used as the VLAN range to ADD or REMOVE from the trunk, such as 2-10 or 2,5,10-15, etc. state: description: - Manage the state of the resource. default: present choices: ['present', 'absent', 'unconfigured'] ''' EXAMPLES = ''' - name: switchport module test hosts: cloudengine connection: local gather_facts: no vars: cli: host: "{{ inventory_hostname }}" port: "{{ ansible_ssh_port }}" username: "{{ username }}" password: "{{ password }}" transport: cli tasks: - name: Ensure 10GE1/0/22 is in its default switchport state ce_switchport: interface: 10GE1/0/22 state: unconfigured provider: '{{ cli }}' - name: Ensure 10GE1/0/22 is configured for access vlan 20 ce_switchport: interface: 10GE1/0/22 mode: access access_vlan: 20 provider: '{{ cli }}' - name: Ensure 10GE1/0/22 only has vlans 5-10 as trunk vlans ce_switchport: interface: 10GE1/0/22 mode: trunk native_vlan: 10 trunk_vlans: 5-10 provider: '{{ cli }}' - name: Ensure 10GE1/0/22 is a trunk port and ensure 2-50 are being tagged (doesn't mean others aren't also being tagged) ce_switchport: interface: 10GE1/0/22 mode: trunk native_vlan: 10 trunk_vlans: 2-50 provider: '{{ cli }}' - name: Ensure these VLANs are not being tagged on the trunk ce_switchport: interface: 10GE1/0/22 mode: trunk trunk_vlans: 51-4000 state: absent provider: '{{ cli }}' ''' RETURN = ''' proposed: description: k/v pairs of parameters passed into module returned: always type: dict sample: {"access_vlan": "20", "interface": "10GE1/0/22", "mode": "access"} existing: description: k/v pairs of existing switchport returned: always type: dict sample: {"access_vlan": "10", "interface": "10GE1/0/22", "mode": "access", "switchport": "enable"} end_state: description: k/v pairs of switchport after module execution returned: always type: dict sample: {"access_vlan": "20", "interface": "10GE1/0/22", "mode": "access", "switchport": "enable"} updates: description: command string sent to the device returned: always type: list sample: ["10GE1/0/22", "port default vlan 20"] changed: description: check to see if a change was made on the device returned: always type: boolean sample: true ''' import re from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec CE_NC_GET_INTF = """ <filter type="subtree"> <ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <interfaces> <interface> <ifName>%s</ifName> <isL2SwitchPort></isL2SwitchPort> </interface> </interfaces> </ifm> </filter> """ CE_NC_GET_PORT_ATTR = """ <filter type="subtree"> <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf> <ifName>%s</ifName> <l2Enable></l2Enable> <l2Attribute> <linkType></linkType> <pvid></pvid> <trunkVlans></trunkVlans> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> </filter> """ CE_NC_SET_ACCESS_PORT = """ <config> <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf operation="merge"> <ifName>%s</ifName> <l2Attribute> <linkType>access</linkType> <pvid>%s</pvid> <trunkVlans></trunkVlans> <untagVlans></untagVlans> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> </config> """ CE_NC_SET_TRUNK_PORT_MODE = """ <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf operation="merge"> <ifName>%s</ifName> <l2Attribute> <linkType>trunk</linkType> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> """ CE_NC_SET_TRUNK_PORT_PVID = """ <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf operation="merge"> <ifName>%s</ifName> <l2Attribute> <linkType>trunk</linkType> <pvid>%s</pvid> <untagVlans></untagVlans> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> """ CE_NC_SET_TRUNK_PORT_VLANS = """ <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf operation="merge"> <ifName>%s</ifName> <l2Attribute> <linkType>trunk</linkType> <trunkVlans>%s:%s</trunkVlans> <untagVlans></untagVlans> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> """ CE_NC_SET_DEFAULT_PORT = """ <config> <ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0"> <ethernetIfs> <ethernetIf operation="merge"> <ifName>%s</ifName> <l2Attribute> <linkType>access</linkType> <pvid>1</pvid> <trunkVlans></trunkVlans> <untagVlans></untagVlans> </l2Attribute> </ethernetIf> </ethernetIfs> </ethernet> </config> """ SWITCH_PORT_TYPE = ('ge', '10ge', '25ge', '4x10ge', '40ge', '100ge', 'eth-trunk') def get_interface_type(interface): """Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF...""" if interface is None: return None iftype = None if interface.upper().startswith('GE'): iftype = 'ge' elif interface.upper().startswith('10GE'): iftype = '10ge' elif interface.upper().startswith('25GE'): iftype = '25ge' elif interface.upper().startswith('4X10GE'): iftype = '4x10ge' elif interface.upper().startswith('40GE'): iftype = '40ge' elif interface.upper().startswith('100GE'): iftype = '100ge' elif interface.upper().startswith('VLANIF'): iftype = 'vlanif' elif interface.upper().startswith('LOOPBACK'): iftype = 'loopback' elif interface.upper().startswith('METH'): iftype = 'meth' elif interface.upper().startswith('ETH-TRUNK'): iftype = 'eth-trunk' elif interface.upper().startswith('VBDIF'): iftype = 'vbdif' elif interface.upper().startswith('NVE'): iftype = 'nve' elif interface.upper().startswith('TUNNEL'): iftype = 'tunnel' elif interface.upper().startswith('ETHERNET'): iftype = 'ethernet' elif interface.upper().startswith('FCOE-PORT'): iftype = 'fcoe-port' elif interface.upper().startswith('FABRIC-PORT'): iftype = 'fabric-port' elif interface.upper().startswith('STACK-PORT'): iftype = 'stack-port' elif interface.upper().startswith('NULL'): iftype = 'null' else: return None return iftype.lower() def is_portswitch_enalbed(iftype): """"[undo] portswitch""" return bool(iftype in SWITCH_PORT_TYPE) def vlan_bitmap_undo(bitmap): """convert vlan bitmap to undo bitmap""" vlan_bit = ['F'] * 1024 if not bitmap or len(bitmap) == 0: return ''.join(vlan_bit) bit_len = len(bitmap) for num in range(bit_len): undo = (~int(bitmap[num], 16)) & 0xF vlan_bit[num] = hex(undo)[2] return ''.join(vlan_bit) def is_vlan_bitmap_empty(bitmap): """check vlan bitmap empty""" if not bitmap or len(bitmap) == 0: return True bit_len = len(bitmap) for num in range(bit_len): if bitmap[num] != '0': return False return True class SwitchPort(object): """ Manages Layer 2 switchport interfaces. """ def __init__(self, argument_spec): self.spec = argument_spec self.module = None self.init_module() # interface and vlan info self.interface = self.module.params['interface'] self.mode = self.module.params['mode'] self.state = self.module.params['state'] self.access_vlan = self.module.params['access_vlan'] self.native_vlan = self.module.params['native_vlan'] self.trunk_vlans = self.module.params['trunk_vlans'] # host info self.host = self.module.params['host'] self.username = self.module.params['username'] self.port = self.module.params['port'] # state self.changed = False self.updates_cmd = list() self.results = dict() self.proposed = dict() self.existing = dict() self.end_state = dict() self.intf_info = dict() # interface vlan info self.intf_type = None # loopback tunnel ... def init_module(self): """ init module """ required_if = [('state', 'absent', ['mode']), ('state', 'present', ['mode'])] self.module = AnsibleModule( argument_spec=self.spec, required_if=required_if, supports_check_mode=True) def check_response(self, xml_str, xml_name): """Check if response message is already succeed.""" if "<ok/>" not in xml_str: self.module.fail_json(msg='Error: %s failed.' % xml_name) def get_interface_dict(self, ifname): """ get one interface attributes dict.""" intf_info = dict() conf_str = CE_NC_GET_PORT_ATTR % ifname rcv_xml = get_nc_config(self.module, conf_str) if "<data/>" in rcv_xml: return intf_info intf = re.findall( r'.*<ifName>(.*)</ifName>.*\s*<l2Enable>(.*)</l2Enable>.*', rcv_xml) if intf: intf_info = dict(ifName=intf[0][0], l2Enable=intf[0][1], linkType="", pvid="", trunkVlans="") if intf_info["l2Enable"] == "enable": attr = re.findall( r'.*<linkType>(.*)</linkType>.*.*\s*<pvid>(.*)' r'</pvid>.*\s*<trunkVlans>(.*)</trunkVlans>.*', rcv_xml) if attr: intf_info["linkType"] = attr[0][0] intf_info["pvid"] = attr[0][1] intf_info["trunkVlans"] = attr[0][2] return intf_info def is_l2switchport(self): """Check layer2 switch port""" return bool(self.intf_info["l2Enable"] == "enable") def merge_access_vlan(self, ifname, access_vlan): """Merge access interface vlan""" change = False conf_str = "" self.updates_cmd.append("interface %s" % ifname) if self.state == "present": if self.intf_info["linkType"] == "access": if access_vlan and self.intf_info["pvid"] != access_vlan: self.updates_cmd.append( "port default vlan %s" % access_vlan) conf_str = CE_NC_SET_ACCESS_PORT % (ifname, access_vlan) change = True else: # not access self.updates_cmd.append("port link-type access") if access_vlan: self.updates_cmd.append( "port default vlan %s" % access_vlan) conf_str = CE_NC_SET_ACCESS_PORT % (ifname, access_vlan) else: conf_str = CE_NC_SET_ACCESS_PORT % (ifname, "1") change = True elif self.state == "absent": if self.intf_info["linkType"] == "access": if access_vlan and self.intf_info["pvid"] == access_vlan and access_vlan != "1": self.updates_cmd.append( "undo port default vlan %s" % access_vlan) conf_str = CE_NC_SET_ACCESS_PORT % (ifname, "1") change = True else: # not access self.updates_cmd.append("port link-type access") conf_str = CE_NC_SET_ACCESS_PORT % (ifname, "1") change = True if not change: self.updates_cmd.pop() # remove interface return rcv_xml = set_nc_config(self.module, conf_str) self.check_response(rcv_xml, "MERGE_ACCESS_PORT") self.changed = True def merge_trunk_vlan(self, ifname, native_vlan, trunk_vlans): """Merge trunk interface vlan""" change = False xmlstr = "" self.updates_cmd.append("interface %s" % ifname) if trunk_vlans: vlan_list = self.vlan_range_to_list(trunk_vlans) vlan_map = self.vlan_list_to_bitmap(vlan_list) if self.state == "present": if self.intf_info["linkType"] == "trunk": if native_vlan and self.intf_info["pvid"] != native_vlan: self.updates_cmd.append( "port trunk pvid vlan %s" % native_vlan) xmlstr += CE_NC_SET_TRUNK_PORT_PVID % (ifname, native_vlan) change = True if trunk_vlans: add_vlans = self.vlan_bitmap_add( self.intf_info["trunkVlans"], vlan_map) if not is_vlan_bitmap_empty(add_vlans): self.updates_cmd.append( "port trunk allow-pass %s" % trunk_vlans.replace(',', ' ').replace('-', ' to ')) xmlstr += CE_NC_SET_TRUNK_PORT_VLANS % ( ifname, add_vlans, add_vlans) change = True else: # not trunk self.updates_cmd.append("port link-type trunk") change = True if native_vlan: self.updates_cmd.append( "port trunk pvid vlan %s" % native_vlan) xmlstr += CE_NC_SET_TRUNK_PORT_PVID % (ifname, native_vlan) if trunk_vlans: self.updates_cmd.append( "port trunk allow-pass %s" % trunk_vlans.replace(',', ' ').replace('-', ' to ')) xmlstr += CE_NC_SET_TRUNK_PORT_VLANS % ( ifname, vlan_map, vlan_map) if not native_vlan and not trunk_vlans: xmlstr += CE_NC_SET_TRUNK_PORT_MODE % ifname self.updates_cmd.append( "undo port trunk allow-pass vlan 1") elif self.state == "absent": if self.intf_info["linkType"] == "trunk": if native_vlan and self.intf_info["pvid"] == native_vlan and native_vlan != '1': self.updates_cmd.append( "undo port trunk pvid vlan %s" % native_vlan) xmlstr += CE_NC_SET_TRUNK_PORT_PVID % (ifname, 1) change = True if trunk_vlans: del_vlans = self.vlan_bitmap_del( self.intf_info["trunkVlans"], vlan_map) if not is_vlan_bitmap_empty(del_vlans): self.updates_cmd.append( "undo port trunk allow-pass %s" % trunk_vlans.replace(',', ' ').replace('-', ' to ')) undo_map = vlan_bitmap_undo(del_vlans) xmlstr += CE_NC_SET_TRUNK_PORT_VLANS % ( ifname, undo_map, del_vlans) change = True else: # not trunk self.updates_cmd.append("port link-type trunk") self.updates_cmd.append("undo port trunk allow-pass vlan 1") xmlstr += CE_NC_SET_TRUNK_PORT_MODE % ifname change = True if not change: self.updates_cmd.pop() return conf_str = "<config>" + xmlstr + "</config>" rcv_xml = set_nc_config(self.module, conf_str) self.check_response(rcv_xml, "MERGE_TRUNK_PORT") self.changed = True def default_switchport(self, ifname): """Set interface default or unconfigured""" change = False if self.intf_info["linkType"] != "access": self.updates_cmd.append("interface %s" % ifname) self.updates_cmd.append("port link-type access") self.updates_cmd.append("port default vlan 1") change = True else: if self.intf_info["pvid"] != "1": self.updates_cmd.append("interface %s" % ifname) self.updates_cmd.append("port default vlan 1") change = True if not change: return conf_str = CE_NC_SET_DEFAULT_PORT % ifname rcv_xml = set_nc_config(self.module, conf_str) self.check_response(rcv_xml, "DEFAULT_INTF_VLAN") self.changed = True def vlan_series(self, vlanid_s): """ convert vlan range to vlan list """ vlan_list = [] peerlistlen = len(vlanid_s) if peerlistlen != 2: self.module.fail_json(msg='Error: Format of vlanid is invalid.') for num in range(peerlistlen): if not vlanid_s[num].isdigit(): self.module.fail_json( msg='Error: Format of vlanid is invalid.') if int(vlanid_s[0]) > int(vlanid_s[1]): self.module.fail_json(msg='Error: Format of vlanid is invalid.') elif int(vlanid_s[0]) == int(vlanid_s[1]): vlan_list.append(str(vlanid_s[0])) return vlan_list for num in range(int(vlanid_s[0]), int(vlanid_s[1])): vlan_list.append(str(num)) vlan_list.append(vlanid_s[1]) return vlan_list def vlan_region(self, vlanid_list): """ convert vlan range to vlan list """ vlan_list = [] peerlistlen = len(vlanid_list) for num in range(peerlistlen): if vlanid_list[num].isdigit(): vlan_list.append(vlanid_list[num]) else: vlan_s = self.vlan_series(vlanid_list[num].split('-')) vlan_list.extend(vlan_s) return vlan_list def vlan_range_to_list(self, vlan_range): """ convert vlan range to vlan list """ vlan_list = self.vlan_region(vlan_range.split(',')) return vlan_list def vlan_list_to_bitmap(self, vlanlist): """ convert vlan list to vlan bitmap """ vlan_bit = ['0'] * 1024 bit_int = [0] * 1024 vlan_list_len = len(vlanlist) for num in range(vlan_list_len): tagged_vlans = int(vlanlist[num]) if tagged_vlans <= 0 or tagged_vlans > 4094: self.module.fail_json( msg='Error: Vlan id is not in the range from 1 to 4094.') j = tagged_vlans / 4 bit_int[j] |= 0x8 >> (tagged_vlans % 4) vlan_bit[j] = hex(bit_int[j])[2] vlan_xml = ''.join(vlan_bit) return vlan_xml def vlan_bitmap_add(self, oldmap, newmap): """vlan add bitmap""" vlan_bit = ['0'] * 1024 if len(newmap) != 1024: self.module.fail_json(msg='Error: New vlan bitmap is invalid.') if len(oldmap) != 1024 and len(oldmap) != 0: self.module.fail_json(msg='Error: old vlan bitmap is invalid.') if len(oldmap) == 0: return newmap for num in range(1024): new_tmp = int(newmap[num], 16) old_tmp = int(oldmap[num], 16) add = (~(new_tmp & old_tmp)) & new_tmp vlan_bit[num] = hex(add)[2] vlan_xml = ''.join(vlan_bit) return vlan_xml def vlan_bitmap_del(self, oldmap, delmap): """vlan del bitmap""" vlan_bit = ['0'] * 1024 if not oldmap or len(oldmap) == 0: return ''.join(vlan_bit) if len(oldmap) != 1024 or len(delmap) != 1024: self.module.fail_json(msg='Error: vlan bitmap is invalid.') for num in range(1024): tmp = int(delmap[num], 16) & int(oldmap[num], 16) vlan_bit[num] = hex(tmp)[2] vlan_xml = ''.join(vlan_bit) return vlan_xml def check_params(self): """Check all input params""" # interface type check if self.interface: self.intf_type = get_interface_type(self.interface) if not self.intf_type: self.module.fail_json( msg='Error: Interface name of %s is error.' % self.interface) if not self.intf_type or not is_portswitch_enalbed(self.intf_type): self.module.fail_json(msg='Error: Interface %s is error.') # check access_vlan if self.access_vlan: if not self.access_vlan.isdigit(): self.module.fail_json(msg='Error: Access vlan id is invalid.') if int(self.access_vlan) <= 0 or int(self.access_vlan) > 4094: self.module.fail_json( msg='Error: Access vlan id is not in the range from 1 to 4094.') # check native_vlan if self.native_vlan: if not self.native_vlan.isdigit(): self.module.fail_json(msg='Error: Native vlan id is invalid.') if int(self.native_vlan) <= 0 or int(self.native_vlan) > 4094: self.module.fail_json( msg='Error: Native vlan id is not in the range from 1 to 4094.') # get interface info self.intf_info = self.get_interface_dict(self.interface) if not self.intf_info: self.module.fail_json(msg='Error: Interface does not exist.') if not self.is_l2switchport(): self.module.fail_json( msg='Error: Interface is not layer2 swtich port.') def get_proposed(self): """get proposed info""" self.proposed['state'] = self.state self.proposed['interface'] = self.interface self.proposed['mode'] = self.mode self.proposed['access_vlan'] = self.access_vlan self.proposed['native_vlan'] = self.native_vlan self.proposed['trunk_vlans'] = self.trunk_vlans def get_existing(self): """get existing info""" if self.intf_info: self.existing["interface"] = self.intf_info["ifName"] self.existing["mode"] = self.intf_info["linkType"] self.existing["switchport"] = self.intf_info["l2Enable"] self.existing['access_vlan'] = self.intf_info["pvid"] self.existing['native_vlan'] = self.intf_info["pvid"] self.existing['trunk_vlans'] = self.intf_info["trunkVlans"] def get_end_state(self): """get end state info""" if self.intf_info: end_info = self.get_interface_dict(self.interface) if end_info: self.end_state["interface"] = end_info["ifName"] self.end_state["mode"] = end_info["linkType"] self.end_state["switchport"] = end_info["l2Enable"] self.end_state['access_vlan'] = end_info["pvid"] self.end_state['native_vlan'] = end_info["pvid"] self.end_state['trunk_vlans'] = end_info["trunkVlans"] def work(self): """worker""" self.check_params() if not self.intf_info: self.module.fail_json(msg='Error: interface does not exist.') self.get_existing() self.get_proposed() # present or absent if self.state == "present" or self.state == "absent": if self.mode == "access": self.merge_access_vlan(self.interface, self.access_vlan) elif self.mode == "trunk": self.merge_trunk_vlan( self.interface, self.native_vlan, self.trunk_vlans) # unconfigured else: self.default_switchport(self.interface) self.get_end_state() self.results['changed'] = self.changed self.results['proposed'] = self.proposed self.results['existing'] = self.existing self.results['end_state'] = self.end_state if self.changed: self.results['updates'] = self.updates_cmd else: self.results['updates'] = list() self.module.exit_json(**self.results) def main(): """Module main""" argument_spec = dict( interface=dict(required=True, type='str'), mode=dict(choices=['access', 'trunk'], required=False), access_vlan=dict(type='str', required=False), native_vlan=dict(type='str', required=False), trunk_vlans=dict(type='str', required=False), state=dict(choices=['absent', 'present', 'unconfigured'], default='present') ) argument_spec.update(ce_argument_spec) switchport = SwitchPort(argument_spec) switchport.work() if __name__ == '__main__': main()
gpl-3.0
-5,700,954,182,553,911,000
33.067081
123
0.553821
false
bradwoo8621/Swift-Study
Instagram/Pods/AVOSCloudCrashReporting/Breakpad/src/tools/gyp/test/lib/TestCommon.py
307
21397
""" TestCommon.py: a testing framework for commands and scripts with commonly useful error handling The TestCommon module provides a simple, high-level interface for writing tests of executable commands and scripts, especially commands and scripts that interact with the file system. All methods throw exceptions and exit on failure, with useful error messages. This makes a number of explicit checks unnecessary, making the test scripts themselves simpler to write and easier to read. The TestCommon class is a subclass of the TestCmd class. In essence, TestCommon is a wrapper that handles common TestCmd error conditions in useful ways. You can use TestCommon directly, or subclass it for your program and add additional (or override) methods to tailor it to your program's specific needs. Alternatively, the TestCommon class serves as a useful example of how to define your own TestCmd subclass. As a subclass of TestCmd, TestCommon provides access to all of the variables and methods from the TestCmd module. Consequently, you can use any variable or method documented in the TestCmd module without having to explicitly import TestCmd. A TestCommon environment object is created via the usual invocation: import TestCommon test = TestCommon.TestCommon() You can use all of the TestCmd keyword arguments when instantiating a TestCommon object; see the TestCmd documentation for details. Here is an overview of the methods and keyword arguments that are provided by the TestCommon class: test.must_be_writable('file1', ['file2', ...]) test.must_contain('file', 'required text\n') test.must_contain_all_lines(output, lines, ['title', find]) test.must_contain_any_line(output, lines, ['title', find]) test.must_exist('file1', ['file2', ...]) test.must_match('file', "expected contents\n") test.must_not_be_writable('file1', ['file2', ...]) test.must_not_contain('file', 'banned text\n') test.must_not_contain_any_line(output, lines, ['title', find]) test.must_not_exist('file1', ['file2', ...]) test.run(options = "options to be prepended to arguments", stdout = "expected standard output from the program", stderr = "expected error output from the program", status = expected_status, match = match_function) The TestCommon module also provides the following variables TestCommon.python_executable TestCommon.exe_suffix TestCommon.obj_suffix TestCommon.shobj_prefix TestCommon.shobj_suffix TestCommon.lib_prefix TestCommon.lib_suffix TestCommon.dll_prefix TestCommon.dll_suffix """ # Copyright 2000-2010 Steven Knight # This module is free software, and you may redistribute it and/or modify # it under the same terms as Python itself, so long as this copyright message # and disclaimer are retained in their original form. # # IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, # SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF # THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. # # THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, # AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. __author__ = "Steven Knight <knight at baldmt dot com>" __revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight" __version__ = "0.37" import copy import os import os.path import stat import string import sys import types import UserList from TestCmd import * from TestCmd import __all__ __all__.extend([ 'TestCommon', 'exe_suffix', 'obj_suffix', 'shobj_prefix', 'shobj_suffix', 'lib_prefix', 'lib_suffix', 'dll_prefix', 'dll_suffix', ]) # Variables that describe the prefixes and suffixes on this system. if sys.platform == 'win32': exe_suffix = '.exe' obj_suffix = '.obj' shobj_suffix = '.obj' shobj_prefix = '' lib_prefix = '' lib_suffix = '.lib' dll_prefix = '' dll_suffix = '.dll' elif sys.platform == 'cygwin': exe_suffix = '.exe' obj_suffix = '.o' shobj_suffix = '.os' shobj_prefix = '' lib_prefix = 'lib' lib_suffix = '.a' dll_prefix = '' dll_suffix = '.dll' elif string.find(sys.platform, 'irix') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.o' shobj_prefix = '' lib_prefix = 'lib' lib_suffix = '.a' dll_prefix = 'lib' dll_suffix = '.so' elif string.find(sys.platform, 'darwin') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.os' shobj_prefix = '' lib_prefix = 'lib' lib_suffix = '.a' dll_prefix = 'lib' dll_suffix = '.dylib' elif string.find(sys.platform, 'sunos') != -1: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.os' shobj_prefix = 'so_' lib_prefix = 'lib' lib_suffix = '.a' dll_prefix = 'lib' dll_suffix = '.dylib' else: exe_suffix = '' obj_suffix = '.o' shobj_suffix = '.os' shobj_prefix = '' lib_prefix = 'lib' lib_suffix = '.a' dll_prefix = 'lib' dll_suffix = '.so' def is_List(e): return type(e) is types.ListType \ or isinstance(e, UserList.UserList) def is_writable(f): mode = os.stat(f)[stat.ST_MODE] return mode & stat.S_IWUSR def separate_files(flist): existing = [] missing = [] for f in flist: if os.path.exists(f): existing.append(f) else: missing.append(f) return existing, missing def _failed(self, status = 0): if self.status is None or status is None: return None try: return _status(self) not in status except TypeError: # status wasn't an iterable return _status(self) != status def _status(self): return self.status class TestCommon(TestCmd): # Additional methods from the Perl Test::Cmd::Common module # that we may wish to add in the future: # # $test->subdir('subdir', ...); # # $test->copy('src_file', 'dst_file'); def __init__(self, **kw): """Initialize a new TestCommon instance. This involves just calling the base class initialization, and then changing directory to the workdir. """ apply(TestCmd.__init__, [self], kw) os.chdir(self.workdir) def must_be_writable(self, *files): """Ensures that the specified file(s) exist and are writable. An individual file can be specified as a list of directory names, in which case the pathname will be constructed by concatenating them. Exits FAILED if any of the files does not exist or is not writable. """ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) existing, missing = separate_files(files) unwritable = filter(lambda x, iw=is_writable: not iw(x), existing) if missing: print "Missing files: `%s'" % string.join(missing, "', `") if unwritable: print "Unwritable files: `%s'" % string.join(unwritable, "', `") self.fail_test(missing + unwritable) def must_contain(self, file, required, mode = 'rb'): """Ensures that the specified file contains the required text. """ file_contents = self.read(file, mode) contains = (string.find(file_contents, required) != -1) if not contains: print "File `%s' does not contain required string." % file print self.banner('Required string ') print required print self.banner('%s contents ' % file) print file_contents self.fail_test(not contains) def must_contain_all_lines(self, output, lines, title=None, find=None): """Ensures that the specified output string (first argument) contains all of the specified lines (second argument). An optional third argument can be used to describe the type of output being searched, and only shows up in failure output. An optional fourth argument can be used to supply a different function, of the form "find(line, output), to use when searching for lines in the output. """ if find is None: find = lambda o, l: string.find(o, l) != -1 missing = [] for line in lines: if not find(output, line): missing.append(line) if missing: if title is None: title = 'output' sys.stdout.write("Missing expected lines from %s:\n" % title) for line in missing: sys.stdout.write(' ' + repr(line) + '\n') sys.stdout.write(self.banner(title + ' ')) sys.stdout.write(output) self.fail_test() def must_contain_any_line(self, output, lines, title=None, find=None): """Ensures that the specified output string (first argument) contains at least one of the specified lines (second argument). An optional third argument can be used to describe the type of output being searched, and only shows up in failure output. An optional fourth argument can be used to supply a different function, of the form "find(line, output), to use when searching for lines in the output. """ if find is None: find = lambda o, l: string.find(o, l) != -1 for line in lines: if find(output, line): return if title is None: title = 'output' sys.stdout.write("Missing any expected line from %s:\n" % title) for line in lines: sys.stdout.write(' ' + repr(line) + '\n') sys.stdout.write(self.banner(title + ' ')) sys.stdout.write(output) self.fail_test() def must_contain_lines(self, lines, output, title=None): # Deprecated; retain for backwards compatibility. return self.must_contain_all_lines(output, lines, title) def must_exist(self, *files): """Ensures that the specified file(s) must exist. An individual file be specified as a list of directory names, in which case the pathname will be constructed by concatenating them. Exits FAILED if any of the files does not exist. """ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) missing = filter(lambda x: not os.path.exists(x), files) if missing: print "Missing files: `%s'" % string.join(missing, "', `") self.fail_test(missing) def must_match(self, file, expect, mode = 'rb'): """Matches the contents of the specified file (first argument) against the expected contents (second argument). The expected contents are a list of lines or a string which will be split on newlines. """ file_contents = self.read(file, mode) try: self.fail_test(not self.match(file_contents, expect)) except KeyboardInterrupt: raise except: print "Unexpected contents of `%s'" % file self.diff(expect, file_contents, 'contents ') raise def must_not_contain(self, file, banned, mode = 'rb'): """Ensures that the specified file doesn't contain the banned text. """ file_contents = self.read(file, mode) contains = (string.find(file_contents, banned) != -1) if contains: print "File `%s' contains banned string." % file print self.banner('Banned string ') print banned print self.banner('%s contents ' % file) print file_contents self.fail_test(contains) def must_not_contain_any_line(self, output, lines, title=None, find=None): """Ensures that the specified output string (first argument) does not contain any of the specified lines (second argument). An optional third argument can be used to describe the type of output being searched, and only shows up in failure output. An optional fourth argument can be used to supply a different function, of the form "find(line, output), to use when searching for lines in the output. """ if find is None: find = lambda o, l: string.find(o, l) != -1 unexpected = [] for line in lines: if find(output, line): unexpected.append(line) if unexpected: if title is None: title = 'output' sys.stdout.write("Unexpected lines in %s:\n" % title) for line in unexpected: sys.stdout.write(' ' + repr(line) + '\n') sys.stdout.write(self.banner(title + ' ')) sys.stdout.write(output) self.fail_test() def must_not_contain_lines(self, lines, output, title=None): return self.must_not_contain_any_line(output, lines, title) def must_not_exist(self, *files): """Ensures that the specified file(s) must not exist. An individual file be specified as a list of directory names, in which case the pathname will be constructed by concatenating them. Exits FAILED if any of the files exists. """ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) existing = filter(os.path.exists, files) if existing: print "Unexpected files exist: `%s'" % string.join(existing, "', `") self.fail_test(existing) def must_not_be_writable(self, *files): """Ensures that the specified file(s) exist and are not writable. An individual file can be specified as a list of directory names, in which case the pathname will be constructed by concatenating them. Exits FAILED if any of the files does not exist or is writable. """ files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files) existing, missing = separate_files(files) writable = filter(is_writable, existing) if missing: print "Missing files: `%s'" % string.join(missing, "', `") if writable: print "Writable files: `%s'" % string.join(writable, "', `") self.fail_test(missing + writable) def _complete(self, actual_stdout, expected_stdout, actual_stderr, expected_stderr, status, match): """ Post-processes running a subcommand, checking for failure status and displaying output appropriately. """ if _failed(self, status): expect = '' if status != 0: expect = " (expected %s)" % str(status) print "%s returned %s%s" % (self.program, str(_status(self)), expect) print self.banner('STDOUT ') print actual_stdout print self.banner('STDERR ') print actual_stderr self.fail_test() if not expected_stdout is None and not match(actual_stdout, expected_stdout): self.diff(expected_stdout, actual_stdout, 'STDOUT ') if actual_stderr: print self.banner('STDERR ') print actual_stderr self.fail_test() if not expected_stderr is None and not match(actual_stderr, expected_stderr): print self.banner('STDOUT ') print actual_stdout self.diff(expected_stderr, actual_stderr, 'STDERR ') self.fail_test() def start(self, program = None, interpreter = None, arguments = None, universal_newlines = None, **kw): """ Starts a program or script for the test environment. This handles the "options" keyword argument and exceptions. """ options = kw.pop('options', None) if options: if arguments is None: arguments = options else: arguments = options + " " + arguments try: return apply(TestCmd.start, (self, program, interpreter, arguments, universal_newlines), kw) except KeyboardInterrupt: raise except Exception, e: print self.banner('STDOUT ') try: print self.stdout() except IndexError: pass print self.banner('STDERR ') try: print self.stderr() except IndexError: pass cmd_args = self.command_args(program, interpreter, arguments) sys.stderr.write('Exception trying to execute: %s\n' % cmd_args) raise e def finish(self, popen, stdout = None, stderr = '', status = 0, **kw): """ Finishes and waits for the process being run under control of the specified popen argument. Additional arguments are similar to those of the run() method: stdout The expected standard output from the command. A value of None means don't test standard output. stderr The expected error output from the command. A value of None means don't test error output. status The expected exit status from the command. A value of None means don't test exit status. """ apply(TestCmd.finish, (self, popen,), kw) match = kw.get('match', self.match) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) def run(self, options = None, arguments = None, stdout = None, stderr = '', status = 0, **kw): """Runs the program under test, checking that the test succeeded. The arguments are the same as the base TestCmd.run() method, with the addition of: options Extra options that get appended to the beginning of the arguments. stdout The expected standard output from the command. A value of None means don't test standard output. stderr The expected error output from the command. A value of None means don't test error output. status The expected exit status from the command. A value of None means don't test exit status. By default, this expects a successful exit (status = 0), does not test standard output (stdout = None), and expects that error output is empty (stderr = ""). """ if options: if arguments is None: arguments = options else: arguments = options + " " + arguments kw['arguments'] = arguments match = kw.pop('match', self.match) apply(TestCmd.run, [self], kw) self._complete(self.stdout(), stdout, self.stderr(), stderr, status, match) def skip_test(self, message="Skipping test.\n"): """Skips a test. Proper test-skipping behavior is dependent on the external TESTCOMMON_PASS_SKIPS environment variable. If set, we treat the skip as a PASS (exit 0), and otherwise treat it as NO RESULT. In either case, we print the specified message as an indication that the substance of the test was skipped. (This was originally added to support development under Aegis. Technically, skipping a test is a NO RESULT, but Aegis would treat that as a test failure and prevent the change from going to the next step. Since we ddn't want to force anyone using Aegis to have to install absolutely every tool used by the tests, we would actually report to Aegis that a skipped test has PASSED so that the workflow isn't held up.) """ if message: sys.stdout.write(message) sys.stdout.flush() pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS') if pass_skips in [None, 0, '0']: # skip=1 means skip this function when showing where this # result came from. They only care about the line where the # script called test.skip_test(), not the line number where # we call test.no_result(). self.no_result(skip=1) else: # We're under the development directory for this change, # so this is an Aegis invocation; pass the test (exit 0). self.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
-6,213,192,701,117,661,000
36.538596
85
0.597373
false
ogenstad/ansible
lib/ansible/modules/cloud/amazon/s3_sync.py
44
19602
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: s3_sync short_description: Efficiently upload multiple files to S3 description: - The S3 module is great, but it is very slow for a large volume of files- even a dozen will be noticeable. In addition to speed, it handles globbing, inclusions/exclusions, mime types, expiration mapping, recursion, cache control and smart directory mapping. version_added: "2.3" options: mode: description: - sync direction. required: true default: 'push' choices: [ push ] file_change_strategy: description: - Difference determination method to allow changes-only syncing. Unlike rsync, files are not patched- they are fully skipped or fully uploaded. - date_size will upload if file sizes don't match or if local file modified date is newer than s3's version - checksum will compare etag values based on s3's implementation of chunked md5s. - force will always upload all files. required: false default: 'date_size' choices: [ force, checksum, date_size ] bucket: description: - Bucket name. required: true key_prefix: description: - In addition to file path, prepend s3 path with this prefix. Module will add slash at end of prefix if necessary. required: false file_root: description: - File/directory path for synchronization. This is a local path. - This root path is scrubbed from the key name, so subdirectories will remain as keys. required: true permission: description: - Canned ACL to apply to synced files. - Changing this ACL only changes newly synced files, it does not trigger a full reupload. required: false choices: [ '', private, public-read, public-read-write, authenticated-read, aws-exec-read, bucket-owner-read, bucket-owner-full-control ] mime_map: description: - > Dict entry from extension to MIME type. This will override any default/sniffed MIME type. For example C({".txt": "application/text", ".yml": "application/text"}) required: false include: description: - Shell pattern-style file matching. - Used before exclude to determine eligible files (for instance, only "*.gif") - For multiple patterns, comma-separate them. required: false default: "*" exclude: description: - Shell pattern-style file matching. - Used after include to remove files (for instance, skip "*.txt") - For multiple patterns, comma-separate them. required: false default: ".*" cache_control: description: - This is a string. - Cache-Control header set on uploaded objects. - Directives are separated by commmas. required: false version_added: "2.4" delete: description: - Remove remote files that exist in bucket but are not present in the file root. required: false default: no version_added: "2.4" requirements: - boto3 >= 1.4.4 - botocore - python-dateutil author: tedder extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' - name: basic upload s3_sync: bucket: tedder file_root: roles/s3/files/ - name: all the options s3_sync: bucket: tedder file_root: roles/s3/files mime_map: .yml: application/text .json: application/text key_prefix: config_files/web file_change_strategy: force permission: public-read cache_control: "public, max-age=31536000" include: "*" exclude: "*.txt,.*" ''' RETURN = ''' filelist_initial: description: file listing (dicts) from inital globbing returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "modified_epoch": 1477416706 }] filelist_local_etag: description: file listing (dicts) including calculated local etag returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "mime_type": "application/json", "modified_epoch": 1477416706, "s3_path": "s3sync/policy.json" }] filelist_s3: description: file listing (dicts) including information about previously-uploaded versions returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "mime_type": "application/json", "modified_epoch": 1477416706, "s3_path": "s3sync/policy.json" }] filelist_typed: description: file listing (dicts) with calculated or overridden mime types returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "mime_type": "application/json", "modified_epoch": 1477416706 }] filelist_actionable: description: file listing (dicts) of files that will be uploaded after the strategy decision returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "mime_type": "application/json", "modified_epoch": 1477931256, "s3_path": "s3sync/policy.json", "whysize": "151 / 151", "whytime": "1477931256 / 1477929260" }] uploaded: description: file listing (dicts) of files that were actually uploaded returned: always type: list sample: [{ "bytes": 151, "chopped_path": "policy.json", "fullpath": "roles/cf/files/policy.json", "s3_path": "s3sync/policy.json", "whysize": "151 / 151", "whytime": "1477931637 / 1477931489" }] ''' import datetime import fnmatch import hashlib import mimetypes import os import stat as osstat # os.stat constants import traceback # import module snippets from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import camel_dict_to_snake_dict, ec2_argument_spec, boto3_conn, get_aws_connection_info, HAS_BOTO3, boto_exception from ansible.module_utils._text import to_text try: from dateutil import tz HAS_DATEUTIL = True except ImportError: HAS_DATEUTIL = False try: import botocore except ImportError: # Handled by imported HAS_BOTO3 pass # the following function, calculate_multipart_etag, is from tlastowka # on github and is used under its (compatible) GPL license. So this # license applies to the following function. # source: https://github.com/tlastowka/calculate_multipart_etag/blob/master/calculate_multipart_etag.py # # calculate_multipart_etag Copyright (C) 2015 # Tony Lastowka <tlastowka at gmail dot com> # https://github.com/tlastowka # # # calculate_multipart_etag is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # calculate_multipart_etag is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with calculate_multipart_etag. If not, see <http://www.gnu.org/licenses/>. DEFAULT_CHUNK_SIZE = 5 * 1024 * 1024 def calculate_multipart_etag(source_path, chunk_size=DEFAULT_CHUNK_SIZE): """ calculates a multipart upload etag for amazon s3 Arguments: source_path -- The file to calculate the etag for chunk_size -- The chunk size to calculate for. """ md5s = [] with open(source_path, 'rb') as fp: while True: data = fp.read(chunk_size) if not data: break md5s.append(hashlib.md5(data)) if len(md5s) == 1: new_etag = '"{0}"'.format(md5s[0].hexdigest()) else: # > 1 digests = b"".join(m.digest() for m in md5s) new_md5 = hashlib.md5(digests) new_etag = '"{0}-{1}"'.format(new_md5.hexdigest(), len(md5s)) return new_etag def gather_files(fileroot, include=None, exclude=None): ret = [] for (dirpath, dirnames, filenames) in os.walk(fileroot): for fn in filenames: fullpath = os.path.join(dirpath, fn) # include/exclude if include: found = False for x in include.split(','): if fnmatch.fnmatch(fn, x): found = True if not found: # not on the include list, so we don't want it. continue if exclude: found = False for x in exclude.split(','): if fnmatch.fnmatch(fn, x): found = True if found: # skip it, even if previously included. continue chopped_path = os.path.relpath(fullpath, start=fileroot) fstat = os.stat(fullpath) f_size = fstat[osstat.ST_SIZE] f_modified_epoch = fstat[osstat.ST_MTIME] ret.append({ 'fullpath': fullpath, 'chopped_path': chopped_path, 'modified_epoch': f_modified_epoch, 'bytes': f_size, }) # dirpath = path *to* the directory # dirnames = subdirs *in* our directory # filenames return ret def calculate_s3_path(filelist, key_prefix=''): ret = [] for fileentry in filelist: # don't modify the input dict retentry = fileentry.copy() retentry['s3_path'] = os.path.join(key_prefix, fileentry['chopped_path']) ret.append(retentry) return ret def calculate_local_etag(filelist, key_prefix=''): '''Really, "calculate md5", but since AWS uses their own format, we'll just call it a "local etag". TODO optimization: only calculate if remote key exists.''' ret = [] for fileentry in filelist: # don't modify the input dict retentry = fileentry.copy() retentry['local_etag'] = calculate_multipart_etag(fileentry['fullpath']) ret.append(retentry) return ret def determine_mimetypes(filelist, override_map): ret = [] for fileentry in filelist: retentry = fileentry.copy() localfile = fileentry['fullpath'] # reminder: file extension is '.txt', not 'txt'. _, file_extension = os.path.splitext(localfile) if override_map and override_map.get(file_extension): # override? use it. retentry['mime_type'] = override_map[file_extension] else: # else sniff it retentry['mime_type'], retentry['encoding'] = mimetypes.guess_type(localfile, strict=False) # might be None or '' from one of the above. Not a great type but better than nothing. if not retentry['mime_type']: retentry['mime_type'] = 'application/octet-stream' ret.append(retentry) return ret def head_s3(s3, bucket, s3keys): retkeys = [] for entry in s3keys: retentry = entry.copy() # don't modify the input dict try: retentry['s3_head'] = s3.head_object(Bucket=bucket, Key=entry['s3_path']) except botocore.exceptions.ClientError as err: if (hasattr(err, 'response') and 'ResponseMetadata' in err.response and 'HTTPStatusCode' in err.response['ResponseMetadata'] and str(err.response['ResponseMetadata']['HTTPStatusCode']) == '404'): pass else: raise Exception(err) # error_msg = boto_exception(err) # return {'error': error_msg} retkeys.append(retentry) return retkeys def filter_list(s3, bucket, s3filelist, strategy): keeplist = list(s3filelist) for e in keeplist: e['_strategy'] = strategy # init/fetch info from S3 if we're going to use it for comparisons if not strategy == 'force': keeplist = head_s3(s3, bucket, s3filelist) # now actually run the strategies if strategy == 'checksum': for entry in keeplist: if entry.get('s3_head'): # since we have a remote s3 object, compare the values. if entry['s3_head']['ETag'] == entry['local_etag']: # files match, so remove the entry entry['skip_flag'] = True else: # file etags don't match, keep the entry. pass else: # we don't have an etag, so we'll keep it. pass elif strategy == 'date_size': for entry in keeplist: if entry.get('s3_head'): # fstat = entry['stat'] local_modified_epoch = entry['modified_epoch'] local_size = entry['bytes'] # py2's datetime doesn't have a timestamp() field, so we have to revert to something more awkward. # remote_modified_epoch = entry['s3_head']['LastModified'].timestamp() remote_modified_datetime = entry['s3_head']['LastModified'] delta = (remote_modified_datetime - datetime.datetime(1970, 1, 1, tzinfo=tz.tzutc())) remote_modified_epoch = delta.seconds + (delta.days * 86400) remote_size = entry['s3_head']['ContentLength'] entry['whytime'] = '{0} / {1}'.format(local_modified_epoch, remote_modified_epoch) entry['whysize'] = '{0} / {1}'.format(local_size, remote_size) if local_modified_epoch <= remote_modified_epoch or local_size == remote_size: entry['skip_flag'] = True else: entry['why'] = "no s3_head" # else: probably 'force'. Basically we don't skip with any with other strategies. else: pass # prune 'please skip' entries, if any. return [x for x in keeplist if not x.get('skip_flag')] def upload_files(s3, bucket, filelist, params): ret = [] for entry in filelist: args = { 'ContentType': entry['mime_type'] } if params.get('permission'): args['ACL'] = params['permission'] if params.get('cache_control'): args['CacheControl'] = params['cache_control'] # if this fails exception is caught in main() s3.upload_file(entry['fullpath'], bucket, entry['s3_path'], ExtraArgs=args, Callback=None, Config=None) ret.append(entry) return ret def remove_files(s3, sourcelist, params): bucket = params.get('bucket') key_prefix = params.get('key_prefix') paginator = s3.get_paginator('list_objects_v2') current_keys = set(x['Key'] for x in paginator.paginate(Bucket=bucket, Prefix=key_prefix).build_full_result().get('Contents', [])) keep_keys = set(to_text(source_file['s3_path']) for source_file in sourcelist) delete_keys = list(current_keys - keep_keys) # can delete 1000 objects at a time groups_of_keys = [delete_keys[i:i + 1000] for i in range(0, len(delete_keys), 1000)] for keys in groups_of_keys: s3.delete_objects(Bucket=bucket, Delete={'Objects': [{'Key': key} for key in keys]}) return delete_keys def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( mode=dict(choices=['push'], default='push'), file_change_strategy=dict(choices=['force', 'date_size', 'checksum'], default='date_size'), bucket=dict(required=True), key_prefix=dict(required=False, default=''), file_root=dict(required=True, type='path'), permission=dict(required=False, choices=['private', 'public-read', 'public-read-write', 'authenticated-read', 'aws-exec-read', 'bucket-owner-read', 'bucket-owner-full-control']), retries=dict(required=False), mime_map=dict(required=False, type='dict'), exclude=dict(required=False, default=".*"), include=dict(required=False, default="*"), cache_control=dict(required=False, default=''), delete=dict(required=False, type='bool', default=False), # future options: encoding, metadata, storage_class, retries ) ) module = AnsibleModule( argument_spec=argument_spec, ) if not HAS_DATEUTIL: module.fail_json(msg='dateutil required for this module') if not HAS_BOTO3: module.fail_json(msg='boto3 required for this module') result = {} mode = module.params['mode'] region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True) if not region: module.fail_json(msg="Region must be specified") s3 = boto3_conn(module, conn_type='client', resource='s3', region=region, endpoint=ec2_url, **aws_connect_kwargs) if mode == 'push': try: result['filelist_initial'] = gather_files(module.params['file_root'], exclude=module.params['exclude'], include=module.params['include']) result['filelist_typed'] = determine_mimetypes(result['filelist_initial'], module.params.get('mime_map')) result['filelist_s3'] = calculate_s3_path(result['filelist_typed'], module.params['key_prefix']) result['filelist_local_etag'] = calculate_local_etag(result['filelist_s3']) result['filelist_actionable'] = filter_list(s3, module.params['bucket'], result['filelist_local_etag'], module.params['file_change_strategy']) result['uploads'] = upload_files(s3, module.params['bucket'], result['filelist_actionable'], module.params) if module.params['delete']: result['removed'] = remove_files(s3, result['filelist_local_etag'], module.params) # mark changed if we actually upload something. if result.get('uploads') or result.get('removed'): result['changed'] = True # result.update(filelist=actionable_filelist) except botocore.exceptions.ClientError as err: error_msg = boto_exception(err) module.fail_json(msg=error_msg, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response)) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
-3,167,043,720,475,284,500
35.033088
155
0.615294
false
yangleo/cloud-github
openstack_dashboard/dashboards/admin/hypervisors/compute/tabs.py
57
1309
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import tabs from openstack_dashboard.api import nova from openstack_dashboard.dashboards.admin.hypervisors.compute import tables class ComputeHostTab(tabs.TableTab): table_classes = (tables.ComputeHostTable,) name = _("Compute Host") slug = "compute_host" template_name = "horizon/common/_detail_table.html" def get_compute_host_data(self): try: return nova.service_list(self.tab_group.request, binary='nova-compute') except Exception: msg = _('Unable to get nova services list.') exceptions.handle(self.tab_group.request, msg) return []
apache-2.0
4,367,786,961,389,526,500
36.4
75
0.703591
false
dashpay/electrum-dash
plugins/greenaddress_instant/qt.py
12
3985
#!/usr/bin/env python # # Electrum - lightweight Bitcoin client # Copyright (C) 2014 Thomas Voegtlin # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import base64 import urllib import sys import requests from PyQt4.QtGui import QApplication, QPushButton from electrum.plugins import BasePlugin, hook from electrum.i18n import _ class Plugin(BasePlugin): button_label = _("Verify GA instant") @hook def transaction_dialog(self, d): d.verify_button = QPushButton(self.button_label) d.verify_button.clicked.connect(lambda: self.do_verify(d)) d.buttons.insert(0, d.verify_button) self.transaction_dialog_update(d) def get_my_addr(self, d): """Returns the address for given tx which can be used to request instant confirmation verification from GreenAddress""" for addr, _ in d.tx.get_outputs(): if d.wallet.is_mine(addr): return addr return None @hook def transaction_dialog_update(self, d): if d.tx.is_complete() and self.get_my_addr(d): d.verify_button.show() else: d.verify_button.hide() def do_verify(self, d): tx = d.tx wallet = d.wallet window = d.parent # 1. get the password and sign the verification request password = None if wallet.use_encryption: msg = _('GreenAddress requires your signature \n' 'to verify that transaction is instant.\n' 'Please enter your password to sign a\n' 'verification request.') password = window.password_dialog(msg, parent=d) if not password: return try: d.verify_button.setText(_('Verifying...')) QApplication.processEvents() # update the button label addr = self.get_my_addr(d) message = "Please verify if %s is GreenAddress instant confirmed" % tx.hash() sig = wallet.sign_message(addr, message, password) sig = base64.b64encode(sig) # 2. send the request response = requests.request("GET", ("https://greenaddress.it/verify/?signature=%s&txhash=%s" % (urllib.quote(sig), tx.hash())), headers = {'User-Agent': 'Electrum'}) response = response.json() # 3. display the result if response.get('verified'): d.show_message(_('%s is covered by GreenAddress instant confirmation') % (tx.hash()), title=_('Verification successful!')) else: d.show_critical(_('%s is not covered by GreenAddress instant confirmation') % (tx.hash()), title=_('Verification failed!')) except BaseException as e: import traceback traceback.print_exc(file=sys.stdout) d.show_error(str(e)) finally: d.verify_button.setText(self.button_label)
mit
-5,453,817,667,949,914,000
38.068627
139
0.640151
false
endorphinl/horizon-fork
openstack_dashboard/enabled/_1000_project.py
21
1201
# Copyright 2015, Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # The slug of the dashboard to be added to HORIZON['dashboards']. Required. DASHBOARD = 'project' # If set to True, this dashboard will be set as the default dashboard. DEFAULT = True # A dictionary of exception classes to be added to HORIZON['exceptions']. ADD_EXCEPTIONS = {} # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['openstack_dashboard.dashboards.project'] ADD_ANGULAR_MODULES = [ 'horizon.dashboard.project', ] AUTO_DISCOVER_STATIC_FILES = True ADD_JS_FILES = [] ADD_JS_SPEC_FILES = [] ADD_SCSS_FILES = [ 'dashboard/project/project.scss' ]
apache-2.0
-4,334,131,623,791,000,000
32.361111
75
0.745212
false
mattesno1/CouchPotatoServer
libs/rtorrent/common.py
88
4050
# Copyright (c) 2013 Chris Lucas, <[email protected]> # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import urlparse import os from rtorrent.compat import is_py3 def bool_to_int(value): """Translates python booleans to RPC-safe integers""" if value is True: return("1") elif value is False: return("0") else: return(value) def cmd_exists(cmds_list, cmd): """Check if given command is in list of available commands @param cmds_list: see L{RTorrent._rpc_methods} @type cmds_list: list @param cmd: name of command to be checked @type cmd: str @return: bool """ return(cmd in cmds_list) def find_torrent(info_hash, torrent_list): """Find torrent file in given list of Torrent classes @param info_hash: info hash of torrent @type info_hash: str @param torrent_list: list of L{Torrent} instances (see L{RTorrent.get_torrents}) @type torrent_list: list @return: L{Torrent} instance, or -1 if not found """ for t in torrent_list: if t.info_hash == info_hash: return t return None def is_valid_port(port): """Check if given port is valid""" return(0 <= int(port) <= 65535) def convert_version_tuple_to_str(t): return(".".join([str(n) for n in t])) def safe_repr(fmt, *args, **kwargs): """ Formatter that handles unicode arguments """ if not is_py3(): # unicode fmt can take str args, str fmt cannot take unicode args fmt = fmt.decode("utf-8") out = fmt.format(*args, **kwargs) return out.encode("utf-8") else: return fmt.format(*args, **kwargs) def split_path(path): fragments = path.split('/') if len(fragments) == 1: return fragments if not fragments[-1]: return fragments[:-1] return fragments def join_path(base, path): # Return if we have a new absolute path if os.path.isabs(path): return path # non-absolute base encountered if base and not os.path.isabs(base): raise NotImplementedError() return '/'.join(split_path(base) + split_path(path)) def join_uri(base, uri, construct=True): p_uri = urlparse.urlparse(uri) # Return if there is nothing to join if not p_uri.path: return base scheme, netloc, path, params, query, fragment = urlparse.urlparse(base) # Switch to 'uri' parts _, _, _, params, query, fragment = p_uri path = join_path(path, p_uri.path) result = urlparse.ParseResult(scheme, netloc, path, params, query, fragment) if not construct: return result # Construct from parts return urlparse.urlunparse(result) def update_uri(uri, construct=True, **kwargs): if isinstance(uri, urlparse.ParseResult): uri = dict(uri._asdict()) if type(uri) is not dict: raise ValueError("Unknown URI type") uri.update(kwargs) result = urlparse.ParseResult(**uri) if not construct: return result return urlparse.urlunparse(result)
gpl-3.0
9,194,769,490,991,832,000
25.821192
84
0.670864
false
zuck/scribee
scribee.py
1
2854
#!/usr/bin/python # -*- coding: utf-8 -*- """This file is part of the Scribee project. """ __author__ = 'Emanuele Bertoldi <[email protected]>' __copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi' __version__ = '0.0.1' import sys import os import fileinput import settings class Scribox(object): def __init__(self): self._inspectors = getattr(settings, "INSPECTORS", {}) self._output_dir = getattr(settings, "OUTPUT_DIR", 'output') self._verbose = getattr(settings, "VERBOSE", False) self._cache_sources = [] def generate(self, sources=[], renderers=getattr(settings, "RENDERERS", {})): from entity import Entity, DocBlock # Clear buffers. Entity.entities = [] DocBlock.blocks = [] self._cache_sources = [] # Start a new generation. print 'SCRIBOX ----- ver %s' % __version__ print '=======================' print 'Searching for entities...' for source in sources: self.parse_file(source) sys.stdout.flush() print 'Found a total of %d entity/ies.' % len(Entity.entities) for format, renderer in renderers.items(): print 'Generating contents in "%s" format...' % format, renderer.render(Entity.entities, self._output_dir) print "Done." print 'Generated %d format/s.' % len(renderers) def parse_file(self, filename=''): filename = filename.replace('\\', '/').replace('//', '/') if filename not in self._cache_sources: self._cache_sources.append(filename) # File not found. if not os.path.exists(filename): return # File. elif os.path.isfile(filename): root, ext = os.path.splitext(filename) # Inspector not found for this extension. if not self._inspectors.has_key(ext): if self._verbose: print "Skipped %s." % filename return inspector = self._inspectors[ext] f = fileinput.input(filename) print "Inspecting %s..." % filename, sys.stdout.flush() new_entities_count = inspector.parse(f) print "Found %d entity/ies." % new_entities_count # Directory. elif os.path.isdir(filename): os.path.walk(filename, self.parse_dir, []) def parse_dir(self, arg, dirname, fnames): for filename in fnames: pathname = '/'.join([dirname, filename]) self.parse_file(pathname) if __name__ == "__main__": s = Scribox() s.generate(sys.argv[1:])
mit
6,355,156,208,067,301,000
32.97619
81
0.522074
false
danieldresser/cortex
python/IECoreHoudini/FnParameterisedHolder.py
12
8982
########################################################################## # # Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios), # its affiliates and/or its licensors. # # Copyright (c) 2010-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import warnings import hou import toolutils import IECore import IECoreHoudini class FnParameterisedHolder(): _nodeType = None # create our function set and stash which node we're looking at def __init__(self, node=None): self.__node = node # check this node is still valid def nodeValid(self): if not self.__node: raise "FnParameterisedHolder does not have a node to operate on." try: p = self.__node.path() return True except hou.ObjectWasDeleted: return False # return the node we're currently wrapping def node(self): return self.__node if self.nodeValid() else None @staticmethod # nodeType: type of node to create (str) # name: desired node name (str) # className: class path to op stub (str) # version: op version, or None for latest (int) # envVarName: environment variable to use as a search path for ops (str) # parent: parent node, or None to create a new /obj geo. Ignored if contextArgs is used in UI mode (hou.Node) # contextArgs: args related to the creation context, as would come from UI menu interactions (dict) # If empty or not in UI mode, will create a top level OBJ to house the new holder def _doCreate( nodeType, name, className, version=None, envVarName=None, parent=None, contextArgs={} ) : if hou.isUIAvailable() and contextArgs.get( "toolname", "" ) : holder = toolutils.genericTool( contextArgs, nodeType, nodename = name ) else : parent = parent if parent else hou.node( "/obj" ).createNode( "geo", node_name=name, run_init_scripts=False ) holder = parent.createNode( nodeType, node_name=name ) IECoreHoudini.FnParameterisedHolder( holder ).setParameterised( className, version, envVarName ) if contextArgs.get( "shiftclick", False ) : converter = holder.parent().createNode( "ieCortexConverter", node_name = holder.name()+"Converter" ) outputNode = hou.node( contextArgs.get( "outputnodename", "" ) ) toolutils.connectInputsAndOutputs( converter, False, holder, outputNode, 0, 0 ) x, y = holder.position() converter.setPosition( [x,y-1] ) return holder # do we have a valid parameterised instance? def hasParameterised( self ) : return IECoreHoudini._IECoreHoudini._FnParameterisedHolder( self.node() ).hasParameterised() if self.nodeValid() else False # this sets a parameterised object on our node and then updates the parameters def setParameterised( self, classNameOrParameterised, classVersion=None, envVarName=None, updateGui=True ) : if not self.nodeValid() : return if isinstance( classNameOrParameterised, str ) : if classVersion is None or classVersion < 0 : classVersions = IECore.ClassLoader.defaultLoader( envVarName ).versions( classNameOrParameterised ) classVersion = classVersions[-1] if classVersions else 0 IECoreHoudini._IECoreHoudini._FnParameterisedHolder( self.node() ).setParameterised( classNameOrParameterised, classVersion, envVarName ) else : IECoreHoudini._IECoreHoudini._FnParameterisedHolder( self.node() ).setParameterised( classNameOrParameterised ) parameterised = self.getParameterised() if updateGui and parameterised : self.updateParameters( parameterised ) # this returns the parameterised object our node is working with def getParameterised( self ) : return IECoreHoudini._IECoreHoudini._FnParameterisedHolder( self.node() ).getParameterised() if self.hasParameterised() else None def setParameterisedValues( self, time = None ) : time = hou.time() if time is None else time IECoreHoudini._IECoreHoudini._FnParameterisedHolder( self.node() ).setParameterisedValues( time ) # get our list of class names based on matchString def classNames( self ) : if not self.nodeValid() : return [] matchString = self.__node.parm( "__classMatchString" ).eval() searchPathEnvVar = self.__node.parm( "__classSearchPathEnvVar" ).eval() return IECore.ClassLoader.defaultLoader( searchPathEnvVar ).classNames( matchString ) # takes a snapshot of the parameter values & expressions on our node so # that if we change the procedural/op we can restore the parameters afterwards. def cacheParameters(self): cached_parameters = {} for p in self.__node.parmTuplesInFolder(['Parameters']): if p.isSpare(): data = {} data['value'] = p.eval() expressions = [] for i in range(len(p)): try: expr = p[i].expression() lang = p[i].expressionLanguage() expressions.append( ( expr, lang ) ) except: expressions.append( ( None, None ) ) data['expressions'] = expressions cached_parameters[p.name()] = data return cached_parameters # resores parameter values/expressions from those cached by cacheParameters def restoreCachedParameters(self, cached): for p in self.__node.parmTuplesInFolder(['Parameters']): if p.name() in cached: cached_data = cached[p.name()] p.set( cached_data['value'] ) for i in range(len(p)): if cached_data['expressions'][i][0]: expr = cached_data['expressions'][i][0] lang = cached_data['expressions'][i][1] p[i].setExpression( expr, lang ) # return the spare parameters under the "Parameters" tab def spareParameters( self, tuples=True ) : result = [] for p in self.__node.spareParms() : if "Parameters" in p.containingFolders() : result.append( p.tuple() if tuples else p ) return result # this method removes all spare parameters from the "Parameters" folder def removeParameters( self ) : if not self.nodeValid() : return spareParms = self.spareParameters() while spareParms : self.__node.removeSpareParmTuple( spareParms[0] ) # this is needed to account for parms removed by a containing folder spareParms = self.spareParameters() # add/remove parameters on our node so we correctly reflect our Procedural def updateParameters( self, parameterised ) : if not self.nodeValid(): return # cache parameters & then remove them cached_parameters = self.cacheParameters() self.removeParameters() if not parameterised: return # get a list of our parm templates by calling createParm on our top-level CompoundParameter # and add them as spare parameter parms = IECoreHoudini.ParmTemplates.createParm( parameterised.parameters(), top_level=True ) parm_names = [] for p in parms: parm_names.append( p['name'] ) parm = self.__node.addSpareParmTuple( p['tuple'], in_folder=p['folder'], create_missing_folders=True ) parm.set( p['initialValue'] ) # restore our cached parameters self.restoreCachedParameters( cached_parameters ) # update the nodes parameter evaluation expression # this creates cook dependencies on the parameters expr = "" for p in parm_names: expr += "if parmTuple('%s'):\n\t%s = evalParmTuple('%s')\n" % ( p, p, p ) expr += "return 1" if len(parm_names)==0: expr = "1" eval_parm = self.__node.parm( "__evaluateParameters" ) eval_parm.lock(False) eval_parm.setExpression( expr, language=hou.exprLanguage.Python, replace_expression=True ) eval_parm.lock(True)
bsd-3-clause
931,267,226,262,248,200
38.052174
140
0.709308
false
mateor/pants
tests/python/pants_test/backend/jvm/tasks/test_check_published_deps.py
10
3985
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from textwrap import dedent from pants.backend.jvm.artifact import Artifact from pants.backend.jvm.repository import Repository from pants.backend.jvm.scala_artifact import ScalaArtifact from pants.backend.jvm.targets.jar_dependency import JarDependency from pants.backend.jvm.targets.jar_library import JarLibrary from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.targets.scala_jar_dependency import ScalaJarDependency from pants.backend.jvm.tasks.check_published_deps import CheckPublishedDeps from pants.build_graph.build_file_aliases import BuildFileAliases from pants.build_graph.target import Target from pants_test.tasks.task_test_base import ConsoleTaskTestBase class CheckPublishedDepsTest(ConsoleTaskTestBase): @property def alias_groups(self): return BuildFileAliases( targets={ 'target': Target, 'jar_library': JarLibrary, 'java_library': JavaLibrary, }, objects={ 'artifact': Artifact, 'jar': JarDependency, 'scala_artifact': ScalaArtifact, 'scala_jar': ScalaJarDependency, 'repo': Repository(name='repo', url='http://www.www.com', push_db_basedir=os.path.join(self.build_root, 'repo')), } ) @classmethod def task_type(cls): return CheckPublishedDeps def assert_console_output(self, *args, **kwargs): # Ensure that JarPublish's repos option is set, as CheckPublishedDeps consults it. self.set_options_for_scope('publish.jar', repos={}) return super(CheckPublishedDepsTest, self).assert_console_output(*args, **kwargs) def setUp(self): super(CheckPublishedDepsTest, self).setUp() self.create_file('repo/org.name/lib1/publish.properties', dedent(""" revision.major.org.name%lib1=2 revision.minor.org.name%lib1=0 revision.patch.org.name%lib1=0 revision.sha.org.name%lib1=12345 """)) self.create_file('repo/org.name/lib2/publish.properties', dedent(""" revision.major.org.name%lib2=2 revision.minor.org.name%lib2=0 revision.patch.org.name%lib2=0 revision.sha.org.name%lib2=12345 """)) self.add_to_build_file('provider/BUILD', dedent(""" java_library(name='lib1', provides=artifact( org='org.name', name='lib1', repo=repo), sources=[]) java_library(name='lib2', provides=artifact( org='org.name', name='lib2', repo=repo), sources=[]) """)) self.add_to_build_file('outdated/BUILD', dedent(""" jar_library(name='outdated', jars=[jar(org='org.name', name='lib1', rev='1.0.0')] ) """)) self.add_to_build_file('uptodate/BUILD', dedent(""" jar_library(name='uptodate', jars=[jar(org='org.name', name='lib2', rev='2.0.0')] ) """)) self.add_to_build_file('both/BUILD', dedent(""" target(name='both', dependencies=[ 'outdated', 'uptodate', ] ) """)) def test_all_up_to_date(self): self.assert_console_output( targets=[self.target('uptodate')] ) def test_print_up_to_date_and_outdated(self): self.assert_console_output( 'outdated org.name#lib1 1.0.0 latest 2.0.0', 'up-to-date org.name#lib2 2.0.0', targets=[self.target('both')], options={'print_uptodate': True} ) def test_outdated(self): self.assert_console_output( 'outdated org.name#lib1 1.0.0 latest 2.0.0', targets=[self.target('outdated')] )
apache-2.0
7,411,157,363,367,676,000
32.208333
93
0.631368
false
reachedu14/traininginstitute
coursebuilder/tests/suite.py
4
10911
# Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Course Builder test suite. This script runs all functional and units test in the Course Builder project. Here is how to use the script: - download WebTest Python package from a URL below and put the files in a folder of your choice, for example: tmp/webtest: http://pypi.python.org/packages/source/W/WebTest/WebTest-1.4.2.zip - update your Python path: PYTHONPATH=$PYTHONPATH:/tmp/webtest - invoke this test suite from the command line: # Automatically find and run all Python tests in tests/*. python tests/suite.py # Run only tests matching shell glob *_functional_test.py in tests/*. python tests/suite.py --pattern *_functional_test.py # Run test method baz in unittest.TestCase Bar found in tests/foo.py. python tests/suite.py --test_class_name tests.foo.Bar.baz - review the output to make sure there are no errors or warnings Good luck! """ __author__ = 'Sean Lip' import argparse import os import shutil import signal import subprocess import sys import time import unittest import task_queue import webtest import appengine_config from google.appengine.api.search import simple_search_stub from google.appengine.datastore import datastore_stub_util from google.appengine.ext import testbed _PARSER = argparse.ArgumentParser() _PARSER.add_argument( '--pattern', default='*.py', help='shell pattern for discovering files containing tests', type=str) _PARSER.add_argument( '--test_class_name', help='optional dotted module name of the test(s) to run', type=str) _PARSER.add_argument( '--integration_server_start_cmd', help='script to start an external CB server', type=str) # Base filesystem location for test data. TEST_DATA_BASE = '/tmp/experimental/coursebuilder/test-data/' def empty_environ(): os.environ['AUTH_DOMAIN'] = 'example.com' os.environ['SERVER_NAME'] = 'localhost' os.environ['HTTP_HOST'] = 'localhost' os.environ['SERVER_PORT'] = '8080' os.environ['USER_EMAIL'] = '' os.environ['USER_ID'] = '' os.environ['DEFAULT_VERSION_HOSTNAME'] = ( os.environ['HTTP_HOST'] + ':' + os.environ['SERVER_PORT']) def iterate_tests(test_suite_or_case): """Iterate through all of the test cases in 'test_suite_or_case'.""" try: suite = iter(test_suite_or_case) except TypeError: yield test_suite_or_case else: for test in suite: for subtest in iterate_tests(test): yield subtest class TestBase(unittest.TestCase): """Base class for all Course Builder tests.""" REQUIRES_INTEGRATION_SERVER = 1 INTEGRATION_SERVER_BASE_URL = 'http://localhost:8081' def setUp(self): super(TestBase, self).setUp() # Map of object -> {symbol_string: original_value} self._originals = {} def tearDown(self): self._unswap_all() super(TestBase, self).tearDown() def swap(self, source, symbol, new): # pylint: disable=invalid-name """Swaps out source.symbol for a new value. Allows swapping of members and methods: myobject.foo = 'original_foo' self.swap(myobject, 'foo', 'bar') self.assertEqual('bar', myobject.foo) myobject.baz() # -> 'original_baz' self.swap(myobject, 'baz', lambda: 'quux') self.assertEqual('quux', myobject.bar()) Swaps are automatically undone in tearDown(). Args: source: object. The source object to swap from. symbol: string. The name of the symbol to swap. new: object. The new value to swap in. """ if source not in self._originals: self._originals[source] = {} if not self._originals[source].get(symbol, None): self._originals[source][symbol] = getattr(source, symbol) setattr(source, symbol, new) # Allow protected method names. pylint: disable=g-bad-name def _unswap_all(self): for source, symbol_to_value in self._originals.iteritems(): for symbol, value in symbol_to_value.iteritems(): setattr(source, symbol, value) def shortDescription(self): """Additional information logged during unittest invocation.""" # Suppress default logging of docstrings. Instead log name/status only. return None class FunctionalTestBase(TestBase): """Base class for functional tests.""" def setUp(self): super(FunctionalTestBase, self).setUp() # e.g. TEST_DATA_BASE/tests/functional/tests/MyTestCase. self.test_tempdir = os.path.join( TEST_DATA_BASE, self.__class__.__module__.replace('.', os.sep), self.__class__.__name__) self.reset_filesystem() def tearDown(self): self.reset_filesystem(remove_only=True) super(FunctionalTestBase, self).tearDown() def reset_filesystem(self, remove_only=False): if os.path.exists(self.test_tempdir): shutil.rmtree(self.test_tempdir) if not remove_only: os.makedirs(self.test_tempdir) class AppEngineTestBase(FunctionalTestBase): """Base class for tests that require App Engine services.""" def getApp(self): # pylint: disable=g-bad-namer """Returns the main application to be tested.""" raise Exception('Not implemented.') def setUp(self): # pylint: disable=g-bad-name super(AppEngineTestBase, self).setUp() empty_environ() # setup an app to be tested self.testapp = webtest.TestApp(self.getApp()) self.testbed = testbed.Testbed() self.testbed.activate() # configure datastore policy to emulate instantaneously and globally # consistent HRD; we also patch dev_appserver in main.py to run under # the same policy policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=1) # declare any relevant App Engine service stubs here self.testbed.init_user_stub() self.testbed.init_memcache_stub() self.testbed.init_datastore_v3_stub(consistency_policy=policy) self.testbed.init_taskqueue_stub() self.taskq = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) self.testbed.init_urlfetch_stub() self.testbed.init_files_stub() self.testbed.init_blobstore_stub() # TODO(emichael): Fix this when an official stub is created self.testbed._register_stub( 'search', simple_search_stub.SearchServiceStub()) self.task_dispatcher = task_queue.TaskQueueHandlerDispatcher( self.testapp, self.taskq) def tearDown(self): # pylint: disable=g-bad-name self.testbed.deactivate() super(AppEngineTestBase, self).tearDown() def execute_all_deferred_tasks(self, queue_name='default'): """Executes all pending deferred tasks.""" # Outer loop here because some tasks (esp. map/reduce) will enqueue # more tasks as part of their operation. while True: tasks = self.taskq.GetTasks(queue_name) if not tasks: break for task in tasks: self.task_dispatcher.dispatch_task(task) def create_test_suite(parsed_args): """Loads all requested test suites. By default, loads all unittest.TestCases found under the project root's tests/ directory. Args: parsed_args: argparse.Namespace. Processed command-line arguments. Returns: unittest.TestSuite. The test suite populated with all tests to run. """ loader = unittest.TestLoader() if parsed_args.test_class_name: return loader.loadTestsFromName(parsed_args.test_class_name) else: return loader.discover( os.path.dirname(__file__), pattern=parsed_args.pattern) def start_integration_server(integration_server_start_cmd): print 'Starting external server: %s' % integration_server_start_cmd server = subprocess.Popen(integration_server_start_cmd) time.sleep(3) # Wait for server to start up return server def stop_integration_server(server): server.kill() # dev_appserver.py itself. # The new dev appserver starts a _python_runtime.py process that isn't # captured by start_integration_server and so doesn't get killed. Until it's # done, our tests will never complete so we kill it manually. pid = int(subprocess.Popen( ['pgrep', '-f', '_python_runtime.py'], stdout=subprocess.PIPE ).communicate()[0][:-1]) os.kill(pid, signal.SIGKILL) def fix_sys_path(): """Fix the sys.path to include GAE extra paths.""" import dev_appserver # pylint: disable=C6204 # dev_appserver.fix_sys_path() prepends GAE paths to sys.path and hides # our classes like 'tests' behind other modules that have 'tests'. # Here, unlike dev_appserver, we append the path instead of prepending it, # so that our classes come first. sys.path += dev_appserver.EXTRA_PATHS[:] def main(): """Starts in-process server and runs all test cases in this module.""" fix_sys_path() parsed_args = _PARSER.parse_args() test_suite = create_test_suite(parsed_args) all_tags = set() for test in iterate_tests(test_suite): if hasattr(test, 'TAGS'): all_tags.update(test.TAGS) server = None if TestBase.REQUIRES_INTEGRATION_SERVER in all_tags: server = start_integration_server( parsed_args.integration_server_start_cmd) result = unittest.TextTestRunner(verbosity=2).run(test_suite) if server: stop_integration_server(server) if result.errors or result.failures: raise Exception( 'Test suite failed: %s errors, %s failures of ' ' %s tests run.' % ( len(result.errors), len(result.failures), result.testsRun)) import tests.functional.actions as actions # pylint: disable=g-import-not-at-top count = len(actions.UNIQUE_URLS_FOUND.keys()) result.stream.writeln('INFO: Unique URLs found: %s' % count) result.stream.writeln('INFO: All %s tests PASSED!' % result.testsRun) if __name__ == '__main__': appengine_config.gcb_force_default_encoding('ascii') main()
apache-2.0
-4,305,486,130,554,586,000
34.083601
85
0.659335
false
HiSPARC/station-software
user/python/Lib/site-packages/win32/Demos/service/serviceEvents.py
40
4127
# A Demo of a service that takes advantage of the additional notifications # available in later Windows versions. # Note that all output is written as event log entries - so you must install # and start the service, then look at the event log for messages as events # are generated. # Events are generated for USB device insertion and removal, power state # changes and hardware profile events - so try putting your computer to # sleep and waking it, inserting a memory stick, etc then check the event log import win32serviceutil, win32service import win32event import servicemanager # Most event notification support lives around win32gui import win32gui, win32gui_struct, win32con GUID_DEVINTERFACE_USB_DEVICE = "{A5DCBF10-6530-11D2-901F-00C04FB951ED}" class EventDemoService(win32serviceutil.ServiceFramework): _svc_name_ = "PyServiceEventDemo" _svc_display_name_ = "Python Service Event Demo" _svc_description_ = "Demonstrates a Python service which takes advantage of the extra notifications" def __init__(self, args): win32serviceutil.ServiceFramework.__init__(self, args) self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) # register for a device notification - we pass our service handle # instead of a window handle. filter = win32gui_struct.PackDEV_BROADCAST_DEVICEINTERFACE( GUID_DEVINTERFACE_USB_DEVICE) self.hdn = win32gui.RegisterDeviceNotification(self.ssh, filter, win32con.DEVICE_NOTIFY_SERVICE_HANDLE) # Override the base class so we can accept additional events. def GetAcceptedControls(self): # say we accept them all. rc = win32serviceutil.ServiceFramework.GetAcceptedControls(self) rc |= win32service.SERVICE_ACCEPT_PARAMCHANGE \ | win32service.SERVICE_ACCEPT_NETBINDCHANGE \ | win32service.SERVICE_CONTROL_DEVICEEVENT \ | win32service.SERVICE_ACCEPT_HARDWAREPROFILECHANGE \ | win32service.SERVICE_ACCEPT_POWEREVENT \ | win32service.SERVICE_ACCEPT_SESSIONCHANGE return rc # All extra events are sent via SvcOtherEx (SvcOther remains as a # function taking only the first args for backwards compat) def SvcOtherEx(self, control, event_type, data): # This is only showing a few of the extra events - see the MSDN # docs for "HandlerEx callback" for more info. if control == win32service.SERVICE_CONTROL_DEVICEEVENT: info = win32gui_struct.UnpackDEV_BROADCAST(data) msg = "A device event occurred: %x - %s" % (event_type, info) elif control == win32service.SERVICE_CONTROL_HARDWAREPROFILECHANGE: msg = "A hardware profile changed: type=%s, data=%s" % (event_type, data) elif control == win32service.SERVICE_CONTROL_POWEREVENT: msg = "A power event: setting %s" % data elif control == win32service.SERVICE_CONTROL_SESSIONCHANGE: # data is a single elt tuple, but this could potentially grow # in the future if the win32 struct does msg = "Session event: type=%s, data=%s" % (event_type, data) else: msg = "Other event: code=%d, type=%s, data=%s" \ % (control, event_type, data) servicemanager.LogMsg( servicemanager.EVENTLOG_INFORMATION_TYPE, 0xF000, # generic message (msg, '') ) def SvcStop(self): self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) win32event.SetEvent(self.hWaitStop) def SvcDoRun(self): # do nothing at all - just wait to be stopped win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE) # Write a stop message. servicemanager.LogMsg( servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STOPPED, (self._svc_name_, '') ) if __name__=='__main__': win32serviceutil.HandleCommandLine(EventDemoService)
gpl-3.0
-7,575,453,376,484,556,000
45.897727
104
0.662951
false
jpshort/odoo
addons/pad/py_etherpad/__init__.py
505
7804
"""Module to talk to EtherpadLite API.""" import json import urllib import urllib2 class EtherpadLiteClient: """Client to talk to EtherpadLite API.""" API_VERSION = 1 # TODO probably 1.1 sometime soon CODE_OK = 0 CODE_INVALID_PARAMETERS = 1 CODE_INTERNAL_ERROR = 2 CODE_INVALID_FUNCTION = 3 CODE_INVALID_API_KEY = 4 TIMEOUT = 20 apiKey = "" baseUrl = "http://localhost:9001/api" def __init__(self, apiKey=None, baseUrl=None): if apiKey: self.apiKey = apiKey if baseUrl: self.baseUrl = baseUrl def call(self, function, arguments=None): """Create a dictionary of all parameters""" url = '%s/%d/%s' % (self.baseUrl, self.API_VERSION, function) params = arguments or {} params.update({'apikey': self.apiKey}) data = urllib.urlencode(params, True) try: opener = urllib2.build_opener() request = urllib2.Request(url=url, data=data) response = opener.open(request, timeout=self.TIMEOUT) result = response.read() response.close() except urllib2.HTTPError: raise result = json.loads(result) if result is None: raise ValueError("JSON response could not be decoded") return self.handleResult(result) def handleResult(self, result): """Handle API call result""" if 'code' not in result: raise Exception("API response has no code") if 'message' not in result: raise Exception("API response has no message") if 'data' not in result: result['data'] = None if result['code'] == self.CODE_OK: return result['data'] elif result['code'] == self.CODE_INVALID_PARAMETERS or result['code'] == self.CODE_INVALID_API_KEY: raise ValueError(result['message']) elif result['code'] == self.CODE_INTERNAL_ERROR: raise Exception(result['message']) elif result['code'] == self.CODE_INVALID_FUNCTION: raise Exception(result['message']) else: raise Exception("An unexpected error occurred whilst handling the response") # GROUPS # Pads can belong to a group. There will always be public pads that do not belong to a group (or we give this group the id 0) def createGroup(self): """creates a new group""" return self.call("createGroup") def createGroupIfNotExistsFor(self, groupMapper): """this functions helps you to map your application group ids to etherpad lite group ids""" return self.call("createGroupIfNotExistsFor", { "groupMapper": groupMapper }) def deleteGroup(self, groupID): """deletes a group""" return self.call("deleteGroup", { "groupID": groupID }) def listPads(self, groupID): """returns all pads of this group""" return self.call("listPads", { "groupID": groupID }) def createGroupPad(self, groupID, padName, text=''): """creates a new pad in this group""" params = { "groupID": groupID, "padName": padName, } if text: params['text'] = text return self.call("createGroupPad", params) # AUTHORS # Theses authors are bind to the attributes the users choose (color and name). def createAuthor(self, name=''): """creates a new author""" params = {} if name: params['name'] = name return self.call("createAuthor", params) def createAuthorIfNotExistsFor(self, authorMapper, name=''): """this functions helps you to map your application author ids to etherpad lite author ids""" params = { 'authorMapper': authorMapper } if name: params['name'] = name return self.call("createAuthorIfNotExistsFor", params) # SESSIONS # Sessions can be created between a group and a author. This allows # an author to access more than one group. The sessionID will be set as # a cookie to the client and is valid until a certain date. def createSession(self, groupID, authorID, validUntil): """creates a new session""" return self.call("createSession", { "groupID": groupID, "authorID": authorID, "validUntil": validUntil }) def deleteSession(self, sessionID): """deletes a session""" return self.call("deleteSession", { "sessionID": sessionID }) def getSessionInfo(self, sessionID): """returns informations about a session""" return self.call("getSessionInfo", { "sessionID": sessionID }) def listSessionsOfGroup(self, groupID): """returns all sessions of a group""" return self.call("listSessionsOfGroup", { "groupID": groupID }) def listSessionsOfAuthor(self, authorID): """returns all sessions of an author""" return self.call("listSessionsOfAuthor", { "authorID": authorID }) # PAD CONTENT # Pad content can be updated and retrieved through the API def getText(self, padID, rev=None): """returns the text of a pad""" params = {"padID": padID} if rev is not None: params['rev'] = rev return self.call("getText", params) # introduced with pull request merge def getHtml(self, padID, rev=None): """returns the html of a pad""" params = {"padID": padID} if rev is not None: params['rev'] = rev return self.call("getHTML", params) def setText(self, padID, text): """sets the text of a pad""" return self.call("setText", { "padID": padID, "text": text }) def setHtml(self, padID, html): """sets the text of a pad from html""" return self.call("setHTML", { "padID": padID, "html": html }) # PAD # Group pads are normal pads, but with the name schema # GROUPID$PADNAME. A security manager controls access of them and its # forbidden for normal pads to include a in the name. def createPad(self, padID, text=''): """creates a new pad""" params = { "padID": padID, } if text: params['text'] = text return self.call("createPad", params) def getRevisionsCount(self, padID): """returns the number of revisions of this pad""" return self.call("getRevisionsCount", { "padID": padID }) def deletePad(self, padID): """deletes a pad""" return self.call("deletePad", { "padID": padID }) def getReadOnlyID(self, padID): """returns the read only link of a pad""" return self.call("getReadOnlyID", { "padID": padID }) def setPublicStatus(self, padID, publicStatus): """sets a boolean for the public status of a pad""" return self.call("setPublicStatus", { "padID": padID, "publicStatus": publicStatus }) def getPublicStatus(self, padID): """return true of false""" return self.call("getPublicStatus", { "padID": padID }) def setPassword(self, padID, password): """returns ok or a error message""" return self.call("setPassword", { "padID": padID, "password": password }) def isPasswordProtected(self, padID): """returns true or false""" return self.call("isPasswordProtected", { "padID": padID })
agpl-3.0
-3,283,697,430,644,249,600
29.968254
129
0.574193
false
martynovp/edx-platform
lms/djangoapps/shoppingcart/migrations/0023_auto__add_field_coupon_expiration_date.py
110
18437
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Coupon.expiration_date' db.add_column('shoppingcart_coupon', 'expiration_date', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Coupon.expiration_date' db.delete_column('shoppingcart_coupon', 'expiration_date') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'shoppingcart.certificateitem': { 'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.coupon': { 'Meta': {'object_name': 'Coupon'}, 'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 6, 0, 0)'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, 'shoppingcart.couponredemption': { 'Meta': {'object_name': 'CouponRedemption'}, 'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.courseregcodeitem': { 'Meta': {'object_name': 'CourseRegCodeItem', '_ormbases': ['shoppingcart.OrderItem']}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.courseregcodeitemannotation': { 'Meta': {'object_name': 'CourseRegCodeItemAnnotation'}, 'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.courseregistrationcode': { 'Meta': {'object_name': 'CourseRegistrationCode'}, 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 6, 0, 0)'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_order'", 'null': 'True', 'to': "orm['shoppingcart.Order']"}) }, 'shoppingcart.donation': { 'Meta': {'object_name': 'Donation', '_ormbases': ['shoppingcart.OrderItem']}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'donation_type': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '32'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.donationconfiguration': { 'Meta': {'object_name': 'DonationConfiguration'}, 'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.invoice': { 'Meta': {'object_name': 'Invoice'}, 'address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'address_line_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'internal_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'total_amount': ('django.db.models.fields.FloatField', [], {}), 'zip': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'}) }, 'shoppingcart.order': { 'Meta': {'object_name': 'Order'}, 'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}), 'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}), 'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}), 'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}), 'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}), 'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order_type': ('django.db.models.fields.CharField', [], {'default': "'personal'", 'max_length': '32'}), 'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.orderitem': { 'Meta': {'object_name': 'OrderItem'}, 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}), 'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}), 'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}), 'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}), 'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}), 'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'shoppingcart.paidcourseregistration': { 'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']", 'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}), 'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}), 'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'}) }, 'shoppingcart.paidcourseregistrationannotation': { 'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'}, 'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'shoppingcart.registrationcoderedemption': { 'Meta': {'object_name': 'RegistrationCodeRedemption'}, 'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']", 'null': 'True'}), 'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 6, 0, 0)', 'null': 'True'}), 'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"}) }, 'student.courseenrollment': { 'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'}, 'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) } } complete_apps = ['shoppingcart']
agpl-3.0
-8,133,023,486,222,825,000
83.191781
182
0.55931
false
synicalsyntax/zulip
zerver/webhooks/statuspage/view.py
4
2145
# Webhooks for external integrations. from typing import Any, Dict from django.http import HttpRequest, HttpResponse from zerver.decorator import REQ, api_key_only_webhook_view, has_request_variables from zerver.lib.response import json_success from zerver.lib.webhooks.common import check_send_webhook_message from zerver.models import UserProfile INCIDENT_TEMPLATE = """ **{name}**: * State: **{state}** * Description: {content} """.strip() COMPONENT_TEMPLATE = "**{name}** has changed status from **{old_status}** to **{new_status}**." TOPIC_TEMPLATE = '{name}: {description}' def get_incident_events_body(payload: Dict[str, Any]) -> str: return INCIDENT_TEMPLATE.format( name = payload["incident"]["name"], state = payload["incident"]["status"], content = payload["incident"]["incident_updates"][0]["body"], ) def get_components_update_body(payload: Dict[str, Any]) -> str: return COMPONENT_TEMPLATE.format( name = payload["component"]["name"], old_status = payload["component_update"]["old_status"], new_status = payload["component_update"]["new_status"], ) def get_incident_topic(payload: Dict[str, Any]) -> str: return TOPIC_TEMPLATE.format( name = payload["incident"]["name"], description = payload["page"]["status_description"], ) def get_component_topic(payload: Dict[str, Any]) -> str: return TOPIC_TEMPLATE.format( name = payload["component"]["name"], description = payload["page"]["status_description"], ) @api_key_only_webhook_view('Statuspage') @has_request_variables def api_statuspage_webhook(request: HttpRequest, user_profile: UserProfile, payload: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse: status = payload["page"]["status_indicator"] if status == "none": topic = get_incident_topic(payload) body = get_incident_events_body(payload) else: topic = get_component_topic(payload) body = get_components_update_body(payload) check_send_webhook_message(request, user_profile, topic, body) return json_success()
apache-2.0
2,309,238,066,771,343,000
33.596774
95
0.6662
false
ntts-clo/mld-ryu
ryu/tests/mininet/l3/ip_ttl/test_ip_ttl.py
63
2976
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import struct from ryu.base import app_manager from ryu.controller import ofp_event from ryu.controller import dpset from ryu.controller.handler import MAIN_DISPATCHER from ryu.controller.handler import set_ev_cls from ryu.ofproto import ofproto_v1_2 from ryu.ofproto import ether from ryu.lib.mac import haddr_to_str LOG = logging.getLogger(__name__) class RunTestMininet(app_manager.RyuApp): _CONTEXTS = {'dpset': dpset.DPSet} OFP_VERSIONS = [ofproto_v1_2.OFP_VERSION] def __init__(self, *args, **kwargs): super(RunTestMininet, self).__init__(*args, **kwargs) def _add_flow(self, dp, match, actions): inst = [dp.ofproto_parser.OFPInstructionActions( dp.ofproto.OFPIT_APPLY_ACTIONS, actions)] mod = dp.ofproto_parser.OFPFlowMod( dp, cookie=0, cookie_mask=0, table_id=0, command=dp.ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0, priority=0xff, buffer_id=0xffffffff, out_port=dp.ofproto.OFPP_ANY, out_group=dp.ofproto.OFPG_ANY, flags=0, match=match, instructions=inst) dp.send_msg(mod) def _define_flow(self, dp): in_port = 1 out_port = 2 eth_IP = ether.ETH_TYPE_IP # ICMP -> DecNwTtl LOG.debug("--- add_flow DecNwTtl") match = dp.ofproto_parser.OFPMatch() match.set_in_port(in_port) match.set_dl_type(eth_IP) actions = [dp.ofproto_parser.OFPActionDecNwTtl(), dp.ofproto_parser.OFPActionOutput(out_port, 0)] self._add_flow(dp, match, actions) @set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER) def handler_datapath(self, ev): if ev.enter: self._define_flow(ev.dp) @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) def packet_in_handler(self, ev): msg = ev.msg dst, src, eth_type = struct.unpack_from('!6s6sH', buffer(msg.data), 0) in_port = msg.match.fields[0].value LOG.info("----------------------------------------") LOG.info("* PacketIn") LOG.info("in_port=%d, eth_type: %s", in_port, hex(eth_type)) LOG.info("packet reason=%d buffer_id=%d", msg.reason, msg.buffer_id) LOG.info("packet in datapath_id=%s src=%s dst=%s", msg.datapath.id, haddr_to_str(src), haddr_to_str(dst))
apache-2.0
-8,629,637,945,728,682,000
34.428571
78
0.646841
false
kybriainfotech/iSocioCRM
addons/website_forum/models/res_users.py
281
5198
# -*- coding: utf-8 -*- from datetime import datetime from urllib import urlencode import hashlib from openerp import SUPERUSER_ID from openerp.osv import osv, fields class Users(osv.Model): _inherit = 'res.users' def __init__(self, pool, cr): init_res = super(Users, self).__init__(pool, cr) self.SELF_WRITEABLE_FIELDS = list( set( self.SELF_WRITEABLE_FIELDS + ['country_id', 'city', 'website', 'website_description', 'website_published'])) return init_res def _get_user_badge_level(self, cr, uid, ids, name, args, context=None): """Return total badge per level of users""" result = dict.fromkeys(ids, False) badge_user_obj = self.pool['gamification.badge.user'] for id in ids: result[id] = { 'gold_badge': badge_user_obj.search(cr, uid, [('badge_id.level', '=', 'gold'), ('user_id', '=', id)], context=context, count=True), 'silver_badge': badge_user_obj.search(cr, uid, [('badge_id.level', '=', 'silver'), ('user_id', '=', id)], context=context, count=True), 'bronze_badge': badge_user_obj.search(cr, uid, [('badge_id.level', '=', 'bronze'), ('user_id', '=', id)], context=context, count=True), } return result _columns = { 'create_date': fields.datetime('Create Date', select=True, readonly=True), 'karma': fields.integer('Karma'), 'badge_ids': fields.one2many('gamification.badge.user', 'user_id', 'Badges'), 'gold_badge': fields.function(_get_user_badge_level, string="Number of gold badges", type='integer', multi='badge_level'), 'silver_badge': fields.function(_get_user_badge_level, string="Number of silver badges", type='integer', multi='badge_level'), 'bronze_badge': fields.function(_get_user_badge_level, string="Number of bronze badges", type='integer', multi='badge_level'), } _defaults = { 'karma': 0, } def _generate_forum_token(self, cr, uid, user_id, email): """Return a token for email validation. This token is valid for the day and is a hash based on a (secret) uuid generated by the forum module, the user_id, the email and currently the day (to be updated if necessary). """ forum_uuid = self.pool.get('ir.config_parameter').get_param(cr, SUPERUSER_ID, 'website_forum.uuid') return hashlib.sha256('%s-%s-%s-%s' % ( datetime.now().replace(hour=0, minute=0, second=0, microsecond=0), forum_uuid, user_id, email)).hexdigest() def send_forum_validation_email(self, cr, uid, user_id, forum_id=None, context=None): user = self.pool['res.users'].browse(cr, uid, user_id, context=context) token = self._generate_forum_token(cr, uid, user_id, user.email) activation_template_id = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'website_forum.validation_email') if activation_template_id: params = { 'token': token, 'id': user_id, 'email': user.email} if forum_id: params['forum_id'] = forum_id base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url') token_url = base_url + '/forum/validate_email?%s' % urlencode(params) tpl_ctx = dict(context, token_url=token_url) self.pool['email.template'].send_mail(cr, SUPERUSER_ID, activation_template_id, user_id, force_send=True, context=tpl_ctx) return True def process_forum_validation_token(self, cr, uid, token, user_id, email, forum_id=None, context=None): validation_token = self.pool['res.users']._generate_forum_token(cr, uid, user_id, email) user = self.pool['res.users'].browse(cr, SUPERUSER_ID, user_id, context=context) if token == validation_token and user.karma == 0: karma = 3 if not forum_id: forum_ids = self.pool['forum.forum'].search(cr, uid, [], limit=1, context=context) if forum_ids: forum_id = forum_ids[0] if forum_id: forum = self.pool['forum.forum'].browse(cr, uid, forum_id, context=context) # karma gained: karma to ask a question and have 2 downvotes karma = forum.karma_ask + (-2 * forum.karma_gen_question_downvote) return user.write({'karma': karma}) return False def add_karma(self, cr, uid, ids, karma, context=None): for user in self.browse(cr, uid, ids, context=context): self.write(cr, uid, [user.id], {'karma': user.karma + karma}, context=context) return True def get_serialised_gamification_summary(self, cr, uid, excluded_categories=None, context=None): if isinstance(excluded_categories, list): if 'forum' not in excluded_categories: excluded_categories.append('forum') else: excluded_categories = ['forum'] return super(Users, self).get_serialised_gamification_summary(cr, uid, excluded_categories=excluded_categories, context=context)
agpl-3.0
3,386,890,195,396,297,700
49.466019
151
0.601
false
tekapo/fabric
tests/test_network.py
3
24848
from __future__ import with_statement from datetime import datetime import copy import getpass import sys from nose.tools import with_setup, ok_, raises from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify, with_patched_object, patched_context, with_fakes) from fabric.context_managers import settings, hide, show from fabric.network import (HostConnectionCache, join_host_strings, normalize, denormalize, key_filenames, ssh) from fabric.io import output_loop import fabric.network # So I can call patch_object correctly. Sigh. from fabric.state import env, output, _get_system_username from fabric.operations import run, sudo, prompt from fabric.exceptions import NetworkError from fabric.tasks import execute from fabric.api import parallel from fabric import utils # for patching from utils import * from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER, CLIENT_PRIVKEY_PASSPHRASE) # # Subroutines, e.g. host string normalization # class TestNetwork(FabricTest): def test_host_string_normalization(self): username = _get_system_username() for description, input, output_ in ( ("Sanity check: equal strings remain equal", 'localhost', 'localhost'), ("Empty username is same as get_system_username", 'localhost', username + '@localhost'), ("Empty port is same as port 22", 'localhost', 'localhost:22'), ("Both username and port tested at once, for kicks", 'localhost', username + '@localhost:22'), ): eq_.description = "Host-string normalization: %s" % description yield eq_, normalize(input), normalize(output_) del eq_.description def test_normalization_for_ipv6(self): """ normalize() will accept IPv6 notation and can separate host and port """ username = _get_system_username() for description, input, output_ in ( ("Full IPv6 address", '2001:DB8:0:0:0:0:0:1', (username, '2001:DB8:0:0:0:0:0:1', '22')), ("IPv6 address in short form", '2001:DB8::1', (username, '2001:DB8::1', '22')), ("IPv6 localhost", '::1', (username, '::1', '22')), ("Square brackets are required to separate non-standard port from IPv6 address", '[2001:DB8::1]:1222', (username, '2001:DB8::1', '1222')), ("Username and IPv6 address", 'user@2001:DB8::1', ('user', '2001:DB8::1', '22')), ("Username and IPv6 address with non-standard port", 'user@[2001:DB8::1]:1222', ('user', '2001:DB8::1', '1222')), ): eq_.description = "Host-string IPv6 normalization: %s" % description yield eq_, normalize(input), output_ del eq_.description def test_normalization_without_port(self): """ normalize() and join_host_strings() omit port if omit_port given """ eq_( join_host_strings(*normalize('user@localhost', omit_port=True)), 'user@localhost' ) def test_ipv6_host_strings_join(self): """ join_host_strings() should use square brackets only for IPv6 and if port is given """ eq_( join_host_strings('user', '2001:DB8::1'), 'user@2001:DB8::1' ) eq_( join_host_strings('user', '2001:DB8::1', '1222'), 'user@[2001:DB8::1]:1222' ) eq_( join_host_strings('user', '192.168.0.0', '1222'), '[email protected]:1222' ) def test_nonword_character_in_username(self): """ normalize() will accept non-word characters in the username part """ eq_( normalize('[email protected]')[0], 'user-with-hyphens' ) def test_at_symbol_in_username(self): """ normalize() should allow '@' in usernames (i.e. last '@' is split char) """ parts = normalize('[email protected]@www.example.com') eq_(parts[0], '[email protected]') eq_(parts[1], 'www.example.com') def test_normalization_of_empty_input(self): empties = ('', '', '') for description, input in ( ("empty string", ''), ("None", None) ): template = "normalize() returns empty strings for %s input" eq_.description = template % description yield eq_, normalize(input), empties del eq_.description def test_host_string_denormalization(self): username = _get_system_username() for description, string1, string2 in ( ("Sanity check: equal strings remain equal", 'localhost', 'localhost'), ("Empty username is same as get_system_username", 'localhost:22', username + '@localhost:22'), ("Empty port is same as port 22", 'user@localhost', 'user@localhost:22'), ("Both username and port", 'localhost', username + '@localhost:22'), ("IPv6 address", '2001:DB8::1', username + '@[2001:DB8::1]:22'), ): eq_.description = "Host-string denormalization: %s" % description yield eq_, denormalize(string1), denormalize(string2) del eq_.description # # Connection caching # @staticmethod @with_fakes def check_connection_calls(host_strings, num_calls): # Clear Fudge call stack # Patch connect() with Fake obj set to expect num_calls calls patched_connect = patch_object('fabric.network', 'connect', Fake('connect', expect_call=True).times_called(num_calls) ) try: # Make new cache object cache = HostConnectionCache() # Connect to all connection strings for host_string in host_strings: # Obtain connection from cache, potentially calling connect() cache[host_string] finally: # Restore connect() patched_connect.restore() def test_connection_caching(self): for description, host_strings, num_calls in ( ("Two different host names, two connections", ('localhost', 'other-system'), 2), ("Same host twice, one connection", ('localhost', 'localhost'), 1), ("Same host twice, different ports, two connections", ('localhost:22', 'localhost:222'), 2), ("Same host twice, different users, two connections", ('user1@localhost', 'user2@localhost'), 2), ): TestNetwork.check_connection_calls.description = description yield TestNetwork.check_connection_calls, host_strings, num_calls def test_connection_cache_deletion(self): """ HostConnectionCache should delete correctly w/ non-full keys """ hcc = HostConnectionCache() fake = Fake('connect', callable=True) with patched_context('fabric.network', 'connect', fake): for host_string in ('hostname', 'user@hostname', 'user@hostname:222'): # Prime hcc[host_string] # Test ok_(host_string in hcc) # Delete del hcc[host_string] # Test ok_(host_string not in hcc) # # Connection loop flow # @server() def test_saved_authentication_returns_client_object(self): cache = HostConnectionCache() assert isinstance(cache[env.host_string], ssh.SSHClient) @server() @with_fakes def test_prompts_for_password_without_good_authentication(self): env.password = None with password_response(PASSWORDS[env.user], times_called=1): cache = HostConnectionCache() cache[env.host_string] @aborts def test_aborts_on_prompt_with_abort_on_prompt(self): """ abort_on_prompt=True should abort when prompt() is used """ env.abort_on_prompts = True prompt("This will abort") @server() @aborts def test_aborts_on_password_prompt_with_abort_on_prompt(self): """ abort_on_prompt=True should abort when password prompts occur """ env.password = None env.abort_on_prompts = True with password_response(PASSWORDS[env.user], times_called=1): cache = HostConnectionCache() cache[env.host_string] @mock_streams('stdout') @server() def test_does_not_abort_with_password_and_host_with_abort_on_prompt(self): """ abort_on_prompt=True should not abort if no prompts are needed """ env.abort_on_prompts = True env.password = PASSWORDS[env.user] # env.host_string is automatically filled in when using server() run("ls /simple") @mock_streams('stdout') @server() def test_trailing_newline_line_drop(self): """ Trailing newlines shouldn't cause last line to be dropped. """ # Multiline output with trailing newline cmd = "ls /" output_string = RESPONSES[cmd] # TODO: fix below lines, duplicates inner workings of tested code prefix = "[%s] out: " % env.host_string expected = prefix + ('\n' + prefix).join(output_string.split('\n')) # Create, tie off thread with settings(show('everything'), hide('running')): result = run(cmd) # Test equivalence of expected, received output eq_(expected, sys.stdout.getvalue()) # Also test that the captured value matches, too. eq_(output_string, result) @server() def test_sudo_prompt_kills_capturing(self): """ Sudo prompts shouldn't screw up output capturing """ cmd = "ls /simple" with hide('everything'): eq_(sudo(cmd), RESPONSES[cmd]) @server() def test_password_memory_on_user_switch(self): """ Switching users mid-session should not screw up password memory """ def _to_user(user): return join_host_strings(user, env.host, env.port) user1 = 'root' user2 = USER with settings(hide('everything'), password=None): # Connect as user1 (thus populating both the fallback and # user-specific caches) with settings( password_response(PASSWORDS[user1]), host_string=_to_user(user1) ): run("ls /simple") # Connect as user2: * First cxn attempt will use fallback cache, # which contains user1's password, and thus fail * Second cxn # attempt will prompt user, and succeed due to mocked p4p * but # will NOT overwrite fallback cache with settings( password_response(PASSWORDS[user2]), host_string=_to_user(user2) ): # Just to trigger connection run("ls /simple") # * Sudo call should use cached user2 password, NOT fallback cache, # and thus succeed. (I.e. p_f_p should NOT be called here.) with settings( password_response('whatever', times_called=0), host_string=_to_user(user2) ): sudo("ls /simple") @mock_streams('stderr') @server() def test_password_prompt_displays_host_string(self): """ Password prompt lines should include the user/host in question """ env.password = None env.no_agent = env.no_keys = True output.everything = False with password_response(PASSWORDS[env.user], silent=False): run("ls /simple") regex = r'^\[%s\] Login password for \'%s\': ' % (env.host_string, env.user) assert_contains(regex, sys.stderr.getvalue()) @mock_streams('stderr') @server(pubkeys=True) def test_passphrase_prompt_displays_host_string(self): """ Passphrase prompt lines should include the user/host in question """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY output.everything = False with password_response(CLIENT_PRIVKEY_PASSPHRASE, silent=False): run("ls /simple") regex = r'^\[%s\] Login password for \'%s\': ' % (env.host_string, env.user) assert_contains(regex, sys.stderr.getvalue()) def test_sudo_prompt_display_passthrough(self): """ Sudo prompt should display (via passthrough) when stdout/stderr shown """ TestNetwork._prompt_display(True) def test_sudo_prompt_display_directly(self): """ Sudo prompt should display (manually) when stdout/stderr hidden """ TestNetwork._prompt_display(False) @staticmethod @mock_streams('both') @server(pubkeys=True, responses={'oneliner': 'result'}) def _prompt_display(display_output): env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY output.output = display_output with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[env.user]), silent=False ): sudo('oneliner') if display_output: expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password for '%(user)s': [%(prefix)s] out: sudo password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: [%(prefix)s] out: result """ % {'prefix': env.host_string, 'user': env.user} else: # Note lack of first sudo prompt (as it's autoresponded to) and of # course the actual result output. expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password for '%(user)s': [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: """ % { 'prefix': env.host_string, 'user': env.user } eq_(expected[1:], sys.stdall.getvalue()) @mock_streams('both') @server( pubkeys=True, responses={'oneliner': 'result', 'twoliner': 'result1\nresult2'} ) def test_consecutive_sudos_should_not_have_blank_line(self): """ Consecutive sudo() calls should not incur a blank line in-between """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[USER]), silent=False ): sudo('oneliner') sudo('twoliner') expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password for '%(user)s': [%(prefix)s] out: sudo password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: [%(prefix)s] out: result [%(prefix)s] sudo: twoliner [%(prefix)s] out: sudo password: [%(prefix)s] out: result1 [%(prefix)s] out: result2 """ % {'prefix': env.host_string, 'user': env.user} eq_(sys.stdall.getvalue(), expected[1:]) @mock_streams('both') @server(pubkeys=True, responses={'silent': '', 'normal': 'foo'}) def test_silent_commands_should_not_have_blank_line(self): """ Silent commands should not generate an extra trailing blank line After the move to interactive I/O, it was noticed that while run/sudo commands which had non-empty stdout worked normally (consecutive such commands were totally adjacent), those with no stdout (i.e. silent commands like ``test`` or ``mkdir``) resulted in spurious blank lines after the "run:" line. This looks quite ugly in real world scripts. """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response(CLIENT_PRIVKEY_PASSPHRASE, silent=False): run('normal') run('silent') run('normal') with hide('everything'): run('normal') run('silent') expected = """ [%(prefix)s] run: normal [%(prefix)s] Login password for '%(user)s': [%(prefix)s] out: foo [%(prefix)s] run: silent [%(prefix)s] run: normal [%(prefix)s] out: foo """ % {'prefix': env.host_string, 'user': env.user} eq_(expected[1:], sys.stdall.getvalue()) @mock_streams('both') @server( pubkeys=True, responses={'oneliner': 'result', 'twoliner': 'result1\nresult2'} ) def test_io_should_print_prefix_if_ouput_prefix_is_true(self): """ run/sudo should print [host_string] if env.output_prefix == True """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[USER]), silent=False ): run('oneliner') run('twoliner') expected = """ [%(prefix)s] run: oneliner [%(prefix)s] Login password for '%(user)s': [%(prefix)s] out: result [%(prefix)s] run: twoliner [%(prefix)s] out: result1 [%(prefix)s] out: result2 """ % {'prefix': env.host_string, 'user': env.user} eq_(expected[1:], sys.stdall.getvalue()) @mock_streams('both') @server( pubkeys=True, responses={'oneliner': 'result', 'twoliner': 'result1\nresult2'} ) def test_io_should_not_print_prefix_if_ouput_prefix_is_false(self): """ run/sudo shouldn't print [host_string] if env.output_prefix == False """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[USER]), silent=False ): with settings(output_prefix=False): run('oneliner') run('twoliner') expected = """ [%(prefix)s] run: oneliner [%(prefix)s] Login password for '%(user)s': result [%(prefix)s] run: twoliner result1 result2 """ % {'prefix': env.host_string, 'user': env.user} eq_(expected[1:], sys.stdall.getvalue()) @server() def test_env_host_set_when_host_prompt_used(self): """ Ensure env.host is set during host prompting """ copied_host_string = str(env.host_string) fake = Fake('raw_input', callable=True).returns(copied_host_string) env.host_string = None env.host = None with settings(hide('everything'), patched_input(fake)): run("ls /") # Ensure it did set host_string back to old value eq_(env.host_string, copied_host_string) # Ensure env.host is correct eq_(env.host, normalize(copied_host_string)[1]) def subtask(): run("This should never execute") class TestConnections(FabricTest): @aborts def test_should_abort_when_cannot_connect(self): """ By default, connecting to a nonexistent server should abort. """ with hide('everything'): execute(subtask, hosts=['nope.nonexistent.com']) def test_should_warn_when_skip_bad_hosts_is_True(self): """ env.skip_bad_hosts = True => execute() skips current host """ with settings(hide('everything'), skip_bad_hosts=True): execute(subtask, hosts=['nope.nonexistent.com']) @parallel def parallel_subtask(): run("This should never execute") class TestParallelConnections(FabricTest): @aborts def test_should_abort_when_cannot_connect(self): """ By default, connecting to a nonexistent server should abort. """ with hide('everything'): execute(parallel_subtask, hosts=['nope.nonexistent.com']) def test_should_warn_when_skip_bad_hosts_is_True(self): """ env.skip_bad_hosts = True => execute() skips current host """ with settings(hide('everything'), skip_bad_hosts=True): execute(parallel_subtask, hosts=['nope.nonexistent.com']) class TestSSHConfig(FabricTest): def env_setup(self): super(TestSSHConfig, self).env_setup() env.use_ssh_config = True env.ssh_config_path = support("ssh_config") # Undo the changes FabricTest makes to env for server support env.user = env.local_user env.port = env.default_port def test_global_user_with_default_env(self): """ Global User should override default env.user """ eq_(normalize("localhost")[0], "satan") def test_global_user_with_nondefault_env(self): """ Global User should NOT override nondefault env.user """ with settings(user="foo"): eq_(normalize("localhost")[0], "foo") def test_specific_user_with_default_env(self): """ Host-specific User should override default env.user """ eq_(normalize("myhost")[0], "neighbor") def test_user_vs_host_string_value(self): """ SSH-config derived user should NOT override host-string user value """ eq_(normalize("myuser@localhost")[0], "myuser") eq_(normalize("myuser@myhost")[0], "myuser") def test_global_port_with_default_env(self): """ Global Port should override default env.port """ eq_(normalize("localhost")[2], "666") def test_global_port_with_nondefault_env(self): """ Global Port should NOT override nondefault env.port """ with settings(port="777"): eq_(normalize("localhost")[2], "777") def test_specific_port_with_default_env(self): """ Host-specific Port should override default env.port """ eq_(normalize("myhost")[2], "664") def test_port_vs_host_string_value(self): """ SSH-config derived port should NOT override host-string port value """ eq_(normalize("localhost:123")[2], "123") eq_(normalize("myhost:123")[2], "123") def test_hostname_alias(self): """ Hostname setting overrides host string's host value """ eq_(normalize("localhost")[1], "localhost") eq_(normalize("myalias")[1], "otherhost") @with_patched_object(utils, 'warn', Fake('warn', callable=True, expect_call=True)) def test_warns_with_bad_config_file_path(self): # use_ssh_config is already set in our env_setup() with settings(hide('everything'), ssh_config_path="nope_bad_lol"): normalize('foo') @server() def test_real_connection(self): """ Test-server connection using ssh_config values """ with settings( hide('everything'), ssh_config_path=support("testserver_ssh_config"), host_string='testserver', ): ok_(run("ls /simple").succeeded) class TestKeyFilenames(FabricTest): def test_empty_everything(self): """ No env.key_filename and no ssh_config = empty list """ with settings(use_ssh_config=False): with settings(key_filename=""): eq_(key_filenames(), []) with settings(key_filename=[]): eq_(key_filenames(), []) def test_just_env(self): """ Valid env.key_filename and no ssh_config = just env """ with settings(use_ssh_config=False): with settings(key_filename="mykey"): eq_(key_filenames(), ["mykey"]) with settings(key_filename=["foo", "bar"]): eq_(key_filenames(), ["foo", "bar"]) def test_just_ssh_config(self): """ No env.key_filename + valid ssh_config = ssh value """ with settings(use_ssh_config=True, ssh_config_path=support("ssh_config")): for val in ["", []]: with settings(key_filename=val): eq_(key_filenames(), ["foobar.pub"]) def test_both(self): """ Both env.key_filename + valid ssh_config = both show up w/ env var first """ with settings(use_ssh_config=True, ssh_config_path=support("ssh_config")): with settings(key_filename="bizbaz.pub"): eq_(key_filenames(), ["bizbaz.pub", "foobar.pub"]) with settings(key_filename=["bizbaz.pub", "whatever.pub"]): expected = ["bizbaz.pub", "whatever.pub", "foobar.pub"] eq_(key_filenames(), expected)
bsd-2-clause
-8,869,130,587,347,242,000
34.547926
92
0.576988
false
Philippe12/external_chromium_org
ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py
48
13105
#!/usr/bin/python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This script lays out the PNaCl translator files for a normal Chrome installer, for one platform. Once run num-of-arches times, the result can then be packed into a multi-CRX zip file. This script depends on and pulls in the translator nexes and libraries from the toolchain directory (so that must be downloaded first) and it depends on the pnacl_irt_shim. """ import json import logging import optparse import os import platform import re import shutil import sys J = os.path.join ###################################################################### # Target arch and build arch junk to convert between all the # silly conventions between SCons, Chrome and PNaCl. # The version of the arch used by NaCl manifest files. # This is based on the machine "building" this extension. # We also used this to identify the arch-specific different versions of # this extension. def CanonicalArch(arch): if arch in ('x86_64', 'x86-64', 'x64', 'amd64'): return 'x86-64' # TODO(jvoung): be more specific about the arm architecture version? if arch in ('arm', 'armv7'): return 'arm' if re.match('^i.86$', arch) or arch in ('x86_32', 'x86-32', 'ia32', 'x86'): return 'x86-32' return None def GetBuildArch(): arch = platform.machine() return CanonicalArch(arch) BUILD_ARCH = GetBuildArch() ARCHES = ['x86-32', 'x86-64', 'arm'] def IsValidArch(arch): return arch in ARCHES # The version of the arch used by configure and pnacl's build.sh. def StandardArch(arch): return {'x86-32': 'i686', 'x86-64': 'x86_64', 'arm' : 'armv7'}[arch] ###################################################################### def GetNaClRoot(): """ Find the native_client path, relative to this script. This script is in ppapi/... and native_client is a sibling of ppapi. """ script_file = os.path.abspath(__file__) def SearchForNaCl(cur_dir): if cur_dir.endswith('ppapi'): parent = os.path.dirname(cur_dir) sibling = os.path.join(parent, 'native_client') if not os.path.isdir(sibling): raise Exception('Could not find native_client relative to %s' % script_file) return sibling # Detect when we've the root (linux is /, but windows is not...) next_dir = os.path.dirname(cur_dir) if cur_dir == next_dir: raise Exception('Could not find native_client relative to %s' % script_file) return SearchForNaCl(next_dir) return SearchForNaCl(script_file) NACL_ROOT = GetNaClRoot() ###################################################################### # Normalize the platform name to be the way SCons finds chrome binaries. # This is based on the platform "building" the extension. def GetBuildPlatform(): if sys.platform == 'darwin': platform = 'mac' elif sys.platform.startswith('linux'): platform = 'linux' elif sys.platform in ('cygwin', 'win32'): platform = 'windows' else: raise Exception('Unknown platform: %s' % sys.platform) return platform BUILD_PLATFORM = GetBuildPlatform() def DetermineInstallerArches(target_arch): arch = CanonicalArch(target_arch) if not IsValidArch(arch): raise Exception('Unknown target_arch %s' % target_arch) # On windows, we need x86-32 and x86-64 (assuming non-windows RT). if BUILD_PLATFORM == 'windows': if arch.startswith('x86'): return ['x86-32', 'x86-64'] else: raise Exception('Unknown target_arch on windows w/ target_arch == %s' % target_arch) else: return [arch] ###################################################################### class PnaclPackaging(object): package_base = os.path.dirname(__file__) # File paths that are set from the command line. pnacl_template = None tool_revisions = None # Agreed-upon name for pnacl-specific info. pnacl_json = 'pnacl.json' @staticmethod def SetPnaclInfoTemplatePath(path): PnaclPackaging.pnacl_template = path @staticmethod def SetToolsRevisionPath(path): PnaclPackaging.tool_revisions = path @staticmethod def PnaclToolsRevision(): with open(PnaclPackaging.tool_revisions, 'r') as f: for line in f.read().splitlines(): if line.startswith('PNACL_VERSION'): _, version = line.split('=') # CWS happens to use version quads, so make it a quad too. # However, each component of the quad is limited to 64K max. # Try to handle a bit more. max_version = 2 ** 16 version = int(version) version_more = version / max_version version = version % max_version return '0.1.%d.%d' % (version_more, version) raise Exception('Cannot find PNACL_VERSION in TOOL_REVISIONS file: %s' % PnaclPackaging.tool_revisions) @staticmethod def GeneratePnaclInfo(target_dir, abi_version, arch): # A note on versions: pnacl_version is the version of translator built # by the NaCl repo, while abi_version is bumped when the NaCl sandbox # actually changes. pnacl_version = PnaclPackaging.PnaclToolsRevision() with open(PnaclPackaging.pnacl_template, 'r') as pnacl_template_fd: pnacl_template = json.load(pnacl_template_fd) out_name = J(target_dir, UseWhitelistedChars(PnaclPackaging.pnacl_json, None)) with open(out_name, 'w') as output_fd: pnacl_template['pnacl-arch'] = arch pnacl_template['pnacl-version'] = pnacl_version json.dump(pnacl_template, output_fd, sort_keys=True, indent=4) ###################################################################### class PnaclDirs(object): toolchain_dir = J(NACL_ROOT, 'toolchain') output_dir = J(toolchain_dir, 'pnacl-package') @staticmethod def TranslatorRoot(): return J(PnaclDirs.toolchain_dir, 'pnacl_translator') @staticmethod def LibDir(target_arch): return J(PnaclDirs.TranslatorRoot(), 'lib-%s' % target_arch) @staticmethod def SandboxedCompilerDir(target_arch): return J(PnaclDirs.toolchain_dir, 'pnacl_translator', StandardArch(target_arch), 'bin') @staticmethod def SetOutputDir(d): PnaclDirs.output_dir = d @staticmethod def OutputDir(): return PnaclDirs.output_dir @staticmethod def OutputAllDir(version_quad): return J(PnaclDirs.OutputDir(), version_quad) @staticmethod def OutputArchBase(arch): return '%s' % arch @staticmethod def OutputArchDir(arch): # Nest this in another directory so that the layout will be the same # as the "all"/universal version. parent_dir = J(PnaclDirs.OutputDir(), PnaclDirs.OutputArchBase(arch)) return (parent_dir, J(parent_dir, PnaclDirs.OutputArchBase(arch))) ###################################################################### def StepBanner(short_desc, long_desc): logging.info("**** %s\t%s", short_desc, long_desc) def Clean(): out_dir = PnaclDirs.OutputDir() StepBanner('CLEAN', 'Cleaning out old packaging: %s' % out_dir) if os.path.isdir(out_dir): shutil.rmtree(out_dir) else: logging.info('Clean skipped -- no previous output directory!') ###################################################################### def UseWhitelistedChars(orig_basename, arch): """ Make the filename match the pattern expected by nacl_file_host. Currently, this assumes there is prefix "pnacl_public_" and that the allowed chars are in the set [a-zA-Z0-9_]. """ if arch: target_basename = 'pnacl_public_%s_%s' % (arch, orig_basename) else: target_basename = 'pnacl_public_%s' % orig_basename result = re.sub(r'[^a-zA-Z0-9_]', '_', target_basename) logging.info('UseWhitelistedChars using: %s' % result) return result def CopyFlattenDirsAndPrefix(src_dir, arch, dest_dir): """ Copy files from src_dir to dest_dir. When copying, also rename the files such that they match the white-listing pattern in chrome/browser/nacl_host/nacl_file_host.cc. """ for (root, dirs, files) in os.walk(src_dir, followlinks=True): for f in files: # Assume a flat directory. assert (f == os.path.basename(f)) full_name = J(root, f) target_name = UseWhitelistedChars(f, arch) shutil.copy(full_name, J(dest_dir, target_name)) def BuildArchForInstaller(version_quad, arch, lib_overrides): """ Build an architecture specific version for the chrome installer. """ target_dir = PnaclDirs.OutputDir() StepBanner('BUILD INSTALLER', 'Packaging for arch %s in %s' % (arch, target_dir)) # Copy llc.nexe and ld.nexe, but with some renaming and directory flattening. CopyFlattenDirsAndPrefix(PnaclDirs.SandboxedCompilerDir(arch), arch, target_dir) # Copy native libraries, also with renaming and directory flattening. CopyFlattenDirsAndPrefix(PnaclDirs.LibDir(arch), arch, target_dir) # Also copy files from the list of overrides. # This needs the arch tagged onto the name too, like the other files. if arch in lib_overrides: for override in lib_overrides[arch]: override_base = os.path.basename(override) target_name = UseWhitelistedChars(override_base, arch) shutil.copy(override, J(target_dir, target_name)) def BuildInstallerStyle(version_quad, lib_overrides, arches): """ Package the pnacl component for use within the chrome installer infrastructure. These files need to be named in a special way so that white-listing of files is easy. """ StepBanner("BUILD_ALL", "Packaging installer for version: %s" % version_quad) for arch in arches: BuildArchForInstaller(version_quad, arch, lib_overrides) # Generate pnacl info manifest. # Hack around the fact that there may be more than one arch, on Windows. if len(arches) == 1: arches = arches[0] PnaclPackaging.GeneratePnaclInfo(PnaclDirs.OutputDir(), version_quad, arches) ###################################################################### def Main(): usage = 'usage: %prog [options] version_arg' parser = optparse.OptionParser(usage) # We may want to accept a target directory to dump it in the usual # output directory (e.g., scons-out). parser.add_option('-c', '--clean', dest='clean', action='store_true', default=False, help='Clean out destination directory first.') parser.add_option('-d', '--dest', dest='dest', help='The destination root for laying out the extension') parser.add_option('-L', '--lib_override', dest='lib_overrides', action='append', default=[], help='Specify path to a fresher native library ' + 'that overrides the tarball library with ' + '(arch:libfile) tuple.') parser.add_option('-t', '--target_arch', dest='target_arch', default=None, help='Only generate the chrome installer version for arch') parser.add_option('--info_template_path', dest='info_template_path', default=None, help='Path of the info template file') parser.add_option('--tool_revisions_path', dest='tool_revisions_path', default=None, help='Location of NaCl TOOL_REVISIONS file.') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Print verbose debug messages.') (options, args) = parser.parse_args() if options.verbose: logging.getLogger().setLevel(logging.DEBUG) else: logging.getLogger().setLevel(logging.ERROR) logging.info('pnacl_component_crx_gen w/ options %s and args %s\n' % (options, args)) # Set destination directory before doing any cleaning, etc. if options.dest: PnaclDirs.SetOutputDir(options.dest) if options.clean: Clean() if options.info_template_path: PnaclPackaging.SetPnaclInfoTemplatePath(options.info_template_path) if options.tool_revisions_path: PnaclPackaging.SetToolsRevisionPath(options.tool_revisions_path) lib_overrides = {} for o in options.lib_overrides: arch, override_lib = o.split(',') arch = CanonicalArch(arch) if not IsValidArch(arch): raise Exception('Unknown arch for -L: %s (from %s)' % (arch, o)) if not os.path.isfile(override_lib): raise Exception('Override native lib not a file for -L: %s (from %s)' % (override_lib, o)) override_list = lib_overrides.get(arch, []) override_list.append(override_lib) lib_overrides[arch] = override_list if len(args) != 1: parser.print_help() parser.error('Incorrect number of arguments') abi_version = int(args[0]) arches = DetermineInstallerArches(options.target_arch) BuildInstallerStyle(abi_version, lib_overrides, arches) return 0 if __name__ == '__main__': sys.exit(Main())
bsd-3-clause
-7,056,966,648,829,842,000
33.396325
79
0.638153
false
maciejkula/glove-python
setup.py
1
4688
import glob import os import platform import subprocess import sys from setuptools import Command, Extension, setup, find_packages from setuptools.command.test import test as TestCommand def define_extensions(cythonize=False): compile_args = ['-fopenmp', '-ffast-math'] # There are problems with illegal ASM instructions # when using the Anaconda distribution (at least on OSX). # This could be because Anaconda uses its own assembler? # To work around this we do not add -march=native if we # know we're dealing with Anaconda if 'anaconda' not in sys.version.lower(): compile_args.append('-march=native') if cythonize: glove_cython = "glove/glove_cython.pyx" glove_metrics = "glove/metrics/accuracy_cython.pyx" glove_corpus = "glove/corpus_cython.pyx" else: glove_cython = "glove/glove_cython.c" glove_metrics = "glove/metrics/accuracy_cython.c" glove_corpus = "glove/corpus_cython.cpp" return [Extension("glove.glove_cython", [glove_cython], extra_link_args=["-fopenmp"], extra_compile_args=compile_args), Extension("glove.metrics.accuracy_cython", [glove_metrics], extra_link_args=["-fopenmp"], extra_compile_args=compile_args), Extension("glove.corpus_cython", [glove_corpus], language='C++', libraries=["stdc++"], extra_link_args=compile_args, extra_compile_args=compile_args)] def set_gcc(): """ Try to find and use GCC on OSX for OpenMP support. """ # For macports and homebrew patterns = ['/opt/local/bin/gcc-mp-[0-9].[0-9]', '/opt/local/bin/gcc-mp-[0-9]', '/usr/local/bin/gcc-[0-9].[0-9]', '/usr/local/bin/gcc-[0-9]'] if 'darwin' in platform.platform().lower(): gcc_binaries = [] for pattern in patterns: gcc_binaries += glob.glob(pattern) gcc_binaries.sort() if gcc_binaries: _, gcc = os.path.split(gcc_binaries[-1]) os.environ["CC"] = gcc else: raise Exception('No GCC available. Install gcc from Homebrew ' 'using brew install gcc.') class Cythonize(Command): """ Compile the extension .pyx files. """ user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import Cython from Cython.Build import cythonize cythonize(define_extensions(cythonize=True)) class Clean(Command): """ Clean build files. """ user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): pth = os.path.dirname(os.path.abspath(__file__)) subprocess.call(['rm', '-rf', os.path.join(pth, 'build')]) subprocess.call(['rm', '-rf', os.path.join(pth, '*.egg-info')]) subprocess.call(['find', pth, '-name', '*.pyc', '-type', 'f', '-delete']) subprocess.call(['rm', os.path.join(pth, 'glove', 'corpus_cython.so')]) subprocess.call(['rm', os.path.join(pth, 'glove', 'glove_cython.so')]) class PyTest(TestCommand): user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = ['tests/'] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) setup( name='glove_python', version='0.1.0', description=('Python implementation of Global Vectors ' 'for Word Representation (GloVe)'), long_description='', packages=find_packages(), install_requires=['numpy', 'scipy'], tests_require=['pytest'], cmdclass={'test': PyTest, 'cythonize': Cythonize, 'clean': Clean}, author='Maciej Kula', url='https://github.com/maciejkula/glove-python', download_url='https://github.com/maciejkula/glove-python/tarball/0.1.0', license='Apache 2.0', classifiers=['Development Status :: 3 - Alpha', 'License :: OSI Approved :: Apache Software License', 'Topic :: Scientific/Engineering :: Artificial Intelligence'], ext_modules=define_extensions() )
apache-2.0
4,129,608,185,902,129,000
29.051282
81
0.584898
false
xhat/micropython
tests/basics/for_range.py
19
1116
# test for+range, mostly to check optimisation of this pair # apply args using * for x in range(*(1, 3)): print(x) for x in range(1, *(6, 2)): print(x) # apply args using ** try: for x in range(**{'end':1}): print(x) except TypeError: print('TypeError') try: for x in range(0, **{'end':1}): print(x) except TypeError: print('TypeError') try: for x in range(0, 1, **{'step':1}): print(x) except TypeError: print('TypeError') # keyword args try: for x in range(end=1): print(x) except TypeError: print('TypeError') try: for x in range(0, end=1): print(x) except TypeError: print('TypeError') try: for x in range(0, 1, step=1): print(x) except TypeError: print('TypeError') # argument is a comprehension try: for x in range(0 for i in []): print(x) except TypeError: print('TypeError') try: for x in range(0, (0 for i in [])): print(x) except TypeError: print('TypeError') try: for x in range(0, 1, (0 for i in [])): print(x) except TypeError: print('TypeError')
mit
-4,837,676,830,301,300,000
18.241379
59
0.578853
false
bennyrowland/pyflo-suspect
tests/test_processing.py
1
5347
from unittest.mock import Mock import numpy import pytest import pyflo_suspect.processing as p import pyflo.ports import suspect @pytest.fixture def simple_data(): source_array = numpy.ones((4, 128), 'complex') source_array[1, :] *= 2 source_array[2, :] *= 4 source_array[3, :] *= 8 data = suspect.MRSData(source_array, 5e-4, 123) return data @pytest.fixture def complex_data(): source_array = numpy.ones((8, 4, 128), 'complex') data = suspect.MRSData(source_array, 5e-4, 123) return data def test_channel_weights_no_axis(simple_data): component = p.SVDChannelWeights({}) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) #target_port = Mock() #print(component.outports) #component.outports["out"].connect(target_port) data_port.send_data(simple_data) #print(target_port.call_args) def test_channel_weights_data(simple_data): component = p.SVDChannelWeights({}) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) target_port = pyflo.ports.Inport({"name": "result"}) mock = Mock() target_port.on('data', mock) component.outports["weights"].connect(target_port) data_port.send_data(simple_data) result = mock.call_args[0][0] assert result.shape == (4,) numpy.testing.assert_almost_equal(result[0] / result[1], simple_data[0, 0] / simple_data[1, 0]) def test_average_data_only(simple_data): component = p.WeightedAverage({}) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) target_port = pyflo.ports.Inport({"name": "result"}) mock = Mock() target_port.on('data', mock) component.outports["out"].connect(target_port) data_port.send_data(simple_data) result = mock.call_args[0][0] assert result.shape == (128,) assert result[0] == 3.75 assert result.dt == 5e-4 assert result.f0 == 123 def test_average_data_weights(simple_data): component = p.WeightedAverage({}) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) weights_port = pyflo.ports.Outport({"name": "weights"}) weights_port.connect(component.inports["weights"]) target_port = pyflo.ports.Inport({"name": "result"}) mock = Mock() target_port.on("data", mock) component.outports["out"].connect(target_port) data_port.send_data(simple_data) # component should wait for weights to be send mock.assert_not_called() weights_port.send_data(numpy.array([0, 0, 0, 1])) result = mock.call_args[0][0] assert result.shape == (128,) assert result[0] == 8 assert result.dt == 5e-4 assert result.f0 == 123 def test_average_data_axis(complex_data): component = p.WeightedAverage({}) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) axis_port = pyflo.ports.Outport({"name": "axis"}) axis_port.connect(component.inports["axis"]) target_port = pyflo.ports.Inport({"name": "result"}) mock = Mock() target_port.on("data", mock) component.outports["out"].connect(target_port) data_port.send_data(complex_data) # component should wait for weights to be send mock.assert_not_called() axis_port.send_data(1) result = mock.call_args[0][0] assert result.shape == (8, 128) assert result[0, 0] == 1 assert result.dt == 5e-4 assert result.f0 == 123 def test_residual_water_alignment(): component = p.WaterPeakAlignment(None) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) test_spectrum = numpy.zeros(128, 'complex') test_spectrum[16] = 1 test_fid = numpy.fft.ifft(test_spectrum) test_data = suspect.MRSData(test_fid, 1.0 / 128, 123) target_port = pyflo.ports.Inport({"name": "target"}) component.outports["shift"].connect(target_port) mock = Mock() target_port.on('data', mock) data_port.send_data(test_data) mock.assert_called_once_with(16) def test_frequency_shift(): component = p.FrequencyShift(None) data_port = pyflo.ports.Outport({"name": "data"}) data_port.connect(component.inports["in"]) shift_port = pyflo.ports.Outport({"name": "shift"}) shift_port.connect(component.inports["shift"]) test_spectrum = numpy.zeros(128, 'complex') test_spectrum[0] = 1 target_fid = numpy.fft.ifft(test_spectrum) target_data = suspect.MRSData(target_fid, 1.0 / 128, 123) shifted_spectrum = numpy.roll(test_spectrum, 16) shifted_fid = numpy.fft.ifft(shifted_spectrum) shifted_data = suspect.MRSData(shifted_fid, 1.0 / 128, 123) target_port = pyflo.ports.Inport({"name": "result"}) component.outports["out"].connect(target_port) mock = Mock() target_port.on('data', mock) data_port.send_data(shifted_data) mock.assert_not_called() shift_port.send_data(-16.0) numpy.testing.assert_almost_equal(target_data, mock.call_args[0][0]) # try sending the data the other way mock = Mock() target_port.on('data', mock) shift_port.send_data(-16.0) mock.assert_not_called() data_port.send_data(shifted_data) numpy.testing.assert_almost_equal(target_data, mock.call_args[0][0])
mit
2,592,149,976,706,942,000
29.907514
99
0.657939
false
40223234/40223234
static/Brython3.1.1-20150328-091302/Lib/getopt.py
845
7488
"""Parser for command line options. This module helps scripts to parse the command line arguments in sys.argv. It supports the same conventions as the Unix getopt() function (including the special meanings of arguments of the form `-' and `--'). Long options similar to those supported by GNU software may be used as well via an optional third argument. This module provides two functions and an exception: getopt() -- Parse command line options gnu_getopt() -- Like getopt(), but allow option and non-option arguments to be intermixed. GetoptError -- exception (class) raised with 'opt' attribute, which is the option involved with the exception. """ # Long option support added by Lars Wirzenius <[email protected]>. # # Gerrit Holl <[email protected]> moved the string-based exceptions # to class-based exceptions. # # Peter Åstrand <[email protected]> added gnu_getopt(). # # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism # - allow the caller to specify ordering # - RETURN_IN_ORDER option # - GNU extension with '-' as first character of option string # - optional arguments, specified by double colons # - a option string with a W followed by semicolon should # treat "-W foo" as "--foo" __all__ = ["GetoptError","error","getopt","gnu_getopt"] import os try: from gettext import gettext as _ except ImportError: # Bootstrapping Python: gettext's dependencies not built yet def _(s): return s class GetoptError(Exception): opt = '' msg = '' def __init__(self, msg, opt=''): self.msg = msg self.opt = opt Exception.__init__(self, msg, opt) def __str__(self): return self.msg error = GetoptError # backward compatibility def getopt(args, shortopts, longopts = []): """getopt(args, options[, long_options]) -> opts, args Parses command line options and parameter list. args is the argument list to be parsed, without the leading reference to the running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a colon (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign ('='). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments left after the option list was stripped (this is a trailing slice of the first argument). Each option-and-value pair returned has the option as its first element, prefixed with a hyphen (e.g., '-x'), and the option argument as its second element, or an empty string if the option has no argument. The options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. """ opts = [] if type(longopts) == type(""): longopts = [longopts] else: longopts = list(longopts) while args and args[0].startswith('-') and args[0] != '-': if args[0] == '--': args = args[1:] break if args[0].startswith('--'): opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) else: opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) return opts, args def gnu_getopt(args, shortopts, longopts = []): """getopt(args, options[, long_options]) -> opts, args This function works like getopt(), except that GNU style scanning mode is used by default. This means that option and non-option arguments may be intermixed. The getopt() function stops processing options as soon as a non-option argument is encountered. If the first character of the option string is `+', or if the environment variable POSIXLY_CORRECT is set, then option processing stops as soon as a non-option argument is encountered. """ opts = [] prog_args = [] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) # Allow options after non-option arguments? if shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): all_options_first = True else: all_options_first = False while args: if args[0] == '--': prog_args += args[1:] break if args[0][:2] == '--': opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: prog_args += args break else: prog_args.append(args[0]) args = args[1:] return opts, prog_args def do_longs(opts, opt, longopts, args): try: i = opt.index('=') except ValueError: optarg = None else: opt, optarg = opt[:i], opt[i+1:] has_arg, opt = long_has_args(opt, longopts) if has_arg: if optarg is None: if not args: raise GetoptError(_('option --%s requires argument') % opt, opt) optarg, args = args[0], args[1:] elif optarg is not None: raise GetoptError(_('option --%s must not have an argument') % opt, opt) opts.append(('--' + opt, optarg or '')) return opts, args # Return: # has_arg? # full option name def long_has_args(opt, longopts): possibilities = [o for o in longopts if o.startswith(opt)] if not possibilities: raise GetoptError(_('option --%s not recognized') % opt, opt) # Is there an exact match? if opt in possibilities: return False, opt elif opt + '=' in possibilities: return True, opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be # nice to work them into the error msg raise GetoptError(_('option --%s not a unique prefix') % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] return has_arg, unique_match def do_shorts(opts, optstring, shortopts, args): while optstring != '': opt, optstring = optstring[0], optstring[1:] if short_has_arg(opt, shortopts): if optstring == '': if not args: raise GetoptError(_('option -%s requires argument') % opt, opt) optstring, args = args[0], args[1:] optarg, optstring = optstring, '' else: optarg = '' opts.append(('-' + opt, optarg)) return opts, args def short_has_arg(opt, shortopts): for i in range(len(shortopts)): if opt == shortopts[i] != ':': return shortopts.startswith(':', i+1) raise GetoptError(_('option -%s not recognized') % opt, opt) if __name__ == '__main__': import sys print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"]))
gpl-3.0
-2,293,394,818,014,773,200
33.823256
80
0.620008
false
bgris/ODL_bgris
lib/python3.5/site-packages/docutils/__init__.py
1
7559
# $Id: __init__.py 7984 2016-12-09 09:48:27Z grubert $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ This is the Docutils (Python Documentation Utilities) package. Package Structure ================= Modules: - __init__.py: Contains component base classes, exception classes, and Docutils version information. - core.py: Contains the ``Publisher`` class and ``publish_*()`` convenience functions. - frontend.py: Runtime settings (command-line interface, configuration files) processing, for Docutils front-ends. - io.py: Provides a uniform API for low-level input and output. - nodes.py: Docutils document tree (doctree) node class library. - statemachine.py: A finite state machine specialized for regular-expression-based text filters. Subpackages: - languages: Language-specific mappings of terms. - parsers: Syntax-specific input parser modules or packages. - readers: Context-specific input handlers which understand the data source and manage a parser. - transforms: Modules used by readers and writers to modify DPS doctrees. - utils: Contains the ``Reporter`` system warning class and miscellaneous utilities used by readers, writers, and transforms. utils/urischemes.py: Contains a complete mapping of known URI addressing scheme names to descriptions. - utils/math: Contains functions for conversion of mathematical notation between different formats (LaTeX, MathML, text, ...). - writers: Format-specific output translators. """ __docformat__ = 'reStructuredText' __version__ = '0.13.1' """``major.minor.micro`` version number. The micro number is bumped for API changes, for new functionality, and for interim project releases. The minor number is bumped whenever there is a significant project release. The major number will be bumped when the project is feature-complete, and perhaps if there is a major change in the design.""" __version_details__ = 'release' """Extra version details (e.g. 'snapshot 2005-05-29, r3410', 'repository', 'release'), modified automatically & manually.""" import sys class ApplicationError(Exception): # Workaround: # In Python < 2.6, unicode(<exception instance>) calls `str` on the # arg and therefore, e.g., unicode(StandardError(u'\u234')) fails # with UnicodeDecodeError. if sys.version_info < (2,6): def __unicode__(self): return ', '.join(self.args) class DataError(ApplicationError): pass class SettingsSpec: """ Runtime setting specification base class. SettingsSpec subclass objects used by `docutils.frontend.OptionParser`. """ settings_spec = () """Runtime settings specification. Override in subclasses. Defines runtime settings and associated command-line options, as used by `docutils.frontend.OptionParser`. This is a tuple of: - Option group title (string or `None` which implies no group, just a list of single options). - Description (string or `None`). - A sequence of option tuples. Each consists of: - Help text (string) - List of option strings (e.g. ``['-Q', '--quux']``). - Dictionary of keyword arguments sent to the OptionParser/OptionGroup ``add_option`` method. Runtime setting names are derived implicitly from long option names ('--a-setting' becomes ``settings.a_setting``) or explicitly from the 'dest' keyword argument. Most settings will also have a 'validator' keyword & function. The validator function validates setting values (from configuration files and command-line option arguments) and converts them to appropriate types. For example, the ``docutils.frontend.validate_boolean`` function, **required by all boolean settings**, converts true values ('1', 'on', 'yes', and 'true') to 1 and false values ('0', 'off', 'no', 'false', and '') to 0. Validators need only be set once per setting. See the `docutils.frontend.validate_*` functions. See the optparse docs for more details. - More triples of group title, description, options, as many times as needed. Thus, `settings_spec` tuples can be simply concatenated. """ settings_defaults = None """A dictionary of defaults for settings not in `settings_spec` (internal settings, intended to be inaccessible by command-line and config file). Override in subclasses.""" settings_default_overrides = None """A dictionary of auxiliary defaults, to override defaults for settings defined in other components. Override in subclasses.""" relative_path_settings = () """Settings containing filesystem paths. Override in subclasses. Settings listed here are to be interpreted relative to the current working directory.""" config_section = None """The name of the config file section specific to this component (lowercase, no brackets). Override in subclasses.""" config_section_dependencies = None """A list of names of config file sections that are to be applied before `config_section`, in order (from general to specific). In other words, the settings in `config_section` are to be overlaid on top of the settings from these sections. The "general" section is assumed implicitly. Override in subclasses.""" class TransformSpec: """ Runtime transform specification base class. TransformSpec subclass objects used by `docutils.transforms.Transformer`. """ def get_transforms(self): """Transforms required by this class. Override in subclasses.""" if self.default_transforms != (): import warnings warnings.warn('default_transforms attribute deprecated.\n' 'Use get_transforms() method instead.', DeprecationWarning) return list(self.default_transforms) return [] # Deprecated; for compatibility. default_transforms = () unknown_reference_resolvers = () """List of functions to try to resolve unknown references. Unknown references have a 'refname' attribute which doesn't correspond to any target in the document. Called when the transforms in `docutils.tranforms.references` are unable to find a correct target. The list should contain functions which will try to resolve unknown references, with the following signature:: def reference_resolver(node): '''Returns boolean: true if resolved, false if not.''' If the function is able to resolve the reference, it should also remove the 'refname' attribute and mark the node as resolved:: del node['refname'] node.resolved = 1 Each function must have a "priority" attribute which will affect the order the unknown_reference_resolvers are run:: reference_resolver.priority = 100 Override in subclasses.""" class Component(SettingsSpec, TransformSpec): """Base class for Docutils components.""" component_type = None """Name of the component type ('reader', 'parser', 'writer'). Override in subclasses.""" supported = () """Names for this component. Override in subclasses.""" def supports(self, format): """ Is `format` supported by this component? To be used by transforms to ask the dependent component if it supports a certain input context or output format. """ return format in self.supported
gpl-3.0
2,540,936,649,337,729,000
33.834101
78
0.694669
false
Jet-Streaming/googletest
googlemock/scripts/generator/cpp/ast.py
384
62773
#!/usr/bin/env python # # Copyright 2007 Neal Norwitz # Portions Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generate an Abstract Syntax Tree (AST) for C++.""" __author__ = '[email protected] (Neal Norwitz)' # TODO: # * Tokens should never be exported, need to convert to Nodes # (return types, parameters, etc.) # * Handle static class data for templatized classes # * Handle casts (both C++ and C-style) # * Handle conditions and loops (if/else, switch, for, while/do) # # TODO much, much later: # * Handle #define # * exceptions try: # Python 3.x import builtins except ImportError: # Python 2.x import __builtin__ as builtins import sys import traceback from cpp import keywords from cpp import tokenize from cpp import utils if not hasattr(builtins, 'reversed'): # Support Python 2.3 and earlier. def reversed(seq): for i in range(len(seq)-1, -1, -1): yield seq[i] if not hasattr(builtins, 'next'): # Support Python 2.5 and earlier. def next(obj): return obj.next() VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3) FUNCTION_NONE = 0x00 FUNCTION_CONST = 0x01 FUNCTION_VIRTUAL = 0x02 FUNCTION_PURE_VIRTUAL = 0x04 FUNCTION_CTOR = 0x08 FUNCTION_DTOR = 0x10 FUNCTION_ATTRIBUTE = 0x20 FUNCTION_UNKNOWN_ANNOTATION = 0x40 FUNCTION_THROW = 0x80 FUNCTION_OVERRIDE = 0x100 """ These are currently unused. Should really handle these properly at some point. TYPE_MODIFIER_INLINE = 0x010000 TYPE_MODIFIER_EXTERN = 0x020000 TYPE_MODIFIER_STATIC = 0x040000 TYPE_MODIFIER_CONST = 0x080000 TYPE_MODIFIER_REGISTER = 0x100000 TYPE_MODIFIER_VOLATILE = 0x200000 TYPE_MODIFIER_MUTABLE = 0x400000 TYPE_MODIFIER_MAP = { 'inline': TYPE_MODIFIER_INLINE, 'extern': TYPE_MODIFIER_EXTERN, 'static': TYPE_MODIFIER_STATIC, 'const': TYPE_MODIFIER_CONST, 'register': TYPE_MODIFIER_REGISTER, 'volatile': TYPE_MODIFIER_VOLATILE, 'mutable': TYPE_MODIFIER_MUTABLE, } """ _INTERNAL_TOKEN = 'internal' _NAMESPACE_POP = 'ns-pop' # TODO(nnorwitz): use this as a singleton for templated_types, etc # where we don't want to create a new empty dict each time. It is also const. class _NullDict(object): __contains__ = lambda self: False keys = values = items = iterkeys = itervalues = iteritems = lambda self: () # TODO(nnorwitz): move AST nodes into a separate module. class Node(object): """Base AST node.""" def __init__(self, start, end): self.start = start self.end = end def IsDeclaration(self): """Returns bool if this node is a declaration.""" return False def IsDefinition(self): """Returns bool if this node is a definition.""" return False def IsExportable(self): """Returns bool if this node exportable from a header file.""" return False def Requires(self, node): """Does this AST node require the definition of the node passed in?""" return False def XXX__str__(self): return self._StringHelper(self.__class__.__name__, '') def _StringHelper(self, name, suffix): if not utils.DEBUG: return '%s(%s)' % (name, suffix) return '%s(%d, %d, %s)' % (name, self.start, self.end, suffix) def __repr__(self): return str(self) class Define(Node): def __init__(self, start, end, name, definition): Node.__init__(self, start, end) self.name = name self.definition = definition def __str__(self): value = '%s %s' % (self.name, self.definition) return self._StringHelper(self.__class__.__name__, value) class Include(Node): def __init__(self, start, end, filename, system): Node.__init__(self, start, end) self.filename = filename self.system = system def __str__(self): fmt = '"%s"' if self.system: fmt = '<%s>' return self._StringHelper(self.__class__.__name__, fmt % self.filename) class Goto(Node): def __init__(self, start, end, label): Node.__init__(self, start, end) self.label = label def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.label)) class Expr(Node): def __init__(self, start, end, expr): Node.__init__(self, start, end) self.expr = expr def Requires(self, node): # TODO(nnorwitz): impl. return False def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.expr)) class Return(Expr): pass class Delete(Expr): pass class Friend(Expr): def __init__(self, start, end, expr, namespace): Expr.__init__(self, start, end, expr) self.namespace = namespace[:] class Using(Node): def __init__(self, start, end, names): Node.__init__(self, start, end) self.names = names def __str__(self): return self._StringHelper(self.__class__.__name__, str(self.names)) class Parameter(Node): def __init__(self, start, end, name, parameter_type, default): Node.__init__(self, start, end) self.name = name self.type = parameter_type self.default = default def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. return self.type.name == node.name def __str__(self): name = str(self.type) suffix = '%s %s' % (name, self.name) if self.default: suffix += ' = ' + ''.join([d.name for d in self.default]) return self._StringHelper(self.__class__.__name__, suffix) class _GenericDeclaration(Node): def __init__(self, start, end, name, namespace): Node.__init__(self, start, end) self.name = name self.namespace = namespace[:] def FullName(self): prefix = '' if self.namespace and self.namespace[-1]: prefix = '::'.join(self.namespace) + '::' return prefix + self.name def _TypeStringHelper(self, suffix): if self.namespace: names = [n or '<anonymous>' for n in self.namespace] suffix += ' in ' + '::'.join(names) return self._StringHelper(self.__class__.__name__, suffix) # TODO(nnorwitz): merge with Parameter in some way? class VariableDeclaration(_GenericDeclaration): def __init__(self, start, end, name, var_type, initial_value, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.type = var_type self.initial_value = initial_value def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. return self.type.name == node.name def ToString(self): """Return a string that tries to reconstitute the variable decl.""" suffix = '%s %s' % (self.type, self.name) if self.initial_value: suffix += ' = ' + self.initial_value return suffix def __str__(self): return self._StringHelper(self.__class__.__name__, self.ToString()) class Typedef(_GenericDeclaration): def __init__(self, start, end, name, alias, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.alias = alias def IsDefinition(self): return True def IsExportable(self): return True def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. name = node.name for token in self.alias: if token is not None and name == token.name: return True return False def __str__(self): suffix = '%s, %s' % (self.name, self.alias) return self._TypeStringHelper(suffix) class _NestedType(_GenericDeclaration): def __init__(self, start, end, name, fields, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.fields = fields def IsDefinition(self): return True def IsExportable(self): return True def __str__(self): suffix = '%s, {%s}' % (self.name, self.fields) return self._TypeStringHelper(suffix) class Union(_NestedType): pass class Enum(_NestedType): pass class Class(_GenericDeclaration): def __init__(self, start, end, name, bases, templated_types, body, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) self.bases = bases self.body = body self.templated_types = templated_types def IsDeclaration(self): return self.bases is None and self.body is None def IsDefinition(self): return not self.IsDeclaration() def IsExportable(self): return not self.IsDeclaration() def Requires(self, node): # TODO(nnorwitz): handle namespaces, etc. if self.bases: for token_list in self.bases: # TODO(nnorwitz): bases are tokens, do name comparision. for token in token_list: if token.name == node.name: return True # TODO(nnorwitz): search in body too. return False def __str__(self): name = self.name if self.templated_types: name += '<%s>' % self.templated_types suffix = '%s, %s, %s' % (name, self.bases, self.body) return self._TypeStringHelper(suffix) class Struct(Class): pass class Function(_GenericDeclaration): def __init__(self, start, end, name, return_type, parameters, modifiers, templated_types, body, namespace): _GenericDeclaration.__init__(self, start, end, name, namespace) converter = TypeConverter(namespace) self.return_type = converter.CreateReturnType(return_type) self.parameters = converter.ToParameters(parameters) self.modifiers = modifiers self.body = body self.templated_types = templated_types def IsDeclaration(self): return self.body is None def IsDefinition(self): return self.body is not None def IsExportable(self): if self.return_type and 'static' in self.return_type.modifiers: return False return None not in self.namespace def Requires(self, node): if self.parameters: # TODO(nnorwitz): parameters are tokens, do name comparision. for p in self.parameters: if p.name == node.name: return True # TODO(nnorwitz): search in body too. return False def __str__(self): # TODO(nnorwitz): add templated_types. suffix = ('%s %s(%s), 0x%02x, %s' % (self.return_type, self.name, self.parameters, self.modifiers, self.body)) return self._TypeStringHelper(suffix) class Method(Function): def __init__(self, start, end, name, in_class, return_type, parameters, modifiers, templated_types, body, namespace): Function.__init__(self, start, end, name, return_type, parameters, modifiers, templated_types, body, namespace) # TODO(nnorwitz): in_class could also be a namespace which can # mess up finding functions properly. self.in_class = in_class class Type(_GenericDeclaration): """Type used for any variable (eg class, primitive, struct, etc).""" def __init__(self, start, end, name, templated_types, modifiers, reference, pointer, array): """ Args: name: str name of main type templated_types: [Class (Type?)] template type info between <> modifiers: [str] type modifiers (keywords) eg, const, mutable, etc. reference, pointer, array: bools """ _GenericDeclaration.__init__(self, start, end, name, []) self.templated_types = templated_types if not name and modifiers: self.name = modifiers.pop() self.modifiers = modifiers self.reference = reference self.pointer = pointer self.array = array def __str__(self): prefix = '' if self.modifiers: prefix = ' '.join(self.modifiers) + ' ' name = str(self.name) if self.templated_types: name += '<%s>' % self.templated_types suffix = prefix + name if self.reference: suffix += '&' if self.pointer: suffix += '*' if self.array: suffix += '[]' return self._TypeStringHelper(suffix) # By definition, Is* are always False. A Type can only exist in # some sort of variable declaration, parameter, or return value. def IsDeclaration(self): return False def IsDefinition(self): return False def IsExportable(self): return False class TypeConverter(object): def __init__(self, namespace_stack): self.namespace_stack = namespace_stack def _GetTemplateEnd(self, tokens, start): count = 1 end = start while 1: token = tokens[end] end += 1 if token.name == '<': count += 1 elif token.name == '>': count -= 1 if count == 0: break return tokens[start:end-1], end def ToType(self, tokens): """Convert [Token,...] to [Class(...), ] useful for base classes. For example, code like class Foo : public Bar<x, y> { ... }; the "Bar<x, y>" portion gets converted to an AST. Returns: [Class(...), ...] """ result = [] name_tokens = [] reference = pointer = array = False def AddType(templated_types): # Partition tokens into name and modifier tokens. names = [] modifiers = [] for t in name_tokens: if keywords.IsKeyword(t.name): modifiers.append(t.name) else: names.append(t.name) name = ''.join(names) if name_tokens: result.append(Type(name_tokens[0].start, name_tokens[-1].end, name, templated_types, modifiers, reference, pointer, array)) del name_tokens[:] i = 0 end = len(tokens) while i < end: token = tokens[i] if token.name == '<': new_tokens, new_end = self._GetTemplateEnd(tokens, i+1) AddType(self.ToType(new_tokens)) # If there is a comma after the template, we need to consume # that here otherwise it becomes part of the name. i = new_end reference = pointer = array = False elif token.name == ',': AddType([]) reference = pointer = array = False elif token.name == '*': pointer = True elif token.name == '&': reference = True elif token.name == '[': pointer = True elif token.name == ']': pass else: name_tokens.append(token) i += 1 if name_tokens: # No '<' in the tokens, just a simple name and no template. AddType([]) return result def DeclarationToParts(self, parts, needs_name_removed): name = None default = [] if needs_name_removed: # Handle default (initial) values properly. for i, t in enumerate(parts): if t.name == '=': default = parts[i+1:] name = parts[i-1].name if name == ']' and parts[i-2].name == '[': name = parts[i-3].name i -= 1 parts = parts[:i-1] break else: if parts[-1].token_type == tokenize.NAME: name = parts.pop().name else: # TODO(nnorwitz): this is a hack that happens for code like # Register(Foo<T>); where it thinks this is a function call # but it's actually a declaration. name = '???' modifiers = [] type_name = [] other_tokens = [] templated_types = [] i = 0 end = len(parts) while i < end: p = parts[i] if keywords.IsKeyword(p.name): modifiers.append(p.name) elif p.name == '<': templated_tokens, new_end = self._GetTemplateEnd(parts, i+1) templated_types = self.ToType(templated_tokens) i = new_end - 1 # Don't add a spurious :: to data members being initialized. next_index = i + 1 if next_index < end and parts[next_index].name == '::': i += 1 elif p.name in ('[', ']', '='): # These are handled elsewhere. other_tokens.append(p) elif p.name not in ('*', '&', '>'): # Ensure that names have a space between them. if (type_name and type_name[-1].token_type == tokenize.NAME and p.token_type == tokenize.NAME): type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0)) type_name.append(p) else: other_tokens.append(p) i += 1 type_name = ''.join([t.name for t in type_name]) return name, type_name, templated_types, modifiers, default, other_tokens def ToParameters(self, tokens): if not tokens: return [] result = [] name = type_name = '' type_modifiers = [] pointer = reference = array = False first_token = None default = [] def AddParameter(end): if default: del default[0] # Remove flag. parts = self.DeclarationToParts(type_modifiers, True) (name, type_name, templated_types, modifiers, unused_default, unused_other_tokens) = parts parameter_type = Type(first_token.start, first_token.end, type_name, templated_types, modifiers, reference, pointer, array) p = Parameter(first_token.start, end, name, parameter_type, default) result.append(p) template_count = 0 for s in tokens: if not first_token: first_token = s if s.name == '<': template_count += 1 elif s.name == '>': template_count -= 1 if template_count > 0: type_modifiers.append(s) continue if s.name == ',': AddParameter(s.start) name = type_name = '' type_modifiers = [] pointer = reference = array = False first_token = None default = [] elif s.name == '*': pointer = True elif s.name == '&': reference = True elif s.name == '[': array = True elif s.name == ']': pass # Just don't add to type_modifiers. elif s.name == '=': # Got a default value. Add any value (None) as a flag. default.append(None) elif default: default.append(s) else: type_modifiers.append(s) AddParameter(tokens[-1].end) return result def CreateReturnType(self, return_type_seq): if not return_type_seq: return None start = return_type_seq[0].start end = return_type_seq[-1].end _, name, templated_types, modifiers, default, other_tokens = \ self.DeclarationToParts(return_type_seq, False) names = [n.name for n in other_tokens] reference = '&' in names pointer = '*' in names array = '[' in names return Type(start, end, name, templated_types, modifiers, reference, pointer, array) def GetTemplateIndices(self, names): # names is a list of strings. start = names.index('<') end = len(names) - 1 while end > 0: if names[end] == '>': break end -= 1 return start, end+1 class AstBuilder(object): def __init__(self, token_stream, filename, in_class='', visibility=None, namespace_stack=[]): self.tokens = token_stream self.filename = filename # TODO(nnorwitz): use a better data structure (deque) for the queue. # Switching directions of the "queue" improved perf by about 25%. # Using a deque should be even better since we access from both sides. self.token_queue = [] self.namespace_stack = namespace_stack[:] self.in_class = in_class if in_class is None: self.in_class_name_only = None else: self.in_class_name_only = in_class.split('::')[-1] self.visibility = visibility self.in_function = False self.current_token = None # Keep the state whether we are currently handling a typedef or not. self._handling_typedef = False self.converter = TypeConverter(self.namespace_stack) def HandleError(self, msg, token): printable_queue = list(reversed(self.token_queue[-20:])) sys.stderr.write('Got %s in %s @ %s %s\n' % (msg, self.filename, token, printable_queue)) def Generate(self): while 1: token = self._GetNextToken() if not token: break # Get the next token. self.current_token = token # Dispatch on the next token type. if token.token_type == _INTERNAL_TOKEN: if token.name == _NAMESPACE_POP: self.namespace_stack.pop() continue try: result = self._GenerateOne(token) if result is not None: yield result except: self.HandleError('exception', token) raise def _CreateVariable(self, pos_token, name, type_name, type_modifiers, ref_pointer_name_seq, templated_types, value=None): reference = '&' in ref_pointer_name_seq pointer = '*' in ref_pointer_name_seq array = '[' in ref_pointer_name_seq var_type = Type(pos_token.start, pos_token.end, type_name, templated_types, type_modifiers, reference, pointer, array) return VariableDeclaration(pos_token.start, pos_token.end, name, var_type, value, self.namespace_stack) def _GenerateOne(self, token): if token.token_type == tokenize.NAME: if (keywords.IsKeyword(token.name) and not keywords.IsBuiltinType(token.name)): method = getattr(self, 'handle_' + token.name) return method() elif token.name == self.in_class_name_only: # The token name is the same as the class, must be a ctor if # there is a paren. Otherwise, it's the return type. # Peek ahead to get the next token to figure out which. next = self._GetNextToken() self._AddBackToken(next) if next.token_type == tokenize.SYNTAX and next.name == '(': return self._GetMethod([token], FUNCTION_CTOR, None, True) # Fall through--handle like any other method. # Handle data or function declaration/definition. syntax = tokenize.SYNTAX temp_tokens, last_token = \ self._GetVarTokensUpTo(syntax, '(', ';', '{', '[') temp_tokens.insert(0, token) if last_token.name == '(': # If there is an assignment before the paren, # this is an expression, not a method. expr = bool([e for e in temp_tokens if e.name == '=']) if expr: new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';') temp_tokens.append(last_token) temp_tokens.extend(new_temp) last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0) if last_token.name == '[': # Handle array, this isn't a method, unless it's an operator. # TODO(nnorwitz): keep the size somewhere. # unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']') temp_tokens.append(last_token) if temp_tokens[-2].name == 'operator': temp_tokens.append(self._GetNextToken()) else: temp_tokens2, last_token = \ self._GetVarTokensUpTo(tokenize.SYNTAX, ';') temp_tokens.extend(temp_tokens2) if last_token.name == ';': # Handle data, this isn't a method. parts = self.converter.DeclarationToParts(temp_tokens, True) (name, type_name, templated_types, modifiers, default, unused_other_tokens) = parts t0 = temp_tokens[0] names = [t.name for t in temp_tokens] if templated_types: start, end = self.converter.GetTemplateIndices(names) names = names[:start] + names[end:] default = ''.join([t.name for t in default]) return self._CreateVariable(t0, name, type_name, modifiers, names, templated_types, default) if last_token.name == '{': self._AddBackTokens(temp_tokens[1:]) self._AddBackToken(last_token) method_name = temp_tokens[0].name method = getattr(self, 'handle_' + method_name, None) if not method: # Must be declaring a variable. # TODO(nnorwitz): handle the declaration. return None return method() return self._GetMethod(temp_tokens, 0, None, False) elif token.token_type == tokenize.SYNTAX: if token.name == '~' and self.in_class: # Must be a dtor (probably not in method body). token = self._GetNextToken() # self.in_class can contain A::Name, but the dtor will only # be Name. Make sure to compare against the right value. if (token.token_type == tokenize.NAME and token.name == self.in_class_name_only): return self._GetMethod([token], FUNCTION_DTOR, None, True) # TODO(nnorwitz): handle a lot more syntax. elif token.token_type == tokenize.PREPROCESSOR: # TODO(nnorwitz): handle more preprocessor directives. # token starts with a #, so remove it and strip whitespace. name = token.name[1:].lstrip() if name.startswith('include'): # Remove "include". name = name[7:].strip() assert name # Handle #include \<newline> "header-on-second-line.h". if name.startswith('\\'): name = name[1:].strip() assert name[0] in '<"', token assert name[-1] in '>"', token system = name[0] == '<' filename = name[1:-1] return Include(token.start, token.end, filename, system) if name.startswith('define'): # Remove "define". name = name[6:].strip() assert name value = '' for i, c in enumerate(name): if c.isspace(): value = name[i:].lstrip() name = name[:i] break return Define(token.start, token.end, name, value) if name.startswith('if') and name[2:3].isspace(): condition = name[3:].strip() if condition.startswith('0') or condition.startswith('(0)'): self._SkipIf0Blocks() return None def _GetTokensUpTo(self, expected_token_type, expected_token): return self._GetVarTokensUpTo(expected_token_type, expected_token)[0] def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens): last_token = self._GetNextToken() tokens = [] while (last_token.token_type != expected_token_type or last_token.name not in expected_tokens): tokens.append(last_token) last_token = self._GetNextToken() return tokens, last_token # TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necesary. def _IgnoreUpTo(self, token_type, token): unused_tokens = self._GetTokensUpTo(token_type, token) def _SkipIf0Blocks(self): count = 1 while 1: token = self._GetNextToken() if token.token_type != tokenize.PREPROCESSOR: continue name = token.name[1:].lstrip() if name.startswith('endif'): count -= 1 if count == 0: break elif name.startswith('if'): count += 1 def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None): if GetNextToken is None: GetNextToken = self._GetNextToken # Assumes the current token is open_paren and we will consume # and return up to the close_paren. count = 1 token = GetNextToken() while 1: if token.token_type == tokenize.SYNTAX: if token.name == open_paren: count += 1 elif token.name == close_paren: count -= 1 if count == 0: break yield token token = GetNextToken() yield token def _GetParameters(self): return self._GetMatchingChar('(', ')') def GetScope(self): return self._GetMatchingChar('{', '}') def _GetNextToken(self): if self.token_queue: return self.token_queue.pop() return next(self.tokens) def _AddBackToken(self, token): if token.whence == tokenize.WHENCE_STREAM: token.whence = tokenize.WHENCE_QUEUE self.token_queue.insert(0, token) else: assert token.whence == tokenize.WHENCE_QUEUE, token self.token_queue.append(token) def _AddBackTokens(self, tokens): if tokens: if tokens[-1].whence == tokenize.WHENCE_STREAM: for token in tokens: token.whence = tokenize.WHENCE_QUEUE self.token_queue[:0] = reversed(tokens) else: assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens self.token_queue.extend(reversed(tokens)) def GetName(self, seq=None): """Returns ([tokens], next_token_info).""" GetNextToken = self._GetNextToken if seq is not None: it = iter(seq) GetNextToken = lambda: next(it) next_token = GetNextToken() tokens = [] last_token_was_name = False while (next_token.token_type == tokenize.NAME or (next_token.token_type == tokenize.SYNTAX and next_token.name in ('::', '<'))): # Two NAMEs in a row means the identifier should terminate. # It's probably some sort of variable declaration. if last_token_was_name and next_token.token_type == tokenize.NAME: break last_token_was_name = next_token.token_type == tokenize.NAME tokens.append(next_token) # Handle templated names. if next_token.name == '<': tokens.extend(self._GetMatchingChar('<', '>', GetNextToken)) last_token_was_name = True next_token = GetNextToken() return tokens, next_token def GetMethod(self, modifiers, templated_types): return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') assert len(return_type_and_name) >= 1 return self._GetMethod(return_type_and_name, modifiers, templated_types, False) def _GetMethod(self, return_type_and_name, modifiers, templated_types, get_paren): template_portion = None if get_paren: token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token if token.name == '<': # Handle templatized dtors. template_portion = [token] template_portion.extend(self._GetMatchingChar('<', '>')) token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == '(', token name = return_type_and_name.pop() # Handle templatized ctors. if name.name == '>': index = 1 while return_type_and_name[index].name != '<': index += 1 template_portion = return_type_and_name[index:] + [name] del return_type_and_name[index:] name = return_type_and_name.pop() elif name.name == ']': rt = return_type_and_name assert rt[-1].name == '[', return_type_and_name assert rt[-2].name == 'operator', return_type_and_name name_seq = return_type_and_name[-2:] del return_type_and_name[-2:] name = tokenize.Token(tokenize.NAME, 'operator[]', name_seq[0].start, name.end) # Get the open paren so _GetParameters() below works. unused_open_paren = self._GetNextToken() # TODO(nnorwitz): store template_portion. return_type = return_type_and_name indices = name if return_type: indices = return_type[0] # Force ctor for templatized ctors. if name.name == self.in_class and not modifiers: modifiers |= FUNCTION_CTOR parameters = list(self._GetParameters()) del parameters[-1] # Remove trailing ')'. # Handling operator() is especially weird. if name.name == 'operator' and not parameters: token = self._GetNextToken() assert token.name == '(', token parameters = list(self._GetParameters()) del parameters[-1] # Remove trailing ')'. token = self._GetNextToken() while token.token_type == tokenize.NAME: modifier_token = token token = self._GetNextToken() if modifier_token.name == 'const': modifiers |= FUNCTION_CONST elif modifier_token.name == '__attribute__': # TODO(nnorwitz): handle more __attribute__ details. modifiers |= FUNCTION_ATTRIBUTE assert token.name == '(', token # Consume everything between the (parens). unused_tokens = list(self._GetMatchingChar('(', ')')) token = self._GetNextToken() elif modifier_token.name == 'throw': modifiers |= FUNCTION_THROW assert token.name == '(', token # Consume everything between the (parens). unused_tokens = list(self._GetMatchingChar('(', ')')) token = self._GetNextToken() elif modifier_token.name == 'override': modifiers |= FUNCTION_OVERRIDE elif modifier_token.name == modifier_token.name.upper(): # HACK(nnorwitz): assume that all upper-case names # are some macro we aren't expanding. modifiers |= FUNCTION_UNKNOWN_ANNOTATION else: self.HandleError('unexpected token', modifier_token) assert token.token_type == tokenize.SYNTAX, token # Handle ctor initializers. if token.name == ':': # TODO(nnorwitz): anything else to handle for initializer list? while token.name != ';' and token.name != '{': token = self._GetNextToken() # Handle pointer to functions that are really data but look # like method declarations. if token.name == '(': if parameters[0].name == '*': # name contains the return type. name = parameters.pop() # parameters contains the name of the data. modifiers = [p.name for p in parameters] # Already at the ( to open the parameter list. function_parameters = list(self._GetMatchingChar('(', ')')) del function_parameters[-1] # Remove trailing ')'. # TODO(nnorwitz): store the function_parameters. token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == ';', token return self._CreateVariable(indices, name.name, indices.name, modifiers, '', None) # At this point, we got something like: # return_type (type::*name_)(params); # This is a data member called name_ that is a function pointer. # With this code: void (sq_type::*field_)(string&); # We get: name=void return_type=[] parameters=sq_type ... field_ # TODO(nnorwitz): is return_type always empty? # TODO(nnorwitz): this isn't even close to being correct. # Just put in something so we don't crash and can move on. real_name = parameters[-1] modifiers = [p.name for p in self._GetParameters()] del modifiers[-1] # Remove trailing ')'. return self._CreateVariable(indices, real_name.name, indices.name, modifiers, '', None) if token.name == '{': body = list(self.GetScope()) del body[-1] # Remove trailing '}'. else: body = None if token.name == '=': token = self._GetNextToken() if token.name == 'default' or token.name == 'delete': # Ignore explicitly defaulted and deleted special members # in C++11. token = self._GetNextToken() else: # Handle pure-virtual declarations. assert token.token_type == tokenize.CONSTANT, token assert token.name == '0', token modifiers |= FUNCTION_PURE_VIRTUAL token = self._GetNextToken() if token.name == '[': # TODO(nnorwitz): store tokens and improve parsing. # template <typename T, size_t N> char (&ASH(T (&seq)[N]))[N]; tokens = list(self._GetMatchingChar('[', ']')) token = self._GetNextToken() assert token.name == ';', (token, return_type_and_name, parameters) # Looks like we got a method, not a function. if len(return_type) > 2 and return_type[-1].name == '::': return_type, in_class = \ self._GetReturnTypeAndClassName(return_type) return Method(indices.start, indices.end, name.name, in_class, return_type, parameters, modifiers, templated_types, body, self.namespace_stack) return Function(indices.start, indices.end, name.name, return_type, parameters, modifiers, templated_types, body, self.namespace_stack) def _GetReturnTypeAndClassName(self, token_seq): # Splitting the return type from the class name in a method # can be tricky. For example, Return::Type::Is::Hard::To::Find(). # Where is the return type and where is the class name? # The heuristic used is to pull the last name as the class name. # This includes all the templated type info. # TODO(nnorwitz): if there is only One name like in the # example above, punt and assume the last bit is the class name. # Ignore a :: prefix, if exists so we can find the first real name. i = 0 if token_seq[0].name == '::': i = 1 # Ignore a :: suffix, if exists. end = len(token_seq) - 1 if token_seq[end-1].name == '::': end -= 1 # Make a copy of the sequence so we can append a sentinel # value. This is required for GetName will has to have some # terminating condition beyond the last name. seq_copy = token_seq[i:end] seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0)) names = [] while i < end: # Iterate through the sequence parsing out each name. new_name, next = self.GetName(seq_copy[i:]) assert new_name, 'Got empty new_name, next=%s' % next # We got a pointer or ref. Add it to the name. if next and next.token_type == tokenize.SYNTAX: new_name.append(next) names.append(new_name) i += len(new_name) # Now that we have the names, it's time to undo what we did. # Remove the sentinel value. names[-1].pop() # Flatten the token sequence for the return type. return_type = [e for seq in names[:-1] for e in seq] # The class name is the last name. class_name = names[-1] return return_type, class_name def handle_bool(self): pass def handle_char(self): pass def handle_int(self): pass def handle_long(self): pass def handle_short(self): pass def handle_double(self): pass def handle_float(self): pass def handle_void(self): pass def handle_wchar_t(self): pass def handle_unsigned(self): pass def handle_signed(self): pass def _GetNestedType(self, ctor): name = None name_tokens, token = self.GetName() if name_tokens: name = ''.join([t.name for t in name_tokens]) # Handle forward declarations. if token.token_type == tokenize.SYNTAX and token.name == ';': return ctor(token.start, token.end, name, None, self.namespace_stack) if token.token_type == tokenize.NAME and self._handling_typedef: self._AddBackToken(token) return ctor(token.start, token.end, name, None, self.namespace_stack) # Must be the type declaration. fields = list(self._GetMatchingChar('{', '}')) del fields[-1] # Remove trailing '}'. if token.token_type == tokenize.SYNTAX and token.name == '{': next = self._GetNextToken() new_type = ctor(token.start, token.end, name, fields, self.namespace_stack) # A name means this is an anonymous type and the name # is the variable declaration. if next.token_type != tokenize.NAME: return new_type name = new_type token = next # Must be variable declaration using the type prefixed with keyword. assert token.token_type == tokenize.NAME, token return self._CreateVariable(token, token.name, name, [], '', None) def handle_struct(self): # Special case the handling typedef/aliasing of structs here. # It would be a pain to handle in the class code. name_tokens, var_token = self.GetName() if name_tokens: next_token = self._GetNextToken() is_syntax = (var_token.token_type == tokenize.SYNTAX and var_token.name[0] in '*&') is_variable = (var_token.token_type == tokenize.NAME and next_token.name == ';') variable = var_token if is_syntax and not is_variable: variable = next_token temp = self._GetNextToken() if temp.token_type == tokenize.SYNTAX and temp.name == '(': # Handle methods declared to return a struct. t0 = name_tokens[0] struct = tokenize.Token(tokenize.NAME, 'struct', t0.start-7, t0.start-2) type_and_name = [struct] type_and_name.extend(name_tokens) type_and_name.extend((var_token, next_token)) return self._GetMethod(type_and_name, 0, None, False) assert temp.name == ';', (temp, name_tokens, var_token) if is_syntax or (is_variable and not self._handling_typedef): modifiers = ['struct'] type_name = ''.join([t.name for t in name_tokens]) position = name_tokens[0] return self._CreateVariable(position, variable.name, type_name, modifiers, var_token.name, None) name_tokens.extend((var_token, next_token)) self._AddBackTokens(name_tokens) else: self._AddBackToken(var_token) return self._GetClass(Struct, VISIBILITY_PUBLIC, None) def handle_union(self): return self._GetNestedType(Union) def handle_enum(self): return self._GetNestedType(Enum) def handle_auto(self): # TODO(nnorwitz): warn about using auto? Probably not since it # will be reclaimed and useful for C++0x. pass def handle_register(self): pass def handle_const(self): pass def handle_inline(self): pass def handle_extern(self): pass def handle_static(self): pass def handle_virtual(self): # What follows must be a method. token = token2 = self._GetNextToken() if token.name == 'inline': # HACK(nnorwitz): handle inline dtors by ignoring 'inline'. token2 = self._GetNextToken() if token2.token_type == tokenize.SYNTAX and token2.name == '~': return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None) assert token.token_type == tokenize.NAME or token.name == '::', token return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') # ) return_type_and_name.insert(0, token) if token2 is not token: return_type_and_name.insert(1, token2) return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL, None, False) def handle_volatile(self): pass def handle_mutable(self): pass def handle_public(self): assert self.in_class self.visibility = VISIBILITY_PUBLIC def handle_protected(self): assert self.in_class self.visibility = VISIBILITY_PROTECTED def handle_private(self): assert self.in_class self.visibility = VISIBILITY_PRIVATE def handle_friend(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens t0 = tokens[0] return Friend(t0.start, t0.end, tokens, self.namespace_stack) def handle_static_cast(self): pass def handle_const_cast(self): pass def handle_dynamic_cast(self): pass def handle_reinterpret_cast(self): pass def handle_new(self): pass def handle_delete(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens return Delete(tokens[0].start, tokens[0].end, tokens) def handle_typedef(self): token = self._GetNextToken() if (token.token_type == tokenize.NAME and keywords.IsKeyword(token.name)): # Token must be struct/enum/union/class. method = getattr(self, 'handle_' + token.name) self._handling_typedef = True tokens = [method()] self._handling_typedef = False else: tokens = [token] # Get the remainder of the typedef up to the semi-colon. tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';')) # TODO(nnorwitz): clean all this up. assert tokens name = tokens.pop() indices = name if tokens: indices = tokens[0] if not indices: indices = token if name.name == ')': # HACK(nnorwitz): Handle pointers to functions "properly". if (len(tokens) >= 4 and tokens[1].name == '(' and tokens[2].name == '*'): tokens.append(name) name = tokens[3] elif name.name == ']': # HACK(nnorwitz): Handle arrays properly. if len(tokens) >= 2: tokens.append(name) name = tokens[1] new_type = tokens if tokens and isinstance(tokens[0], tokenize.Token): new_type = self.converter.ToType(tokens)[0] return Typedef(indices.start, indices.end, name.name, new_type, self.namespace_stack) def handle_typeid(self): pass # Not needed yet. def handle_typename(self): pass # Not needed yet. def _GetTemplatedTypes(self): result = {} tokens = list(self._GetMatchingChar('<', '>')) len_tokens = len(tokens) - 1 # Ignore trailing '>'. i = 0 while i < len_tokens: key = tokens[i].name i += 1 if keywords.IsKeyword(key) or key == ',': continue type_name = default = None if i < len_tokens: i += 1 if tokens[i-1].name == '=': assert i < len_tokens, '%s %s' % (i, tokens) default, unused_next_token = self.GetName(tokens[i:]) i += len(default) else: if tokens[i-1].name != ',': # We got something like: Type variable. # Re-adjust the key (variable) and type_name (Type). key = tokens[i-1].name type_name = tokens[i-2] result[key] = (type_name, default) return result def handle_template(self): token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX, token assert token.name == '<', token templated_types = self._GetTemplatedTypes() # TODO(nnorwitz): for now, just ignore the template params. token = self._GetNextToken() if token.token_type == tokenize.NAME: if token.name == 'class': return self._GetClass(Class, VISIBILITY_PRIVATE, templated_types) elif token.name == 'struct': return self._GetClass(Struct, VISIBILITY_PUBLIC, templated_types) elif token.name == 'friend': return self.handle_friend() self._AddBackToken(token) tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';') tokens.append(last) self._AddBackTokens(tokens) if last.name == '(': return self.GetMethod(FUNCTION_NONE, templated_types) # Must be a variable definition. return None def handle_true(self): pass # Nothing to do. def handle_false(self): pass # Nothing to do. def handle_asm(self): pass # Not needed yet. def handle_class(self): return self._GetClass(Class, VISIBILITY_PRIVATE, None) def _GetBases(self): # Get base classes. bases = [] while 1: token = self._GetNextToken() assert token.token_type == tokenize.NAME, token # TODO(nnorwitz): store kind of inheritance...maybe. if token.name not in ('public', 'protected', 'private'): # If inheritance type is not specified, it is private. # Just put the token back so we can form a name. # TODO(nnorwitz): it would be good to warn about this. self._AddBackToken(token) else: # Check for virtual inheritance. token = self._GetNextToken() if token.name != 'virtual': self._AddBackToken(token) else: # TODO(nnorwitz): store that we got virtual for this base. pass base, next_token = self.GetName() bases_ast = self.converter.ToType(base) assert len(bases_ast) == 1, bases_ast bases.append(bases_ast[0]) assert next_token.token_type == tokenize.SYNTAX, next_token if next_token.name == '{': token = next_token break # Support multiple inheritance. assert next_token.name == ',', next_token return bases, token def _GetClass(self, class_type, visibility, templated_types): class_name = None class_token = self._GetNextToken() if class_token.token_type != tokenize.NAME: assert class_token.token_type == tokenize.SYNTAX, class_token token = class_token else: # Skip any macro (e.g. storage class specifiers) after the # 'class' keyword. next_token = self._GetNextToken() if next_token.token_type == tokenize.NAME: self._AddBackToken(next_token) else: self._AddBackTokens([class_token, next_token]) name_tokens, token = self.GetName() class_name = ''.join([t.name for t in name_tokens]) bases = None if token.token_type == tokenize.SYNTAX: if token.name == ';': # Forward declaration. return class_type(class_token.start, class_token.end, class_name, None, templated_types, None, self.namespace_stack) if token.name in '*&': # Inline forward declaration. Could be method or data. name_token = self._GetNextToken() next_token = self._GetNextToken() if next_token.name == ';': # Handle data modifiers = ['class'] return self._CreateVariable(class_token, name_token.name, class_name, modifiers, token.name, None) else: # Assume this is a method. tokens = (class_token, token, name_token, next_token) self._AddBackTokens(tokens) return self.GetMethod(FUNCTION_NONE, None) if token.name == ':': bases, token = self._GetBases() body = None if token.token_type == tokenize.SYNTAX and token.name == '{': assert token.token_type == tokenize.SYNTAX, token assert token.name == '{', token ast = AstBuilder(self.GetScope(), self.filename, class_name, visibility, self.namespace_stack) body = list(ast.Generate()) if not self._handling_typedef: token = self._GetNextToken() if token.token_type != tokenize.NAME: assert token.token_type == tokenize.SYNTAX, token assert token.name == ';', token else: new_class = class_type(class_token.start, class_token.end, class_name, bases, None, body, self.namespace_stack) modifiers = [] return self._CreateVariable(class_token, token.name, new_class, modifiers, token.name, None) else: if not self._handling_typedef: self.HandleError('non-typedef token', token) self._AddBackToken(token) return class_type(class_token.start, class_token.end, class_name, bases, templated_types, body, self.namespace_stack) def handle_namespace(self): token = self._GetNextToken() # Support anonymous namespaces. name = None if token.token_type == tokenize.NAME: name = token.name token = self._GetNextToken() self.namespace_stack.append(name) assert token.token_type == tokenize.SYNTAX, token # Create an internal token that denotes when the namespace is complete. internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP, None, None) internal_token.whence = token.whence if token.name == '=': # TODO(nnorwitz): handle aliasing namespaces. name, next_token = self.GetName() assert next_token.name == ';', next_token self._AddBackToken(internal_token) else: assert token.name == '{', token tokens = list(self.GetScope()) # Replace the trailing } with the internal namespace pop token. tokens[-1] = internal_token # Handle namespace with nothing in it. self._AddBackTokens(tokens) return None def handle_using(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert tokens return Using(tokens[0].start, tokens[0].end, tokens) def handle_explicit(self): assert self.in_class # Nothing much to do. # TODO(nnorwitz): maybe verify the method name == class name. # This must be a ctor. return self.GetMethod(FUNCTION_CTOR, None) def handle_this(self): pass # Nothing to do. def handle_operator(self): # Pull off the next token(s?) and make that part of the method name. pass def handle_sizeof(self): pass def handle_case(self): pass def handle_switch(self): pass def handle_default(self): token = self._GetNextToken() assert token.token_type == tokenize.SYNTAX assert token.name == ':' def handle_if(self): pass def handle_else(self): pass def handle_return(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') if not tokens: return Return(self.current_token.start, self.current_token.end, None) return Return(tokens[0].start, tokens[0].end, tokens) def handle_goto(self): tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';') assert len(tokens) == 1, str(tokens) return Goto(tokens[0].start, tokens[0].end, tokens[0].name) def handle_try(self): pass # Not needed yet. def handle_catch(self): pass # Not needed yet. def handle_throw(self): pass # Not needed yet. def handle_while(self): pass def handle_do(self): pass def handle_for(self): pass def handle_break(self): self._IgnoreUpTo(tokenize.SYNTAX, ';') def handle_continue(self): self._IgnoreUpTo(tokenize.SYNTAX, ';') def BuilderFromSource(source, filename): """Utility method that returns an AstBuilder from source code. Args: source: 'C++ source code' filename: 'file1' Returns: AstBuilder """ return AstBuilder(tokenize.GetTokens(source), filename) def PrintIndentifiers(filename, should_print): """Prints all identifiers for a C++ source file. Args: filename: 'file1' should_print: predicate with signature: bool Function(token) """ source = utils.ReadFile(filename, False) if source is None: sys.stderr.write('Unable to find: %s\n' % filename) return #print('Processing %s' % actual_filename) builder = BuilderFromSource(source, filename) try: for node in builder.Generate(): if should_print(node): print(node.name) except KeyboardInterrupt: return except: pass def PrintAllIndentifiers(filenames, should_print): """Prints all identifiers for each C++ source file in filenames. Args: filenames: ['file1', 'file2', ...] should_print: predicate with signature: bool Function(token) """ for path in filenames: PrintIndentifiers(path, should_print) def main(argv): for filename in argv[1:]: source = utils.ReadFile(filename) if source is None: continue print('Processing %s' % filename) builder = BuilderFromSource(source, filename) try: entire_ast = filter(None, builder.Generate()) except KeyboardInterrupt: return except: # Already printed a warning, print the traceback and continue. traceback.print_exc() else: if utils.DEBUG: for ast in entire_ast: print(ast) if __name__ == '__main__': main(sys.argv)
bsd-3-clause
848,491,998,429,159,300
35.222158
82
0.541507
false
norbertspiess/google-python-exercises
basic/solution/list1.py
209
3656
#!/usr/bin/python -tt # Copyright 2010 Google Inc. # Licensed under the Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # Google's Python Class # http://code.google.com/edu/languages/google-python-class/ # Basic list exercises # Fill in the code for the functions below. main() is already set up # to call the functions with a few different inputs, # printing 'OK' when each function is correct. # The starter code for each function includes a 'return' # which is just a placeholder for your code. # It's ok if you do not complete all the functions, and there # are some additional functions to try in list2.py. # A. match_ends # Given a list of strings, return the count of the number of # strings where the string length is 2 or more and the first # and last chars of the string are the same. # Note: python does not have a ++ operator, but += works. def match_ends(words): # +++your code here+++ # LAB(begin solution) count = 0 for word in words: if len(word) >= 2 and word[0] == word[-1]: count = count + 1 return count # LAB(replace solution) # return # LAB(end solution) # B. front_x # Given a list of strings, return a list with the strings # in sorted order, except group all the strings that begin with 'x' first. # e.g. ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields # ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'] # Hint: this can be done by making 2 lists and sorting each of them # before combining them. def front_x(words): # +++your code here+++ # LAB(begin solution) # Put each word into the x_list or the other_list. x_list = [] other_list = [] for w in words: if w.startswith('x'): x_list.append(w) else: other_list.append(w) return sorted(x_list) + sorted(other_list) # LAB(replace solution) # return # LAB(end solution) # LAB(begin solution) # Extract the last element from a tuple -- used for custom sorting below. def last(a): return a[-1] # LAB(end solution) # C. sort_last # Given a list of non-empty tuples, return a list sorted in increasing # order by the last element in each tuple. # e.g. [(1, 7), (1, 3), (3, 4, 5), (2, 2)] yields # [(2, 2), (1, 3), (3, 4, 5), (1, 7)] # Hint: use a custom key= function to extract the last element form each tuple. def sort_last(tuples): # +++your code here+++ # LAB(begin solution) return sorted(tuples, key=last) # LAB(replace solution) # return # LAB(end solution) # Simple provided test() function used in main() to print # what each function returns vs. what it's supposed to return. def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected)) # Calls the above functions with interesting inputs. def main(): print 'match_ends' test(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3) test(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2) test(match_ends(['aaa', 'be', 'abc', 'hello']), 1) print print 'front_x' test(front_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc']) test(front_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc']) test(front_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']) print print 'sort_last' test(sort_last([(1, 3), (3, 2), (2, 1)]), [(2, 1), (3, 2), (1, 3)]) test(sort_last([(2, 3), (1, 2), (3, 1)]), [(3, 1), (1, 2), (2, 3)]) test(sort_last([(1, 7), (1, 3), (3, 4, 5), (2, 2)]), [(2, 2), (1, 3), (3, 4, 5), (1, 7)]) if __name__ == '__main__': main()
apache-2.0
-3,504,756,563,686,536,000
29.722689
79
0.612691
false
ZhangXinNan/tensorflow
tensorflow/contrib/distributions/python/kernel_tests/mvn_full_covariance_test.py
14
7349
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for MultivariateNormalFullCovariance.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from scipy import stats from tensorflow.contrib import distributions from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops from tensorflow.python.platform import test ds = distributions rng = np.random.RandomState(42) class MultivariateNormalFullCovarianceTest(test.TestCase): def _random_pd_matrix(self, *shape): mat = rng.rand(*shape) chol = ds.matrix_diag_transform(mat, transform=nn_ops.softplus) chol = array_ops.matrix_band_part(chol, -1, 0) return math_ops.matmul(chol, chol, adjoint_b=True).eval() def testRaisesIfInitializedWithNonSymmetricMatrix(self): with self.test_session(): mu = [1., 2.] sigma = [[1., 0.], [1., 1.]] # Nonsingular, but not symmetric mvn = ds.MultivariateNormalFullCovariance(mu, sigma, validate_args=True) with self.assertRaisesOpError("not symmetric"): mvn.covariance().eval() def testNamePropertyIsSetByInitArg(self): with self.test_session(): mu = [1., 2.] sigma = [[1., 0.], [0., 1.]] mvn = ds.MultivariateNormalFullCovariance(mu, sigma, name="Billy") self.assertEqual(mvn.name, "Billy/") def testDoesNotRaiseIfInitializedWithSymmetricMatrix(self): with self.test_session(): mu = rng.rand(10) sigma = self._random_pd_matrix(10, 10) mvn = ds.MultivariateNormalFullCovariance(mu, sigma, validate_args=True) # Should not raise mvn.covariance().eval() def testLogPDFScalarBatch(self): with self.test_session(): mu = rng.rand(2) sigma = self._random_pd_matrix(2, 2) mvn = ds.MultivariateNormalFullCovariance(mu, sigma, validate_args=True) x = rng.rand(2) log_pdf = mvn.log_prob(x) pdf = mvn.prob(x) scipy_mvn = stats.multivariate_normal(mean=mu, cov=sigma) expected_log_pdf = scipy_mvn.logpdf(x) expected_pdf = scipy_mvn.pdf(x) self.assertEqual((), log_pdf.get_shape()) self.assertEqual((), pdf.get_shape()) self.assertAllClose(expected_log_pdf, log_pdf.eval()) self.assertAllClose(expected_pdf, pdf.eval()) def testLogPDFScalarBatchCovarianceNotProvided(self): with self.test_session(): mu = rng.rand(2) mvn = ds.MultivariateNormalFullCovariance( mu, covariance_matrix=None, validate_args=True) x = rng.rand(2) log_pdf = mvn.log_prob(x) pdf = mvn.prob(x) # Initialize a scipy_mvn with the default covariance. scipy_mvn = stats.multivariate_normal(mean=mu, cov=np.eye(2)) expected_log_pdf = scipy_mvn.logpdf(x) expected_pdf = scipy_mvn.pdf(x) self.assertEqual((), log_pdf.get_shape()) self.assertEqual((), pdf.get_shape()) self.assertAllClose(expected_log_pdf, log_pdf.eval()) self.assertAllClose(expected_pdf, pdf.eval()) def testShapes(self): with self.test_session(): mu = rng.rand(3, 5, 2) covariance = self._random_pd_matrix(3, 5, 2, 2) mvn = ds.MultivariateNormalFullCovariance( mu, covariance, validate_args=True) # Shapes known at graph construction time. self.assertEqual((2,), tuple(mvn.event_shape.as_list())) self.assertEqual((3, 5), tuple(mvn.batch_shape.as_list())) # Shapes known at runtime. self.assertEqual((2,), tuple(mvn.event_shape_tensor().eval())) self.assertEqual((3, 5), tuple(mvn.batch_shape_tensor().eval())) def _random_mu_and_sigma(self, batch_shape, event_shape): # This ensures sigma is positive def. mat_shape = batch_shape + event_shape + event_shape mat = rng.randn(*mat_shape) perm = np.arange(mat.ndim) perm[-2:] = [perm[-1], perm[-2]] sigma = np.matmul(mat, np.transpose(mat, perm)) mu_shape = batch_shape + event_shape mu = rng.randn(*mu_shape) return mu, sigma def testKLBatch(self): batch_shape = [2] event_shape = [3] with self.test_session(): mu_a, sigma_a = self._random_mu_and_sigma(batch_shape, event_shape) mu_b, sigma_b = self._random_mu_and_sigma(batch_shape, event_shape) mvn_a = ds.MultivariateNormalFullCovariance( loc=mu_a, covariance_matrix=sigma_a, validate_args=True) mvn_b = ds.MultivariateNormalFullCovariance( loc=mu_b, covariance_matrix=sigma_b, validate_args=True) kl = ds.kl_divergence(mvn_a, mvn_b) self.assertEqual(batch_shape, kl.get_shape()) kl_v = kl.eval() expected_kl_0 = _compute_non_batch_kl(mu_a[0, :], sigma_a[0, :, :], mu_b[0, :], sigma_b[0, :]) expected_kl_1 = _compute_non_batch_kl(mu_a[1, :], sigma_a[1, :, :], mu_b[1, :], sigma_b[1, :]) self.assertAllClose(expected_kl_0, kl_v[0]) self.assertAllClose(expected_kl_1, kl_v[1]) def testKLBatchBroadcast(self): batch_shape = [2] event_shape = [3] with self.test_session(): mu_a, sigma_a = self._random_mu_and_sigma(batch_shape, event_shape) # No batch shape. mu_b, sigma_b = self._random_mu_and_sigma([], event_shape) mvn_a = ds.MultivariateNormalFullCovariance( loc=mu_a, covariance_matrix=sigma_a, validate_args=True) mvn_b = ds.MultivariateNormalFullCovariance( loc=mu_b, covariance_matrix=sigma_b, validate_args=True) kl = ds.kl_divergence(mvn_a, mvn_b) self.assertEqual(batch_shape, kl.get_shape()) kl_v = kl.eval() expected_kl_0 = _compute_non_batch_kl(mu_a[0, :], sigma_a[0, :, :], mu_b, sigma_b) expected_kl_1 = _compute_non_batch_kl(mu_a[1, :], sigma_a[1, :, :], mu_b, sigma_b) self.assertAllClose(expected_kl_0, kl_v[0]) self.assertAllClose(expected_kl_1, kl_v[1]) def _compute_non_batch_kl(mu_a, sigma_a, mu_b, sigma_b): """Non-batch KL for N(mu_a, sigma_a), N(mu_b, sigma_b).""" # Check using numpy operations # This mostly repeats the tensorflow code _kl_mvn_mvn(), but in numpy. # So it is important to also check that KL(mvn, mvn) = 0. sigma_b_inv = np.linalg.inv(sigma_b) t = np.trace(sigma_b_inv.dot(sigma_a)) q = (mu_b - mu_a).dot(sigma_b_inv).dot(mu_b - mu_a) k = mu_a.shape[0] l = np.log(np.linalg.det(sigma_b) / np.linalg.det(sigma_a)) return 0.5 * (t + q - k + l) if __name__ == "__main__": test.main()
apache-2.0
-6,034,931,142,505,606,000
35.20197
80
0.626616
false
CodeWingX/yowsup
yowsup/layers/axolotl/protocolentities/receipt_outgoing_retry.py
35
2457
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity from yowsup.layers.axolotl.protocolentities.iq_keys_get_result import ResultGetKeysIqProtocolEntity class RetryOutgoingReceiptProtocolEntity(OutgoingReceiptProtocolEntity): ''' <receipt type="retry" to="[email protected]" id="1415389947-12" t="1432833777"> <retry count="1" t="1432833266" id="1415389947-12" v="1"> </retry> <registration> HEX:xxxxxxxxx </registration> </receipt> ''' def __init__(self, _id, to, t, v = "1", count = "1",regData = ""): super(RetryOutgoingReceiptProtocolEntity, self).__init__(_id,to) self.setRetryData(t,v,count,regData) def setRetryData(self, t,v,count,regData): self.t = int(t) self.v = int(v) self.count = int(count) self.regData = regData def setRegData(self,regData): ''' In axolotl layer: regData = self.store.getLocalRegistrationId() ''' self.regData = ResultGetKeysIqProtocolEntity._intToBytes(regData) def toProtocolTreeNode(self): node = super(RetryOutgoingReceiptProtocolEntity, self).toProtocolTreeNode() node.setAttribute("type", "retry") retry = ProtocolTreeNode("retry", {"count": str(self.count),"t":str(self.t),"id":self.getId(),"v":str(self.v)}) node.addChild(retry) registration = ProtocolTreeNode("registration",data=self.regData) node.addChild(registration) return node def __str__(self): out = super(RetryOutgoingReceiptProtocolEntity, self).__str__() return out @staticmethod def fromProtocolTreeNode(node): entity = OutgoingReceiptProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = RetryOutgoingReceiptProtocolEntity retryNode = node.getChild("retry") entity.setRetryData(retryNode["t"], retryNode["v"], retryNode["count"], node.getChild("registration").data) @staticmethod def fromMesageNode(MessageNodeToBeRetried): return RetryOutgoingReceiptProtocolEntity( MessageNodeToBeRetried.getAttributeValue("id"), MessageNodeToBeRetried.getAttributeValue("from"), MessageNodeToBeRetried.getAttributeValue("t"), MessageNodeToBeRetried.getChild("enc").getAttributeValue("v") )
gpl-3.0
-4,562,038,323,375,983,000
38.645161
119
0.672365
false
jspan/Open-Knesset
accounts/migrations/0002_add_valid_email_group.py
11
4805
# encoding: utf-8 import datetime from south.db import db from south.v2 import DataMigration from django.db import models from django.contrib.auth.models import User,Group,Permission class Migration(DataMigration): def forwards(self, orm): (g,created) = Group.objects.get_or_create(name='Valid Email') if created: g.save() p = Permission.objects.get(name='Can add comment') g.permissions.add(p) g.permissions.add(Permission.objects.get(name='Can add annotation')) for u in User.objects.all(): if p in u.user_permissions.all(): u.groups.add(g) u.user_permissions.remove(p) print "user %s: permission->group" % u.username def backwards(self, orm): p = Permission.objects.get(name='Can add comment') g = Group.objects.get(name='Valid Email') for u in User.objects.all(): if g in u.groups.all(): print "user %s: group->permission" % u.username u.user_permissions.add(p) u.groups.remove(g) g.delete() models = { 'accounts.emailvalidation': { 'Meta': {'object_name': 'EmailValidation'}, 'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'date_requested': ('django.db.models.fields.DateField', [], {}), 'email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['accounts']
bsd-3-clause
-8,475,411,838,066,501,000
55.529412
163
0.549428
false
vyvojer/ploev
tests/test_calc.py
1
3140
import unittest from ploev import calc from ploev.calc import Calc, GameCalc from ploev.ppt import OddsOracle class CalcModuleTest(unittest.TestCase): def test_close_parenthesis(self): self.assertEqual(calc.close_parenthesis('77,KK'), '(77,KK)') self.assertEqual(calc.close_parenthesis('(77,KK)'), '(77,KK)') self.assertEqual(calc.close_parenthesis('(77,KK):(ss)'), '((77,KK):(ss))') def test_create_cumulative_ranges(self): ranges = [ '(77,KK)', '(74,K4,K7,44,77,KK)', '*', ] expected = [ '(77,KK)', '(74,K4,K7,44,77,KK)!(77,KK)', '*!(74,K4,K7,44,77,KK)!(77,KK)', ] cumulative_ranges = calc.create_cumulative_ranges(ranges) self.assertEqual(cumulative_ranges, expected) class CalcTest(unittest.TestCase): @classmethod def setUpClass(cls): odds_oracle = OddsOracle() odds_oracle.trials = 100000 odds_oracle.seconds = 1 cls.odds_oracle = odds_oracle cls.calc = Calc(odds_oracle) def test_range_distribution(self): main_range = '75%' board = '7c Kh 4s' sub_ranges = [ '77,KK', '74,K4,K7,44,77,KK', '*' ] hero = '8c4h6s4c' rd = self.calc.range_distribution(main_range, sub_ranges, board, players=[hero]) self.assertAlmostEqual(rd[0].fraction, 0.041, delta= 0.01) self.assertAlmostEqual(rd[1].fraction, 0.0733, delta=0.01) self.assertAlmostEqual(rd[2].fraction, 0.885, delta=0.01) self.assertAlmostEqual(rd[0].equity, 0.23, delta= 0.02) self.assertAlmostEqual(rd[1].equity, 0.79, delta=0.02) self.assertAlmostEqual(rd[2].equity, 0.88, delta=0.02) def test_range_distribution_no_cumulative(self): main_range = '75%' board = '7c Kh 4s' sub_ranges = [ '77,KK', '74,K4,K7,44,77,KK', '*' ] players = ['8c4h6s4c'] rd = self.calc.range_distribution(main_range, sub_ranges, board, players, cumulative=False) self.assertAlmostEqual(rd[0].fraction, 0.041, delta= 0.01) self.assertAlmostEqual(rd[1].fraction, 0.115, delta=0.01) self.assertAlmostEqual(rd[2].fraction, 1, delta=0.01) self.assertAlmostEqual(rd[0].equity, 0.23, delta= 0.02) self.assertAlmostEqual(rd[1].equity, 0.59, delta=0.02) self.assertAlmostEqual(rd[2].equity, 0.84, delta=0.02) def test_equity(self): players = [ '3s4s5d6d', '10%', 'AA',] equities = self.calc.equity(players) self.assertAlmostEqual(equities[0], 0.34, delta=0.01) self.assertAlmostEqual(equities[1], 0.30, delta=0.01) self.assertAlmostEqual(equities[2], 0.36, delta=0.01) equity = self.calc.equity(players, hero_only=True) self.assertAlmostEqual(equity, 0.34, delta=0.01) players = ['As2sTc7h', '60%!$3b10i'] board = 'Ks3s9d' equity = self.calc.equity(players, board=board, hero_only=True) self.assertAlmostEqual(equity, 0.52, delta=0.01)
gpl-3.0
-4,430,519,952,128,349,000
33.888889
99
0.588535
false
SoftwareDefinedBuildings/smap
python/smap/drivers/labjack/labjackpython/u6.py
6
83591
""" Name: u6.py Desc: Defines the U6 class, which makes working with a U6 much easier. All of the low-level functions for the U6 are implemented as functions of the U6 class. There are also a handful additional functions which improve upon the interface provided by the low-level functions. To learn about the low-level functions, please see Section 5.2 of the U6 User's Guide: http://labjack.com/support/u6/users-guide/5.2 """ from LabJackPython import * import struct, ConfigParser def openAllU6(): """ A helpful function which will open all the connected U6s. Returns a dictionary where the keys are the serialNumber, and the value is the device object. """ returnDict = dict() for i in range(deviceCount(6)): d = U6(firstFound = False, devNumber = i+1) returnDict[str(d.serialNumber)] = d return returnDict def dumpPacket(buffer): """ Name: dumpPacket(buffer) Args: byte array Desc: Returns hex value of all bytes in the buffer """ return repr([ hex(x) for x in buffer ]) def getBit(n, bit): """ Name: getBit(n, bit) Args: n, the original integer you want the bit of bit, the index of the bit you want Desc: Returns the bit at position "bit" of integer "n" >>> n = 5 >>> bit = 2 >>> getBit(n, bit) 1 >>> bit = 0 >>> getBit(n, bit) 1 """ return int(bool((int(n) & (1 << bit)) >> bit)) def toBitList(inbyte): """ Name: toBitList(inbyte) Args: a byte Desc: Converts a byte into list for access to individual bits >>> inbyte = 5 >>> toBitList(inbyte) [1, 0, 1, 0, 0, 0, 0, 0] """ return [ getBit(inbyte, b) for b in range(8) ] def dictAsString(d): """Helper function that returns a string representation of a dictionary""" s = "{" for key, val in sorted(d.items()): s += "%s: %s, " % (key, val) s = s.rstrip(", ") # Nuke the trailing comma s += "}" return s class CalibrationInfo(object): """ A class to hold the calibration info for a U6 """ def __init__(self): # A flag to tell difference between nominal and actual values. self.nominal = True # Positive Channel calibration self.ain10vSlope = 3.1580578 * (10 ** -4) self.ain10vOffset = -10.5869565220 self.ain1vSlope = 3.1580578 * (10 ** -5) self.ain1vOffset = -1.05869565220 self.ain100mvSlope = 3.1580578 * (10 ** -6) self.ain100mvOffset = -0.105869565220 self.ain10mvSlope = 3.1580578 * (10 ** -7) self.ain10mvOffset = -0.0105869565220 self.ainSlope = [self.ain10vSlope, self.ain1vSlope, self.ain100mvSlope, self.ain10mvSlope] self.ainOffset = [ self.ain10vOffset, self.ain1vOffset, self.ain100mvOffset, self.ain10mvOffset ] # Negative Channel calibration self.ain10vNegSlope = -3.15805800 * (10 ** -4) self.ain10vCenter = 33523.0 self.ain1vNegSlope = -3.15805800 * (10 ** -5) self.ain1vCenter = 33523.0 self.ain100mvNegSlope = -3.15805800 * (10 ** -6) self.ain100mvCenter = 33523.0 self.ain10mvNegSlope = -3.15805800 * (10 ** -7) self.ain10mvCenter = 33523.0 self.ainNegSlope = [ self.ain10vNegSlope, self.ain1vNegSlope, self.ain100mvNegSlope, self.ain10mvNegSlope ] self.ainCenter = [ self.ain10vCenter, self.ain1vCenter, self.ain100mvCenter, self.ain10mvCenter ] # Miscellaneous self.dac0Slope = 13200.0 self.dac0Offset = 0 self.dac1Slope = 13200.0 self.dac1Offset = 0 self.currentOutput0 = 0.0000100000 self.currentOutput1 = 0.0002000000 self.temperatureSlope = -92.379 self.temperatureOffset = 465.129 # Hi-Res ADC stuff # Positive Channel calibration self.proAin10vSlope = 3.1580578 * (10 ** -4) self.proAin10vOffset = -10.5869565220 self.proAin1vSlope = 3.1580578 * (10 ** -5) self.proAin1vOffset = -1.05869565220 self.proAin100mvSlope = 3.1580578 * (10 ** -6) self.proAin100mvOffset = -0.105869565220 self.proAin10mvSlope = 3.1580578 * (10 ** -7) self.proAin10mvOffset = -0.0105869565220 # Negative Channel calibration self.proAin10vNegSlope = -3.15805800 * (10 ** -4) self.proAin10vCenter = 33523.0 self.proAin1vNegSlope = -3.15805800 * (10 ** -5) self.proAin1vCenter = 33523.0 self.proAin100mvNegSlope = -3.15805800 * (10 ** -6) self.proAin100mvCenter = 33523.0 self.proAin10mvNegSlope = -3.15805800 * (10 ** -7) self.proAin10mvCenter = 33523.0 def __str__(self): return str(self.__dict__) class U6(Device): """ U6 Class for all U6 specific low-level commands. Example: >>> import u6 >>> d = u6.U6() >>> print d.configU6() {'SerialNumber': 320032102, ... , 'FirmwareVersion': '1.26'} """ def __init__(self, debug = False, autoOpen = True, **kargs): """ Name: U6.__init__(self, debug = False, autoOpen = True, **kargs) Args: debug, Do you want debug information? autoOpen, If true, then the constructor will call open for you **kargs, The arguments to be passed to open. Desc: Your basic constructor. """ Device.__init__(self, None, devType = 6) self.firmwareVersion = 0 self.bootloaderVersion = 0 self.hardwareVersion = 0 self.productId = 0 self.fioDirection = [None] * 8 self.fioState = [None] * 8 self.eioDirection = [None] * 8 self.eioState = [None] * 8 self.cioDirection = [None] * 8 self.cioState = [None] * 8 self.dac1Enable = 0 self.dac0 = 0 self.dac1 = 0 self.calInfo = CalibrationInfo() self.productName = "U6" self.debug = debug if autoOpen: self.open(**kargs) def open(self, localId = None, firstFound = True, serial = None, devNumber = None, handleOnly = False, LJSocket = None): """ Name: U6.open(localId = None, firstFound = True, devNumber = None, handleOnly = False, LJSocket = None) Args: firstFound, If True, use the first found U6 serial, open a U6 with the given serial number localId, open a U6 with the given local id. devNumber, open a U6 with the given devNumber handleOnly, if True, LabJackPython will only open a handle LJSocket, set to "<ip>:<port>" to connect to LJSocket Desc: Opens a U6 for reading and writing. >>> myU6 = u6.U6(autoOpen = False) >>> myU6.open() """ Device.open(self, 6, firstFound = firstFound, serial = serial, localId = localId, devNumber = devNumber, handleOnly = handleOnly, LJSocket = LJSocket ) def configU6(self, LocalID = None): """ Name: U6.configU6(LocalID = None) Args: LocalID, if set, will write the new value to U6 Desc: Writes the Local ID, and reads some hardware information. >>> myU6 = u6.U6() >>> myU6.configU6() {'BootloaderVersion': '6.15', 'FirmwareVersion': '0.88', 'HardwareVersion': '2.0', 'LocalID': 1, 'ProductID': 6, 'SerialNumber': 360005087, 'VersionInfo': 4} """ command = [ 0 ] * 26 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x0A command[3] = 0x08 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) if LocalID != None: command[6] = (1 << 3) command[8] = LocalID #command[7] = Reserved #command[9-25] = Reserved try: result = self._writeRead(command, 38, [0xF8, 0x10, 0x08]) except LabJackException, e: if e.errorCode is 4: print "NOTE: ConfigU6 returned an error of 4. This probably means you are using U6 with a *really old* firmware. Please upgrade your U6's firmware as soon as possible." result = self._writeRead(command, 38, [0xF8, 0x10, 0x08], checkBytes = False) else: raise e self.firmwareVersion = "%s.%02d" % (result[10], result[9]) self.bootloaderVersion = "%s.%02d" % (result[12], result[11]) self.hardwareVersion = "%s.%02d" % (result[14], result[13]) self.serialNumber = struct.unpack("<I", struct.pack(">BBBB", *result[15:19]))[0] self.productId = struct.unpack("<H", struct.pack(">BB", *result[19:21]))[0] self.localId = result[21] self.versionInfo = result[37] self.deviceName = 'U6' if self.versionInfo == 12: self.deviceName = 'U6-Pro' return { 'FirmwareVersion' : self.firmwareVersion, 'BootloaderVersion' : self.bootloaderVersion, 'HardwareVersion' : self.hardwareVersion, 'SerialNumber' : self.serialNumber, 'ProductID' : self.productId, 'LocalID' : self.localId, 'VersionInfo' : self.versionInfo, 'DeviceName' : self.deviceName } def configIO(self, NumberTimersEnabled = None, EnableCounter1 = None, EnableCounter0 = None, TimerCounterPinOffset = None, EnableUART = None): """ Name: U6.configIO(NumberTimersEnabled = None, EnableCounter1 = None, EnableCounter0 = None, TimerCounterPinOffset = None) Args: NumberTimersEnabled, Number of timers to enable EnableCounter1, Set to True to enable counter 1, F to disable EnableCounter0, Set to True to enable counter 0, F to disable TimerCounterPinOffset, where should the timers/counters start if all args are None, command just reads. Desc: Writes and reads the current IO configuration. >>> myU6 = u6.U6() >>> myU6.configIO() {'Counter0Enabled': False, 'Counter1Enabled': False, 'NumberTimersEnabled': 0, 'TimerCounterPinOffset': 0} """ command = [ 0 ] * 16 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x05 command[3] = 0x0B #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) if NumberTimersEnabled != None: command[6] = 1 command[7] = NumberTimersEnabled if EnableCounter0 != None: command[6] = 1 if EnableCounter0: command[8] = 1 if EnableCounter1 != None: command[6] = 1 if EnableCounter1: command[8] |= (1 << 1) if TimerCounterPinOffset != None: command[6] = 1 command[9] = TimerCounterPinOffset if EnableUART is not None: command[6] |= 1 command[6] |= (1 << 5) result = self._writeRead(command, 16, [0xf8, 0x05, 0x0B]) return { 'NumberTimersEnabled' : result[8], 'Counter0Enabled' : bool(result[9] & 1), 'Counter1Enabled' : bool( (result[9] >> 1) & 1), 'TimerCounterPinOffset' : result[10] } def configTimerClock(self, TimerClockBase = None, TimerClockDivisor = None): """ Name: U6.configTimerClock(TimerClockBase = None, TimerClockDivisor = None) Args: TimerClockBase, which timer base to use TimerClockDivisor, set the divisor if all args are None, command just reads. Also, if you cannot set the divisor without setting the base. Desc: Writes and read the timer clock configuration. >>> myU6 = u6.U6() >>> myU6.configTimerClock() {'TimerClockDivisor': 256, 'TimerClockBase': 2} """ command = [ 0 ] * 10 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x02 command[3] = 0x0A #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) #command[6] = Reserved #command[7] = Reserved if TimerClockBase != None: command[8] = (1 << 7) command[8] |= TimerClockBase & 7 if TimerClockDivisor != None: command[9] = TimerClockDivisor result = self._writeRead(command, 10, [0xF8, 0x2, 0x0A]) divisor = result[9] if divisor == 0: divisor = 256 return { 'TimerClockBase' : (result[8] & 7), 'TimerClockDivisor' : divisor } def _buildBuffer(self, sendBuffer, readLen, commandlist): for cmd in commandlist: if isinstance(cmd, FeedbackCommand): sendBuffer += cmd.cmdBytes readLen += cmd.readLen elif isinstance(cmd, list): sendBuffer, readLen = self._buildBuffer(sendBuffer, readLen, cmd) return (sendBuffer, readLen) def _buildFeedbackResults(self, rcvBuffer, commandlist, results, i): for cmd in commandlist: if isinstance(cmd, FeedbackCommand): results.append(cmd.handle(rcvBuffer[i:i+cmd.readLen])) i += cmd.readLen elif isinstance(cmd, list): self._buildFeedbackResults(rcvBuffer, cmd, results, i) return results def getFeedback(self, *commandlist): """ Name: getFeedback(commandlist) Args: the FeedbackCommands to run Desc: Forms the commandlist into a packet, sends it to the U6, and reads the response. >>> myU6 = U6() >>> ledCommand = u6.LED(False) >>> internalTempCommand = u6.AIN(30, 31, True) >>> myU6.getFeedback(ledCommand, internalTempCommand) [None, 23200] OR if you like the list version better: >>> myU6 = U6() >>> ledCommand = u6.LED(False) >>> internalTempCommand = u6.AIN(30, 31, True) >>> commandList = [ ledCommand, internalTempCommand ] >>> myU6.getFeedback(commandList) [None, 23200] """ sendBuffer = [0] * 7 sendBuffer[1] = 0xF8 readLen = 9 sendBuffer, readLen = self._buildBuffer(sendBuffer, readLen, commandlist) if len(sendBuffer) % 2: sendBuffer += [0] sendBuffer[2] = len(sendBuffer) / 2 - 3 if readLen % 2: readLen += 1 if len(sendBuffer) > MAX_USB_PACKET_LENGTH: raise LabJackException("ERROR: The feedback command you are attempting to send is bigger than 64 bytes ( %s bytes ). Break your commands up into separate calls to getFeedback()." % len(sendBuffer)) if readLen > MAX_USB_PACKET_LENGTH: raise LabJackException("ERROR: The feedback command you are attempting to send would yield a response that is greater than 64 bytes ( %s bytes ). Break your commands up into separate calls to getFeedback()." % readLen) rcvBuffer = self._writeRead(sendBuffer, readLen, [], checkBytes = False, stream = False, checksum = True) # Check the response for errors try: self._checkCommandBytes(rcvBuffer, [0xF8]) if rcvBuffer[3] != 0x00: raise LabJackException("Got incorrect command bytes") except LowlevelErrorException, e: if isinstance(commandlist[0], list): culprit = commandlist[0][ (rcvBuffer[7] -1) ] else: culprit = commandlist[ (rcvBuffer[7] -1) ] raise LowlevelErrorException("\nThis Command\n %s\nreturned an error:\n %s" % ( culprit, lowlevelErrorToString(rcvBuffer[6]) ) ) results = [] i = 9 return self._buildFeedbackResults(rcvBuffer, commandlist, results, i) def readMem(self, BlockNum, ReadCal=False): """ Name: U6.readMem(BlockNum, ReadCal=False) Args: BlockNum, which block to read ReadCal, set to True to read the calibration data Desc: Reads 1 block (32 bytes) from the non-volatile user or calibration memory. Please read section 5.2.6 of the user's guide before you do something you may regret. >>> myU6 = U6() >>> myU6.readMem(0) [ < userdata stored in block 0 > ] NOTE: Do not call this function while streaming. """ command = [ 0 ] * 8 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x01 command[3] = 0x2A if ReadCal: command[3] = 0x2D #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) command[6] = 0x00 command[7] = BlockNum result = self._writeRead(command, 40, [ 0xF8, 0x11, command[3] ]) return result[8:] def readCal(self, BlockNum): return self.readMem(BlockNum, ReadCal = True) def writeMem(self, BlockNum, Data, WriteCal=False): """ Name: U6.writeMem(BlockNum, Data, WriteCal=False) Args: BlockNum, which block to write Data, a list of bytes to write WriteCal, set to True to write calibration. Desc: Writes 1 block (32 bytes) from the non-volatile user or calibration memory. Please read section 5.2.7 of the user's guide before you do something you may regret. >>> myU6 = U6() >>> myU6.writeMem(0, [ < userdata to be stored in block 0 > ]) NOTE: Do not call this function while streaming. """ if not isinstance(Data, list): raise LabJackException("Data must be a list of bytes") command = [ 0 ] * 40 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x11 command[3] = 0x28 if WriteCal: command[3] = 0x2B #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) command[6] = 0x00 command[7] = BlockNum command[8:] = Data self._writeRead(command, 8, [0xF8, 0x11, command[3]]) def writeCal(self, BlockNum, Data): return self.writeMem(BlockNum, Data, WriteCal = True) def eraseMem(self, EraseCal=False): """ Name: U6.eraseMem(EraseCal=False) Args: EraseCal, set to True to erase the calibration memory. Desc: The U6 uses flash memory that must be erased before writing. Please read section 5.2.8 of the user's guide before you do something you may regret. >>> myU6 = U6() >>> myU6.eraseMem() NOTE: Do not call this function while streaming. """ if eraseCal: command = [ 0 ] * 8 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x01 command[3] = 0x2C #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) command[6] = 0x4C command[7] = 0x6C else: command = [ 0 ] * 6 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x00 command[3] = 0x29 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) self._writeRead(command, 8, [0xF8, 0x01, command[3]]) def eraseCal(self): return self.eraseMem(EraseCal=True) def streamConfig(self, NumChannels = 1, ResolutionIndex = 0, SamplesPerPacket = 25, SettlingFactor = 0, InternalStreamClockFrequency = 0, DivideClockBy256 = False, ScanInterval = 1, ChannelNumbers = [0], ChannelOptions = [0], SampleFrequency = None): """ Name: U6.streamConfig( NumChannels = 1, ResolutionIndex = 0, SamplesPerPacket = 25, SettlingFactor = 0, InternalStreamClockFrequency = 0, DivideClockBy256 = False, ScanInterval = 1, ChannelNumbers = [0], ChannelOptions = [0], SampleFrequency = None ) Args: NumChannels, the number of channels to stream ResolutionIndex, the resolution of the samples SettlingFactor, the settling factor to be used ChannelNumbers, a list of channel numbers to stream ChannelOptions, a list of channel options bytes Set Either: SampleFrequency, the frequency in Hz to sample -- OR -- SamplesPerPacket, how many samples make one packet InternalStreamClockFrequency, 0 = 4 MHz, 1 = 48 MHz DivideClockBy256, True = divide the clock by 256 ScanInterval, clock/ScanInterval = frequency. Desc: Configures streaming on the U6. On a decent machine, you can expect to stream a range of 0.238 Hz to 15 Hz. Without the conversion, you can get up to 55 Hz. """ if NumChannels != len(ChannelNumbers) or NumChannels != len(ChannelOptions): raise LabJackException("NumChannels must match length of ChannelNumbers and ChannelOptions") if len(ChannelNumbers) != len(ChannelOptions): raise LabJackException("len(ChannelNumbers) doesn't match len(ChannelOptions)") if SampleFrequency != None: if SampleFrequency < 1000: if SampleFrequency < 25: SamplesPerPacket = SampleFrequency DivideClockBy256 = True ScanInterval = 15625/SampleFrequency else: DivideClockBy256 = False ScanInterval = 4000000/SampleFrequency # Force Scan Interval into correct range ScanInterval = min( ScanInterval, 65535 ) ScanInterval = int( ScanInterval ) ScanInterval = max( ScanInterval, 1 ) # Same with Samples per packet SamplesPerPacket = max( SamplesPerPacket, 1) SamplesPerPacket = int( SamplesPerPacket ) SamplesPerPacket = min ( SamplesPerPacket, 25) command = [ 0 ] * (14 + NumChannels*2) #command[0] = Checksum8 command[1] = 0xF8 command[2] = NumChannels+4 command[3] = 0x11 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) command[6] = NumChannels command[7] = ResolutionIndex command[8] = SamplesPerPacket #command[9] = Reserved command[10] = SettlingFactor command[11] = (InternalStreamClockFrequency & 1) << 3 if DivideClockBy256: command[11] |= 1 << 1 t = struct.pack("<H", ScanInterval) command[12] = ord(t[0]) command[13] = ord(t[1]) for i in range(NumChannels): command[14+(i*2)] = ChannelNumbers[i] command[15+(i*2)] = ChannelOptions[i] self._writeRead(command, 8, [0xF8, 0x01, 0x11]) # Set up the variables for future use. self.streamSamplesPerPacket = SamplesPerPacket self.streamChannelNumbers = ChannelNumbers self.streamChannelOptions = ChannelOptions self.streamConfiged = True if InternalStreamClockFrequency == 1: freq = float(48000000) else: freq = float(4000000) if DivideClockBy256: freq /= 256 freq = freq/ScanInterval self.packetsPerRequest = max(1, int(freq/SamplesPerPacket)) self.packetsPerRequest = min(self.packetsPerRequest, 48) def processStreamData(self, result, numBytes = None): """ Name: U6.processStreamData(result, numPackets = None) Args: result, the string returned from streamData() numBytes, the number of bytes per packet Desc: Breaks stream data into individual channels and applies calibrations. >>> reading = d.streamData(convert = False) >>> print proccessStreamData(reading['result']) defaultDict(list, {'AIN0' : [3.123, 3.231, 3.232, ...]}) """ if numBytes is None: numBytes = 14 + (self.streamSamplesPerPacket * 2) returnDict = collections.defaultdict(list) j = self.streamPacketOffset for packet in self.breakupPackets(result, numBytes): for sample in self.samplesFromPacket(packet): if j >= len(self.streamChannelNumbers): j = 0 if self.streamChannelNumbers[j] in (193, 194): value = struct.unpack('<BB', sample ) elif self.streamChannelNumbers[j] >= 200: value = struct.unpack('<H', sample )[0] else: if (self.streamChannelOptions[j] >> 7) == 1: # do signed value = struct.unpack('<H', sample )[0] else: # do unsigned value = struct.unpack('<H', sample )[0] gainIndex = (self.streamChannelOptions[j] >> 4) & 0x3 value = self.binaryToCalibratedAnalogVoltage(gainIndex, value, is16Bits=True) returnDict["AIN%s" % self.streamChannelNumbers[j]].append(value) j += 1 self.streamPacketOffset = j return returnDict def watchdog(self, Write = False, ResetOnTimeout = False, SetDIOStateOnTimeout = False, TimeoutPeriod = 60, DIOState = 0, DIONumber = 0): """ Name: U6.watchdog(Write = False, ResetOnTimeout = False, SetDIOStateOnTimeout = False, TimeoutPeriod = 60, DIOState = 0, DIONumber = 0) Args: Write, Set to True to write new values to the watchdog. ResetOnTimeout, True means reset the device on timeout SetDIOStateOnTimeout, True means set the sate of a DIO on timeout TimeoutPeriod, Time, in seconds, to wait before timing out. DIOState, 1 = High, 0 = Low DIONumber, which DIO to set. Desc: Controls a firmware based watchdog timer. """ command = [ 0 ] * 16 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x05 command[3] = 0x09 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) if Write: command[6] = 1 if ResetOnTimeout: command[7] = (1 << 5) if SetDIOStateOnTimeout: command[7] |= (1 << 4) t = struct.pack("<H", TimeoutPeriod) command[8] = ord(t[0]) command[9] = ord(t[1]) command[10] = ((DIOState & 1 ) << 7) command[10] |= (DIONumber & 0xf) result = self._writeRead(command, 16, [ 0xF8, 0x05, 0x09]) watchdogStatus = {} if result[7] == 0: watchdogStatus['WatchDogEnabled'] = False watchdogStatus['ResetOnTimeout'] = False watchdogStatus['SetDIOStateOnTimeout'] = False else: watchdogStatus['WatchDogEnabled'] = True if (( result[7] >> 5 ) & 1): watchdogStatus['ResetOnTimeout'] = True else: watchdogStatus['ResetOnTimeout'] = False if (( result[7] >> 4 ) & 1): watchdogStatus['SetDIOStateOnTimeout'] = True else: watchdogStatus['SetDIOStateOnTimeout'] = False watchdogStatus['TimeoutPeriod'] = struct.unpack('<H', struct.pack("BB", *result[8:10])) if (( result[10] >> 7 ) & 1): watchdogStatus['DIOState'] = 1 else: watchdogStatus['DIOState'] = 0 watchdogStatus['DIONumber'] = ( result[10] & 15 ) return watchdogStatus SPIModes = { 'A' : 0, 'B' : 1, 'C' : 2, 'D' : 3 } def spi(self, SPIBytes, AutoCS=True, DisableDirConfig = False, SPIMode = 'A', SPIClockFactor = 0, CSPINNum = 0, CLKPinNum = 1, MISOPinNum = 2, MOSIPinNum = 3): """ Name: U6.spi(SPIBytes, AutoCS=True, DisableDirConfig = False, SPIMode = 'A', SPIClockFactor = 0, CSPINNum = 0, CLKPinNum = 1, MISOPinNum = 2, MOSIPinNum = 3) Args: SPIBytes, A list of bytes to send. AutoCS, If True, the CS line is automatically driven low during the SPI communication and brought back high when done. DisableDirConfig, If True, function does not set the direction of the line. SPIMode, 'A', 'B', 'C', or 'D'. SPIClockFactor, Sets the frequency of the SPI clock. CSPINNum, which pin is CS CLKPinNum, which pin is CLK MISOPinNum, which pin is MISO MOSIPinNum, which pin is MOSI Desc: Sends and receives serial data using SPI synchronous communication. See Section 5.2.17 of the user's guide. """ if not isinstance(SPIBytes, list): raise LabJackException("SPIBytes MUST be a list of bytes") numSPIBytes = len(SPIBytes) oddPacket = False if numSPIBytes%2 != 0: SPIBytes.append(0) numSPIBytes = numSPIBytes + 1 oddPacket = True command = [ 0 ] * (13 + numSPIBytes) #command[0] = Checksum8 command[1] = 0xF8 command[2] = 4 + (numSPIBytes/2) command[3] = 0x3A #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) if AutoCS: command[6] |= (1 << 7) if DisableDirConfig: command[6] |= (1 << 6) command[6] |= ( self.SPIModes[SPIMode] & 3 ) command[7] = SPIClockFactor #command[8] = Reserved command[9] = CSPINNum command[10] = CLKPinNum command[11] = MISOPinNum command[12] = MOSIPinNum command[13] = numSPIBytes if oddPacket: command[13] = numSPIBytes - 1 command[14:] = SPIBytes result = self._writeRead(command, 8+numSPIBytes, [ 0xF8, 1+(numSPIBytes/2), 0x3A ]) return { 'NumSPIBytesTransferred' : result[7], 'SPIBytes' : result[8:] } def asynchConfig(self, Update = True, UARTEnable = True, DesiredBaud = None, BaudFactor = 63036): """ Name: U6.asynchConfig(Update = True, UARTEnable = True, DesiredBaud = None, BaudFactor = 63036) Args: Update, If True, new values are written. UARTEnable, If True, UART will be enabled. DesiredBaud, If set, will apply the formualt to calculate BaudFactor. BaudFactor, = 2^16 - 48000000/(2 * Desired Baud). Ignored if DesiredBaud is set. Desc: Configures the U6 UART for asynchronous communication. See section 5.2.18 of the User's Guide. """ if UARTEnable: self.configIO(EnableUART = True) command = [ 0 ] * 10 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x02 command[3] = 0x14 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) #commmand[6] = 0x00 if Update: command[7] = (1 << 7) if UARTEnable: command[7] |= (1 << 6) if DesiredBaud != None: BaudFactor = (2**16) - 48000000/(2 * DesiredBaud) t = struct.pack("<H", BaudFactor) command[8] = ord(t[0]) command[9] = ord(t[1]) results = self._writeRead(command, 10, [0xF8, 0x02, 0x14]) if command[8] != results[8] and command[9] != results[9]: raise LabJackException("BaudFactor didn't stick.") def asynchTX(self, AsynchBytes): """ Name: U6.asynchTX(AsynchBytes) Args: AsynchBytes, List of bytes to send Desc: Sends bytes to the U6 UART which will be sent asynchronously on the transmit line. Section 5.2.19 of the User's Guide. """ numBytes = len(AsynchBytes) oddPacket = False if numBytes%2 != 0: oddPacket = True AsynchBytes.append(0) numBytes = numBytes + 1 command = [ 0 ] * (8+numBytes) #command[0] = Checksum8 command[1] = 0xF8 command[2] = 1 + (numBytes/2) command[3] = 0x15 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) #commmand[6] = 0x00 command[7] = numBytes if oddPacket: command[7] = numBytes-1 command[8:] = AsynchBytes result = self._writeRead(command, 10, [ 0xF8, 0x02, 0x15]) return { 'NumAsynchBytesSent' : result[7], 'NumAsynchBytesInRXBuffer' : result[8] } def asynchRX(self, Flush = False): """ Name: U6.asynchTX(AsynchBytes) Args: Flush, If True, empties the entire 256-byte RX buffer. Desc: Sends bytes to the U6 UART which will be sent asynchronously on the transmit line. Section 5.2.20 of the User's Guide. """ command = [ 0, 0xF8, 0x01, 0x16, 0, 0, 0, int(Flush)] result = self._writeRead(command, 40, [ 0xF8, 0x11, 0x16 ]) return { 'NumAsynchBytesInRXBuffer' : result[7], 'AsynchBytes' : result[8:] } def i2c(self, Address, I2CBytes, EnableClockStretching = False, NoStopWhenRestarting = False, ResetAtStart = False, SpeedAdjust = 0, SDAPinNum = 0, SCLPinNum = 1, NumI2CBytesToReceive = 0, AddressByte = None): """ Name: U6.i2c(Address, I2CBytes, EnableClockStretching = False, NoStopWhenRestarting = False, ResetAtStart = False, SpeedAdjust = 0, SDAPinNum = 0, SCLPinNum = 1, NumI2CBytesToReceive = 0, AddressByte = None) Args: Address, the address (Not shifted over) I2CBytes, a list of bytes to send EnableClockStretching, True enables clock stretching NoStopWhenRestarting, True means no stop sent when restarting ResetAtStart, if True, an I2C bus reset will be done before communicating. SpeedAdjust, Allows the communication frequency to be reduced. SDAPinNum, Which pin will be data SCLPinNum, Which pin is clock NumI2CBytesToReceive, Number of I2C bytes to expect back. AddressByte, The address as you would put it in the lowlevel packet. Overrides Address. Optional. Desc: Sends and receives serial data using I2C synchronous communication. Section 5.2.21 of the User's Guide. """ numBytes = len(I2CBytes) oddPacket = False if numBytes%2 != 0: oddPacket = True I2CBytes.append(0) numBytes = numBytes+1 command = [ 0 ] * (14+numBytes) #command[0] = Checksum8 command[1] = 0xF8 command[2] = 4 + (numBytes/2) command[3] = 0x3B #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) if EnableClockStretching: command[6] |= (1 << 3) if NoStopWhenRestarting: command[6] |= (1 << 2) if ResetAtStart: command[6] |= (1 << 1) command[7] = SpeedAdjust command[8] = SDAPinNum command[9] = SCLPinNum if AddressByte != None: command[10] = AddressByte else: command[10] = Address << 1 #command[11] = Reserved command[12] = numBytes if oddPacket: command[12] = numBytes-1 command[13] = NumI2CBytesToReceive command[14:] = I2CBytes oddResponse = False if NumI2CBytesToReceive%2 != 0: NumI2CBytesToReceive = NumI2CBytesToReceive+1 oddResponse = True result = self._writeRead(command, (12+NumI2CBytesToReceive), [0xF8, (3+(NumI2CBytesToReceive/2)), 0x3B]) if NumI2CBytesToReceive != 0: return { 'AckArray' : result[8:12], 'I2CBytes' : result[12:] } else: return { 'AckArray' : result[8:12] } def sht1x(self, DataPinNum = 0, ClockPinNum = 1, SHTOptions = 0xc0): """ Name: U6.sht1x(DataPinNum = 0, ClockPinNum = 1, SHTOptions = 0xc0) Args: DataPinNum, Which pin is the Data line ClockPinNum, Which line is the Clock line SHTOptions (and proof people read documentation): bit 7 = Read Temperature bit 6 = Read Realtive Humidity bit 2 = Heater. 1 = on, 0 = off bit 1 = Reserved at 0 bit 0 = Resolution. 1 = 8 bit RH, 12 bit T; 0 = 12 RH, 14 bit T Desc: Reads temperature and humidity from a Sensirion SHT1X sensor. Section 5.2.22 of the User's Guide. """ command = [ 0 ] * 10 #command[0] = Checksum8 command[1] = 0xF8 command[2] = 0x02 command[3] = 0x39 #command[4] = Checksum16 (LSB) #command[5] = Checksum16 (MSB) command[6] = DataPinNum command[7] = ClockPinNum #command[8] = Reserved command[9] = SHTOptions result = self._writeRead(command, 16, [ 0xF8, 0x05, 0x39]) val = (result[11]*256) + result[10] temp = -39.60 + 0.01*val val = (result[14]*256) + result[13] humid = -4 + 0.0405*val + -.0000028*(val*val) humid = (temp - 25)*(0.01 + 0.00008*val) + humid return { 'StatusReg' : result[8], 'StatusCRC' : result[9], 'Temperature' : temp, 'TemperatureCRC' : result[12], 'Humidity' : humid, 'HumidityCRC' : result[15] } # --------------------------- Old U6 code ------------------------------- def _readCalDataBlock(self, n): """ Internal routine to read the specified calibration block (0-2) """ sendBuffer = [0] * 8 sendBuffer[1] = 0xF8 # command byte sendBuffer[2] = 0x01 # number of data words sendBuffer[3] = 0x2D # extended command number sendBuffer[6] = 0x00 sendBuffer[7] = n # Blocknum = 0 self.write(sendBuffer) buff = self.read(40) return buff[8:] def getCalibrationData(self): """ Name: getCalibrationData(self) Args: None Desc: Gets the slopes and offsets for AIN and DACs, as well as other calibration data >>> myU6 = U6() >>> myU6.getCalibrationData() >>> myU6.calInfo <ainDiffOffset: -2.46886488446,...> """ if self.debug is True: print "Calibration data retrieval" self.calInfo.nominal = False #reading block 0 from memory rcvBuffer = self._readCalDataBlock(0) # Positive Channel calibration self.calInfo.ain10vSlope = toDouble(rcvBuffer[:8]) self.calInfo.ain10vOffset = toDouble(rcvBuffer[8:16]) self.calInfo.ain1vSlope = toDouble(rcvBuffer[16:24]) self.calInfo.ain1vOffset = toDouble(rcvBuffer[24:]) #reading block 1 from memory rcvBuffer = self._readCalDataBlock(1) self.calInfo.ain100mvSlope = toDouble(rcvBuffer[:8]) self.calInfo.ain100mvOffset = toDouble(rcvBuffer[8:16]) self.calInfo.ain10mvSlope = toDouble(rcvBuffer[16:24]) self.calInfo.ain10mvOffset = toDouble(rcvBuffer[24:]) self.calInfo.ainSlope = [self.calInfo.ain10vSlope, self.calInfo.ain1vSlope, self.calInfo.ain100mvSlope, self.calInfo.ain10mvSlope] self.calInfo.ainOffset = [ self.calInfo.ain10vOffset, self.calInfo.ain1vOffset, self.calInfo.ain100mvOffset, self.calInfo.ain10mvOffset ] #reading block 2 from memory rcvBuffer = self._readCalDataBlock(2) # Negative Channel calibration self.calInfo.ain10vNegSlope = toDouble(rcvBuffer[:8]) self.calInfo.ain10vCenter = toDouble(rcvBuffer[8:16]) self.calInfo.ain1vNegSlope = toDouble(rcvBuffer[16:24]) self.calInfo.ain1vCenter = toDouble(rcvBuffer[24:]) #reading block 3 from memory rcvBuffer = self._readCalDataBlock(3) self.calInfo.ain100mvNegSlope = toDouble(rcvBuffer[:8]) self.calInfo.ain100mvCenter = toDouble(rcvBuffer[8:16]) self.calInfo.ain10mvNegSlope = toDouble(rcvBuffer[16:24]) self.calInfo.ain10mvCenter = toDouble(rcvBuffer[24:]) self.calInfo.ainNegSlope = [ self.calInfo.ain10vNegSlope, self.calInfo.ain1vNegSlope, self.calInfo.ain100mvNegSlope, self.calInfo.ain10mvNegSlope ] self.calInfo.ainCenter = [ self.calInfo.ain10vCenter, self.calInfo.ain1vCenter, self.calInfo.ain100mvCenter, self.calInfo.ain10mvCenter ] #reading block 4 from memory rcvBuffer = self._readCalDataBlock(4) # Miscellaneous self.calInfo.dac0Slope = toDouble(rcvBuffer[:8]) self.calInfo.dac0Offset = toDouble(rcvBuffer[8:16]) self.calInfo.dac1Slope = toDouble(rcvBuffer[16:24]) self.calInfo.dac1Offset = toDouble(rcvBuffer[24:]) #reading block 5 from memory rcvBuffer = self._readCalDataBlock(5) self.calInfo.currentOutput0 = toDouble(rcvBuffer[:8]) self.calInfo.currentOutput1 = toDouble(rcvBuffer[8:16]) self.calInfo.temperatureSlope = toDouble(rcvBuffer[16:24]) self.calInfo.temperatureOffset = toDouble(rcvBuffer[24:]) if self.productName == "U6-Pro": # Hi-Res ADC stuff #reading block 6 from memory rcvBuffer = self._readCalDataBlock(6) # Positive Channel calibration self.calInfo.proAin10vSlope = toDouble(rcvBuffer[:8]) self.calInfo.proAin10vOffset = toDouble(rcvBuffer[8:16]) self.calInfo.proAin1vSlope = toDouble(rcvBuffer[16:24]) self.calInfo.proAin1vOffset = toDouble(rcvBuffer[24:]) #reading block 7 from memory rcvBuffer = self._readCalDataBlock(7) self.calInfo.proAin100mvSlope = toDouble(rcvBuffer[:8]) self.calInfo.proAin100mvOffset = toDouble(rcvBuffer[8:16]) self.calInfo.proAin10mvSlope = toDouble(rcvBuffer[16:24]) self.calInfo.proAin10mvOffset = toDouble(rcvBuffer[24:]) self.calInfo.proAinSlope = [self.calInfo.proAin10vSlope, self.calInfo.proAin1vSlope, self.calInfo.proAin100mvSlope, self.calInfo.proAin10mvSlope] self.calInfo.proAinOffset = [ self.calInfo.proAin10vOffset, self.calInfo.proAin1vOffset, self.calInfo.proAin100mvOffset, self.calInfo.proAin10mvOffset ] #reading block 8 from memory rcvBuffer = self._readCalDataBlock(8) # Negative Channel calibration self.calInfo.proAin10vNegSlope = toDouble(rcvBuffer[:8]) self.calInfo.proAin10vCenter = toDouble(rcvBuffer[8:16]) self.calInfo.proAin1vNegSlope = toDouble(rcvBuffer[16:24]) self.calInfo.proAin1vCenter = toDouble(rcvBuffer[24:]) #reading block 9 from memory rcvBuffer = self._readCalDataBlock(9) self.calInfo.proAin100mvNegSlope = toDouble(rcvBuffer[:8]) self.calInfo.proAin100mvCenter = toDouble(rcvBuffer[8:16]) self.calInfo.proAin10mvNegSlope = toDouble(rcvBuffer[16:24]) self.calInfo.proAin10mvCenter = toDouble(rcvBuffer[24:]) self.calInfo.proAinNegSlope = [ self.calInfo.proAin10vNegSlope, self.calInfo.proAin1vNegSlope, self.calInfo.proAin100mvNegSlope, self.calInfo.proAin10mvNegSlope ] self.calInfo.proAinCenter = [ self.calInfo.proAin10vCenter, self.calInfo.proAin1vCenter, self.calInfo.proAin100mvCenter, self.calInfo.proAin10mvCenter ] def binaryToCalibratedAnalogVoltage(self, gainIndex, bytesVoltage, is16Bits=False): """ Name: binaryToCalibratedAnalogVoltage(gainIndex, bytesVoltage, is16Bits = False) Args: gainIndex, which gain did you use? bytesVoltage, bytes returned from the U6 is16bits, set to True if bytesVolotage is 16 bits (not 24) Desc: Converts binary voltage to an analog value. """ if not is16Bits: bits = float(bytesVoltage)/256 else: bits = float(bytesVoltage) center = self.calInfo.ainCenter[gainIndex] negSlope = self.calInfo.ainNegSlope[gainIndex] posSlope = self.calInfo.ainSlope[gainIndex] if self.productName == "U6-Pro": center = self.calInfo.proAinCenter[gainIndex] negSlope = self.calInfo.proAinNegSlope[gainIndex] posSlope = self.calInfo.proAinSlope[gainIndex] if bits < center: return (center - bits) * negSlope else: return (bits - center) * posSlope def binaryToCalibratedAnalogTemperature(self, bytesTemperature): voltage = self.binaryToCalibratedAnalogVoltage(0, bytesTemperature) return self.calInfo.temperatureSlope * float(voltage) + self.calInfo.temperatureOffset def softReset(self): """ Name: softReset Args: none Desc: Send a soft reset. >>> myU6 = U6() >>> myU6.softReset() """ command = [ 0x00, 0x99, 0x01, 0x00 ] command = setChecksum8(command, 4) self.write(command, False, False) results = self.read(4) if results[3] != 0: raise LowlevelErrorException(results[3], "The softReset command returned an error:\n %s" % lowlevelErrorToString(results[3])) def hardReset(self): """ Name: hardReset Args: none Desc: Send a hard reset. >>> myU6 = U6() >>> myU6.hardReset() """ command = [ 0x00, 0x99, 0x02, 0x00 ] command = setChecksum8(command, 4) self.write(command, False, False) results = self.read(4) if results[3] != 0: raise LowlevelErrorException(results[3], "The softHard command returned an error:\n %s" % lowlevelErrorToString(results[3])) self.close() def setLED(self, state): """ Name: setLED(self, state) Args: state: 1 = On, 0 = Off Desc: Sets the state of the LED. (5.2.5.4 of user's guide) >>> myU6 = U6() >>> myU6.setLED(0) ... (LED turns off) ... """ self.getFeedback(LED(state)) def getTemperature(self): """ Name: getTemperature Args: none Desc: Reads the U6's internal temperature sensor in Kelvin. See Section 2.6.4 of the U6 User's Guide. >>> myU6.getTemperature() 299.87723471224308 """ if self.calInfo.nominal: # Read the actual calibration constants if we haven't already. self.getCalibrationData() result = self.getFeedback(AIN24AR(14)) return self.binaryToCalibratedAnalogTemperature(result[0]['AIN']) def getAIN(self, positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False): """ Name: getAIN Args: positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False Desc: Reads an AIN and applies the calibration constants to it. >>> myU6.getAIN(14) 299.87723471224308 """ result = self.getFeedback(AIN24AR(positiveChannel, resolutionIndex, gainIndex, settlingFactor, differential)) return self.binaryToCalibratedAnalogVoltage(result[0]['GainIndex'], result[0]['AIN']) def readDefaultsConfig(self): """ Name: U6.readDefaultsConfig( ) Args: None Desc: Reads the power-up defaults stored in flash. """ results = dict() defaults = self.readDefaults(0) results['FIODirection'] = defaults[4] results['FIOState'] = defaults[5] results['EIODirection'] = defaults[8] results['EIOState'] = defaults[9] results['CIODirection'] = defaults[12] results['CIOState'] = defaults[13] results['ConfigWriteMask'] = defaults[16] results['NumOfTimersEnable'] = defaults[17] results['CounterMask'] = defaults[18] results['PinOffset'] = defaults[19] defaults = self.readDefaults(1) results['ClockSource'] = defaults[0] results['Divisor'] = defaults[1] results['TMR0Mode'] = defaults[16] results['TMR0ValueL'] = defaults[17] results['TMR0ValueH'] = defaults[18] results['TMR1Mode'] = defaults[20] results['TMR1ValueL'] = defaults[21] results['TMR1ValueH'] = defaults[22] results['TMR2Mode'] = defaults[24] results['TMR2ValueL'] = defaults[25] results['TMR2ValueH'] = defaults[26] results['TMR3Mode'] = defaults[28] results['TMR3ValueL'] = defaults[29] results['TMR3ValueH'] = defaults[30] defaults = self.readDefaults(2) results['DAC0'] = struct.unpack( ">H", struct.pack("BB", *defaults[16:18]) )[0] results['DAC1'] = struct.unpack( ">H", struct.pack("BB", *defaults[20:22]) )[0] defaults = self.readDefaults(3) for i in range(14): results["AIN%sGainRes" % i] = defaults[i] results["AIN%sOptions" % i] = defaults[i+16] return results def exportConfig(self): """ Name: U6.exportConfig( ) Args: None Desc: Takes a configuration and puts it into a ConfigParser object. """ # Make a new configuration file parser = ConfigParser.SafeConfigParser() # Change optionxform so that options preserve their case. parser.optionxform = str # Local Id and name section = "Identifiers" parser.add_section(section) parser.set(section, "Local ID", str(self.localId)) parser.set(section, "Name", str(self.getName())) parser.set(section, "Device Type", str(self.devType)) # FIO Direction / State section = "FIOs" parser.add_section(section) dirs, states = self.getFeedback( PortDirRead(), PortStateRead() ) for key, value in dirs.items(): parser.set(section, "%s Directions" % key, str(value)) for key, value in states.items(): parser.set(section, "%s States" % key, str(value)) # DACs section = "DACs" parser.add_section(section) dac0 = self.readRegister(5000) dac0 = max(dac0, 0) dac0 = min(dac0, 5) parser.set(section, "DAC0", "%0.2f" % dac0) dac1 = self.readRegister(5002) dac1 = max(dac1, 0) dac1 = min(dac1, 5) parser.set(section, "DAC1", "%0.2f" % dac1) # Timer Clock Configuration section = "Timer Clock Speed Configuration" parser.add_section(section) timerclockconfig = self.configTimerClock() for key, value in timerclockconfig.items(): parser.set(section, key, str(value)) # Timers / Counters section = "Timers And Counters" parser.add_section(section) ioconfig = self.configIO() for key, value in ioconfig.items(): parser.set(section, key, str(value)) for i in range(ioconfig['NumberTimersEnabled']): mode, value = self.readRegister(7100 + (2 * i), numReg = 2, format = ">HH") parser.set(section, "Timer%s Mode" % i, str(mode)) parser.set(section, "Timer%s Value" % i, str(value)) return parser def loadConfig(self, configParserObj): """ Name: U6.loadConfig( configParserObj ) Args: configParserObj, A Config Parser object to load in Desc: Takes a configuration and updates the U6 to match it. """ parser = configParserObj # Set Identifiers: section = "Identifiers" if parser.has_section(section): if parser.has_option(section, "device type"): if parser.getint(section, "device type") != self.devType: raise Exception("Not a U6 Config file.") if parser.has_option(section, "local id"): self.configU6( LocalID = parser.getint(section, "local id")) if parser.has_option(section, "name"): self.setName( parser.get(section, "name") ) # Set FIOs: section = "FIOs" if parser.has_section(section): fiodirs = 0 eiodirs = 0 ciodirs = 0 fiostates = 0 eiostates = 0 ciostates = 0 if parser.has_option(section, "fios directions"): fiodirs = parser.getint(section, "fios directions") if parser.has_option(section, "eios directions"): eiodirs = parser.getint(section, "eios directions") if parser.has_option(section, "cios directions"): ciodirs = parser.getint(section, "cios directions") if parser.has_option(section, "fios states"): fiostates = parser.getint(section, "fios states") if parser.has_option(section, "eios states"): eiostates = parser.getint(section, "eios states") if parser.has_option(section, "cios states"): ciostates = parser.getint(section, "cios states") self.getFeedback( PortStateWrite([fiostates, eiostates, ciostates]), PortDirWrite([fiodirs, eiodirs, ciodirs]) ) # Set DACs: section = "DACs" if parser.has_section(section): if parser.has_option(section, "dac0"): self.writeRegister(5000, parser.getfloat(section, "dac0")) if parser.has_option(section, "dac1"): self.writeRegister(5002, parser.getfloat(section, "dac1")) # Set Timer Clock Configuration section = "Timer Clock Speed Configuration" if parser.has_section(section): if parser.has_option(section, "timerclockbase") and parser.has_option(section, "timerclockdivisor"): self.configTimerClock(TimerClockBase = parser.getint(section, "timerclockbase"), TimerClockDivisor = parser.getint(section, "timerclockdivisor")) # Set Timers / Counters section = "Timers And Counters" if parser.has_section(section): nte = None c0e = None c1e = None cpo = None if parser.has_option(section, "NumberTimersEnabled"): nte = parser.getint(section, "NumberTimersEnabled") if parser.has_option(section, "TimerCounterPinOffset"): cpo = parser.getint(section, "TimerCounterPinOffset") if parser.has_option(section, "Counter0Enabled"): c0e = parser.getboolean(section, "Counter0Enabled") if parser.has_option(section, "Counter1Enabled"): c1e = parser.getboolean(section, "Counter1Enabled") self.configIO(NumberTimersEnabled = nte, EnableCounter1 = c1e, EnableCounter0 = c0e, TimerCounterPinOffset = cpo) mode = None value = None for i in range(4): if parser.has_option(section, "timer%i mode" % i): mode = parser.getint(section, "timer%i mode" % i) if parser.has_option(section, "timer%i value" % i): value = parser.getint(section, "timer%i value" % i) self.getFeedback( TimerConfig(i, mode, value) ) class FeedbackCommand(object): ''' The base FeedbackCommand class Used to make Feedback easy. Make a list of these and call getFeedback. ''' readLen = 0 def handle(self, input): return None validChannels = range(144) class AIN(FeedbackCommand): ''' Analog Input Feedback command AIN(PositiveChannel) PositiveChannel : the positive channel to use NOTE: This function kept for compatibility. Please use the new AIN24 and AIN24AR. returns 16-bit unsigned int sample >>> d.getFeedback( u6.AIN( PositiveChannel ) ) [ 19238 ] ''' def __init__(self, PositiveChannel): if PositiveChannel not in validChannels: raise LabJackException("Invalid Positive Channel specified") self.positiveChannel = PositiveChannel self.cmdBytes = [ 0x01, PositiveChannel, 0 ] readLen = 2 def __repr__(self): return "<u6.AIN( PositiveChannel = %s )>" % self.positiveChannel def handle(self, input): result = (input[1] << 8) + input[0] return result class AIN24(FeedbackCommand): ''' Analog Input 24-bit Feedback command ainCommand = AIN24(PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False) See section 5.2.5.2 of the user's guide. NOTE: If you use a gain index of 15 (autorange), you should be using the AIN24AR command instead. positiveChannel : The positive channel to use resolutionIndex : 0=default, 1-8 for high-speed ADC, 9-12 for high-res ADC on U6-Pro. gainIndex : 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange settlingFactor : 0=5us, 1=10us, 2=100us, 3=1ms, 4=10ms differential : If this bit is set, a differential reading is done where the negative channel is positiveChannel+1 returns 24-bit unsigned int sample >>> d.getFeedback( u6.AIN24(PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False ) ) [ 193847 ] ''' def __init__(self, PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False): if PositiveChannel not in validChannels: raise LabJackException("Invalid Positive Channel specified") self.positiveChannel = PositiveChannel self.resolutionIndex = ResolutionIndex self.gainIndex = GainIndex self.settlingFactor = SettlingFactor self.differential = Differential byte2 = ( ResolutionIndex & 0xf ) byte2 = ( ( GainIndex & 0xf ) << 4 ) + byte2 byte3 = (int(Differential) << 7) + SettlingFactor self.cmdBytes = [ 0x02, PositiveChannel, byte2, byte3 ] def __repr__(self): return "<u6.AIN24( PositiveChannel = %s, ResolutionIndex = %s, GainIndex = %s, SettlingFactor = %s, Differential = %s )>" % (self.positiveChannel, self.resolutionIndex, self.gainIndex, self.settlingFactor, self.differential) readLen = 3 def handle(self, input): #Put it all into an integer. result = (input[2] << 16 ) + (input[1] << 8 ) + input[0] return result class AIN24AR(FeedbackCommand): ''' Autorange Analog Input 24-bit Feedback command ainARCommand = AIN24AR(0, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False) See section 5.2.5.3 of the user's guide PositiveChannel : The positive channel to use ResolutionIndex : 0=default, 1-8 for high-speed ADC, 9-13 for high-res ADC on U6-Pro. GainIndex : 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange SettlingFactor : 0=5us, 1=10us, 2=100us, 3=1ms, 4=10ms Differential : If this bit is set, a differential reading is done where the negative channel is positiveChannel+1 returns a dictionary: { 'AIN' : < 24-bit binary reading >, 'ResolutionIndex' : < actual resolution setting used for the reading >, 'GainIndex' : < actual gain used for the reading >, 'Status' : < reserved for future use > } >>> d.getFeedback( u6.AIN24AR( PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False ) ) { 'AIN' : 193847, 'ResolutionIndex' : 0, 'GainIndex' : 0, 'Status' : 0 } ''' def __init__(self, PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False): if PositiveChannel not in validChannels: raise LabJackException("Invalid Positive Channel specified") self.positiveChannel = PositiveChannel self.resolutionIndex = ResolutionIndex self.gainIndex = GainIndex self.settlingFactor = SettlingFactor self.differential = Differential byte2 = ( ResolutionIndex & 0xf ) byte2 = ( ( GainIndex & 0xf ) << 4 ) + byte2 byte3 = (int(Differential) << 7) + SettlingFactor self.cmdBytes = [ 0x03, PositiveChannel, byte2, byte3 ] def __repr__(self): return "<u6.AIN24AR( PositiveChannel = %s, ResolutionIndex = %s, GainIndex = %s, SettlingFactor = %s, Differential = %s )>" % (self.positiveChannel, self.resolutionIndex, self.gainIndex, self.settlingFactor, self.differential) readLen = 5 def handle(self, input): #Put it all into an integer. result = (input[2] << 16 ) + (input[1] << 8 ) + input[0] resolutionIndex = input[3] & 0xf gainIndex = ( input[3] >> 4 ) & 0xf status = input[4] return { 'AIN' : result, 'ResolutionIndex' : resolutionIndex, 'GainIndex' : gainIndex, 'Status' : status } class WaitShort(FeedbackCommand): ''' WaitShort Feedback command specify the number of 128us time increments to wait >>> d.getFeedback( u6.WaitShort( Time ) ) [ None ] ''' def __init__(self, Time): self.time = Time % 256 self.cmdBytes = [ 5, Time % 256 ] def __repr__(self): return "<u6.WaitShort( Time = %s )>" % self.time class WaitLong(FeedbackCommand): ''' WaitLong Feedback command specify the number of 32ms time increments to wait >>> d.getFeedback( u6.WaitLog( Time ) ) [ None ] ''' def __init__(self, Time): self.time = Time self.cmdBytes = [ 6, Time % 256 ] def __repr__(self): return "<u6.WaitLog( Time = %s )>" % self.time class LED(FeedbackCommand): ''' LED Toggle specify whether the LED should be on or off by truth value 1 or True = On, 0 or False = Off >>> d.getFeedback( u6.LED( State ) ) [ None ] ''' def __init__(self, State): self.state = State self.cmdBytes = [ 9, int(bool(State)) ] def __repr__(self): return "<u6.LED( State = %s )>" % self.state class BitStateRead(FeedbackCommand): ''' BitStateRead Feedback command read the state of a single bit of digital I/O. Only digital lines return valid readings. IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO return 0 or 1 >>> d.getFeedback( u6.BitStateRead( IONumber ) ) [ 1 ] ''' def __init__(self, IONumber): self.ioNumber = IONumber self.cmdBytes = [ 10, IONumber % 20 ] def __repr__(self): return "<u6.BitStateRead( IONumber = %s )>" % self.ioNumber readLen = 1 def handle(self, input): return int(bool(input[0])) class BitStateWrite(FeedbackCommand): ''' BitStateWrite Feedback command write a single bit of digital I/O. The direction of the specified line is forced to output. IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO State: 0 or 1 >>> d.getFeedback( u6.BitStateWrite( IONumber, State ) ) [ None ] ''' def __init__(self, IONumber, State): self.ioNumber = IONumber self.state = State self.cmdBytes = [ 11, (IONumber % 20) + (int(bool(State)) << 7) ] def __repr__(self): return "<u6.BitStateWrite( IONumber = %s, State = %s )>" % self.ioNumber class BitDirRead(FeedbackCommand): ''' Read the digital direction of one I/O IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO returns 1 = Output, 0 = Input >>> d.getFeedback( u6.BitDirRead( IONumber ) ) [ 1 ] ''' def __init__(self, IONumber): self.ioNumber = IONumber self.cmdBytes = [ 12, IONumber % 20 ] def __repr__(self): return "<u6.BitDirRead( IONumber = %s )>" % self.ioNumber readLen = 1 def handle(self, input): return int(bool(input[0])) class BitDirWrite(FeedbackCommand): ''' BitDirWrite Feedback command Set the digital direction of one I/O IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO Direction: 1 = Output, 0 = Input >>> d.getFeedback( u6.BitDirWrite( IONumber, Direction ) ) [ None ] ''' def __init__(self, IONumber, Direction): self.ioNumber = IONumber self.direction = Direction self.cmdBytes = [ 13, (IONumber % 20) + (int(bool(Direction)) << 7) ] def __repr__(self): return "<u6.BitDirWrite( IONumber = %s, Direction = %s )>" % (self.ioNumber, self.direction) class PortStateRead(FeedbackCommand): """ PortStateRead Feedback command Reads the state of all digital I/O. >>> d.getFeedback( u6.PortStateRead() ) [ { 'FIO' : 10, 'EIO' : 0, 'CIO' : 0 } ] """ def __init__(self): self.cmdBytes = [ 26 ] def __repr__(self): return "<u6.PortStateRead()>" readLen = 3 def handle(self, input): return {'FIO' : input[0], 'EIO' : input[1], 'CIO' : input[2] } class PortStateWrite(FeedbackCommand): """ PortStateWrite Feedback command State: A list of 3 bytes representing FIO, EIO, CIO WriteMask: A list of 3 bytes, representing which to update. The Default is all ones. >>> d.getFeedback( u6.PortStateWrite( State, WriteMask = [ 0xff, 0xff, 0xff] ) ) [ None ] """ def __init__(self, State, WriteMask = [ 0xff, 0xff, 0xff]): self.state = State self.writeMask = WriteMask self.cmdBytes = [ 27 ] + WriteMask + State def __repr__(self): return "<u6.PortStateWrite( State = %s, WriteMask = %s )>" % (self.state, self.writeMask) class PortDirRead(FeedbackCommand): """ PortDirRead Feedback command Reads the direction of all digital I/O. >>> d.getFeedback( u6.PortDirRead() ) [ { 'FIO' : 10, 'EIO' : 0, 'CIO' : 0 } ] """ def __init__(self): self.cmdBytes = [ 28 ] def __repr__(self): return "<u6.PortDirRead()>" readLen = 3 def handle(self, input): return {'FIO' : input[0], 'EIO' : input[1], 'CIO' : input[2] } class PortDirWrite(FeedbackCommand): """ PortDirWrite Feedback command Direction: A list of 3 bytes representing FIO, EIO, CIO WriteMask: A list of 3 bytes, representing which to update. Default is all ones. >>> d.getFeedback( u6.PortDirWrite( Direction, WriteMask = [ 0xff, 0xff, 0xff] ) ) [ None ] """ def __init__(self, Direction, WriteMask = [ 0xff, 0xff, 0xff]): self.direction = Direction self.writeMask = WriteMask self.cmdBytes = [ 29 ] + WriteMask + Direction def __repr__(self): return "<u6.PortDirWrite( Direction = %s, WriteMask = %s )>" % (self.direction, self.writeMask) class DAC8(FeedbackCommand): ''' 8-bit DAC Feedback command Controls a single analog output Dac: 0 or 1 Value: 0-255 >>> d.getFeedback( u6.DAC8( Dac, Value ) ) [ None ] ''' def __init__(self, Dac, Value): self.dac = Dac self.value = Value % 256 self.cmdBytes = [ 34 + (Dac % 2), Value % 256 ] def __repr__(self): return "<u6.DAC8( Dac = %s, Value = %s )>" % (self.dac, self.value) class DAC0_8(DAC8): """ 8-bit DAC Feedback command for DAC0 Controls DAC0 in 8-bit mode. Value: 0-255 >>> d.getFeedback( u6.DAC0_8( Value ) ) [ None ] """ def __init__(self, Value): DAC8.__init__(self, 0, Value) def __repr__(self): return "<u6.DAC0_8( Value = %s )>" % self.value class DAC1_8(DAC8): """ 8-bit DAC Feedback command for DAC1 Controls DAC1 in 8-bit mode. Value: 0-255 >>> d.getFeedback( u6.DAC1_8( Value ) ) [ None ] """ def __init__(self, Value): DAC8.__init__(self, 1, Value) def __repr__(self): return "<u6.DAC1_8( Value = %s )>" % self.value class DAC16(FeedbackCommand): ''' 16-bit DAC Feedback command Controls a single analog output Dac: 0 or 1 Value: 0-65535 >>> d.getFeedback( u6.DAC16( Dac, Value ) ) [ None ] ''' def __init__(self, Dac, Value): self.dac = Dac self.value = Value self.cmdBytes = [ 38 + (Dac % 2), Value % 256, Value >> 8 ] def __repr__(self): return "<u6.DAC8( Dac = %s, Value = %s )>" % (self.dac, self.value) class DAC0_16(DAC16): """ 16-bit DAC Feedback command for DAC0 Controls DAC0 in 16-bit mode. Value: 0-65535 >>> d.getFeedback( u6.DAC0_16( Value ) ) [ None ] """ def __init__(self, Value): DAC16.__init__(self, 0, Value) def __repr__(self): return "<u6.DAC0_16( Value = %s )>" % self.value class DAC1_16(DAC16): """ 16-bit DAC Feedback command for DAC1 Controls DAC1 in 16-bit mode. Value: 0-65535 >>> d.getFeedback( u6.DAC1_16( Value ) ) [ None ] """ def __init__(self, Value): DAC16.__init__(self, 1, Value) def __repr__(self): return "<u6.DAC1_16( Value = %s )>" % self.value class Timer(FeedbackCommand): """ For reading the value of the Timer. It provides the ability to update/reset a given timer, and read the timer value. ( Section 5.2.5.17 of the User's Guide) timer: Either 0 or 1 for counter0 or counter1 UpdateReset: Set True if you want to update the value Value: Only updated if the UpdateReset bit is 1. The meaning of this parameter varies with the timer mode. Mode: Set to the timer mode to handle any special processing. See classes QuadratureInputTimer and TimerStopInput1. Returns an unsigned integer of the timer value, unless Mode has been specified and there are special return values. See Section 2.9.1 for expected return values. >>> d.getFeedback( u6.Timer( timer, UpdateReset = False, Value = 0 \ ... , Mode = None ) ) [ 12314 ] """ def __init__(self, timer, UpdateReset = False, Value=0, Mode = None): if timer != 0 and timer != 1: raise LabJackException("Timer should be either 0 or 1.") if UpdateReset and Value == None: raise LabJackException("UpdateReset set but no value.") self.timer = timer self.updateReset = UpdateReset self.value = Value self.mode = Mode self.cmdBytes = [ (42 + (2*timer)), UpdateReset, Value % 256, Value >> 8 ] readLen = 4 def __repr__(self): return "<u6.Timer( timer = %s, UpdateReset = %s, Value = %s, Mode = %s )>" % (self.timer, self.updateReset, self.value, self.mode) def handle(self, input): inStr = struct.pack('B' * len(input), *input) if self.mode == 8: return struct.unpack('<i', inStr )[0] elif self.mode == 9: maxCount, current = struct.unpack('<HH', inStr ) return current, maxCount else: return struct.unpack('<I', inStr )[0] class Timer0(Timer): """ For reading the value of the Timer0. It provides the ability to update/reset Timer0, and read the timer value. ( Section 5.2.5.17 of the User's Guide) UpdateReset: Set True if you want to update the value Value: Only updated if the UpdateReset bit is 1. The meaning of this parameter varies with the timer mode. Mode: Set to the timer mode to handle any special processing. See classes QuadratureInputTimer and TimerStopInput1. >>> d.getFeedback( u6.Timer0( UpdateReset = False, Value = 0, \ ... Mode = None ) ) [ 12314 ] """ def __init__(self, UpdateReset = False, Value = 0, Mode = None): Timer.__init__(self, 0, UpdateReset, Value, Mode) def __repr__(self): return "<u6.Timer0( UpdateReset = %s, Value = %s, Mode = %s )>" % (self.updateReset, self.value, self.mode) class Timer1(Timer): """ For reading the value of the Timer1. It provides the ability to update/reset Timer1, and read the timer value. ( Section 5.2.5.17 of the User's Guide) UpdateReset: Set True if you want to update the value Value: Only updated if the UpdateReset bit is 1. The meaning of this parameter varies with the timer mode. Mode: Set to the timer mode to handle any special processing. See classes QuadratureInputTimer and TimerStopInput1. >>> d.getFeedback( u6.Timer1( UpdateReset = False, Value = 0, \ ... Mode = None ) ) [ 12314 ] """ def __init__(self, UpdateReset = False, Value = 0, Mode = None): Timer.__init__(self, 1, UpdateReset, Value, Mode) def __repr__(self): return "<u6.Timer1( UpdateReset = %s, Value = %s, Mode = %s )>" % (self.updateReset, self.value, self.mode) class QuadratureInputTimer(Timer): """ For reading Quadrature input timers. They are special because their values are signed. ( Section 2.9.1.8 of the User's Guide) Args: UpdateReset: Set True if you want to reset the counter. Value: Set to 0, and UpdateReset to True to reset the counter. Returns a signed integer. >>> # Setup the two timers to be quadrature >>> d.getFeedback( u6.Timer0Config( 8 ), u6.Timer1Config( 8 ) ) [None, None] >>> # Read the value >>> d.getFeedback( u6.QuadratureInputTimer() ) [-21] """ def __init__(self, UpdateReset = False, Value = 0): Timer.__init__(self, 0, UpdateReset, Value, Mode = 8) def __repr__(self): return "<u6.QuadratureInputTimer( UpdateReset = %s, Value = %s )>" % (self.updateReset, self.value) class TimerStopInput1(Timer1): """ For reading a stop input timer. They are special because the value returns the current edge count and the stop value. ( Section 2.9.1.9 of the User's Guide) Args: UpdateReset: Set True if you want to update the value. Value: The stop value. Only updated if the UpdateReset bit is 1. Returns a tuple where the first value is current edge count, and the second value is the stop value. >>> # Setup the timer to be Stop Input >>> d.getFeedback( u6.Timer0Config( 9, Value = 30 ) ) [None] >>> # Read the timer >>> d.getFeedback( u6.TimerStopInput1() ) [(0, 30)] """ def __init__(self, UpdateReset = False, Value = 0): Timer.__init__(self, 1, UpdateReset, Value, Mode = 9) def __repr__(self): return "<u6.TimerStopInput1( UpdateReset = %s, Value = %s )>" % (self.updateReset, self.value) class TimerConfig(FeedbackCommand): """ This IOType configures a particular timer. timer = # of the timer to configure TimerMode = See Section 2.9 for more information about the available modes. Value = The meaning of this parameter varies with the timer mode. >>> d.getFeedback( u6.TimerConfig( timer, TimerMode, Value = 0 ) ) [ None ] """ def __init__(self, timer, TimerMode, Value=0): '''Creates command bytes for configureing a Timer''' #Conditions come from pages 33-34 of user's guide if timer not in range(4): raise LabJackException("Timer should be either 0-3.") if TimerMode > 13 or TimerMode < 0: raise LabJackException("Invalid Timer Mode.") self.timer = timer self.timerMode = TimerMode self.value = Value self.cmdBytes = [43 + (timer * 2), TimerMode, Value % 256, Value >> 8] def __repr__(self): return "<u6.TimerConfig( timer = %s, TimerMode = %s, Value = %s )>" % (self.timer, self.timerMode, self.value) class Timer0Config(TimerConfig): """ This IOType configures Timer0. TimerMode = See Section 2.9 for more information about the available modes. Value = The meaning of this parameter varies with the timer mode. >>> d.getFeedback( u6.Timer0Config( TimerMode, Value = 0 ) ) [ None ] """ def __init__(self, TimerMode, Value = 0): TimerConfig.__init__(self, 0, TimerMode, Value) def __repr__(self): return "<u6.Timer0Config( TimerMode = %s, Value = %s )>" % (self.timerMode, self.value) class Timer1Config(TimerConfig): """ This IOType configures Timer1. TimerMode = See Section 2.9 for more information about the available modes. Value = The meaning of this parameter varies with the timer mode. >>> d.getFeedback( u6.Timer1Config( TimerMode, Value = 0 ) ) [ None ] """ def __init__(self, TimerMode, Value = 0): TimerConfig.__init__(self, 1, TimerMode, Value) def __repr__(self): return "<u6.Timer1Config( TimerMode = %s, Value = %s )>" % (self.timerMode, self.value) class Counter(FeedbackCommand): ''' Counter Feedback command Reads a hardware counter, optionally resetting it counter: 0 or 1 Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset Returns the current count from the counter if enabled. If reset, this is the value before the reset. >>> d.getFeedback( u6.Counter( counter, Reset = False ) ) [ 2183 ] ''' def __init__(self, counter, Reset): self.counter = counter self.reset = Reset self.cmdBytes = [ 54 + (counter % 2), int(bool(Reset))] def __repr__(self): return "<u6.Counter( counter = %s, Reset = %s )>" % (self.counter, self.reset) readLen = 4 def handle(self, input): inStr = ''.join([chr(x) for x in input]) return struct.unpack('<I', inStr )[0] class Counter0(Counter): ''' Counter0 Feedback command Reads hardware counter0, optionally resetting it Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset Returns the current count from the counter if enabled. If reset, this is the value before the reset. >>> d.getFeedback( u6.Counter0( Reset = False ) ) [ 2183 ] ''' def __init__(self, Reset = False): Counter.__init__(self, 0, Reset) def __repr__(self): return "<u6.Counter0( Reset = %s )>" % self.reset class Counter1(Counter): ''' Counter1 Feedback command Reads hardware counter1, optionally resetting it Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset Returns the current count from the counter if enabled. If reset, this is the value before the reset. >>> d.getFeedback( u6.Counter1( Reset = False ) ) [ 2183 ] ''' def __init__(self, Reset = False): Counter.__init__(self, 1, Reset) def __repr__(self): return "<u6.Counter1( Reset = %s )>" % self.reset class DSP(FeedbackCommand): ''' DSP Feedback command Acquires 1000 samples from the specified AIN at 50us intervals and performs the specified analysis on the acquired data. AcquireNewData: True, acquire new data; False, operate on existing data DSPAnalysis: 1, True RMS; 2, DC Offset; 3, Peak To Peak; 4, Period (ms) PLine: Positive Channel Gain: The gain you would like to use Resolution: The resolution index to use SettlingFactor: The SettlingFactor to use Differential: True, do differential readings; False, single-ended readings See section 5.2.5.20 of the U3 User's Guide (http://labjack.com/support/u6/users-guide/5.2.5.20) >>> d.getFeedback( u6.DSP( PLine, Resolution = 0, Gain = 0, SettlingFactor = 0, Differential = False, DSPAnalysis = 1, AcquireNewData = True) ) [ 2183 ] ''' def __init__(self, PLine, Resolution = 0, Gain = 0, SettlingFactor = 0, Differential = False, DSPAnalysis = 1, AcquireNewData = True): self.pline = PLine self.resolution = Resolution self.gain = Gain self.settlingFactor = SettlingFactor self.differential = Differential self.dspAnalysis = DSPAnalysis self.acquireNewData = AcquireNewData byte1 = DSPAnalysis + ( int(AcquireNewData) << 7 ) byte4 = ( Gain << 4 ) + Resolution byte5 = ( int(Differential) << 7 ) + SettlingFactor self.cmdBytes = [ 62, byte1, PLine, 0, byte4, byte5, 0, 0 ] def __repr__(self): return "<u6.DSP( PLine = %s, Resolution = %s, Gain = %s, SettlingFactor = %s, Differential = %s, DSPAnalysis = %s, AcquireNewData = %s )>" % (self.pline, self.resolution, self.gain, self.settlingFactor, self.differential, self.dspAnalysis, self.acquireNewData) readLen = 4 def handle(self, input): inStr = ''.join([chr(x) for x in input]) return struct.unpack('<I', inStr )[0]
bsd-2-clause
-7,835,564,006,306,339,000
35.359722
305
0.569308
false
vivekpabani/projecteuler
python/058/problem_058.py
1
1812
#!/usr/bin/env python # coding=utf-8 """ Problem Definition : Starting with 1 and spiralling anticlockwise in the following way, a square spiral with side length 7 is formed. 37 36 35 34 33 32 31 38 17 16 15 14 13 30 39 18 5 4 3 12 29 40 19 6 1 2 11 28 41 20 7 8 9 10 27 42 21 22 23 24 25 26 43 44 45 46 47 48 49 It is interesting to note that the odd squares lie along the bottom right diagonal, but what is more interesting is that 8 out of the 13 numbers lying along both diagonals are prime; that is, a ratio of 8/13 ≈ 62%. If one complete new layer is wrapped around the spiral above, a square spiral with side length 9 will be formed. If this process is continued, what is the side length of the square spiral for which the ratio of primes along both diagonals first falls below 10%? """ __author__ = 'vivek' import time import math def is_prime(number): if number < 0: return 0 elif number == 2 or number == 3: return 1 elif number % 2 == 0 or number % 3 == 0 or number == 1: return 0 else: start = 5 while start <= int(math.sqrt(number)): if number % start == 0: return 0 break if number % (start+2) == 0: return 0 break start += 6 return 1 def main(): start_time = time.clock() num = 1 step = 2 dia = 1 prime_count = 0 while True: for i in xrange(4): num += step if is_prime(num): prime_count += 1 dia += 4 if prime_count*1.0/dia < 0.1: break step += 2 print(num, dia,prime_count) print "Run time...{} secs \n".format(round(time.clock() - start_time, 4)) if __name__ == "__main__": main()
apache-2.0
-7,594,606,990,625,697,000
23.459459
261
0.577348
false
e-gob/plataforma-kioscos-autoatencion
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/action/ce_config.py
89
4192
# # Copyright 2015 Peter Sprygada <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re import time import glob from ansible.plugins.action.ce import ActionModule as _ActionModule from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.parse import urlsplit from ansible.utils.vars import merge_hash PRIVATE_KEYS_RE = re.compile('__.+__') class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): if self._task.args.get('src'): try: self._handle_template() except ValueError as exc: return dict(failed=True, msg=exc.message) result = super(ActionModule, self).run(tmp, task_vars) if self._task.args.get('backup') and result.get('__backup__'): # User requested backup and no error occurred in module. # NOTE: If there is a parameter error, _backup key may not be in results. filepath = self._write_backup(task_vars['inventory_hostname'], result['__backup__']) result['backup_path'] = filepath # strip out any keys that have two leading and two trailing # underscore characters for key in result.keys(): if PRIVATE_KEYS_RE.match(key): del result[key] return result def _get_working_path(self): cwd = self._loader.get_basedir() if self._task._role is not None: cwd = self._task._role._role_path return cwd def _write_backup(self, host, contents): backup_path = self._get_working_path() + '/backup' if not os.path.exists(backup_path): os.mkdir(backup_path) for fn in glob.glob('%s/%s*' % (backup_path, host)): os.remove(fn) tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time())) filename = '%s/%s_config.%s' % (backup_path, host, tstamp) open(filename, 'w').write(contents) return filename def _handle_template(self): src = self._task.args.get('src') working_path = self._get_working_path() if os.path.isabs(src) or urlsplit('src').scheme: source = src else: source = self._loader.path_dwim_relative(working_path, 'templates', src) if not source: source = self._loader.path_dwim_relative(working_path, src) if not os.path.exists(source): raise ValueError('path specified in src not found') try: with open(source, 'r') as f: template_data = to_text(f.read()) except IOError: return dict(failed=True, msg='unable to load src file') # Create a template search path in the following order: # [working_path, self_role_path, dependent_role_paths, dirname(source)] searchpath = [working_path] if self._task._role is not None: searchpath.append(self._task._role._role_path) if hasattr(self._task, "_block:"): dep_chain = self._task._block.get_dep_chain() if dep_chain is not None: for role in dep_chain: searchpath.append(role._role_path) searchpath.append(os.path.dirname(source)) self._templar.environment.loader.searchpath = searchpath self._task.args['src'] = self._templar.template(template_data)
bsd-3-clause
-7,830,596,251,620,386,000
36.097345
85
0.617605
false
firstjob/python-social-auth
social/tests/test_utils.py
73
5018
import sys import unittest2 as unittest from mock import Mock from social.utils import sanitize_redirect, user_is_authenticated, \ user_is_active, slugify, build_absolute_uri, \ partial_pipeline_data PY3 = sys.version_info[0] == 3 class SanitizeRedirectTest(unittest.TestCase): def test_none_redirect(self): self.assertEqual(sanitize_redirect('myapp.com', None), None) def test_empty_redirect(self): self.assertEqual(sanitize_redirect('myapp.com', ''), None) def test_dict_redirect(self): self.assertEqual(sanitize_redirect('myapp.com', {}), None) def test_invalid_redirect(self): self.assertEqual(sanitize_redirect('myapp.com', {'foo': 'bar'}), None) def test_wrong_path_redirect(self): self.assertEqual( sanitize_redirect('myapp.com', 'http://notmyapp.com/path/'), None ) def test_valid_absolute_redirect(self): self.assertEqual( sanitize_redirect('myapp.com', 'http://myapp.com/path/'), 'http://myapp.com/path/' ) def test_valid_relative_redirect(self): self.assertEqual(sanitize_redirect('myapp.com', '/path/'), '/path/') class UserIsAuthenticatedTest(unittest.TestCase): def test_user_is_none(self): self.assertEqual(user_is_authenticated(None), False) def test_user_is_not_none(self): self.assertEqual(user_is_authenticated(object()), True) def test_user_has_is_authenticated(self): class User(object): is_authenticated = True self.assertEqual(user_is_authenticated(User()), True) def test_user_has_is_authenticated_callable(self): class User(object): def is_authenticated(self): return True self.assertEqual(user_is_authenticated(User()), True) class UserIsActiveTest(unittest.TestCase): def test_user_is_none(self): self.assertEqual(user_is_active(None), False) def test_user_is_not_none(self): self.assertEqual(user_is_active(object()), True) def test_user_has_is_active(self): class User(object): is_active = True self.assertEqual(user_is_active(User()), True) def test_user_has_is_active_callable(self): class User(object): def is_active(self): return True self.assertEqual(user_is_active(User()), True) class SlugifyTest(unittest.TestCase): def test_slugify_formats(self): if PY3: self.assertEqual(slugify('FooBar'), 'foobar') self.assertEqual(slugify('Foo Bar'), 'foo-bar') self.assertEqual(slugify('Foo (Bar)'), 'foo-bar') else: self.assertEqual(slugify('FooBar'.decode('utf-8')), 'foobar') self.assertEqual(slugify('Foo Bar'.decode('utf-8')), 'foo-bar') self.assertEqual(slugify('Foo (Bar)'.decode('utf-8')), 'foo-bar') class BuildAbsoluteURITest(unittest.TestCase): def setUp(self): self.host = 'http://foobar.com' def tearDown(self): self.host = None def test_path_none(self): self.assertEqual(build_absolute_uri(self.host), self.host) def test_path_empty(self): self.assertEqual(build_absolute_uri(self.host, ''), self.host) def test_path_http(self): self.assertEqual(build_absolute_uri(self.host, 'http://barfoo.com'), 'http://barfoo.com') def test_path_https(self): self.assertEqual(build_absolute_uri(self.host, 'https://barfoo.com'), 'https://barfoo.com') def test_host_ends_with_slash_and_path_starts_with_slash(self): self.assertEqual(build_absolute_uri(self.host + '/', '/foo/bar'), 'http://foobar.com/foo/bar') def test_absolute_uri(self): self.assertEqual(build_absolute_uri(self.host, '/foo/bar'), 'http://foobar.com/foo/bar') class PartialPipelineData(unittest.TestCase): def test_kwargs_included_in_result(self): backend = self._backend() key, val = ('foo', 'bar') _, xkwargs = partial_pipeline_data(backend, None, *(), **dict([(key, val)])) self.assertTrue(key in xkwargs) self.assertEqual(xkwargs[key], val) def test_update_user(self): user = object() backend = self._backend(session_kwargs={'user': None}) _, xkwargs = partial_pipeline_data(backend, user) self.assertTrue('user' in xkwargs) self.assertEqual(xkwargs['user'], user) def _backend(self, session_kwargs=None): strategy = Mock() strategy.request = None strategy.session_get.return_value = object() strategy.partial_from_session.return_value = \ (0, 'mock-backend', [], session_kwargs or {}) backend = Mock() backend.name = 'mock-backend' backend.strategy = strategy return backend
bsd-3-clause
3,718,112,198,099,050,000
32.677852
78
0.606218
false
ds-hwang/chromium-crosswalk
tools/perf/benchmarks/gpu_times.py
8
2505
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from core import perf_benchmark from benchmarks import silk_flags from telemetry import benchmark from telemetry.timeline import tracing_category_filter from telemetry.web_perf.metrics import gpu_timeline from telemetry.web_perf import timeline_based_measurement import page_sets TOPLEVEL_CATEGORIES = ['disabled-by-default-gpu.device', 'disabled-by-default-gpu.service'] class _GPUTimes(perf_benchmark.PerfBenchmark): def CreateTimelineBasedMeasurementOptions(self): cat_string = ','.join(TOPLEVEL_CATEGORIES) cat_filter = tracing_category_filter.TracingCategoryFilter(cat_string) options = timeline_based_measurement.Options(overhead_level=cat_filter) options.SetLegacyTimelineBasedMetrics([gpu_timeline.GPUTimelineMetric()]) return options @benchmark.Disabled('all') # http://crbug.com/453131, http://crbug.com/527543 class GPUTimesKeyMobileSites(_GPUTimes): """Measures GPU timeline metric on key mobile sites.""" page_set = page_sets.KeyMobileSitesSmoothPageSet @classmethod def Name(cls): return 'gpu_times.key_mobile_sites_smooth' @benchmark.Disabled('all') # http://crbug.com/453131, http://crbug.com/527543 class GPUTimesGpuRasterizationKeyMobileSites(_GPUTimes): """Measures GPU timeline metric on key mobile sites with GPU rasterization. """ page_set = page_sets.KeyMobileSitesSmoothPageSet def SetExtraBrowserOptions(self, options): silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) @classmethod def Name(cls): return 'gpu_times.gpu_rasterization.key_mobile_sites_smooth' @benchmark.Disabled('all') # http://crbug.com/453131, http://crbug.com/517476 class GPUTimesTop25Sites(_GPUTimes): """Measures GPU timeline metric for the top 25 sites.""" page_set = page_sets.Top25SmoothPageSet @classmethod def Name(cls): return 'gpu_times.top_25_smooth' @benchmark.Disabled('all') # http://crbug.com/453131, http://crbug.com/517476 class GPUTimesGpuRasterizationTop25Sites(_GPUTimes): """Measures GPU timeline metric for the top 25 sites with GPU rasterization. """ page_set = page_sets.Top25SmoothPageSet def SetExtraBrowserOptions(self, options): silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) @classmethod def Name(cls): return 'gpu_times.gpu_rasterization.top_25_smooth'
bsd-3-clause
7,396,021,824,340,821,000
31.960526
78
0.762874
false
bqbn/addons-server
src/olympia/files/utils.py
1
52387
import collections import contextlib import errno import hashlib import json import os import io import re import shutil import signal import stat import struct import tarfile import tempfile import zipfile import fcntl from datetime import datetime from django import forms from django.conf import settings from django.core.files.storage import ( File as DjangoFile, default_storage as storage) from django.template.defaultfilters import filesizeformat from django.utils.encoding import force_text from django.utils.jslex import JsLexer from django.utils.translation import ugettext import rdflib from xml.parsers.expat import ExpatError from defusedxml import minidom from defusedxml.common import DefusedXmlException import olympia.core.logger from olympia import amo from olympia.access import acl from olympia.addons.utils import verify_mozilla_trademark from olympia.amo.utils import decode_json, find_language, rm_local_tmp_dir from olympia.applications.models import AppVersion from olympia.lib.crypto.signing import get_signer_organizational_unit_name from olympia.lib import unicodehelper from olympia.users.utils import ( mozilla_signed_extension_submission_allowed, system_addon_submission_allowed) from olympia.versions.compare import version_int as vint log = olympia.core.logger.getLogger('z.files.utils') class ParseError(forms.ValidationError): pass VERSION_RE = re.compile(r'^[-+*.\w]{,32}$') SIGNED_RE = re.compile(r'^META\-INF/(\w+)\.(rsa|sf)$') # This is essentially what Firefox matches # (see toolkit/components/extensions/ExtensionUtils.jsm) MSG_RE = re.compile(r'__MSG_(?P<msgid>[a-zA-Z0-9@_]+?)__') # The default update URL. default = ( 'https://versioncheck.addons.mozilla.org/update/VersionCheck.php?' 'reqVersion=%REQ_VERSION%&id=%ITEM_ID%&version=%ITEM_VERSION%&' 'maxAppVersion=%ITEM_MAXAPPVERSION%&status=%ITEM_STATUS%&appID=%APP_ID%&' 'appVersion=%APP_VERSION%&appOS=%APP_OS%&appABI=%APP_ABI%&' 'locale=%APP_LOCALE%&currentAppVersion=%CURRENT_APP_VERSION%&' 'updateType=%UPDATE_TYPE%' ) def get_filepath(fileorpath): """Resolve the actual file path of `fileorpath`. This supports various input formats, a path, a django `File` object, `olympia.files.File`, a `FileUpload` or just a regular file-like object. """ if isinstance(fileorpath, str): return fileorpath elif isinstance(fileorpath, DjangoFile): return fileorpath elif hasattr(fileorpath, 'file_path'): # File return fileorpath.file_path elif hasattr(fileorpath, 'path'): # FileUpload return fileorpath.path elif hasattr(fileorpath, 'name'): # file-like object return fileorpath.name return fileorpath def id_to_path(pk): """ Generate a path from an id, to distribute folders in the file system. 1 => 1/1/1 12 => 2/12/12 123456 => 6/56/123456 """ pk = str(pk) path = [pk[-1]] if len(pk) >= 2: path.append(pk[-2:]) else: path.append(pk) path.append(pk) return os.path.join(*path) def get_file(fileorpath): """Get a file-like object, whether given a FileUpload object or a path.""" if hasattr(fileorpath, 'path'): # FileUpload return storage.open(fileorpath.path, 'rb') if hasattr(fileorpath, 'name'): return fileorpath return storage.open(fileorpath, 'rb') def make_xpi(files): file_obj = io.BytesIO() zip_file = zipfile.ZipFile(file_obj, 'w') for path, data in files.items(): zip_file.writestr(path, data) zip_file.close() file_obj.seek(0) return file_obj class UnsupportedFileType(forms.ValidationError): pass class NoManifestFound(forms.ValidationError): pass class InvalidManifest(forms.ValidationError): pass class Extractor(object): """Extract add-on info from a manifest file.""" App = collections.namedtuple('App', 'appdata id min max') @classmethod def parse(cls, xpi_fobj, minimal=False): zip_file = SafeZip(xpi_fobj) certificate = os.path.join('META-INF', 'mozilla.rsa') certificate_info = None if zip_file.exists(certificate): certificate_info = SigningCertificateInformation( zip_file.read(certificate)) if zip_file.exists('manifest.json'): data = ManifestJSONExtractor( zip_file, certinfo=certificate_info).parse(minimal=minimal) elif zip_file.exists('install.rdf'): # Note that RDFExtractor is a misnomer, it receives the zip_file # object because it might need to read other files than just # the rdf to deal with dictionaries, complete themes etc. data = RDFExtractor( zip_file, certinfo=certificate_info).parse(minimal=minimal) else: raise NoManifestFound( 'No install.rdf or manifest.json found') return data def get_appversions(app, min_version, max_version): """Return the `AppVersion`s that correspond to the given versions.""" qs = AppVersion.objects.filter(application=app.id) min_appver = qs.get(version=min_version) max_appver = qs.get(version=max_version) return min_appver, max_appver def get_simple_version(version_string): """Extract the version number without the ><= requirements. This simply extracts the version number without the ><= requirement so it will not be accurate for version requirements that are not >=, <= or = to a version. >>> get_simple_version('>=33.0a1') '33.0a1' """ if not version_string: return '' return re.sub('[<=>]', '', version_string) class RDFExtractor(object): """Extract add-on info from an install.rdf.""" # https://developer.mozilla.org/en-US/Add-ons/Install_Manifests#type TYPES = { '2': amo.ADDON_EXTENSION, '4': amo.ADDON_EXTENSION, # Really a XUL theme but now unsupported. '8': amo.ADDON_LPAPP, '64': amo.ADDON_DICT, '128': amo.ADDON_EXTENSION, # Telemetry Experiment '256': amo.ADDON_EXTENSION, # WebExtension Experiment } # Langpacks and dictionaries, if the type is properly set, are always # considered restartless. ALWAYS_RESTARTLESS_TYPES = ('8', '64', '128', '256') # Telemetry and Web Extension Experiments types. # See: bug 1220097 and https://github.com/mozilla/addons-server/issues/3315 EXPERIMENT_TYPES = ('128', '256') manifest = u'urn:mozilla:install-manifest' is_experiment = False # Experiment extensions: bug 1220097. def __init__(self, zip_file, certinfo=None): self.zip_file = zip_file self.certinfo = certinfo self.rdf = rdflib.Graph().parse( data=force_text(zip_file.read('install.rdf'))) self.package_type = None self.find_root() # Will set self.package_type def parse(self, minimal=False): data = { 'guid': self.find('id'), 'type': self.find_type(), 'version': self.find('version'), 'is_webextension': False, 'name': self.find('name'), 'summary': self.find('description'), } # Populate certificate information (e.g signed by mozilla or not) # early on to be able to verify compatibility based on it if self.certinfo is not None: data.update(self.certinfo.parse()) if not minimal: data.update({ 'homepage': self.find('homepageURL'), 'is_restart_required': ( self.find('bootstrap') != 'true' and self.find('type') not in self.ALWAYS_RESTARTLESS_TYPES), 'apps': self.apps(), }) # We used to simply use the value of 'strictCompatibility' in the # rdf to set strict_compatibility, but now we enable it or not for # all legacy add-ons depending on their type. This will prevent # them from being marked as compatible with Firefox 57. # This is not true for legacy add-ons already signed by Mozilla. # For these add-ons we just re-use to whatever # `strictCompatibility` is set. if data['type'] not in amo.NO_COMPAT: if self.certinfo and self.certinfo.is_mozilla_signed_ou: data['strict_compatibility'] = ( self.find('strictCompatibility') == 'true') else: data['strict_compatibility'] = True else: data['strict_compatibility'] = False # `experiment` is detected in in `find_type`. data['is_experiment'] = self.is_experiment return data def find_type(self): # If the extension declares a type that we know about, use # that. # https://developer.mozilla.org/en-US/Add-ons/Install_Manifests#type self.package_type = self.find('type') if self.package_type and self.package_type in self.TYPES: # If it's an experiment, we need to store that for later. self.is_experiment = self.package_type in self.EXPERIMENT_TYPES return self.TYPES[self.package_type] # Look for dictionaries. is_dictionary = ( self.zip_file.exists('dictionaries/') and any(fname.endswith('.dic') for fname in self.zip_file.namelist()) ) if is_dictionary: return amo.ADDON_DICT # Consult <em:type>. return self.TYPES.get(self.package_type, amo.ADDON_EXTENSION) def uri(self, name): namespace = 'http://www.mozilla.org/2004/em-rdf' return rdflib.term.URIRef('%s#%s' % (namespace, name)) def find_root(self): # If the install-manifest root is well-defined, it'll show up when we # search for triples with it. If not, we have to find the context that # defines the manifest and use that as our root. # http://www.w3.org/TR/rdf-concepts/#section-triples manifest = rdflib.term.URIRef(self.manifest) if list(self.rdf.triples((manifest, None, None))): self.root = manifest else: self.root = next(self.rdf.subjects(None, self.manifest)) def find(self, name, ctx=None): """Like $() for install.rdf, where name is the selector.""" if ctx is None: ctx = self.root # predicate it maps to <em:{name}>. match = list(self.rdf.objects(ctx, predicate=self.uri(name))) # These come back as rdflib.Literal, which subclasses unicode. if match: return str(match[0]) def apps(self): rv = [] seen_apps = set() for ctx in self.rdf.objects(None, self.uri('targetApplication')): app = amo.APP_GUIDS.get(self.find('id', ctx)) if not app: continue if app.guid not in amo.APP_GUIDS or app.id in seen_apps: continue if app not in amo.APP_USAGE: # Ignore non-firefoxes compatibility. continue seen_apps.add(app.id) try: min_appver_text = self.find('minVersion', ctx) max_appver_text = self.find('maxVersion', ctx) # Rewrite '*' as '56.*' in legacy extensions, since they # are not compatible with higher versions. # We don't do that for legacy add-ons that are already # signed by Mozilla to allow them for Firefox 57 onwards. needs_max_56_star = ( app.id in (amo.FIREFOX.id, amo.ANDROID.id) and max_appver_text == '*' and not (self.certinfo and self.certinfo.is_mozilla_signed_ou) ) if needs_max_56_star: max_appver_text = '56.*' min_appver, max_appver = get_appversions( app, min_appver_text, max_appver_text) except AppVersion.DoesNotExist: continue rv.append(Extractor.App( appdata=app, id=app.id, min=min_appver, max=max_appver)) return rv class ManifestJSONExtractor(object): def __init__(self, zip_file, data='', certinfo=None): self.zip_file = zip_file self.certinfo = certinfo if not data: data = zip_file.read('manifest.json') # Remove BOM if present. data = unicodehelper.decode(data) # Run through the JSON and remove all comments, then try to read # the manifest file. # Note that Firefox and the WebExtension spec only allow for # line comments (starting with `//`), not block comments (starting with # `/*`). We strip out both in AMO because the linter will flag the # block-level comments explicitly as an error (so the developer can # change them to line-level comments). # # But block level comments are not allowed. We just flag them elsewhere # (in the linter). json_string = '' lexer = JsLexer() for name, token in lexer.lex(data): if name not in ('blockcomment', 'linecomment'): json_string += token try: self.data = json.loads(json_string) except Exception: raise InvalidManifest( ugettext('Could not parse the manifest file.')) def get(self, key, default=None): return self.data.get(key, default) @property def is_experiment(self): """Return whether or not the webextension uses experiments or theme experiments API. In legacy extensions this is a different type, but for webextensions we just look at the manifest.""" experiment_keys = ('experiment_apis', 'theme_experiment') return any(bool(self.get(key)) for key in experiment_keys) @property def gecko(self): """Return the "applications|browser_specific_settings["gecko"]" part of the manifest.""" parent_block = self.get( 'browser_specific_settings', self.get('applications', {})) return parent_block.get('gecko', {}) @property def guid(self): return self.gecko.get('id', None) @property def type(self): return ( amo.ADDON_LPAPP if 'langpack_id' in self.data else amo.ADDON_STATICTHEME if 'theme' in self.data else amo.ADDON_DICT if 'dictionaries' in self.data else amo.ADDON_EXTENSION ) @property def strict_max_version(self): return get_simple_version(self.gecko.get('strict_max_version')) @property def strict_min_version(self): return get_simple_version(self.gecko.get('strict_min_version')) def apps(self): """Get `AppVersion`s for the application.""" type_ = self.type if type_ == amo.ADDON_LPAPP: # Langpack are only compatible with Firefox desktop at the moment. # https://github.com/mozilla/addons-server/issues/8381 # They are all strictly compatible with a specific version, so # the default min version here doesn't matter much. apps = ( (amo.FIREFOX, amo.DEFAULT_WEBEXT_MIN_VERSION), ) elif type_ == amo.ADDON_STATICTHEME: # Static themes are only compatible with Firefox desktop >= 53 # and Firefox for Android >=65. apps = ( (amo.FIREFOX, amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX), (amo.ANDROID, amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID), ) elif type_ == amo.ADDON_DICT: # WebExt dicts are only compatible with Firefox desktop >= 61. apps = ( (amo.FIREFOX, amo.DEFAULT_WEBEXT_DICT_MIN_VERSION_FIREFOX), ) else: webext_min = ( amo.DEFAULT_WEBEXT_MIN_VERSION if self.get('browser_specific_settings', None) is None else amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC) # amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC should be 48.0, # which is the same as amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID, so # no specific treatment for Android. apps = ( (amo.FIREFOX, webext_min), (amo.ANDROID, amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID), ) doesnt_support_no_id = ( self.strict_min_version and (vint(self.strict_min_version) < vint(amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID)) ) if self.guid is None and doesnt_support_no_id: raise forms.ValidationError( ugettext('Add-on ID is required for Firefox 47 and below.') ) # If a minimum strict version is specified, it needs to be higher # than the version when Firefox started supporting WebExtensions. unsupported_no_matter_what = ( self.strict_min_version and vint(self.strict_min_version) < vint(amo.DEFAULT_WEBEXT_MIN_VERSION)) if unsupported_no_matter_what: msg = ugettext('Lowest supported "strict_min_version" is 42.0.') raise forms.ValidationError(msg) for app, default_min_version in apps: if self.guid is None and not self.strict_min_version: strict_min_version = max(amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID, default_min_version) else: # strict_min_version for this app shouldn't be lower than the # default min version for this app. strict_min_version = max( self.strict_min_version, default_min_version) strict_max_version = ( self.strict_max_version or amo.DEFAULT_WEBEXT_MAX_VERSION) if vint(strict_max_version) < vint(strict_min_version): strict_max_version = strict_min_version qs = AppVersion.objects.filter(application=app.id) try: min_appver = qs.get(version=strict_min_version) except AppVersion.DoesNotExist: # If the specified strict_min_version can't be found, raise an # error, we can't guess an appropriate one. msg = ugettext( u'Unknown "strict_min_version" {appver} for {app}'.format( app=app.pretty, appver=strict_min_version)) raise forms.ValidationError(msg) try: max_appver = qs.get(version=strict_max_version) except AppVersion.DoesNotExist: # If the specified strict_max_version can't be found, this is # less of a problem, ignore and replace with '*'. # https://github.com/mozilla/addons-server/issues/7160 max_appver = qs.get(version=amo.DEFAULT_WEBEXT_MAX_VERSION) yield Extractor.App( appdata=app, id=app.id, min=min_appver, max=max_appver) def target_locale(self): """Guess target_locale for a dictionary from manifest contents.""" try: dictionaries = self.get('dictionaries', {}) key = force_text(list(dictionaries.keys())[0]) return key[:255] except (IndexError, UnicodeDecodeError): # This shouldn't happen: the linter should prevent it, but # just in case, handle the error (without bothering with # translations as users should never see this). raise forms.ValidationError('Invalid dictionaries object.') def parse(self, minimal=False): data = { 'guid': self.guid, 'type': self.type, 'version': self.get('version', ''), 'is_webextension': True, 'name': self.get('name'), 'summary': self.get('description'), 'homepage': self.get('homepage_url'), 'default_locale': self.get('default_locale'), } # Populate certificate information (e.g signed by mozilla or not) # early on to be able to verify compatibility based on it if self.certinfo is not None: data.update(self.certinfo.parse()) if self.type == amo.ADDON_STATICTHEME: data['theme'] = self.get('theme', {}) if not minimal: data.update({ 'is_restart_required': False, 'apps': list(self.apps()), # Langpacks have strict compatibility enabled, rest of # webextensions don't. 'strict_compatibility': data['type'] == amo.ADDON_LPAPP, 'is_experiment': self.is_experiment, }) if self.type == amo.ADDON_EXTENSION: # Only extensions have permissions and content scripts data.update({ 'optional_permissions': self.get('optional_permissions', []), 'permissions': self.get('permissions', []), 'content_scripts': self.get('content_scripts', []), }) if self.get('devtools_page'): data.update({ 'devtools_page': self.get('devtools_page') }) elif self.type == amo.ADDON_DICT: data['target_locale'] = self.target_locale() return data class SigningCertificateInformation(object): """Process the signature to determine the addon is a Mozilla Signed extension, so is signed already with a special certificate. We want to know this so we don't write over it later, and stop unauthorised people from submitting them to AMO.""" def __init__(self, certificate_data): pkcs7 = certificate_data self.cert_ou = get_signer_organizational_unit_name(pkcs7) @property def is_mozilla_signed_ou(self): return self.cert_ou == 'Mozilla Extensions' def parse(self): return {'is_mozilla_signed_extension': self.is_mozilla_signed_ou} def extract_search(content): def _text(tag): try: return dom.getElementsByTagName(tag)[0].childNodes[0].wholeText except (IndexError, AttributeError): raise forms.ValidationError( ugettext('Could not parse uploaded file, missing or empty ' '<%s> element') % tag) # Only catch basic errors, we don't accept any new uploads and validation # has happened on upload in the past. try: dom = minidom.parse(content) except DefusedXmlException: raise forms.ValidationError( ugettext('OpenSearch: XML Security error.')) except ExpatError: raise forms.ValidationError(ugettext('OpenSearch: XML Parse Error.')) return { 'name': _text('ShortName'), 'description': _text('Description') } def parse_search(fileorpath, addon=None): try: f = get_file(fileorpath) data = extract_search(f) except forms.ValidationError: raise except Exception: log.error('OpenSearch parse error', exc_info=True) raise forms.ValidationError(ugettext('Could not parse uploaded file.')) return {'guid': None, 'type': amo.ADDON_SEARCH, 'name': data['name'], 'is_restart_required': False, 'is_webextension': False, 'summary': data['description'], 'version': datetime.now().strftime('%Y%m%d')} class FSyncMixin(object): """Mixin that implements fsync for file extractions. This mixin uses the `_extract_member` interface used by `ziplib` and `tarfile` so it's somewhat unversal. We need this to make sure that on EFS / NFS all data is immediately written to avoid any data loss on the way. """ def _fsync_dir(self, path): descriptor = os.open(path, os.O_DIRECTORY) try: os.fsync(descriptor) except OSError as exc: # On some filesystem doing a fsync on a directory # raises an EINVAL error. Ignoring it is usually safe. if exc.errno != errno.EINVAL: raise os.close(descriptor) def _fsync_file(self, path): descriptor = os.open(path, os.O_RDONLY) os.fsync(descriptor) os.close(descriptor) def _extract_member(self, member, targetpath, *args, **kwargs): """Extends `ZipFile._extract_member` to call fsync(). For every extracted file we are ensuring that it's data has been written to disk. We are doing this to avoid any data inconsistencies that we have seen in the past. To do this correctly we are fsync()ing all directories as well only that will ensure we have a durable write for that specific file. This is inspired by https://github.com/2ndquadrant-it/barman/ (see backup.py -> backup_fsync_and_set_sizes and utils.py) """ super(FSyncMixin, self)._extract_member( member, targetpath, *args, **kwargs) parent_dir = os.path.dirname(os.path.normpath(targetpath)) if parent_dir: self._fsync_dir(parent_dir) self._fsync_file(targetpath) class FSyncedZipFile(FSyncMixin, zipfile.ZipFile): """Subclass of ZipFile that calls `fsync` for file extractions.""" pass class FSyncedTarFile(FSyncMixin, tarfile.TarFile): """Subclass of TarFile that calls `fsync` for file extractions.""" pass def archive_member_validator(archive, member): """Validate a member of an archive member (TarInfo or ZipInfo).""" filename = getattr(member, 'filename', getattr(member, 'name', None)) filesize = getattr(member, 'file_size', getattr(member, 'size', None)) _validate_archive_member_name_and_size(filename, filesize) def _validate_archive_member_name_and_size(filename, filesize): if filename is None or filesize is None: raise forms.ValidationError(ugettext('Unsupported archive type.')) try: force_text(filename) except UnicodeDecodeError: # We can't log the filename unfortunately since it's encoding # is obviously broken :-/ log.error('Extraction error, invalid file name encoding') msg = ugettext('Invalid file name in archive. Please make sure ' 'all filenames are utf-8 or latin1 encoded.') raise forms.ValidationError(msg) if '../' in filename or '..' == filename or filename.startswith('/'): log.error('Extraction error, invalid file name: %s' % (filename)) # L10n: {0} is the name of the invalid file. msg = ugettext('Invalid file name in archive: {0}') raise forms.ValidationError(msg.format(filename)) if filesize > settings.FILE_UNZIP_SIZE_LIMIT: log.error('Extraction error, file too big for file (%s): ' '%s' % (filename, filesize)) # L10n: {0} is the name of the invalid file. msg = ugettext('File exceeding size limit in archive: {0}') raise forms.ValidationError(msg.format(filename)) class SafeZip(object): def __init__(self, source, mode='r', force_fsync=False): self.source = source self.info_list = None self.mode = mode self.force_fsync = force_fsync self.initialize_and_validate() def initialize_and_validate(self): """ Runs some overall archive checks. """ if self.force_fsync: zip_file = FSyncedZipFile(self.source, self.mode) else: zip_file = zipfile.ZipFile(self.source, self.mode) info_list = zip_file.infolist() total_file_size = 0 for info in info_list: total_file_size += info.file_size archive_member_validator(self.source, info) if total_file_size >= settings.MAX_ZIP_UNCOMPRESSED_SIZE: raise forms.ValidationError(ugettext( 'Uncompressed size is too large')) self.info_list = info_list self.zip_file = zip_file def is_signed(self): """Tells us if an addon is signed.""" finds = [] for info in self.info_list: match = SIGNED_RE.match(info.filename) if match: name, ext = match.groups() # If it's rsa or sf, just look for the opposite. if (name, {'rsa': 'sf', 'sf': 'rsa'}[ext]) in finds: return True finds.append((name, ext)) def extract_from_manifest(self, manifest): """ Extracts a file given a manifest such as: jar:chrome/de.jar!/locale/de/browser/ or locale/de/browser """ type, path = manifest.split(':') jar = self if type == 'jar': parts = path.split('!') for part in parts[:-1]: jar = self.__class__(io.BytesIO(jar.zip_file.read(part))) path = parts[-1] return jar.read(path[1:] if path.startswith('/') else path) def extract_info_to_dest(self, info, dest): """Extracts the given info to a directory and checks the file size.""" self.zip_file.extract(info, dest) dest = os.path.join(dest, info.filename) if not os.path.isdir(dest): # Directories consistently report their size incorrectly. size = os.stat(dest)[stat.ST_SIZE] if size != info.file_size: log.error('Extraction error, uncompressed size: %s, %s not %s' % (self.source, size, info.file_size)) raise forms.ValidationError(ugettext('Invalid archive.')) def extract_to_dest(self, dest): """Extracts the zip file to a directory.""" for info in self.info_list: self.extract_info_to_dest(info, dest) def close(self): self.zip_file.close() @property def filelist(self): return self.zip_file.filelist @property def namelist(self): return self.zip_file.namelist def exists(self, path): try: return self.zip_file.getinfo(path) except KeyError: return False def read(self, path): return self.zip_file.read(path) def extract_zip(source, remove=False, force_fsync=False, tempdir=None): """Extracts the zip file. If remove is given, removes the source file.""" if tempdir is None: tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) try: zip_file = SafeZip(source, force_fsync=force_fsync) zip_file.extract_to_dest(tempdir) except Exception: rm_local_tmp_dir(tempdir) raise if remove: os.remove(source) return tempdir def extract_extension_to_dest(source, dest=None, force_fsync=False): """Extract `source` to `dest`. `source` can be an extension or extension source, can be a zip, tar (gzip, bzip) or a search provider (.xml file). Note that this doesn't verify the contents of `source` except for that it requires something valid to be extracted. :returns: Extraction target directory, if `dest` is `None` it'll be a temporary directory. :raises FileNotFoundError: if the source file is not found on the filestem :raises forms.ValidationError: if the zip is invalid """ target, tempdir = None, None if dest is None: target = tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) else: target = dest try: source = force_text(source) if source.endswith((u'.zip', u'.xpi')): with open(source, 'rb') as source_file: zip_file = SafeZip(source_file, force_fsync=force_fsync) zip_file.extract_to_dest(target) elif source.endswith((u'.tar.gz', u'.tar.bz2', u'.tgz')): tarfile_class = ( tarfile.TarFile if not force_fsync else FSyncedTarFile) with tarfile_class.open(source) as archive: archive.extractall(target) elif source.endswith(u'.xml'): shutil.copy(source, target) if force_fsync: FSyncMixin()._fsync_file(target) except (zipfile.BadZipFile, tarfile.ReadError, IOError, forms.ValidationError) as e: if tempdir is not None: rm_local_tmp_dir(tempdir) if isinstance(e, (FileNotFoundError, forms.ValidationError)): # We let FileNotFoundError (which are a subclass of IOError, or # rather OSError but that's an alias) and ValidationError be # raised, the caller will have to deal with it. raise # Any other exceptions we caught, we raise a generic ValidationError # instead. raise forms.ValidationError( ugettext('Invalid or broken archive.')) return target def copy_over(source, dest): """ Copies from the source to the destination, removing the destination if it exists and is a directory. """ if os.path.exists(dest) and os.path.isdir(dest): shutil.rmtree(dest) shutil.copytree(source, dest) # mkdtemp will set the directory permissions to 700 # for the webserver to read them, we need 755 os.chmod(dest, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) shutil.rmtree(source) def get_all_files(folder, strip_prefix='', prefix=None): """Return all files in a file/directory tree. :param folder: The folder of which to return the file-tree. :param strip_prefix str: A string to strip in case we're adding a custom `prefix` Doesn't have any implications if `prefix` isn't given. :param prefix: A custom prefix to add to all files and folders. """ all_files = [] # Not using os.path.walk so we get just the right order. def iterate(path): path_dirs, path_files = storage.listdir(path) for dirname in sorted(path_dirs): full = os.path.join(path, force_text(dirname)) all_files.append(full) iterate(full) for filename in sorted(path_files): full = os.path.join(path, force_text(filename)) all_files.append(full) iterate(folder) if prefix is not None: # This is magic: strip the prefix, e.g /tmp/ and prepend the prefix all_files = [ os.path.join(prefix, fname[len(strip_prefix) + 1:]) for fname in all_files] return all_files def extract_xpi(xpi, path): """Extract all files from `xpi` to `path`. This can be removed in favour of our already extracted git-repositories once we land and tested them in production. """ tempdir = extract_zip(xpi) all_files = get_all_files(tempdir) copy_over(tempdir, path) return all_files def parse_xpi(xpi, addon=None, minimal=False, user=None): """Extract and parse an XPI. Returns a dict with various properties describing the xpi. Will raise ValidationError if something went wrong while parsing. If minimal is True, it avoids validation as much as possible (still raising ValidationError for hard errors like I/O or invalid json/rdf) and returns only the minimal set of properties needed to decide what to do with the add-on: guid, version and is_webextension. """ try: xpi = get_file(xpi) xpi_info = Extractor.parse(xpi, minimal=minimal) except forms.ValidationError: raise except IOError as e: if len(e.args) < 2: err, strerror = None, e.args[0] else: err, strerror = e.args log.error('I/O error({0}): {1}'.format(err, strerror)) # Note: we don't really know what happened, so even though we return a # generic message about the manifest, don't raise InvalidManifest. We # want the validation to stop there. raise forms.ValidationError(ugettext( 'Could not parse the manifest file.')) except Exception: # As above, don't raise InvalidManifest here. log.error('XPI parse error', exc_info=True) raise forms.ValidationError(ugettext( 'Could not parse the manifest file.')) if minimal: return xpi_info return check_xpi_info(xpi_info, addon, xpi, user=user) def check_xpi_info(xpi_info, addon=None, xpi_file=None, user=None): from olympia.addons.models import Addon, DeniedGuid guid = xpi_info['guid'] is_webextension = xpi_info.get('is_webextension', False) # If we allow the guid to be omitted we assume that one was generated # or existed before and use that one. # An example are WebExtensions that don't require a guid but we generate # one once they're uploaded. Now, if you update that WebExtension we # just use the original guid. if addon and not guid and is_webextension: xpi_info['guid'] = guid = addon.guid if not guid and not is_webextension: raise forms.ValidationError(ugettext('Could not find an add-on ID.')) if guid: if user: deleted_guid_clashes = Addon.unfiltered.exclude( authors__id=user.id).filter(guid=guid) else: deleted_guid_clashes = Addon.unfiltered.filter(guid=guid) if addon and addon.guid != guid: msg = ugettext( 'The add-on ID in your manifest.json or install.rdf (%s) ' 'does not match the ID of your add-on on AMO (%s)') raise forms.ValidationError(msg % (guid, addon.guid)) if (not addon and # Non-deleted add-ons. (Addon.objects.filter(guid=guid).exists() or # DeniedGuid objects for deletions for Mozilla disabled add-ons DeniedGuid.objects.filter(guid=guid).exists() or # Deleted add-ons that don't belong to the uploader. deleted_guid_clashes.exists())): raise forms.ValidationError(ugettext('Duplicate add-on ID found.')) if len(xpi_info['version']) > 32: raise forms.ValidationError( ugettext('Version numbers should have fewer than 32 characters.')) if not VERSION_RE.match(xpi_info['version']): raise forms.ValidationError( ugettext('Version numbers should only contain letters, numbers, ' 'and these punctuation characters: +*.-_.')) if is_webextension and xpi_info.get('type') == amo.ADDON_STATICTHEME: max_size = settings.MAX_STATICTHEME_SIZE if xpi_file and os.path.getsize(xpi_file.name) > max_size: raise forms.ValidationError( ugettext(u'Maximum size for WebExtension themes is {0}.') .format(filesizeformat(max_size))) if xpi_file: # Make sure we pass in a copy of `xpi_info` since # `resolve_webext_translations` modifies data in-place translations = Addon.resolve_webext_translations( xpi_info.copy(), xpi_file) verify_mozilla_trademark(translations['name'], user) # Parse the file to get and validate package data with the addon. if not acl.experiments_submission_allowed(user, xpi_info): raise forms.ValidationError( ugettext(u'You cannot submit this type of add-on')) if not addon and not system_addon_submission_allowed( user, xpi_info): guids = ' or '.join( '"' + guid + '"' for guid in amo.SYSTEM_ADDON_GUIDS) raise forms.ValidationError( ugettext('You cannot submit an add-on using an ID ending with ' '%s' % guids)) if not mozilla_signed_extension_submission_allowed(user, xpi_info): raise forms.ValidationError( ugettext(u'You cannot submit a Mozilla Signed Extension')) if not acl.langpack_submission_allowed(user, xpi_info): raise forms.ValidationError( ugettext('You cannot submit a language pack')) return xpi_info def parse_addon(pkg, addon=None, user=None, minimal=False): """ Extract and parse a file path, UploadedFile or FileUpload. Returns a dict with various properties describing the add-on. Will raise ValidationError if something went wrong while parsing. `addon` parameter is mandatory if the file being parsed is going to be attached to an existing Addon instance. `user` parameter is mandatory unless minimal `parameter` is True. It should point to the UserProfile responsible for the upload. If `minimal` parameter is True, it avoids validation as much as possible (still raising ValidationError for hard errors like I/O or invalid json/rdf) and returns only the minimal set of properties needed to decide what to do with the add-on (the exact set depends on the add-on type, but it should always contain at least guid, type, version and is_webextension. """ name = getattr(pkg, 'name', pkg) if name.endswith('.xml'): parsed = parse_search(pkg, addon) elif name.endswith(amo.VALID_ADDON_FILE_EXTENSIONS): parsed = parse_xpi(pkg, addon, minimal=minimal, user=user) else: valid_extensions_string = u'(%s)' % u', '.join( amo.VALID_ADDON_FILE_EXTENSIONS) raise UnsupportedFileType( ugettext( 'Unsupported file type, please upload a supported ' 'file {extensions}.'.format( extensions=valid_extensions_string))) if not minimal: if user is None: # This should never happen and means there is a bug in # addons-server itself. raise forms.ValidationError(ugettext('Unexpected error.')) # FIXME: do the checks depending on user here. if addon and addon.type != parsed['type']: msg = ugettext( 'The type (%s) does not match the type of your add-on on ' 'AMO (%s)') raise forms.ValidationError(msg % (parsed['type'], addon.type)) return parsed def get_sha256(file_obj, block_size=io.DEFAULT_BUFFER_SIZE): """Calculate a sha256 hash for `file_obj`. `file_obj` must be an open file descriptor. The caller needs to take care of closing it properly. """ hash_ = hashlib.sha256() for chunk in iter(lambda: file_obj.read(block_size), b''): hash_.update(chunk) return hash_.hexdigest() def update_version_number(file_obj, new_version_number): """Update the manifest to have the new version number.""" # Create a new xpi with the updated version. updated = u'{0}.updated_version_number'.format(file_obj.file_path) # Copy the original XPI, with the updated install.rdf or package.json. with zipfile.ZipFile(file_obj.file_path, 'r') as source: file_list = source.infolist() with zipfile.ZipFile(updated, 'w', zipfile.ZIP_DEFLATED) as dest: for file_ in file_list: content = source.read(file_.filename) if file_.filename == 'manifest.json': content = _update_version_in_json_manifest( content, new_version_number) dest.writestr(file_, content) # Move the updated file to the original file. shutil.move(updated, file_obj.file_path) def write_crx_as_xpi(chunks, target): """Extract and strip the header from the CRX, convert it to a regular ZIP archive, then write it to `target`. Read more about the CRX file format: https://developer.chrome.com/extensions/crx """ # First we open the uploaded CRX so we can see how much we need # to trim from the header of the file to make it a valid ZIP. with tempfile.NamedTemporaryFile('w+b', dir=settings.TMP_PATH) as tmp: for chunk in chunks: tmp.write(chunk) tmp.seek(0) header = tmp.read(16) header_info = struct.unpack('4cHxII', header) public_key_length = header_info[5] signature_length = header_info[6] # This is how far forward we need to seek to extract only a # ZIP file from this CRX. start_position = 16 + public_key_length + signature_length hash = hashlib.sha256() tmp.seek(start_position) # Now we open the Django storage and write our real XPI file. with storage.open(target, 'wb') as file_destination: bytes = tmp.read(65536) # Keep reading bytes and writing them to the XPI. while bytes: hash.update(bytes) file_destination.write(bytes) bytes = tmp.read(65536) return hash def _update_version_in_json_manifest(content, new_version_number): """Change the version number in the json manifest file provided.""" updated = json.loads(content) if 'version' in updated: updated['version'] = new_version_number return json.dumps(updated) def extract_translations(file_obj): """Extract all translation messages from `file_obj`. :param locale: if not `None` the list will be restricted only to `locale`. """ xpi = get_filepath(file_obj) messages = {} try: with zipfile.ZipFile(xpi, 'r') as source: file_list = source.namelist() # Fetch all locales the add-on supports # see https://developer.chrome.com/extensions/i18n#overview-locales # for more details on the format. locales = { name.split('/')[1] for name in file_list if name.startswith('_locales/') and name.endswith('/messages.json')} for locale in locales: corrected_locale = find_language(locale) # Filter out languages we don't support. if not corrected_locale: continue fname = '_locales/{0}/messages.json'.format(locale) try: data = source.read(fname) messages[corrected_locale] = decode_json(data) except (ValueError, KeyError): # `ValueError` thrown by `decode_json` if the json is # invalid and `KeyError` thrown by `source.read` # usually means the file doesn't exist for some reason, # we fail silently continue except IOError: pass return messages def resolve_i18n_message(message, messages, locale, default_locale=None): """Resolve a translatable string in an add-on. This matches ``__MSG_extensionName__`` like names and returns the correct translation for `locale`. :param locale: The locale to fetch the translation for, If ``None`` (default) ``settings.LANGUAGE_CODE`` is used. :param messages: A dictionary of messages, e.g the return value of `extract_translations`. """ if not message or not isinstance(message, str): # Don't even attempt to extract invalid data. # See https://github.com/mozilla/addons-server/issues/3067 # for more details return message match = MSG_RE.match(message) if match is None: return message locale = find_language(locale) if default_locale: default_locale = find_language(default_locale) msgid = match.group('msgid') default = {'message': message} if locale in messages: message = messages[locale].get(msgid, default) elif default_locale in messages: message = messages[default_locale].get(msgid, default) if not isinstance(message, dict): # Fallback for invalid message format, should be caught by # addons-linter in the future but we'll have to handle it. # See https://github.com/mozilla/addons-server/issues/3485 return default['message'] return message['message'] def get_background_images(file_obj, theme_data, header_only=False): """Extract static theme header image from `file_obj` and return in dict.""" xpi = get_filepath(file_obj) if not theme_data: # we might already have theme_data, but otherwise get it from the xpi. try: parsed_data = parse_xpi(xpi, minimal=True) theme_data = parsed_data.get('theme', {}) except forms.ValidationError: # If we can't parse the existing manifest safely return. return {} images_dict = theme_data.get('images', {}) # Get the reference in the manifest. headerURL is the deprecated variant. header_url = images_dict.get( 'theme_frame', images_dict.get('headerURL')) # And any additional backgrounds too. additional_urls = ( images_dict.get('additional_backgrounds', []) if not header_only else []) image_urls = [header_url] + additional_urls images = {} try: with zipfile.ZipFile(xpi, 'r') as source: for url in image_urls: _, file_ext = os.path.splitext(str(url).lower()) if file_ext not in amo.THEME_BACKGROUND_EXTS: # Just extract image files. continue try: images[url] = source.read(url) except KeyError: pass except IOError as ioerror: log.info(ioerror) return images @contextlib.contextmanager def run_with_timeout(seconds): """Implement timeouts via `signal`. This is being used to implement timeout handling when acquiring locks. """ def timeout_handler(signum, frame): """ Since Python 3.5 `fcntl` is retried automatically when interrupted. We need an exception to stop it. This exception will propagate on to the main thread, make sure `flock` is called there. """ raise TimeoutError original_handler = signal.signal(signal.SIGALRM, timeout_handler) try: signal.alarm(seconds) yield finally: signal.alarm(0) signal.signal(signal.SIGALRM, original_handler) @contextlib.contextmanager def lock(lock_dir, lock_name, timeout=6): """A wrapper around fcntl to be used as a context manager. Additionally this helper allows the caller to wait for a lock for a certain amount of time. Example:: with lock(settings.TMP_PATH, 'extraction-1234'): extract_xpi(...) The lock is properly released at the end of the context block. This locking mechanism should work perfectly fine with NFS v4 and EFS (which uses the NFS v4.1 protocol). :param timeout: Timeout for how long we expect to wait for a lock in seconds. If 0 the function returns immediately, otherwise it blocks the execution. :return: `True` if the lock was attained, we are owning the lock, `False` if there is an already existing lock. """ lock_name = f'{lock_name}.lock' log.info(f'Acquiring lock {lock_name}.') lock_path = os.path.join(lock_dir, lock_name) with open(lock_path, 'w') as lockfd: lockfd.write(f'{os.getpid()}') fileno = lockfd.fileno() try: with run_with_timeout(timeout): fcntl.flock(fileno, fcntl.LOCK_EX) except (BlockingIOError, TimeoutError): # Another process already holds the lock. # In theory, in this case we'd always catch # `TimeoutError` but for the sake of completness let's # catch `BlockingIOError` too to be on the safe side. yield False else: # We successfully acquired the lock. yield True finally: # Always release the lock after the parent context # block has finised. log.info(f'Releasing lock {lock_name}.') fcntl.flock(fileno, fcntl.LOCK_UN) lockfd.close() try: os.unlink(lock_path) except FileNotFoundError: pass
bsd-3-clause
2,322,492,546,398,874,000
35.788624
79
0.609369
false
jtara1/RedditImageGrab
redditdownload/plugins/parse_subreddit_list.py
1
2749
# -*- coding: utf-8 -*- """ Created on Tue Aug 30 15:26:13 2016 @author: jtara1 General syntax for subreddits.txt: : (colon character) denotes folder name subreddit url or word denotes subreddit For more examples see https://github.com/jtara1/RedditImageGrab/commit/8e4787ef9ac43ca694fc663be026f69a568bb622 Example of expected input and output: subreddits.txt = " pc-wallpapers: https://www.reddit.com/r/wallpapers/ /r/BackgroundArt/ nature_pics: http://www.reddit.com/r/EarthPorn/ : Mountain " parse_subreddit_list('/MyPath/subreddits.txt', '/MyPath/') = [ ('wallpapers', '/MyPath/pc-wallpaper/wallpapers'), ('BackgroundArt', '/MyPath/pc-wallpaper/BackgroundArt'), ('EarthPorn', '/MyPath/nature-pics/EarthPorn'), ('Mountain', '/MyPath/Mountain') ] """ import re import os from os import getcwd, mkdir def parse_subreddit_list(file_path, base_path=getcwd()): """Gets list of subreddits from a file & returns folder for media from each subreddit :param file_path: path of text file to load subreddits from (relative or full path) :param base_path: base path that gets returned with each subreddit :return: list containing tuples of subreddit & its associated folder to get media saved to :rtype: list """ try: file = open(file_path, 'r') except IOError as e: print(e) raise IOError output = [] folder_regex = re.compile('([a-zA-Z0-9_\- ]*):\n') subreddit_regex = re.compile('(?:https?://)?(?:www.)?reddit.com/r/([a-zA-Z0-9_]*)') subreddit_regex2 = re.compile('(?:/r/)?([a-zA-Z0-9_]*)') if not os.path.isdir(base_path): mkdir(base_path) # iterate through the lines using regex to check if line is subreddit or folder title path = base_path for line in file: if line == '\n': continue folder_match = re.match(folder_regex, line) if folder_match: if folder_match.group(1) != '': path = os.path.join(base_path, line[:-2]) if not os.path.isdir(path): mkdir(path) else: path = base_path continue subreddit_match = re.match(subreddit_regex, line) if not subreddit_match: subreddit_match = re.match(subreddit_regex2, line) if not subreddit_match: print('No match at position %s' % file.tell() ) print('parse_subreddit_list Error: No match found, skipping this iteration.') continue subreddit = subreddit_match.group(1) final_path = os.path.join(path, subreddit) if not os.path.isdir(final_path): mkdir(final_path) output.append((subreddit, final_path)) return output
gpl-3.0
-7,828,582,792,004,484,000
28.880435
111
0.628592
false
ibethune/lammps
tools/i-pi/ipi/inputs/normalmodes.py
41
3951
"""Deals with creating the normal mode representation arrays. Copyright (C) 2013, Joshua More and Michele Ceriotti This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http.//www.gnu.org/licenses/>. Classes: InputNormalModes: Deals with creating the normal mode objects. """ import numpy as np from copy import copy from ipi.engine.normalmodes import * from ipi.utils.inputvalue import * from ipi.utils.units import * __all__ = ['InputNormalModes'] class InputNormalModes(InputArray): """ Storage class for NormalModes engine. Describes how normal-modes transformation and integration should be performed. Attributes: mode: Specifies the method by which the dynamical masses are created. transform: Specifies whether the normal mode calculation will be done using a FFT transform or a matrix multiplication. """ attribs = copy(InputArray.attribs) attribs["mode"] = (InputAttribute, {"dtype" : str, "default" : "rpmd", "help" : "Specifies the technique to be used to calculate the dynamical masses. 'rpmd' simply assigns the bead masses the physical mass. 'manual' sets all the normal mode frequencies except the centroid normal mode manually. 'pa-cmd' takes an argument giving the frequency to set all the non-centroid normal modes to. 'wmax-cmd' is similar to 'pa-cmd', except instead of taking one argument it takes two ([wmax,wtarget]). The lowest-lying normal mode will be set to wtarget for a free particle, and all the normal modes will coincide at frequency wmax. ", "options" : ['pa-cmd', 'wmax-cmd', 'manual', 'rpmd']}) attribs["transform"] = (InputValue,{"dtype" : str, "default" : "fft", "help" : "Specifies whether to calculate the normal mode transform using a fast Fourier transform or a matrix multiplication. For small numbers of beads the matrix multiplication may be faster.", "options" : ['fft', 'matrix']}) default_help = "Deals with the normal mode transformations, including the adjustment of bead masses to give the desired ring polymer normal mode frequencies if appropriate. Takes as arguments frequencies, of which different numbers must be specified and which are used to scale the normal mode frequencies in different ways depending on which 'mode' is specified." default_label = "NORMALMODES" def __init__(self, help=None, dimension=None, default=None, dtype=None): """ Initializes InputNormalModes. Just calls the parent initialization function with appropriate arguments. """ super(InputNormalModes,self).__init__(help=help, default=default, dtype=float, dimension="frequency") def store(self, nm): """Takes a normal modes instance and stores a minimal representation of it. Args: nm: A normal modes object. """ super(InputNormalModes,self).store(nm.nm_freqs) self.mode.store(nm.mode) self.transform.store(nm.transform_method) def fetch(self): """Creates a normal modes object. Returns: A normal modes object. """ super(InputNormalModes,self).check() return NormalModes(self.mode.fetch(), self.transform.fetch(), super(InputNormalModes,self).fetch() )
gpl-2.0
5,744,279,130,182,483,000
46.035714
597
0.690205
false
jamslevy/gsoc
thirdparty/google_appengine/google/appengine/tools/appcfg.py
1
74994
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Tool for deploying apps to an app server. Currently, the application only uploads new appversions. To do this, it first walks the directory tree rooted at the path the user specifies, adding all the files it finds to a list. It then uploads the application configuration (app.yaml) to the server using HTTP, followed by uploading each of the files. It then commits the transaction with another request. The bulk of this work is handled by the AppVersionUpload class, which exposes methods to add to the list of files, fetch a list of modified files, upload files, and commit or rollback the transaction. """ import calendar import datetime import getpass import logging import mimetypes import optparse import os import re import sha import sys import tempfile import time import urllib2 import google import yaml from google.appengine.cron import groctimespecification from google.appengine.api import appinfo from google.appengine.api import croninfo from google.appengine.api import validation from google.appengine.api import yaml_errors from google.appengine.api import yaml_object from google.appengine.datastore import datastore_index from google.appengine.tools import appengine_rpc from google.appengine.tools import bulkloader MAX_FILES_TO_CLONE = 100 LIST_DELIMITER = "\n" TUPLE_DELIMITER = "|" VERSION_FILE = "../VERSION" UPDATE_CHECK_TIMEOUT = 3 NAG_FILE = ".appcfg_nag" MAX_LOG_LEVEL = 4 verbosity = 1 appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = "python" _api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1') _options = validation.Options(*_api_versions.split(',')) appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options del _api_versions, _options def StatusUpdate(msg): """Print a status message to stderr. If 'verbosity' is greater than 0, print the message. Args: msg: The string to print. """ if verbosity > 0: print >>sys.stderr, msg def GetMimeTypeIfStaticFile(config, filename): """Looks up the mime type for 'filename'. Uses the handlers in 'config' to determine if the file should be treated as a static file. Args: config: The app.yaml object to check the filename against. filename: The name of the file. Returns: The mime type string. For example, 'text/plain' or 'image/gif'. None if this is not a static file. """ for handler in config.handlers: handler_type = handler.GetHandlerType() if handler_type in ("static_dir", "static_files"): if handler_type == "static_dir": regex = os.path.join(re.escape(handler.GetHandler()), ".*") else: regex = handler.upload if re.match(regex, filename): if handler.mime_type is not None: return handler.mime_type else: guess = mimetypes.guess_type(filename)[0] if guess is None: default = "application/octet-stream" print >>sys.stderr, ("Could not guess mimetype for %s. Using %s." % (filename, default)) return default return guess return None def BuildClonePostBody(file_tuples): """Build the post body for the /api/clone{files,blobs} urls. Args: file_tuples: A list of tuples. Each tuple should contain the entries appropriate for the endpoint in question. Returns: A string containing the properly delimited tuples. """ file_list = [] for tup in file_tuples: path = tup[0] tup = tup[1:] file_list.append(TUPLE_DELIMITER.join([path] + list(tup))) return LIST_DELIMITER.join(file_list) class NagFile(validation.Validated): """A validated YAML class to represent the user's nag preferences. Attributes: timestamp: The timestamp of the last nag. opt_in: True if the user wants to check for updates on dev_appserver start. False if not. May be None if we have not asked the user yet. """ ATTRIBUTES = { "timestamp": validation.TYPE_FLOAT, "opt_in": validation.Optional(validation.TYPE_BOOL), } @staticmethod def Load(nag_file): """Load a single NagFile object where one and only one is expected. Args: nag_file: A file-like object or string containing the yaml data to parse. Returns: A NagFile instance. """ return yaml_object.BuildSingleObject(NagFile, nag_file) def GetVersionObject(isfile=os.path.isfile, open_fn=open): """Gets the version of the SDK by parsing the VERSION file. Args: isfile: used for testing. open_fn: Used for testing. Returns: A Yaml object or None if the VERSION file does not exist. """ version_filename = os.path.join(os.path.dirname(google.__file__), VERSION_FILE) if not isfile(version_filename): logging.error("Could not find version file at %s", version_filename) return None version_fh = open_fn(version_filename, "r") try: version = yaml.safe_load(version_fh) finally: version_fh.close() return version def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable): """Calls a function multiple times, backing off more and more each time. Args: initial_delay: Initial delay after first try, in seconds. backoff_factor: Delay will be multiplied by this factor after each try. max_tries: Maximum number of tries. callable: The method to call, will pass no arguments. Returns: True if the function succeded in one of its tries. Raises: Whatever the function raises--an exception will immediately stop retries. """ delay = initial_delay while not callable() and max_tries > 0: StatusUpdate("Will check again in %s seconds." % delay) time.sleep(delay) delay *= backoff_factor max_tries -= 1 return max_tries > 0 class UpdateCheck(object): """Determines if the local SDK is the latest version. Nags the user when there are updates to the SDK. As the SDK becomes more out of date, the language in the nagging gets stronger. We store a little yaml file in the user's home directory so that we nag the user only once a week. The yaml file has the following field: 'timestamp': Last time we nagged the user in seconds since the epoch. Attributes: server: An AbstractRpcServer instance used to check for the latest SDK. config: The app's AppInfoExternal. Needed to determine which api_version the app is using. """ def __init__(self, server, config, isdir=os.path.isdir, isfile=os.path.isfile, open_fn=open): """Create a new UpdateCheck. Args: server: The AbstractRpcServer to use. config: The yaml object that specifies the configuration of this application. isdir: Replacement for os.path.isdir (for testing). isfile: Replacement for os.path.isfile (for testing). open_fn: Replacement for the open builtin (for testing). """ self.server = server self.config = config self.isdir = isdir self.isfile = isfile self.open = open_fn @staticmethod def MakeNagFilename(): """Returns the filename for the nag file for this user.""" user_homedir = os.path.expanduser("~/") if not os.path.isdir(user_homedir): drive, unused_tail = os.path.splitdrive(os.__file__) if drive: os.environ["HOMEDRIVE"] = drive return os.path.expanduser("~/" + NAG_FILE) def _ParseVersionFile(self): """Parse the local VERSION file. Returns: A Yaml object or None if the file does not exist. """ return GetVersionObject(isfile=self.isfile, open_fn=self.open) def CheckSupportedVersion(self): """Determines if the app's api_version is supported by the SDK. Uses the api_version field from the AppInfoExternal to determine if the SDK supports that api_version. Raises: SystemExit if the api_version is not supported. """ version = self._ParseVersionFile() if version is None: logging.error("Could not determine if the SDK supports the api_version " "requested in app.yaml.") return if self.config.api_version not in version["api_versions"]: logging.critical("The api_version specified in app.yaml (%s) is not " "supported by this release of the SDK. The supported " "api_versions are %s.", self.config.api_version, version["api_versions"]) sys.exit(1) def CheckForUpdates(self): """Queries the server for updates and nags the user if appropriate. Queries the server for the latest SDK version at the same time reporting the local SDK version. The server will respond with a yaml document containing the fields: "release": The name of the release (e.g. 1.2). "timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ). "api_versions": A list of api_version strings (e.g. ['1', 'beta']). We will nag the user with increasing severity if: - There is a new release. - There is a new release with a new api_version. - There is a new release that does not support the api_version named in self.config. """ version = self._ParseVersionFile() if version is None: logging.info("Skipping update check") return logging.info("Checking for updates to the SDK.") try: response = self.server.Send("/api/updatecheck", timeout=UPDATE_CHECK_TIMEOUT, release=version["release"], timestamp=version["timestamp"], api_versions=version["api_versions"]) except urllib2.URLError, e: logging.info("Update check failed: %s", e) return latest = yaml.safe_load(response) if latest["release"] == version["release"]: logging.info("The SDK is up to date.") return api_versions = latest["api_versions"] if self.config.api_version not in api_versions: self._Nag( "The api version you are using (%s) is obsolete! You should\n" "upgrade your SDK and test that your code works with the new\n" "api version." % self.config.api_version, latest, version, force=True) return if self.config.api_version != api_versions[len(api_versions) - 1]: self._Nag( "The api version you are using (%s) is deprecated. You should\n" "upgrade your SDK to try the new functionality." % self.config.api_version, latest, version) return self._Nag("There is a new release of the SDK available.", latest, version) def _ParseNagFile(self): """Parses the nag file. Returns: A NagFile if the file was present else None. """ nag_filename = UpdateCheck.MakeNagFilename() if self.isfile(nag_filename): fh = self.open(nag_filename, "r") try: nag = NagFile.Load(fh) finally: fh.close() return nag return None def _WriteNagFile(self, nag): """Writes the NagFile to the user's nag file. If the destination path does not exist, this method will log an error and fail silently. Args: nag: The NagFile to write. """ nagfilename = UpdateCheck.MakeNagFilename() try: fh = self.open(nagfilename, "w") try: fh.write(nag.ToYAML()) finally: fh.close() except (OSError, IOError), e: logging.error("Could not write nag file to %s. Error: %s", nagfilename, e) def _Nag(self, msg, latest, version, force=False): """Prints a nag message and updates the nag file's timestamp. Because we don't want to nag the user everytime, we store a simple yaml document in the user's home directory. If the timestamp in this doc is over a week old, we'll nag the user. And when we nag the user, we update the timestamp in this doc. Args: msg: The formatted message to print to the user. latest: The yaml document received from the server. version: The local yaml version document. force: If True, always nag the user, ignoring the nag file. """ nag = self._ParseNagFile() if nag and not force: last_nag = datetime.datetime.fromtimestamp(nag.timestamp) if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1): logging.debug("Skipping nag message") return if nag is None: nag = NagFile() nag.timestamp = time.time() self._WriteNagFile(nag) print "****************************************************************" print msg print "-----------" print "Latest SDK:" print yaml.dump(latest) print "-----------" print "Your SDK:" print yaml.dump(version) print "-----------" print "Please visit http://code.google.com/appengine for the latest SDK" print "****************************************************************" def AllowedToCheckForUpdates(self, input_fn=raw_input): """Determines if the user wants to check for updates. On startup, the dev_appserver wants to check for updates to the SDK. Because this action reports usage to Google when the user is not otherwise communicating with Google (e.g. pushing a new app version), the user must opt in. If the user does not have a nag file, we will query the user and save the response in the nag file. Subsequent calls to this function will re-use that response. Args: input_fn: used to collect user input. This is for testing only. Returns: True if the user wants to check for updates. False otherwise. """ nag = self._ParseNagFile() if nag is None: nag = NagFile() nag.timestamp = time.time() if nag.opt_in is None: answer = input_fn("Allow dev_appserver to check for updates on startup? " "(Y/n): ") answer = answer.strip().lower() if answer == "n" or answer == "no": print ("dev_appserver will not check for updates on startup. To " "change this setting, edit %s" % UpdateCheck.MakeNagFilename()) nag.opt_in = False else: print ("dev_appserver will check for updates on startup. To change " "this setting, edit %s" % UpdateCheck.MakeNagFilename()) nag.opt_in = True self._WriteNagFile(nag) return nag.opt_in class IndexDefinitionUpload(object): """Provides facilities to upload index definitions to the hosting service.""" def __init__(self, server, config, definitions): """Creates a new DatastoreIndexUpload. Args: server: The RPC server to use. Should be an instance of HttpRpcServer or TestRpcServer. config: The AppInfoExternal object derived from the app.yaml file. definitions: An IndexDefinitions object. """ self.server = server self.config = config self.definitions = definitions def DoUpload(self): """Uploads the index definitions.""" StatusUpdate("Uploading index definitions.") self.server.Send("/api/datastore/index/add", app_id=self.config.application, version=self.config.version, payload=self.definitions.ToYAML()) class CronEntryUpload(object): """Provides facilities to upload cron entries to the hosting service.""" def __init__(self, server, config, cron): """Creates a new CronEntryUpload. Args: server: The RPC server to use. Should be an instance of a subclass of AbstractRpcServer config: The AppInfoExternal object derived from the app.yaml file. cron: The CronInfoExternal object loaded from the cron.yaml file. """ self.server = server self.config = config self.cron = cron def DoUpload(self): """Uploads the cron entries.""" StatusUpdate("Uploading cron entries.") self.server.Send("/api/datastore/cron/update", app_id=self.config.application, version=self.config.version, payload=self.cron.ToYAML()) class IndexOperation(object): """Provide facilities for writing Index operation commands.""" def __init__(self, server, config): """Creates a new IndexOperation. Args: server: The RPC server to use. Should be an instance of HttpRpcServer or TestRpcServer. config: appinfo.AppInfoExternal configuration object. """ self.server = server self.config = config def DoDiff(self, definitions): """Retrieve diff file from the server. Args: definitions: datastore_index.IndexDefinitions as loaded from users index.yaml file. Returns: A pair of datastore_index.IndexDefinitions objects. The first record is the set of indexes that are present in the index.yaml file but missing from the server. The second record is the set of indexes that are present on the server but missing from the index.yaml file (indicating that these indexes should probably be vacuumed). """ StatusUpdate("Fetching index definitions diff.") response = self.server.Send("/api/datastore/index/diff", app_id=self.config.application, payload=definitions.ToYAML()) return datastore_index.ParseMultipleIndexDefinitions(response) def DoDelete(self, definitions): """Delete indexes from the server. Args: definitions: Index definitions to delete from datastore. Returns: A single datstore_index.IndexDefinitions containing indexes that were not deleted, probably because they were already removed. This may be normal behavior as there is a potential race condition between fetching the index-diff and sending deletion confirmation through. """ StatusUpdate("Deleting selected index definitions.") response = self.server.Send("/api/datastore/index/delete", app_id=self.config.application, payload=definitions.ToYAML()) return datastore_index.ParseIndexDefinitions(response) class VacuumIndexesOperation(IndexOperation): """Provide facilities to request the deletion of datastore indexes.""" def __init__(self, server, config, force, confirmation_fn=raw_input): """Creates a new VacuumIndexesOperation. Args: server: The RPC server to use. Should be an instance of HttpRpcServer or TestRpcServer. config: appinfo.AppInfoExternal configuration object. force: True to force deletion of indexes, else False. confirmation_fn: Function used for getting input form user. """ super(VacuumIndexesOperation, self).__init__(server, config) self.force = force self.confirmation_fn = confirmation_fn def GetConfirmation(self, index): """Get confirmation from user to delete an index. This method will enter an input loop until the user provides a response it is expecting. Valid input is one of three responses: y: Confirm deletion of index. n: Do not delete index. a: Delete all indexes without asking for further confirmation. If the user enters nothing at all, the default action is to skip that index and do not delete. If the user selects 'a', as a side effect, the 'force' flag is set. Args: index: Index to confirm. Returns: True if user enters 'y' or 'a'. False if user enter 'n'. """ while True: print "This index is no longer defined in your index.yaml file." print print index.ToYAML() print confirmation = self.confirmation_fn( "Are you sure you want to delete this index? (N/y/a): ") confirmation = confirmation.strip().lower() if confirmation == "y": return True elif confirmation == "n" or not confirmation: return False elif confirmation == "a": self.force = True return True else: print "Did not understand your response." def DoVacuum(self, definitions): """Vacuum indexes in datastore. This method will query the server to determine which indexes are not being used according to the user's local index.yaml file. Once it has made this determination, it confirms with the user which unused indexes should be deleted. Once confirmation for each index is receives, it deletes those indexes. Because another user may in theory delete the same indexes at the same time as the user, there is a potential race condition. In this rare cases, some of the indexes previously confirmed for deletion will not be found. The user is notified which indexes these were. Args: definitions: datastore_index.IndexDefinitions as loaded from users index.yaml file. """ unused_new_indexes, notused_indexes = self.DoDiff(definitions) deletions = datastore_index.IndexDefinitions(indexes=[]) if notused_indexes.indexes is not None: for index in notused_indexes.indexes: if self.force or self.GetConfirmation(index): deletions.indexes.append(index) if deletions.indexes: not_deleted = self.DoDelete(deletions) if not_deleted.indexes: not_deleted_count = len(not_deleted.indexes) if not_deleted_count == 1: warning_message = ("An index was not deleted. Most likely this is " "because it no longer exists.\n\n") else: warning_message = ("%d indexes were not deleted. Most likely this " "is because they no longer exist.\n\n" % not_deleted_count) for index in not_deleted.indexes: warning_message += index.ToYAML() logging.warning(warning_message) class LogsRequester(object): """Provide facilities to export request logs.""" def __init__(self, server, config, output_file, num_days, append, severity, now): """Constructor. Args: server: The RPC server to use. Should be an instance of HttpRpcServer or TestRpcServer. config: appinfo.AppInfoExternal configuration object. output_file: Output file name. num_days: Number of days worth of logs to export; 0 for all available. append: True if appending to an existing file. severity: App log severity to request (0-4); None for no app logs. now: POSIX timestamp used for calculating valid dates for num_days. """ self.server = server self.config = config self.output_file = output_file self.append = append self.num_days = num_days self.severity = severity self.version_id = self.config.version + ".1" self.sentinel = None self.write_mode = "w" if self.append: self.sentinel = FindSentinel(self.output_file) self.write_mode = "a" self.valid_dates = None if self.num_days: patterns = [] now = PacificTime(now) for i in xrange(self.num_days): then = time.gmtime(now - 24*3600 * i) patterns.append(re.escape(time.strftime("%d/%m/%Y", then))) patterns.append(re.escape(time.strftime("%d/%b/%Y", then))) self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):") def DownloadLogs(self): """Download the requested logs. This will write the logs to the file designated by self.output_file, or to stdout if the filename is '-'. Multiple roundtrips to the server may be made. """ StatusUpdate("Downloading request logs for %s %s." % (self.config.application, self.version_id)) tf = tempfile.TemporaryFile() offset = None try: while True: try: offset = self.RequestLogLines(tf, offset) if not offset: break except KeyboardInterrupt: StatusUpdate("Keyboard interrupt; saving data downloaded so far.") break StatusUpdate("Copying request logs to %r." % self.output_file) if self.output_file == "-": of = sys.stdout else: try: of = open(self.output_file, self.write_mode) except IOError, err: StatusUpdate("Can't write %r: %s." % (self.output_file, err)) sys.exit(1) try: line_count = CopyReversedLines(tf, of) finally: of.flush() if of is not sys.stdout: of.close() finally: tf.close() StatusUpdate("Copied %d records." % line_count) def RequestLogLines(self, tf, offset): """Make a single roundtrip to the server. Args: tf: Writable binary stream to which the log lines returned by the server are written, stripped of headers, and excluding lines skipped due to self.sentinel or self.valid_dates filtering. offset: Offset string for a continued request; None for the first. Returns: The offset string to be used for the next request, if another request should be issued; or None, if not. """ logging.info("Request with offset %r.", offset) kwds = {"app_id": self.config.application, "version": self.version_id, "limit": 100, } if offset: kwds["offset"] = offset if self.severity is not None: kwds["severity"] = str(self.severity) response = self.server.Send("/api/request_logs", payload=None, **kwds) response = response.replace("\r", "\0") lines = response.splitlines() logging.info("Received %d bytes, %d records.", len(response), len(lines)) offset = None if lines and lines[0].startswith("#"): match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0]) del lines[0] if match: offset = match.group(1) if lines and lines[-1].startswith("#"): del lines[-1] valid_dates = self.valid_dates sentinel = self.sentinel len_sentinel = None if sentinel: len_sentinel = len(sentinel) for line in lines: if ((sentinel and line.startswith(sentinel) and line[len_sentinel : len_sentinel+1] in ("", "\0")) or (valid_dates and not valid_dates.match(line))): return None tf.write(line + "\n") if not lines: return None return offset def PacificTime(now): """Helper to return the number of seconds between UTC and Pacific time. This is needed to compute today's date in Pacific time (more specifically: Mountain View local time), which is how request logs are reported. (Google servers always report times in Mountain View local time, regardless of where they are physically located.) This takes (post-2006) US DST into account. Pacific time is either 8 hours or 7 hours west of UTC, depending on whether DST is in effect. Since 2007, US DST starts on the Second Sunday in March March, and ends on the first Sunday in November. (Reference: http://aa.usno.navy.mil/faq/docs/daylight_time.php.) Note that the server doesn't report its local time (the HTTP Date header uses UTC), and the client's local time is irrelevant. Args: now: A posix timestamp giving current UTC time. Returns: A pseudo-posix timestamp giving current Pacific time. Passing this through time.gmtime() will produce a tuple in Pacific local time. """ now -= 8*3600 if IsPacificDST(now): now += 3600 return now def IsPacificDST(now): """Helper for PacificTime to decide whether now is Pacific DST (PDT). Args: now: A pseudo-posix timestamp giving current time in PST. Returns: True if now falls within the range of DST, False otherwise. """ DAY = 24*3600 SUNDAY = 6 pst = time.gmtime(now) year = pst[0] assert year >= 2007 begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0)) while time.gmtime(begin).tm_wday != SUNDAY: begin += DAY end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0)) while time.gmtime(end).tm_wday != SUNDAY: end += DAY return begin <= now < end def CopyReversedLines(instream, outstream, blocksize=2**16): r"""Copy lines from input stream to output stream in reverse order. As a special feature, null bytes in the input are turned into newlines followed by tabs in the output, but these "sub-lines" separated by null bytes are not reversed. E.g. If the input is "A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n". Args: instream: A seekable stream open for reading in binary mode. outstream: A stream open for writing; doesn't have to be seekable or binary. blocksize: Optional block size for buffering, for unit testing. Returns: The number of lines copied. """ line_count = 0 instream.seek(0, 2) last_block = instream.tell() // blocksize spillover = "" for iblock in xrange(last_block + 1, -1, -1): instream.seek(iblock * blocksize) data = instream.read(blocksize) lines = data.splitlines(True) lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True) if lines and not lines[-1].endswith("\n"): lines[-1] += "\n" lines.reverse() if lines and iblock > 0: spillover = lines.pop() if lines: line_count += len(lines) data = "".join(lines).replace("\0", "\n\t") outstream.write(data) return line_count def FindSentinel(filename, blocksize=2**16): """Return the sentinel line from the output file. Args: filename: The filename of the output file. (We'll read this file.) blocksize: Optional block size for buffering, for unit testing. Returns: The contents of the last line in the file that doesn't start with a tab, with its trailing newline stripped; or None if the file couldn't be opened or no such line could be found by inspecting the last 'blocksize' bytes of the file. """ if filename == "-": StatusUpdate("Can't combine --append with output to stdout.") sys.exit(2) try: fp = open(filename, "rb") except IOError, err: StatusUpdate("Append mode disabled: can't read %r: %s." % (filename, err)) return None try: fp.seek(0, 2) fp.seek(max(0, fp.tell() - blocksize)) lines = fp.readlines() del lines[:1] sentinel = None for line in lines: if not line.startswith("\t"): sentinel = line if not sentinel: StatusUpdate("Append mode disabled: can't find sentinel in %r." % filename) return None return sentinel.rstrip("\n") finally: fp.close() class AppVersionUpload(object): """Provides facilities to upload a new appversion to the hosting service. Attributes: server: The AbstractRpcServer to use for the upload. config: The AppInfoExternal object derived from the app.yaml file. app_id: The application string from 'config'. version: The version string from 'config'. files: A dictionary of files to upload to the server, mapping path to hash of the file contents. in_transaction: True iff a transaction with the server has started. An AppVersionUpload can do only one transaction at a time. deployed: True iff the Deploy method has been called. """ def __init__(self, server, config): """Creates a new AppVersionUpload. Args: server: The RPC server to use. Should be an instance of HttpRpcServer or TestRpcServer. config: An AppInfoExternal object that specifies the configuration for this application. """ self.server = server self.config = config self.app_id = self.config.application self.version = self.config.version self.files = {} self.in_transaction = False self.deployed = False def _Hash(self, content): """Compute the hash of the content. Args: content: The data to hash as a string. Returns: The string representation of the hash. """ h = sha.new(content).hexdigest() return "%s_%s_%s_%s_%s" % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40]) def AddFile(self, path, file_handle): """Adds the provided file to the list to be pushed to the server. Args: path: The path the file should be uploaded as. file_handle: A stream containing data to upload. """ assert not self.in_transaction, "Already in a transaction." assert file_handle is not None reason = appinfo.ValidFilename(path) if reason: logging.error(reason) return pos = file_handle.tell() content_hash = self._Hash(file_handle.read()) file_handle.seek(pos, 0) self.files[path] = content_hash def Begin(self): """Begins the transaction, returning a list of files that need uploading. All calls to AddFile must be made before calling Begin(). Returns: A list of pathnames for files that should be uploaded using UploadFile() before Commit() can be called. """ assert not self.in_transaction, "Already in a transaction." StatusUpdate("Initiating update.") self.server.Send("/api/appversion/create", app_id=self.app_id, version=self.version, payload=self.config.ToYAML()) self.in_transaction = True files_to_clone = [] blobs_to_clone = [] for path, content_hash in self.files.iteritems(): mime_type = GetMimeTypeIfStaticFile(self.config, path) if mime_type is not None: blobs_to_clone.append((path, content_hash, mime_type)) else: files_to_clone.append((path, content_hash)) files_to_upload = {} def CloneFiles(url, files, file_type): """Sends files to the given url. Args: url: the server URL to use. files: a list of files file_type: the type of the files """ if not files: return StatusUpdate("Cloning %d %s file%s." % (len(files), file_type, len(files) != 1 and "s" or "")) for i in xrange(0, len(files), MAX_FILES_TO_CLONE): if i > 0 and i % MAX_FILES_TO_CLONE == 0: StatusUpdate("Cloned %d files." % i) chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)] result = self.server.Send(url, app_id=self.app_id, version=self.version, payload=BuildClonePostBody(chunk)) if result: files_to_upload.update(dict( (f, self.files[f]) for f in result.split(LIST_DELIMITER))) CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static") CloneFiles("/api/appversion/clonefiles", files_to_clone, "application") logging.info("Files to upload: " + str(files_to_upload)) self.files = files_to_upload return sorted(files_to_upload.iterkeys()) def UploadFile(self, path, file_handle): """Uploads a file to the hosting service. Must only be called after Begin(). The path provided must be one of those that were returned by Begin(). Args: path: The path the file is being uploaded as. file_handle: A file-like object containing the data to upload. Raises: KeyError: The provided file is not amongst those to be uploaded. """ assert self.in_transaction, "Begin() must be called before UploadFile()." if path not in self.files: raise KeyError("File '%s' is not in the list of files to be uploaded." % path) del self.files[path] mime_type = GetMimeTypeIfStaticFile(self.config, path) if mime_type is not None: self.server.Send("/api/appversion/addblob", app_id=self.app_id, version=self.version, path=path, content_type=mime_type, payload=file_handle.read()) else: self.server.Send("/api/appversion/addfile", app_id=self.app_id, version=self.version, path=path, payload=file_handle.read()) def Commit(self): """Commits the transaction, making the new app version available. All the files returned by Begin() must have been uploaded with UploadFile() before Commit() can be called. This tries the new 'deploy' method; if that fails it uses the old 'commit'. Raises: Exception: Some required files were not uploaded. """ assert self.in_transaction, "Begin() must be called before Commit()." if self.files: raise Exception("Not all required files have been uploaded.") try: self.Deploy() if not RetryWithBackoff(1, 2, 8, self.IsReady): logging.warning("Version still not ready to serve, aborting.") raise Exception("Version not ready.") self.StartServing() except urllib2.HTTPError, e: if e.code != 404: raise StatusUpdate("Closing update.") self.server.Send("/api/appversion/commit", app_id=self.app_id, version=self.version) self.in_transaction = False def Deploy(self): """Deploys the new app version but does not make it default. All the files returned by Begin() must have been uploaded with UploadFile() before Deploy() can be called. Raises: Exception: Some required files were not uploaded. """ assert self.in_transaction, "Begin() must be called before Deploy()." if self.files: raise Exception("Not all required files have been uploaded.") StatusUpdate("Deploying new version.") self.server.Send("/api/appversion/deploy", app_id=self.app_id, version=self.version) self.deployed = True def IsReady(self): """Check if the new app version is ready to serve traffic. Raises: Exception: Deploy has not yet been called. Returns: True if the server returned the app is ready to serve. """ assert self.deployed, "Deploy() must be called before IsReady()." StatusUpdate("Checking if new version is ready to serve.") result = self.server.Send("/api/appversion/isready", app_id=self.app_id, version=self.version) return result == "1" def StartServing(self): """Start serving with the newly created version. Raises: Exception: Deploy has not yet been called. """ assert self.deployed, "Deploy() must be called before IsReady()." StatusUpdate("Closing update: new version is ready to start serving.") self.server.Send("/api/appversion/startserving", app_id=self.app_id, version=self.version) self.in_transaction = False def Rollback(self): """Rolls back the transaction if one is in progress.""" if not self.in_transaction: return StatusUpdate("Rolling back the update.") self.server.Send("/api/appversion/rollback", app_id=self.app_id, version=self.version) self.in_transaction = False self.files = {} def DoUpload(self, paths, max_size, openfunc): """Uploads a new appversion with the given config and files to the server. Args: paths: An iterator that yields the relative paths of the files to upload. max_size: The maximum size file to upload. openfunc: A function that takes a path and returns a file-like object. """ logging.info("Reading app configuration.") path = "" try: StatusUpdate("Scanning files on local disk.") num_files = 0 for path in paths: file_handle = openfunc(path) try: if self.config.skip_files.match(path): logging.info("Ignoring file '%s': File matches ignore regex.", path) else: file_length = GetFileLength(file_handle) if file_length > max_size: logging.error("Ignoring file '%s': Too long " "(max %d bytes, file is %d bytes)", path, max_size, file_length) else: logging.info("Processing file '%s'", path) self.AddFile(path, file_handle) finally: file_handle.close() num_files += 1 if num_files % 500 == 0: StatusUpdate("Scanned %d files." % num_files) except KeyboardInterrupt: logging.info("User interrupted. Aborting.") raise except EnvironmentError, e: logging.error("An error occurred processing file '%s': %s. Aborting.", path, e) raise try: missing_files = self.Begin() if missing_files: StatusUpdate("Uploading %d files." % len(missing_files)) num_files = 0 for missing_file in missing_files: logging.info("Uploading file '%s'" % missing_file) file_handle = openfunc(missing_file) try: self.UploadFile(missing_file, file_handle) finally: file_handle.close() num_files += 1 if num_files % 500 == 0: StatusUpdate("Uploaded %d files." % num_files) self.Commit() except KeyboardInterrupt: logging.info("User interrupted. Aborting.") self.Rollback() raise except: logging.exception("An unexpected error occurred. Aborting.") self.Rollback() raise logging.info("Done!") def FileIterator(base, separator=os.path.sep): """Walks a directory tree, returning all the files. Follows symlinks. Args: base: The base path to search for files under. separator: Path separator used by the running system's platform. Yields: Paths of files found, relative to base. """ dirs = [""] while dirs: current_dir = dirs.pop() for entry in os.listdir(os.path.join(base, current_dir)): name = os.path.join(current_dir, entry) fullname = os.path.join(base, name) if os.path.isfile(fullname): if separator == "\\": name = name.replace("\\", "/") yield name elif os.path.isdir(fullname): dirs.append(name) def GetFileLength(fh): """Returns the length of the file represented by fh. This function is capable of finding the length of any seekable stream, unlike os.fstat, which only works on file streams. Args: fh: The stream to get the length of. Returns: The length of the stream. """ pos = fh.tell() fh.seek(0, 2) length = fh.tell() fh.seek(pos, 0) return length def GetUserAgent(get_version=GetVersionObject, get_platform=appengine_rpc.GetPlatformToken): """Determines the value of the 'User-agent' header to use for HTTP requests. If the 'APPCFG_SDK_NAME' environment variable is present, that will be used as the first product token in the user-agent. Args: get_version: Used for testing. get_platform: Used for testing. Returns: String containing the 'user-agent' header value, which includes the SDK version, the platform information, and the version of Python; e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2". """ product_tokens = [] sdk_name = os.environ.get("APPCFG_SDK_NAME") if sdk_name: product_tokens.append(sdk_name) else: version = get_version() if version is None: release = "unknown" else: release = version["release"] product_tokens.append("appcfg_py/%s" % release) product_tokens.append(get_platform()) python_version = ".".join(str(i) for i in sys.version_info) product_tokens.append("Python/%s" % python_version) return " ".join(product_tokens) def GetSourceName(get_version=GetVersionObject): """Gets the name of this source version.""" version = get_version() if version is None: release = "unknown" else: release = version["release"] return "Google-appcfg-%s" % (release,) class AppCfgApp(object): """Singleton class to wrap AppCfg tool functionality. This class is responsible for parsing the command line and executing the desired action on behalf of the user. Processing files and communicating with the server is handled by other classes. Attributes: actions: A dictionary mapping action names to Action objects. action: The Action specified on the command line. parser: An instance of optparse.OptionParser. options: The command line options parsed by 'parser'. argv: The original command line as a list. args: The positional command line args left over after parsing the options. raw_input_fn: Function used for getting raw user input, like email. password_input_fn: Function used for getting user password. error_fh: Unexpected HTTPErrors are printed to this file handle. Attributes for testing: parser_class: The class to use for parsing the command line. Because OptionsParser will exit the program when there is a parse failure, it is nice to subclass OptionsParser and catch the error before exiting. """ def __init__(self, argv, parser_class=optparse.OptionParser, rpc_server_class=appengine_rpc.HttpRpcServer, raw_input_fn=raw_input, password_input_fn=getpass.getpass, error_fh=sys.stderr, update_check_class=UpdateCheck): """Initializer. Parses the cmdline and selects the Action to use. Initializes all of the attributes described in the class docstring. Prints help or error messages if there is an error parsing the cmdline. Args: argv: The list of arguments passed to this program. parser_class: Options parser to use for this application. rpc_server_class: RPC server class to use for this application. raw_input_fn: Function used for getting user email. password_input_fn: Function used for getting user password. error_fh: Unexpected HTTPErrors are printed to this file handle. update_check_class: UpdateCheck class (can be replaced for testing). """ self.parser_class = parser_class self.argv = argv self.rpc_server_class = rpc_server_class self.raw_input_fn = raw_input_fn self.password_input_fn = password_input_fn self.error_fh = error_fh self.update_check_class = update_check_class self.parser = self._GetOptionParser() for action in self.actions.itervalues(): action.options(self, self.parser) self.options, self.args = self.parser.parse_args(argv[1:]) if len(self.args) < 1: self._PrintHelpAndExit() if self.args[0] not in self.actions: self.parser.error("Unknown action '%s'\n%s" % (self.args[0], self.parser.get_description())) action_name = self.args.pop(0) self.action = self.actions[action_name] self.parser, self.options = self._MakeSpecificParser(self.action) if self.options.help: self._PrintHelpAndExit() if self.options.verbose == 2: logging.getLogger().setLevel(logging.INFO) elif self.options.verbose == 3: logging.getLogger().setLevel(logging.DEBUG) global verbosity verbosity = self.options.verbose def Run(self): """Executes the requested action. Catches any HTTPErrors raised by the action and prints them to stderr. """ try: self.action(self) except urllib2.HTTPError, e: body = e.read() print >>self.error_fh, ("Error %d: --- begin server output ---\n" "%s\n--- end server output ---" % (e.code, body.rstrip("\n"))) return 1 except yaml_errors.EventListenerError, e: print >>self.error_fh, ("Error parsing yaml file:\n%s" % e) return 1 return 0 def _GetActionDescriptions(self): """Returns a formatted string containing the short_descs for all actions.""" action_names = self.actions.keys() action_names.sort() desc = "" for action_name in action_names: desc += " %s: %s\n" % (action_name, self.actions[action_name].short_desc) return desc def _GetOptionParser(self): """Creates an OptionParser with generic usage and description strings. Returns: An OptionParser instance. """ class Formatter(optparse.IndentedHelpFormatter): """Custom help formatter that does not reformat the description.""" def format_description(self, description): """Very simple formatter.""" return description + "\n" desc = self._GetActionDescriptions() desc = ("Action must be one of:\n%s" "Use 'help <action>' for a detailed description.") % desc parser = self.parser_class(usage="%prog [options] <action>", description=desc, formatter=Formatter(), conflict_handler="resolve") parser.add_option("-h", "--help", action="store_true", dest="help", help="Show the help message and exit.") parser.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Print errors only.") parser.add_option("-v", "--verbose", action="store_const", const=2, dest="verbose", default=1, help="Print info level logs.") parser.add_option("--noisy", action="store_const", const=3, dest="verbose", help="Print all logs.") parser.add_option("-s", "--server", action="store", dest="server", default="appengine.google.com", metavar="SERVER", help="The server to connect to.") parser.add_option("--secure", action="store_true", dest="secure", default=False, help="Use SSL when communicating with the server.") parser.add_option("-e", "--email", action="store", dest="email", metavar="EMAIL", default=None, help="The username to use. Will prompt if omitted.") parser.add_option("-H", "--host", action="store", dest="host", metavar="HOST", default=None, help="Overrides the Host header sent with all RPCs.") parser.add_option("--no_cookies", action="store_false", dest="save_cookies", default=True, help="Do not save authentication cookies to local disk.") parser.add_option("--passin", action="store_true", dest="passin", default=False, help="Read the login password from stdin.") return parser def _MakeSpecificParser(self, action): """Creates a new parser with documentation specific to 'action'. Args: action: An Action instance to be used when initializing the new parser. Returns: A tuple containing: parser: An instance of OptionsParser customized to 'action'. options: The command line options after re-parsing. """ parser = self._GetOptionParser() parser.set_usage(action.usage) parser.set_description("%s\n%s" % (action.short_desc, action.long_desc)) action.options(self, parser) options, unused_args = parser.parse_args(self.argv[1:]) return parser, options def _PrintHelpAndExit(self, exit_code=2): """Prints the parser's help message and exits the program. Args: exit_code: The integer code to pass to sys.exit(). """ self.parser.print_help() sys.exit(exit_code) def _GetRpcServer(self): """Returns an instance of an AbstractRpcServer. Returns: A new AbstractRpcServer, on which RPC calls can be made. """ def GetUserCredentials(): """Prompts the user for a username and password.""" email = self.options.email if email is None: email = self.raw_input_fn("Email: ") password_prompt = "Password for %s: " % email if self.options.passin: password = self.raw_input_fn(password_prompt) else: password = self.password_input_fn(password_prompt) return (email, password) if self.options.host and self.options.host == "localhost": email = self.options.email if email is None: email = "[email protected]" logging.info("Using debug user %s. Override with --email" % email) server = self.rpc_server_class( self.options.server, lambda: (email, "password"), GetUserAgent(), GetSourceName(), host_override=self.options.host, save_cookies=self.options.save_cookies) server.authenticated = True return server if self.options.passin: auth_tries = 1 else: auth_tries = 3 return self.rpc_server_class(self.options.server, GetUserCredentials, GetUserAgent(), GetSourceName(), host_override=self.options.host, save_cookies=self.options.save_cookies, auth_tries=auth_tries, account_type="HOSTED_OR_GOOGLE", secure=self.options.secure) def _FindYaml(self, basepath, file_name): """Find yaml files in application directory. Args: basepath: Base application directory. file_name: Filename without extension to search for. Returns: Path to located yaml file if one exists, else None. """ if not os.path.isdir(basepath): self.parser.error("Not a directory: %s" % basepath) for yaml_file in (file_name + ".yaml", file_name + ".yml"): yaml_path = os.path.join(basepath, yaml_file) if os.path.isfile(yaml_path): return yaml_path return None def _ParseAppYaml(self, basepath): """Parses the app.yaml file. Args: basepath: the directory of the application. Returns: An AppInfoExternal object. """ appyaml_filename = self._FindYaml(basepath, "app") if appyaml_filename is None: self.parser.error("Directory does not contain an app.yaml " "configuration file.") fh = open(appyaml_filename, "r") try: appyaml = appinfo.LoadSingleAppInfo(fh) finally: fh.close() return appyaml def _ParseIndexYaml(self, basepath): """Parses the index.yaml file. Args: basepath: the directory of the application. Returns: A single parsed yaml file or None if the file does not exist. """ file_name = self._FindYaml(basepath, "index") if file_name is not None: fh = open(file_name, "r") try: index_defs = datastore_index.ParseIndexDefinitions(fh) finally: fh.close() return index_defs return None def _ParseCronYaml(self, basepath): """Parses the cron.yaml file. Args: basepath: the directory of the application. Returns: A CronInfoExternal object. """ file_name = self._FindYaml(basepath, "cron") if file_name is not None: fh = open(file_name, "r") try: cron_info = croninfo.LoadSingleCron(fh) finally: fh.close() return cron_info return None def Help(self): """Prints help for a specific action. Expects self.args[0] to contain the name of the action in question. Exits the program after printing the help message. """ if len(self.args) != 1 or self.args[0] not in self.actions: self.parser.error("Expected a single action argument. Must be one of:\n" + self._GetActionDescriptions()) action = self.actions[self.args[0]] self.parser, unused_options = self._MakeSpecificParser(action) self._PrintHelpAndExit(exit_code=0) def Update(self): """Updates and deploys a new appversion.""" if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) rpc_server = self._GetRpcServer() updatecheck = self.update_check_class(rpc_server, appyaml) updatecheck.CheckForUpdates() appversion = AppVersionUpload(rpc_server, appyaml) appversion.DoUpload(FileIterator(basepath), self.options.max_size, lambda path: open(os.path.join(basepath, path), "rb")) index_defs = self._ParseIndexYaml(basepath) if index_defs: index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) try: index_upload.DoUpload() except urllib2.HTTPError, e: StatusUpdate("Error %d: --- begin server output ---\n" "%s\n--- end server output ---" % (e.code, e.read().rstrip("\n"))) print >> self.error_fh, ( "Your app was updated, but there was an error updating your " "indexes. Please retry later with appcfg.py update_indexes.") cron_entries = self._ParseCronYaml(basepath) if cron_entries: cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) cron_upload.DoUpload() def _UpdateOptions(self, parser): """Adds update-specific options to 'parser'. Args: parser: An instance of OptionsParser. """ parser.add_option("-S", "--max_size", type="int", dest="max_size", default=10485760, metavar="SIZE", help="Maximum size of a file to upload.") def VacuumIndexes(self): """Deletes unused indexes.""" if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") basepath = self.args[0] config = self._ParseAppYaml(basepath) index_defs = self._ParseIndexYaml(basepath) if index_defs is None: index_defs = datastore_index.IndexDefinitions() rpc_server = self._GetRpcServer() vacuum = VacuumIndexesOperation(rpc_server, config, self.options.force_delete) vacuum.DoVacuum(index_defs) def _VacuumIndexesOptions(self, parser): """Adds vacuum_indexes-specific options to 'parser'. Args: parser: An instance of OptionsParser. """ parser.add_option("-f", "--force", action="store_true", dest="force_delete", default=False, help="Force deletion without being prompted.") def UpdateCron(self): """Updates any new or changed cron definitions.""" if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) rpc_server = self._GetRpcServer() cron_entries = self._ParseCronYaml(basepath) if cron_entries: cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries) cron_upload.DoUpload() def UpdateIndexes(self): """Updates indexes.""" if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) rpc_server = self._GetRpcServer() index_defs = self._ParseIndexYaml(basepath) if index_defs: index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs) index_upload.DoUpload() def Rollback(self): """Does a rollback of any existing transaction for this app version.""" if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) appversion = AppVersionUpload(self._GetRpcServer(), appyaml) appversion.in_transaction = True appversion.Rollback() def RequestLogs(self): """Write request logs to a file.""" if len(self.args) != 2: self.parser.error( "Expected a <directory> argument and an <output_file> argument.") if (self.options.severity is not None and not 0 <= self.options.severity <= MAX_LOG_LEVEL): self.parser.error( "Severity range is 0 (DEBUG) through %s (CRITICAL)." % MAX_LOG_LEVEL) if self.options.num_days is None: self.options.num_days = int(not self.options.append) basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) rpc_server = self._GetRpcServer() logs_requester = LogsRequester(rpc_server, appyaml, self.args[1], self.options.num_days, self.options.append, self.options.severity, time.time()) logs_requester.DownloadLogs() def _RequestLogsOptions(self, parser): """Adds request_logs-specific options to 'parser'. Args: parser: An instance of OptionsParser. """ parser.add_option("-n", "--num_days", type="int", dest="num_days", action="store", default=None, help="Number of days worth of log data to get. " "The cut-off point is midnight UTC. " "Use 0 to get all available logs. " "Default is 1, unless --append is also given; " "then the default is 0.") parser.add_option("-a", "--append", dest="append", action="store_true", default=False, help="Append to existing file.") parser.add_option("--severity", type="int", dest="severity", action="store", default=None, help="Severity of app-level log messages to get. " "The range is 0 (DEBUG) through 4 (CRITICAL). " "If omitted, only request logs are returned.") def CronInfo(self, now=None, output=sys.stdout): """Displays information about cron definitions. Args: now: used for testing. output: Used for testing. """ if len(self.args) != 1: self.parser.error("Expected a single <directory> argument.") if now is None: now = datetime.datetime.now() basepath = self.args[0] cron_entries = self._ParseCronYaml(basepath) if cron_entries and cron_entries.cron: for entry in cron_entries.cron: description = entry.description if not description: description = "<no description>" print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description, entry.schedule, entry.url) schedule = groctimespecification.GrocTimeSpecification(entry.schedule) matches = schedule.GetMatches(now, self.options.num_runs) for match in matches: print >>output, "%s, %s from now" % ( match.strftime("%Y-%m-%d %H:%M:%S"), match - now) def _CronInfoOptions(self, parser): """Adds cron_info-specific options to 'parser'. Args: parser: An instance of OptionsParser. """ parser.add_option("-n", "--num_runs", type="int", dest="num_runs", action="store", default=5, help="Number of runs of each cron job to display" "Default is 5") def _CheckRequiredUploadOptions(self): """Checks that upload options are present.""" for option in ["filename", "kind", "config_file"]: if getattr(self.options, option) is None: self.parser.error("Option '%s' is required." % option) if not self.options.url: self.parser.error("You must have google.appengine.ext.remote_api.handler " "assigned to an endpoint in app.yaml, or provide " "the url of the handler via the 'url' option.") def InferUploadUrl(self, appyaml): """Uses app.yaml to determine the remote_api endpoint. Args: appyaml: A parsed app.yaml file. Returns: The url of the remote_api endpoint as a string, or None """ handlers = appyaml.handlers handler_suffix = "remote_api/handler.py" app_id = appyaml.application for handler in handlers: if hasattr(handler, "script") and handler.script: if handler.script.endswith(handler_suffix): server = self.options.server if server == "appengine.google.com": return "http://%s.appspot.com%s" % (app_id, handler.url) else: return "http://%s%s" % (server, handler.url) return None def RunBulkloader(self, **kwargs): """Invokes the bulkloader with the given keyword arguments. Args: kwargs: Keyword arguments to pass to bulkloader.Run(). """ try: import sqlite3 except ImportError: logging.error("upload_data action requires SQLite3 and the python " "sqlite3 module (included in python since 2.5).") sys.exit(1) sys.exit(bulkloader.Run(kwargs)) def PerformUpload(self, run_fn=None): """Performs a datastore upload via the bulkloader. Args: run_fn: Function to invoke the bulkloader, used for testing. """ if run_fn is None: run_fn = self.RunBulkloader if len(self.args) != 1: self.parser.error("Expected <directory> argument.") basepath = self.args[0] appyaml = self._ParseAppYaml(basepath) self.options.app_id = appyaml.application if not self.options.url: url = self.InferUploadUrl(appyaml) if url is not None: self.options.url = url self._CheckRequiredUploadOptions() if self.options.batch_size < 1: self.parser.error("batch_size must be 1 or larger.") if verbosity == 1: logging.getLogger().setLevel(logging.INFO) self.options.debug = False else: logging.getLogger().setLevel(logging.DEBUG) self.options.debug = True StatusUpdate("Uploading data records.") run_fn(app_id=self.options.app_id, url=self.options.url, filename=self.options.filename, batch_size=self.options.batch_size, kind=self.options.kind, num_threads=self.options.num_threads, bandwidth_limit=self.options.bandwidth_limit, rps_limit=self.options.rps_limit, http_limit=self.options.http_limit, db_filename=self.options.db_filename, config_file=self.options.config_file, auth_domain=self.options.auth_domain, has_header=self.options.has_header, loader_opts=self.options.loader_opts, log_file=self.options.log_file, passin=self.options.passin, email=self.options.email, debug=self.options.debug, exporter_opts=None, download=False, result_db_filename=None, ) def _PerformUploadOptions(self, parser): """Adds 'upload_data' specific options to the 'parser' passed in. Args: parser: An instance of OptionsParser. """ parser.add_option("--filename", type="string", dest="filename", action="store", help="The name of the file containing the input data." " (Required)") parser.add_option("--config_file", type="string", dest="config_file", action="store", help="Name of the configuration file. (Required)") parser.add_option("--kind", type="string", dest="kind", action="store", help="The kind of the entities to store. (Required)") parser.add_option("--url", type="string", dest="url", action="store", help="The location of the remote_api endpoint.") parser.add_option("--num_threads", type="int", dest="num_threads", action="store", default=10, help="Number of threads to upload records with.") parser.add_option("--batch_size", type="int", dest="batch_size", action="store", default=10, help="Number of records to post in each request.") parser.add_option("--bandwidth_limit", type="int", dest="bandwidth_limit", action="store", default=250000, help="The maximum bytes/second bandwidth for transfers.") parser.add_option("--rps_limit", type="int", dest="rps_limit", action="store", default=20, help="The maximum records/second for transfers.") parser.add_option("--http_limit", type="int", dest="http_limit", action="store", default=8, help="The maximum requests/second for transfers.") parser.add_option("--db_filename", type="string", dest="db_filename", action="store", help="Name of the progress database file.") parser.add_option("--auth_domain", type="string", dest="auth_domain", action="store", default="gmail.com", help="The name of the authorization domain to use.") parser.add_option("--has_header", dest="has_header", action="store_true", default=False, help="Whether the first line of the input file should be" " skipped") parser.add_option("--loader_opts", type="string", dest="loader_opts", help="A string to pass to the Loader.Initialize method.") parser.add_option("--log_file", type="string", dest="log_file", help="File to write bulkloader logs. If not supplied " "then a new log file will be created, named: " "bulkloader-log-TIMESTAMP.") class Action(object): """Contains information about a command line action. Attributes: function: The name of a function defined on AppCfg or its subclasses that will perform the appropriate action. usage: A command line usage string. short_desc: A one-line description of the action. long_desc: A detailed description of the action. Whitespace and formatting will be preserved. options: A function that will add extra options to a given OptionParser object. """ def __init__(self, function, usage, short_desc, long_desc="", options=lambda obj, parser: None): """Initializer for the class attributes.""" self.function = function self.usage = usage self.short_desc = short_desc self.long_desc = long_desc self.options = options def __call__(self, appcfg): """Invoke this Action on the specified AppCfg. This calls the function of the appropriate name on AppCfg, and respects polymophic overrides.""" method = getattr(appcfg, self.function) return method() actions = { "help": Action( function="Help", usage="%prog help <action>", short_desc="Print help for a specific action."), "update": Action( function="Update", usage="%prog [options] update <directory>", options=_UpdateOptions, short_desc="Create or update an app version.", long_desc=""" Specify a directory that contains all of the files required by the app, and appcfg.py will create/update the app version referenced in the app.yaml file at the top level of that directory. appcfg.py will follow symlinks and recursively upload all files to the server. Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""), "update_cron": Action( function="UpdateCron", usage="%prog [options] update_cron <directory>", short_desc="Update application cron definitions.", long_desc=""" The 'update_cron' command will update any new, removed or changed cron definitions from the cron.yaml file."""), "update_indexes": Action( function="UpdateIndexes", usage="%prog [options] update_indexes <directory>", short_desc="Update application indexes.", long_desc=""" The 'update_indexes' command will add additional indexes which are not currently in production as well as restart any indexes that were not completed."""), "vacuum_indexes": Action( function="VacuumIndexes", usage="%prog [options] vacuum_indexes <directory>", options=_VacuumIndexesOptions, short_desc="Delete unused indexes from application.", long_desc=""" The 'vacuum_indexes' command will help clean up indexes which are no longer in use. It does this by comparing the local index configuration with indexes that are actually defined on the server. If any indexes on the server do not exist in the index configuration file, the user is given the option to delete them."""), "rollback": Action( function="Rollback", usage="%prog [options] rollback <directory>", short_desc="Rollback an in-progress update.", long_desc=""" The 'update' command requires a server-side transaction. Use 'rollback' if you get an error message about another transaction being in progress and you are sure that there is no such transaction."""), "request_logs": Action( function="RequestLogs", usage="%prog [options] request_logs <directory> <output_file>", options=_RequestLogsOptions, short_desc="Write request logs in Apache common log format.", long_desc=""" The 'request_logs' command exports the request logs from your application to a file. It will write Apache common log format records ordered chronologically. If output file is '-' stdout will be written."""), "cron_info": Action( function="CronInfo", usage="%prog [options] cron_info <directory>", options=_CronInfoOptions, short_desc="Display information about cron jobs.", long_desc=""" The 'cron_info' command will display the next 'number' runs (default 5) for each cron job defined in the cron.yaml file."""), "upload_data": Action( function="PerformUpload", usage="%prog [options] upload_data <directory>", options=_PerformUploadOptions, short_desc="Upload CSV records to datastore", long_desc=""" The 'upload_data' command translates CSV records into datastore entities and uploads them into your application's datastore."""), } def main(argv): logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:" "%(lineno)s %(message)s ")) try: result = AppCfgApp(argv).Run() if result: sys.exit(result) except KeyboardInterrupt: StatusUpdate("Interrupted.") sys.exit(1) if __name__ == "__main__": main(sys.argv)
apache-2.0
-337,467,472,029,764,500
33.929669
80
0.631957
false
olea/PyConES-2016
pycones/schedule/migrations/0007_auto_20150930_1149.py
2
4593
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import markupfield.fields class Migration(migrations.Migration): dependencies = [ ('schedule', '0006_slotkind_plenary'), ] operations = [ migrations.AlterField( model_name='presentation', name='abstract', field=markupfield.fields.MarkupField(rendered_field=True, default='', blank=True), ), migrations.AlterField( model_name='presentation', name='abstract_ca', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='abstract_en', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='abstract_es', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='abstract_eu', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='abstract_gl', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='abstract_markup_type', field=models.CharField(blank=True, default=None, choices=[('', '--'), ('markdown', 'markdown'), ('ReST', 'ReST')], max_length=30), ), migrations.AlterField( model_name='presentation', name='description', field=markupfield.fields.MarkupField(rendered_field=True, default='', blank=True), ), migrations.AlterField( model_name='presentation', name='description_ca', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='description_en', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='description_es', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='description_eu', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='description_gl', field=markupfield.fields.MarkupField(null=True, default='', blank=True, rendered_field=True), ), migrations.AlterField( model_name='presentation', name='description_markup_type', field=models.CharField(blank=True, default=None, choices=[('', '--'), ('markdown', 'markdown'), ('ReST', 'ReST')], max_length=30), ), migrations.AlterField( model_name='presentation', name='title', field=models.CharField(default='', blank=True, max_length=100), ), migrations.AlterField( model_name='presentation', name='title_ca', field=models.CharField(null=True, default='', blank=True, max_length=100), ), migrations.AlterField( model_name='presentation', name='title_en', field=models.CharField(null=True, default='', blank=True, max_length=100), ), migrations.AlterField( model_name='presentation', name='title_es', field=models.CharField(null=True, default='', blank=True, max_length=100), ), migrations.AlterField( model_name='presentation', name='title_eu', field=models.CharField(null=True, default='', blank=True, max_length=100), ), migrations.AlterField( model_name='presentation', name='title_gl', field=models.CharField(null=True, default='', blank=True, max_length=100), ), ]
mit
-2,997,923,581,167,101,400
38.93913
142
0.578489
false
jyotikamboj/container
django/contrib/gis/tests/geoapp/test_sitemaps.py
8
3286
from __future__ import unicode_literals from io import BytesIO from xml.dom import minidom import zipfile from django.conf import settings from django.contrib.gis.geos import HAS_GEOS from django.contrib.sites.models import Site from django.test import ( TestCase, ignore_warnings, modify_settings, override_settings, skipUnlessDBFeature ) from django.utils.deprecation import RemovedInDjango20Warning if HAS_GEOS: from .models import City, Country @modify_settings(INSTALLED_APPS={'append': ['django.contrib.sites', 'django.contrib.sitemaps']}) @override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoapp.urls') @skipUnlessDBFeature("gis_enabled") class GeoSitemapTest(TestCase): def setUp(self): super(GeoSitemapTest, self).setUp() Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = set(n.nodeName for n in elem.childNodes) expected = set(expected) self.assertEqual(actual, expected) @ignore_warnings(category=RemovedInDjango20Warning) def test_geositemap_kml(self): "Tests KML/KMZ geographic sitemaps." for kml_type in ('kml', 'kmz'): # The URL for the sitemaps in urls.py have been updated # with a name but since reversing by Python path is tried first # before reversing by name and works since we're giving # name='django.contrib.gis.sitemaps.views.(kml|kmz)', we need # to silence the erroneous warning until reversing by dotted # path is removed. The test will work without modification when # it's removed. doc = minidom.parseString(self.client.get('/sitemaps/%s.xml' % kml_type).content) # Ensuring the right sitemaps namespace is present. urlset = doc.firstChild self.assertEqual(urlset.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9') urls = urlset.getElementsByTagName('url') self.assertEqual(2, len(urls)) # Should only be 2 sitemaps. for url in urls: self.assertChildNodes(url, ['loc']) # Getting the relative URL since we don't have a real site. kml_url = url.getElementsByTagName('loc')[0].childNodes[0].data.split('http://example.com')[1] if kml_type == 'kml': kml_doc = minidom.parseString(self.client.get(kml_url).content) elif kml_type == 'kmz': # Have to decompress KMZ before parsing. buf = BytesIO(self.client.get(kml_url).content) zf = zipfile.ZipFile(buf) self.assertEqual(1, len(zf.filelist)) self.assertEqual('doc.kml', zf.filelist[0].filename) kml_doc = minidom.parseString(zf.read('doc.kml')) # Ensuring the correct number of placemarks are in the KML doc. if 'city' in kml_url: model = City elif 'country' in kml_url: model = Country self.assertEqual(model.objects.count(), len(kml_doc.getElementsByTagName('Placemark')))
mit
-1,752,181,218,168,328,000
43.405405
110
0.630858
false
jhonnyam123/hangoutsbot
hangupsbot/plugins/xkcd.py
3
1350
import aiohttp, asyncio, io, logging, os, re, urllib.request, urllib.error from bs4 import BeautifulSoup import plugins logger = logging.getLogger(__name__) def _initialise(bot): plugins.register_handler(_watch_xkcd_link, type="message") @asyncio.coroutine def _watch_xkcd_link(bot, event, command): if event.user.is_self: return if " " in event.text: return if re.match("^https?://(www\.)?xkcd.com(/([0-9]+/)?)?$", event.text.lower(), re.IGNORECASE): url = event.text.lower() try: response = urllib.request.urlopen(url) except urllib.error.URLError as e: logger.info("Tried and failed to get the xkcd comic :(") logger.info(e.read()) return body = response.read() soup = BeautifulSoup(body.decode("utf-8"), "lxml") comic = soup.find(src=re.compile('//imgs.xkcd.com/comics/.+')) alttext = comic.attrs['title'] imgurl = comic.attrs['src'] title = comic.attrs['alt'] link_image = "http:{}".format(imgurl) filename = os.path.basename(link_image) r = yield from aiohttp.request('get', link_image) raw = yield from r.read() image_data = io.BytesIO(raw) image_id = yield from bot._client.upload_image(image_data, filename=filename) yield from bot.coro_send_message(event.conv, "<b><u>{}</u></b><br>{}".format(title, alttext), image_id=image_id)
agpl-3.0
-8,259,803,688,609,911,000
31.926829
116
0.654074
false
PySCeS/PyscesToolbox
psctb/analyse/_symca/ccobjects.py
1
18252
from __future__ import division, print_function from __future__ import absolute_import from __future__ import unicode_literals import numpy as np from numpy import array, nanmin, nanmax from sympy import Symbol from pysces import ModelMap, Scanner, ParScanner from numpy import NaN, abs from ...utils.model_graph import ModelGraph from ...utils.misc import silence_print, DotDict, formatter_factory, \ do_safe_state, find_min, find_max, get_value, stringify, \ scanner_range_setup from ...utils.plotting import Data2D def cctype(obj): return 'ccobjects' in str(type(obj)) @silence_print def get_state(mod, do_state=False): if do_state: mod.doState() ss = [getattr(mod, 'J_' + r) for r in mod.reactions] + \ [getattr(mod, s + '_ss') for s in mod.species] return ss class CCBase(object): """The base object for the control coefficients and control patterns""" def __init__(self, mod, name, expression, ltxe): super(CCBase, self).__init__() self.expression = expression self.mod = mod self._ltxe = ltxe self.name = name self._latex_name = '\\Sigma' self._analysis_method = 'symca' self._str_expression_ = None self._value = None self._latex_expression = None @property def latex_expression(self): if not self._latex_expression: self._latex_expression = self._ltxe.expression_to_latex( self.expression ) return self._latex_expression @property def latex_name(self): return self._latex_name @property def _str_expression(self): if not self._str_expression_: self._str_expression_ = str(self.expression) return self._str_expression_ @property def value(self): """The value property. Calls self._calc_value() when self._value is None and returns self._value""" self._calc_value() return self._value def _repr_latex_(self): return '$%s = %s = %.3f$' % (self.latex_name, self.latex_expression, self.value) def _calc_value(self): """Calculates the value of the expression""" keys = self.expression.atoms(Symbol) subsdict = {} for key in keys: str_key = str(key) subsdict[str_key] = getattr(self.mod, str_key) self._value = get_value(self._str_expression, subsdict) def __repr__(self): return self.expression.__repr__() def __add__(self, other): if cctype(other): return self.expression.__add__(other.expression) else: return self.expression.__add__(other) def __mul__(self, other): if cctype(other): return self.expression.__mul__(other.expression) else: return self.expression.__mul__(other) def __div__(self, other): if cctype(other): return self.expression.__div__(other.expression) else: return self.expression.__div__(other) def __pow__(self, other): if cctype(other): return self.expression.__pow__(other.expression) else: return self.expression.__pow__(other) class CCoef(CCBase): """The object the stores control coefficients. Inherits from CCBase""" def __init__(self, mod, name, expression, denominator, ltxe): super(CCoef, self).__init__(mod, name, expression, ltxe) self.numerator = expression self.denominator = denominator.expression self.expression = self.numerator / denominator.expression self.denominator_object = denominator self._latex_numerator = None self._latex_expression_full = None self._latex_expression = None self._latex_name = None self._abs_value = None self.control_patterns = None self._set_control_patterns() @property def abs_value(self): self._calc_abs_value() return self._abs_value @property def latex_numerator(self): if not self._latex_numerator: self._latex_numerator = self._ltxe.expression_to_latex( self.numerator ) return self._latex_numerator @property def latex_expression_full(self): if not self._latex_expression_full: full_expr = '\\frac{' + self.latex_numerator + '}{' \ + self.denominator_object.latex_expression + '}' self._latex_expression_full = full_expr return self._latex_expression_full @property def latex_expression(self): if not self._latex_expression: self._latex_expression = '(' + \ self.latex_numerator + ')' + '/~\\Sigma' return self._latex_expression @property def latex_name(self): if not self._latex_name: self._latex_name = self._ltxe.expression_to_latex( self.name ) return self._latex_name def _perscan_legacy(self, parameter, scan_range): scan_res = [list() for i in range(len(list(self.control_patterns.values())) + 1)] scan_res[0] = scan_range for parvalue in scan_range: state_valid = do_safe_state( self.mod, parameter, parvalue, type='mca') cc_abs_value = 0 for i, cp in enumerate(self.control_patterns.values()): if state_valid: cp_abs = abs(cp.value) scan_res[i + 1].append(cp_abs) cc_abs_value += cp_abs else: scan_res[i + 1].append(NaN) for i, cp in enumerate(self.control_patterns.values()): if state_valid: scan_res[i + 1][-1] = (scan_res[i + 1] [-1] / cc_abs_value) * 100 return scan_res def _valscan_legacy(self, parameter, scan_range): control_pattern_range = list(range(len(list(self.control_patterns.values())) + 2)) scan_res = [list() for i in control_pattern_range] scan_res[0] = scan_range for parvalue in scan_range: state_valid = do_safe_state(self.mod, parameter, parvalue, type='mca') cc_value = 0 for i, cp in enumerate(self.control_patterns.values()): if state_valid: cp_value = cp.value scan_res[i + 1].append(cp_value) cc_value += cp_value else: scan_res[i + 1].append(NaN) if state_valid: scan_res[i + 2].append(cc_value) else: scan_res[i + 2].append(NaN) return scan_res def _perscan(self, parameter, scan_range, par_scan=False, par_engine='multiproc'): val_scan_res = self._valscan(parameter, scan_range, par_scan, par_engine) points = len(scan_range) parameter = val_scan_res[:, 0].reshape(points, 1) cp_abs_vals = np.abs(val_scan_res[:, 1:-1]) cp_abs_sum = np.sum(cp_abs_vals, 1).reshape(points, 1) cp_abs_perc = (cp_abs_vals / cp_abs_sum) * 100 scan_res = np.hstack([parameter, cp_abs_perc]) return scan_res def _valscan(self, parameter, scan_range, par_scan=False, par_engine='multiproc'): needed_symbols = [parameter] + \ stringify(list(self.expression.atoms(Symbol))) # This is experimental if par_scan: scanner = ParScanner(self.mod, par_engine) else: scanner = Scanner(self.mod) scanner.quietRun = True start, end, points, log = scanner_range_setup(scan_range) scanner.addScanParameter(parameter, start=start, end=end, points=points, log=log) scanner.addUserOutput(*needed_symbols) scanner.Run() subs_dict = {} for i, symbol in enumerate(scanner.UserOutputList): subs_dict[symbol] = scanner.UserOutputResults[:, i] control_pattern_names = list(self.control_patterns.keys()) denom_expr = str(self.denominator) cp_numerators = [self.control_patterns[cp_name].numerator for cp_name in control_pattern_names] column_exprs = stringify(cp_numerators) parameter = subs_dict[parameter].reshape(points, 1) scan_res = [] denom_val = get_value(denom_expr, subs_dict) for expr in column_exprs: scan_res.append(get_value(expr, subs_dict) / denom_val) scan_res = np.array(scan_res).transpose() cc_vals = np.sum(scan_res, 1).reshape(points, 1) scan_res = np.hstack([parameter, scan_res, cc_vals]) return scan_res def do_par_scan(self, parameter, scan_range, scan_type='percentage', init_return=True, par_scan=False, par_engine='multiproc', force_legacy=False): assert scan_type in ['percentage', 'value'] init = getattr(self.mod, parameter) column_names = [parameter] + \ [cp.name for cp in list(self.control_patterns.values())] if scan_type == 'percentage': y_label = 'Control pattern percentage contribution' try: assert not force_legacy, 'Legacy scan requested' scan_res = self._perscan(parameter, scan_range, par_scan, par_engine) data_array = scan_res except Exception as exception: print('The parameter scan yielded the following error:') print(exception) print('Switching over to slower scan method and replacing') print('invalid steady states with NaN values.') scan_res = self._perscan_legacy(parameter, scan_range) data_array = array(scan_res, dtype=np.float).transpose() ylim = [nanmin(data_array[:, 1:]), nanmax(data_array[:, 1:]) * 1.1] elif scan_type == 'value': column_names = column_names + [self.name] y_label = 'Control coefficient/pattern value' try: assert not force_legacy, 'Legacy scan requested' scan_res = self._valscan(parameter, scan_range, par_scan, par_engine) data_array = scan_res except Exception as exception: print('The parameter scan yielded the following error:') print(exception) print('Switching over to slower scan method and replacing') print('invalid steady states with NaN values.') scan_res = self._valscan_legacy(parameter, scan_range) data_array = array(scan_res, dtype=np.float).transpose() ylim = [nanmin(data_array[:, 1:]), nanmax(data_array[:, 1:]) * 1.1] # print data_array.shape if init_return: self.mod.SetQuiet() setattr(self.mod, parameter, init) self.mod.doMca() self.mod.SetLoud() mm = ModelMap(self.mod) species = mm.hasSpecies() if parameter in species: x_label = '[%s]' % parameter.replace('_', ' ') else: x_label = parameter ax_properties = {'ylabel': y_label, 'xlabel': x_label, 'xscale': 'linear', 'yscale': 'linear', 'xlim': [find_min(scan_range), find_max(scan_range)], 'ylim': ylim} data = Data2D(mod=self.mod, column_names=column_names, data_array=data_array, ltxe=self._ltxe, analysis_method='symca', ax_properties=ax_properties, file_name=self.name) return data def _calc_abs_value(self): """Calculates the absolute numeric value of the control coefficient from the values of its control patterns.""" keys = self.expression.atoms(Symbol) subsdict = {} if len(keys) == 0: subsdict = None for key in keys: str_key = str(key) subsdict[str_key] = getattr(self.mod, str_key) for pattern in list(self.control_patterns.values()): pattern._calc_value(subsdict) self._abs_value = sum( [abs(pattern._value) for pattern in list(self.control_patterns.values())]) def _calc_value(self): """Calculates the numeric value of the control coefficient from the values of its control patterns.""" keys = self.expression.atoms(Symbol) subsdict = {} if len(keys) == 0: subsdict = None for key in keys: str_key = str(key) subsdict[str_key] = getattr(self.mod, str_key) for pattern in list(self.control_patterns.values()): pattern._calc_value(subsdict) self._value = sum( [pattern._value for pattern in list(self.control_patterns.values())]) def _set_control_patterns(self): """Divides control coefficient into control patterns and saves results in self.CPx where x is a number is the number of the control pattern as it appears in in control coefficient expression""" patterns = self.numerator.as_coeff_add()[1] if len(patterns) == 0: patterns = [self.numerator.as_coeff_add()[0]] cps = DotDict() cps._make_repr('v.name', 'v.value', formatter_factory()) for i, pattern in enumerate(patterns): name = 'CP{:3}'.format(i + 1).replace(' ', '0') cp = CPattern(self.mod, name, pattern, self.denominator_object, self, self._ltxe) setattr(self, name, cp) cps[name] = cp self.control_patterns = cps # assert self._check_control_patterns == True def _check_control_patterns(self): """Checks that all control patterns are either positive or negative""" all_same = False poscomp = [i.value > 0 for i in list(self.control_patterns.values())] negcomp = [i.value < 0 for i in list(self.control_patterns.values())] if all(poscomp): all_same = True elif all(negcomp): all_same = True return all_same def highlight_patterns(self, width=None, height=None, show_dummy_sinks=False, show_external_modifier_links=False, pos_dic=None): mg = ModelGraph(mod=self.mod, pos_dic=pos_dic, analysis_method=self._analysis_method) if height: mg.height = height if width: mg.width = width mg.highlight_cc(self, show_dummy_sinks, show_external_modifier_links) class CPattern(CCBase): """docstring for CPattern""" def __init__(self, mod, name, expression, denominator, parent, ltxe): super(CPattern, self).__init__(mod, name, expression, ltxe) self.numerator = expression self.denominator = denominator.expression self.expression = self.numerator / denominator.expression self.denominator_object = denominator self.parent = parent self._latex_numerator = None self._latex_expression_full = None self._latex_expression = None self._latex_name = None self._percentage = None def _calc_value(self, subsdict=None): """Calculates the value of the expression""" if subsdict is None: keys = self.expression.atoms(Symbol) subsdict = {} for key in keys: str_key = str(key) subsdict[str_key] = getattr(self.mod, str_key) self._value = get_value(self._str_expression, subsdict) @property def latex_numerator(self): if not self._latex_numerator: self._latex_numerator = self._ltxe.expression_to_latex( self.numerator ) return self._latex_numerator @property def latex_expression_full(self): if not self._latex_expression_full: full_expr = '\\frac{' + self.latex_numerator + '}{' \ + self.denominator_object.latex_expression + '}' self._latex_expression_full = full_expr return self._latex_expression_full @property def latex_expression(self): if not self._latex_expression: self._latex_expression = self.latex_numerator + '/~\\Sigma' return self._latex_expression @property def latex_name(self): if not self._latex_name: self._latex_name = self.name return self._latex_name @property def percentage(self): self._percentage = (abs(self.value) / self.parent.abs_value) * 100 return self._percentage
bsd-3-clause
4,744,020,672,763,269,000
34.1
90
0.526682
false
alexsmx/djangoAppengineSrcTemplate
django/db/backends/postgresql_psycopg2/base.py
239
8346
""" PostgreSQL database backend for Django. Requires psycopg 2: http://initd.org/projects/psycopg2 """ import sys from django.db import utils from django.db.backends import * from django.db.backends.signals import connection_created from django.db.backends.postgresql.operations import DatabaseOperations as PostgresqlDatabaseOperations from django.db.backends.postgresql.client import DatabaseClient from django.db.backends.postgresql.creation import DatabaseCreation from django.db.backends.postgresql.version import get_version from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection from django.utils.safestring import SafeUnicode, SafeString try: import psycopg2 as Database import psycopg2.extensions except ImportError, e: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) psycopg2.extensions.register_adapter(SafeUnicode, psycopg2.extensions.QuotedString) class CursorWrapper(object): """ A thin wrapper around psycopg2's normal cursor class so that we can catch particular exception instances and reraise them with the right types. """ def __init__(self, cursor): self.cursor = cursor def execute(self, query, args=None): try: return self.cursor.execute(query, args) except Database.IntegrityError, e: raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2] except Database.DatabaseError, e: raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2] def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.IntegrityError, e: raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2] except Database.DatabaseError, e: raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2] def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) class DatabaseFeatures(BaseDatabaseFeatures): needs_datetime_string_cast = False can_return_id_from_insert = False requires_rollback_on_dirty_transaction = True has_real_datatype = True can_defer_constraint_checks = True class DatabaseOperations(PostgresqlDatabaseOperations): def last_executed_query(self, cursor, sql, params): # With psycopg2, cursor objects have a "query" attribute that is the # exact query sent to the database. See docs here: # http://www.initd.org/tracker/psycopg/wiki/psycopg2_documentation#postgresql-status-message-and-executed-query return cursor.query def return_insert_id(self): return "RETURNING %s", () class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'postgresql' operators = { 'exact': '= %s', 'iexact': '= UPPER(%s)', 'contains': 'LIKE %s', 'icontains': 'LIKE UPPER(%s)', 'regex': '~ %s', 'iregex': '~* %s', 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': 'LIKE %s', 'endswith': 'LIKE %s', 'istartswith': 'LIKE UPPER(%s)', 'iendswith': 'LIKE UPPER(%s)', } def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) autocommit = self.settings_dict["OPTIONS"].get('autocommit', False) self.features.uses_autocommit = autocommit self._set_isolation_level(int(not autocommit)) self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) def _cursor(self): new_connection = False set_tz = False settings_dict = self.settings_dict if self.connection is None: new_connection = True set_tz = settings_dict.get('TIME_ZONE') if settings_dict['NAME'] == '': from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("You need to specify NAME in your Django settings file.") conn_params = { 'database': settings_dict['NAME'], } conn_params.update(settings_dict['OPTIONS']) if 'autocommit' in conn_params: del conn_params['autocommit'] if settings_dict['USER']: conn_params['user'] = settings_dict['USER'] if settings_dict['PASSWORD']: conn_params['password'] = settings_dict['PASSWORD'] if settings_dict['HOST']: conn_params['host'] = settings_dict['HOST'] if settings_dict['PORT']: conn_params['port'] = settings_dict['PORT'] self.connection = Database.connect(**conn_params) self.connection.set_client_encoding('UTF8') self.connection.set_isolation_level(self.isolation_level) connection_created.send(sender=self.__class__, connection=self) cursor = self.connection.cursor() cursor.tzinfo_factory = None if new_connection: if set_tz: cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']]) if not hasattr(self, '_version'): self.__class__._version = get_version(cursor) if self._version[0:2] < (8, 0): # No savepoint support for earlier version of PostgreSQL. self.features.uses_savepoints = False if self.features.uses_autocommit: if self._version[0:2] < (8, 2): # FIXME: Needs extra code to do reliable model insert # handling, so we forbid it for now. from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("You cannot use autocommit=True with PostgreSQL prior to 8.2 at the moment.") else: # FIXME: Eventually we're enable this by default for # versions that support it, but, right now, that's hard to # do without breaking other things (#10509). self.features.can_return_id_from_insert = True return CursorWrapper(cursor) def _enter_transaction_management(self, managed): """ Switch the isolation level when needing transaction support, so that the same transaction is visible across all the queries. """ if self.features.uses_autocommit and managed and not self.isolation_level: self._set_isolation_level(1) def _leave_transaction_management(self, managed): """ If the normal operating mode is "autocommit", switch back to that when leaving transaction management. """ if self.features.uses_autocommit and not managed and self.isolation_level: self._set_isolation_level(0) def _set_isolation_level(self, level): """ Do all the related feature configurations for changing isolation levels. This doesn't touch the uses_autocommit feature, since that controls the movement *between* isolation levels. """ assert level in (0, 1) try: if self.connection is not None: self.connection.set_isolation_level(level) finally: self.isolation_level = level self.features.uses_savepoints = bool(level) def _commit(self): if self.connection is not None: try: return self.connection.commit() except Database.IntegrityError, e: raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
bsd-3-clause
278,535,006,342,271,650
40.316832
124
0.6318
false
xiandiancloud/edxplaltfom-xusong
common/lib/extract_tar.py
40
2149
""" Safe version of tarfile.extractall which does not extract any files that would be, or symlink to a file that is, outside of the directory extracted in. Adapted from: http://stackoverflow.com/questions/10060069/safely-extract-zip-or-tar-using-python """ from os.path import abspath, realpath, dirname, join as joinpath from django.core.exceptions import SuspiciousOperation import logging log = logging.getLogger(__name__) # pylint: disable=C0103 def resolved(rpath): """ Returns the canonical absolute path of `rpath`. """ return realpath(abspath(rpath)) def _is_bad_path(path, base): """ Is (the canonical absolute path of) `path` outside `base`? """ return not resolved(joinpath(base, path)).startswith(base) def _is_bad_link(info, base): """ Does the file sym- ord hard-link to files outside `base`? """ # Links are interpreted relative to the directory containing the link tip = resolved(joinpath(base, dirname(info.name))) return _is_bad_path(info.linkname, base=tip) def safemembers(members): """ Check that all elements of a tar file are safe. """ base = resolved(".") for finfo in members: if _is_bad_path(finfo.name, base): log.debug("File %r is blocked (illegal path)", finfo.name) raise SuspiciousOperation("Illegal path") elif finfo.issym() and _is_bad_link(finfo, base): log.debug( "File %r is blocked: Hard link to %r", finfo.name, finfo.linkname) raise SuspiciousOperation("Hard link") elif finfo.islnk() and _is_bad_link(finfo, base): log.debug("File %r is blocked: Symlink to %r", finfo.name, finfo.linkname) raise SuspiciousOperation("Symlink") elif finfo.isdev(): log.debug("File %r is blocked: FIFO, device or character file", finfo.name) raise SuspiciousOperation("Dev file") return members def safetar_extractall(tarf, *args, **kwargs): """ Safe version of `tarf.extractall()`. """ return tarf.extractall(members=safemembers(tarf), *args, **kwargs)
agpl-3.0
5,506,515,926,232,927,000
33.111111
89
0.65007
false
voussoir/praw
tests/unit/models/reddit/test_subreddit.py
2
3857
import pickle import pytest from praw.models import Subreddit, WikiPage from ... import UnitTest class TestSubreddit(UnitTest): def test_equality(self): subreddit1 = Subreddit(self.reddit, _data={'display_name': 'dummy1', 'n': 1}) subreddit2 = Subreddit(self.reddit, _data={'display_name': 'Dummy1', 'n': 2}) subreddit3 = Subreddit(self.reddit, _data={'display_name': 'dummy3', 'n': 2}) assert subreddit1 == subreddit1 assert subreddit2 == subreddit2 assert subreddit3 == subreddit3 assert subreddit1 == subreddit2 assert subreddit2 != subreddit3 assert subreddit1 != subreddit3 assert 'dummy1' == subreddit1 assert subreddit2 == 'dummy1' def test_construct_failure(self): message = 'Either `display_name` or `_data` must be provided.' with pytest.raises(TypeError) as excinfo: Subreddit(self.reddit) assert str(excinfo.value) == message with pytest.raises(TypeError) as excinfo: Subreddit(self.reddit, 'dummy', {'id': 'dummy'}) assert str(excinfo.value) == message def test_fullname(self): subreddit = Subreddit(self.reddit, _data={'display_name': 'name', 'id': 'dummy'}) assert subreddit.fullname == 't5_dummy' def test_hash(self): subreddit1 = Subreddit(self.reddit, _data={'display_name': 'dummy1', 'n': 1}) subreddit2 = Subreddit(self.reddit, _data={'display_name': 'Dummy1', 'n': 2}) subreddit3 = Subreddit(self.reddit, _data={'display_name': 'dummy3', 'n': 2}) assert hash(subreddit1) == hash(subreddit1) assert hash(subreddit2) == hash(subreddit2) assert hash(subreddit3) == hash(subreddit3) assert hash(subreddit1) == hash(subreddit2) assert hash(subreddit2) != hash(subreddit3) assert hash(subreddit1) != hash(subreddit3) def test_pickle(self): subreddit = Subreddit(self.reddit, _data={'display_name': 'name', 'id': 'dummy'}) for level in range(pickle.HIGHEST_PROTOCOL + 1): other = pickle.loads(pickle.dumps(subreddit, protocol=level)) assert subreddit == other def test_repr(self): subreddit = Subreddit(self.reddit, display_name='name') assert repr(subreddit) == 'Subreddit(display_name=\'name\')' def test_search__params_not_modified(self): params = {'dummy': 'value'} subreddit = Subreddit(self.reddit, display_name='name') generator = subreddit.search(None, params=params) assert generator.params['dummy'] == 'value' assert params == {'dummy': 'value'} def test_str(self): subreddit = Subreddit(self.reddit, _data={'display_name': 'name', 'id': 'dummy'}) assert str(subreddit) == 'name' def test_submit_failure(self): message = 'Either `selftext` or `url` must be provided.' subreddit = Subreddit(self.reddit, display_name='name') with pytest.raises(TypeError) as excinfo: subreddit.submit('Cool title') assert str(excinfo.value) == message with pytest.raises(TypeError) as excinfo: subreddit.submit('Cool title', selftext='a', url='b') assert str(excinfo.value) == message class TestSubredditWiki(UnitTest): def test__getitem(self): subreddit = Subreddit(self.reddit, display_name='name') wikipage = subreddit.wiki['Foo'] assert isinstance(wikipage, WikiPage) assert 'foo' == wikipage.name
gpl-3.0
8,130,608,840,796,770,000
39.177083
73
0.574799
false
CXQERP/ODOOERP
openerp/report/render/rml2txt/__init__.py
381
1351
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from rml2txt import parseString, parseNode """ This engine is the minimalistic renderer of RML documents into text files, using spaces and newlines to format. It was needed in some special applications, where legal reports need to be printed in special (dot-matrix) printers. """ # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-7,856,761,138,642,566,000
39.939394
79
0.638046
false
gmt/portage
pym/_emerge/BinpkgPrefetcher.py
8
1240
# Copyright 1999-2009 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from _emerge.BinpkgFetcher import BinpkgFetcher from _emerge.CompositeTask import CompositeTask from _emerge.BinpkgVerifier import BinpkgVerifier from portage import os class BinpkgPrefetcher(CompositeTask): __slots__ = ("pkg",) + \ ("pkg_path", "_bintree",) def _start(self): self._bintree = self.pkg.root_config.trees["bintree"] fetcher = BinpkgFetcher(background=self.background, logfile=self.scheduler.fetch.log_file, pkg=self.pkg, scheduler=self.scheduler) self.pkg_path = fetcher.pkg_path self._start_task(fetcher, self._fetcher_exit) def _fetcher_exit(self, fetcher): if self._default_exit(fetcher) != os.EX_OK: self.wait() return verifier = BinpkgVerifier(background=self.background, logfile=self.scheduler.fetch.log_file, pkg=self.pkg, scheduler=self.scheduler, _pkg_path=self.pkg_path) self._start_task(verifier, self._verifier_exit) def _verifier_exit(self, verifier): if self._default_exit(verifier) != os.EX_OK: self.wait() return self._bintree.inject(self.pkg.cpv, filename=self.pkg_path) self._current_task = None self.returncode = os.EX_OK self.wait()
gpl-2.0
1,846,980,671,108,573,200
27.837209
66
0.733065
false
seann1/portfolio5
.meteor/dev_bundle/python/Lib/lib-tk/test/test_ttk/test_widgets.py
13
57791
import unittest import Tkinter as tkinter from Tkinter import TclError import ttk from test.test_support import requires, run_unittest import sys from test_functions import MockTclObj from support import (AbstractTkTest, tcl_version, get_tk_patchlevel, simulate_mouse_click) from widget_tests import (add_standard_options, noconv, noconv_meth, AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests, setUpModule) requires('gui') class StandardTtkOptionsTests(StandardOptionsTests): def test_class(self): widget = self.create() self.assertEqual(widget['class'], '') errmsg='attempt to change read-only option' if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): errmsg='Attempt to change read-only option' self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg) widget2 = self.create(class_='Foo') self.assertEqual(widget2['class'], 'Foo') def test_padding(self): widget = self.create() self.checkParam(widget, 'padding', 0, expected=('0',)) self.checkParam(widget, 'padding', 5, expected=('5',)) self.checkParam(widget, 'padding', (5, 6), expected=('5', '6')) self.checkParam(widget, 'padding', (5, 6, 7), expected=('5', '6', '7')) self.checkParam(widget, 'padding', (5, 6, 7, 8), expected=('5', '6', '7', '8')) self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p')) self.checkParam(widget, 'padding', (), expected='') def test_style(self): widget = self.create() self.assertEqual(widget['style'], '') errmsg = 'Layout Foo not found' if hasattr(self, 'default_orient'): errmsg = ('Layout %s.Foo not found' % getattr(self, 'default_orient').title()) self.checkInvalidParam(widget, 'style', 'Foo', errmsg=errmsg) widget2 = self.create(class_='Foo') self.assertEqual(widget2['class'], 'Foo') # XXX pass class WidgetTest(AbstractTkTest, unittest.TestCase): """Tests methods available in every ttk widget.""" def setUp(self): super(WidgetTest, self).setUp() self.widget = ttk.Button(self.root, width=0, text="Text") self.widget.pack() self.widget.wait_visibility() def test_identify(self): self.widget.update_idletasks() self.assertEqual(self.widget.identify( self.widget.winfo_width() // 2, self.widget.winfo_height() // 2 ), "label") self.assertEqual(self.widget.identify(-1, -1), "") self.assertRaises(tkinter.TclError, self.widget.identify, None, 5) self.assertRaises(tkinter.TclError, self.widget.identify, 5, None) self.assertRaises(tkinter.TclError, self.widget.identify, 5, '') def test_widget_state(self): # XXX not sure about the portability of all these tests self.assertEqual(self.widget.state(), ()) self.assertEqual(self.widget.instate(['!disabled']), True) # changing from !disabled to disabled self.assertEqual(self.widget.state(['disabled']), ('!disabled', )) # no state change self.assertEqual(self.widget.state(['disabled']), ()) # change back to !disable but also active self.assertEqual(self.widget.state(['!disabled', 'active']), ('!active', 'disabled')) # no state changes, again self.assertEqual(self.widget.state(['!disabled', 'active']), ()) self.assertEqual(self.widget.state(['active', '!disabled']), ()) def test_cb(arg1, **kw): return arg1, kw self.assertEqual(self.widget.instate(['!disabled'], test_cb, "hi", **{"msg": "there"}), ('hi', {'msg': 'there'})) # attempt to set invalid statespec currstate = self.widget.state() self.assertRaises(tkinter.TclError, self.widget.instate, ['badstate']) self.assertRaises(tkinter.TclError, self.widget.instate, ['disabled', 'badstate']) # verify that widget didn't change its state self.assertEqual(currstate, self.widget.state()) # ensuring that passing None as state doesn't modify current state self.widget.state(['active', '!disabled']) self.assertEqual(self.widget.state(), ('active', )) class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests): _conv_pixels = noconv_meth @add_standard_options(StandardTtkOptionsTests) class FrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'borderwidth', 'class', 'cursor', 'height', 'padding', 'relief', 'style', 'takefocus', 'width', ) def create(self, **kwargs): return ttk.Frame(self.root, **kwargs) @add_standard_options(StandardTtkOptionsTests) class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'borderwidth', 'class', 'cursor', 'height', 'labelanchor', 'labelwidget', 'padding', 'relief', 'style', 'takefocus', 'text', 'underline', 'width', ) def create(self, **kwargs): return ttk.LabelFrame(self.root, **kwargs) def test_labelanchor(self): widget = self.create() self.checkEnumParam(widget, 'labelanchor', 'e', 'en', 'es', 'n', 'ne', 'nw', 's', 'se', 'sw', 'w', 'wn', 'ws', errmsg='Bad label anchor specification {}') self.checkInvalidParam(widget, 'labelanchor', 'center') def test_labelwidget(self): widget = self.create() label = ttk.Label(self.root, text='Mupp', name='foo') self.checkParam(widget, 'labelwidget', label, expected='.foo') label.destroy() class AbstractLabelTest(AbstractWidgetTest): def checkImageParam(self, widget, name): image = tkinter.PhotoImage(master=self.root, name='image1') image2 = tkinter.PhotoImage(master=self.root, name='image2') self.checkParam(widget, name, image, expected=('image1',)) self.checkParam(widget, name, 'image1', expected=('image1',)) self.checkParam(widget, name, (image,), expected=('image1',)) self.checkParam(widget, name, (image, 'active', image2), expected=('image1', 'active', 'image2')) self.checkParam(widget, name, 'image1 active image2', expected=('image1', 'active', 'image2')) self.checkInvalidParam(widget, name, 'spam', errmsg='image "spam" doesn\'t exist') def test_compound(self): widget = self.create() self.checkEnumParam(widget, 'compound', 'none', 'text', 'image', 'center', 'top', 'bottom', 'left', 'right') def test_state(self): widget = self.create() self.checkParams(widget, 'state', 'active', 'disabled', 'normal') def test_width(self): widget = self.create() self.checkParams(widget, 'width', 402, -402, 0) @add_standard_options(StandardTtkOptionsTests) class LabelTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'anchor', 'background', 'class', 'compound', 'cursor', 'font', 'foreground', 'image', 'justify', 'padding', 'relief', 'state', 'style', 'takefocus', 'text', 'textvariable', 'underline', 'width', 'wraplength', ) _conv_pixels = noconv_meth def create(self, **kwargs): return ttk.Label(self.root, **kwargs) def test_font(self): widget = self.create() self.checkParam(widget, 'font', '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') @add_standard_options(StandardTtkOptionsTests) class ButtonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', 'default', 'image', 'state', 'style', 'takefocus', 'text', 'textvariable', 'underline', 'width', ) def create(self, **kwargs): return ttk.Button(self.root, **kwargs) def test_default(self): widget = self.create() self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled') def test_invoke(self): success = [] btn = ttk.Button(self.root, command=lambda: success.append(1)) btn.invoke() self.assertTrue(success) @add_standard_options(StandardTtkOptionsTests) class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', 'image', 'offvalue', 'onvalue', 'state', 'style', 'takefocus', 'text', 'textvariable', 'underline', 'variable', 'width', ) def create(self, **kwargs): return ttk.Checkbutton(self.root, **kwargs) def test_offvalue(self): widget = self.create() self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string') def test_onvalue(self): widget = self.create() self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string') def test_invoke(self): success = [] def cb_test(): success.append(1) return "cb test called" cbtn = ttk.Checkbutton(self.root, command=cb_test) # the variable automatically created by ttk.Checkbutton is actually # undefined till we invoke the Checkbutton self.assertEqual(cbtn.state(), ('alternate', )) self.assertRaises(tkinter.TclError, cbtn.tk.globalgetvar, cbtn['variable']) res = cbtn.invoke() self.assertEqual(res, "cb test called") self.assertEqual(cbtn['onvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) self.assertTrue(success) cbtn['command'] = '' res = cbtn.invoke() self.assertFalse(str(res)) self.assertLessEqual(len(success), 1) self.assertEqual(cbtn['offvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class ComboboxTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'exportselection', 'height', 'justify', 'postcommand', 'state', 'style', 'takefocus', 'textvariable', 'values', 'width', ) def setUp(self): super(ComboboxTest, self).setUp() self.combo = self.create() def create(self, **kwargs): return ttk.Combobox(self.root, **kwargs) def test_height(self): widget = self.create() self.checkParams(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i') def test_state(self): widget = self.create() self.checkParams(widget, 'state', 'active', 'disabled', 'normal') def _show_drop_down_listbox(self): width = self.combo.winfo_width() self.combo.event_generate('<ButtonPress-1>', x=width - 5, y=5) self.combo.event_generate('<ButtonRelease-1>', x=width - 5, y=5) self.combo.update_idletasks() def test_virtual_event(self): success = [] self.combo['values'] = [1] self.combo.bind('<<ComboboxSelected>>', lambda evt: success.append(True)) self.combo.pack() self.combo.wait_visibility() height = self.combo.winfo_height() self._show_drop_down_listbox() self.combo.update() self.combo.event_generate('<Return>') self.combo.update() self.assertTrue(success) def test_postcommand(self): success = [] self.combo['postcommand'] = lambda: success.append(True) self.combo.pack() self.combo.wait_visibility() self._show_drop_down_listbox() self.assertTrue(success) # testing postcommand removal self.combo['postcommand'] = '' self._show_drop_down_listbox() self.assertEqual(len(success), 1) def test_values(self): def check_get_current(getval, currval): self.assertEqual(self.combo.get(), getval) self.assertEqual(self.combo.current(), currval) self.assertEqual(self.combo['values'], () if tcl_version < (8, 5) else '') check_get_current('', -1) self.checkParam(self.combo, 'values', 'mon tue wed thur', expected=('mon', 'tue', 'wed', 'thur')) self.checkParam(self.combo, 'values', ('mon', 'tue', 'wed', 'thur')) self.checkParam(self.combo, 'values', (42, 3.14, '', 'any string')) self.checkParam(self.combo, 'values', () if tcl_version < (8, 5) else '') self.combo['values'] = ['a', 1, 'c'] self.combo.set('c') check_get_current('c', 2) self.combo.current(0) check_get_current('a', 0) self.combo.set('d') check_get_current('d', -1) # testing values with empty string self.combo.set('') self.combo['values'] = (1, 2, '', 3) check_get_current('', 2) # testing values with empty string set through configure self.combo.configure(values=[1, '', 2]) self.assertEqual(self.combo['values'], ('1', '', '2') if self.wantobjects else '1 {} 2') # testing values with spaces self.combo['values'] = ['a b', 'a\tb', 'a\nb'] self.assertEqual(self.combo['values'], ('a b', 'a\tb', 'a\nb') if self.wantobjects else '{a b} {a\tb} {a\nb}') # testing values with special characters self.combo['values'] = [r'a\tb', '"a"', '} {'] self.assertEqual(self.combo['values'], (r'a\tb', '"a"', '} {') if self.wantobjects else r'a\\tb {"a"} \}\ \{') # out of range self.assertRaises(tkinter.TclError, self.combo.current, len(self.combo['values'])) # it expects an integer (or something that can be converted to int) self.assertRaises(tkinter.TclError, self.combo.current, '') # testing creating combobox with empty string in values combo2 = ttk.Combobox(self.root, values=[1, 2, '']) self.assertEqual(combo2['values'], ('1', '2', '') if self.wantobjects else '1 2 {}') combo2.destroy() @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class EntryTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'background', 'class', 'cursor', 'exportselection', 'font', 'invalidcommand', 'justify', 'show', 'state', 'style', 'takefocus', 'textvariable', 'validate', 'validatecommand', 'width', 'xscrollcommand', ) def setUp(self): super(EntryTest, self).setUp() self.entry = self.create() def create(self, **kwargs): return ttk.Entry(self.root, **kwargs) def test_invalidcommand(self): widget = self.create() self.checkCommandParam(widget, 'invalidcommand') def test_show(self): widget = self.create() self.checkParam(widget, 'show', '*') self.checkParam(widget, 'show', '') self.checkParam(widget, 'show', ' ') def test_state(self): widget = self.create() self.checkParams(widget, 'state', 'disabled', 'normal', 'readonly') def test_validate(self): widget = self.create() self.checkEnumParam(widget, 'validate', 'all', 'key', 'focus', 'focusin', 'focusout', 'none') def test_validatecommand(self): widget = self.create() self.checkCommandParam(widget, 'validatecommand') def test_bbox(self): self.assertIsBoundingBox(self.entry.bbox(0)) self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex') self.assertRaises(tkinter.TclError, self.entry.bbox, None) def test_identify(self): self.entry.pack() self.entry.wait_visibility() self.entry.update_idletasks() self.assertEqual(self.entry.identify(5, 5), "textarea") self.assertEqual(self.entry.identify(-1, -1), "") self.assertRaises(tkinter.TclError, self.entry.identify, None, 5) self.assertRaises(tkinter.TclError, self.entry.identify, 5, None) self.assertRaises(tkinter.TclError, self.entry.identify, 5, '') def test_validation_options(self): success = [] test_invalid = lambda: success.append(True) self.entry['validate'] = 'none' self.entry['validatecommand'] = lambda: False self.entry['invalidcommand'] = test_invalid self.entry.validate() self.assertTrue(success) self.entry['invalidcommand'] = '' self.entry.validate() self.assertEqual(len(success), 1) self.entry['invalidcommand'] = test_invalid self.entry['validatecommand'] = lambda: True self.entry.validate() self.assertEqual(len(success), 1) self.entry['validatecommand'] = '' self.entry.validate() self.assertEqual(len(success), 1) self.entry['validatecommand'] = True self.assertRaises(tkinter.TclError, self.entry.validate) def test_validation(self): validation = [] def validate(to_insert): if not 'a' <= to_insert.lower() <= 'z': validation.append(False) return False validation.append(True) return True self.entry['validate'] = 'key' self.entry['validatecommand'] = self.entry.register(validate), '%S' self.entry.insert('end', 1) self.entry.insert('end', 'a') self.assertEqual(validation, [False, True]) self.assertEqual(self.entry.get(), 'a') def test_revalidation(self): def validate(content): for letter in content: if not 'a' <= letter.lower() <= 'z': return False return True self.entry['validatecommand'] = self.entry.register(validate), '%P' self.entry.insert('end', 'avocado') self.assertEqual(self.entry.validate(), True) self.assertEqual(self.entry.state(), ()) self.entry.delete(0, 'end') self.assertEqual(self.entry.get(), '') self.entry.insert('end', 'a1b') self.assertEqual(self.entry.validate(), False) self.assertEqual(self.entry.state(), ('invalid', )) self.entry.delete(1) self.assertEqual(self.entry.validate(), True) self.assertEqual(self.entry.state(), ()) @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'orient', 'style', 'takefocus', 'width', ) def setUp(self): super(PanedWindowTest, self).setUp() self.paned = self.create() def create(self, **kwargs): return ttk.PanedWindow(self.root, **kwargs) def test_orient(self): widget = self.create() self.assertEqual(str(widget['orient']), 'vertical') errmsg='attempt to change read-only option' if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): errmsg='Attempt to change read-only option' self.checkInvalidParam(widget, 'orient', 'horizontal', errmsg=errmsg) widget2 = self.create(orient='horizontal') self.assertEqual(str(widget2['orient']), 'horizontal') def test_add(self): # attempt to add a child that is not a direct child of the paned window label = ttk.Label(self.paned) child = ttk.Label(label) self.assertRaises(tkinter.TclError, self.paned.add, child) label.destroy() child.destroy() # another attempt label = ttk.Label(self.root) child = ttk.Label(label) self.assertRaises(tkinter.TclError, self.paned.add, child) child.destroy() label.destroy() good_child = ttk.Label(self.root) self.paned.add(good_child) # re-adding a child is not accepted self.assertRaises(tkinter.TclError, self.paned.add, good_child) other_child = ttk.Label(self.paned) self.paned.add(other_child) self.assertEqual(self.paned.pane(0), self.paned.pane(1)) self.assertRaises(tkinter.TclError, self.paned.pane, 2) good_child.destroy() other_child.destroy() self.assertRaises(tkinter.TclError, self.paned.pane, 0) def test_forget(self): self.assertRaises(tkinter.TclError, self.paned.forget, None) self.assertRaises(tkinter.TclError, self.paned.forget, 0) self.paned.add(ttk.Label(self.root)) self.paned.forget(0) self.assertRaises(tkinter.TclError, self.paned.forget, 0) def test_insert(self): self.assertRaises(tkinter.TclError, self.paned.insert, None, 0) self.assertRaises(tkinter.TclError, self.paned.insert, 0, None) self.assertRaises(tkinter.TclError, self.paned.insert, 0, 0) child = ttk.Label(self.root) child2 = ttk.Label(self.root) child3 = ttk.Label(self.root) self.assertRaises(tkinter.TclError, self.paned.insert, 0, child) self.paned.insert('end', child2) self.paned.insert(0, child) self.assertEqual(self.paned.panes(), (str(child), str(child2))) self.paned.insert(0, child2) self.assertEqual(self.paned.panes(), (str(child2), str(child))) self.paned.insert('end', child3) self.assertEqual(self.paned.panes(), (str(child2), str(child), str(child3))) # reinserting a child should move it to its current position panes = self.paned.panes() self.paned.insert('end', child3) self.assertEqual(panes, self.paned.panes()) # moving child3 to child2 position should result in child2 ending up # in previous child position and child ending up in previous child3 # position self.paned.insert(child2, child3) self.assertEqual(self.paned.panes(), (str(child3), str(child2), str(child))) def test_pane(self): self.assertRaises(tkinter.TclError, self.paned.pane, 0) child = ttk.Label(self.root) self.paned.add(child) self.assertIsInstance(self.paned.pane(0), dict) self.assertEqual(self.paned.pane(0, weight=None), 0 if self.wantobjects else '0') # newer form for querying a single option self.assertEqual(self.paned.pane(0, 'weight'), 0 if self.wantobjects else '0') self.assertEqual(self.paned.pane(0), self.paned.pane(str(child))) self.assertRaises(tkinter.TclError, self.paned.pane, 0, badoption='somevalue') def test_sashpos(self): self.assertRaises(tkinter.TclError, self.paned.sashpos, None) self.assertRaises(tkinter.TclError, self.paned.sashpos, '') self.assertRaises(tkinter.TclError, self.paned.sashpos, 0) child = ttk.Label(self.paned, text='a') self.paned.add(child, weight=1) self.assertRaises(tkinter.TclError, self.paned.sashpos, 0) child2 = ttk.Label(self.paned, text='b') self.paned.add(child2) self.assertRaises(tkinter.TclError, self.paned.sashpos, 1) self.paned.pack(expand=True, fill='both') self.paned.wait_visibility() curr_pos = self.paned.sashpos(0) self.paned.sashpos(0, 1000) self.assertNotEqual(curr_pos, self.paned.sashpos(0)) self.assertIsInstance(self.paned.sashpos(0), int) @add_standard_options(StandardTtkOptionsTests) class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', 'image', 'state', 'style', 'takefocus', 'text', 'textvariable', 'underline', 'value', 'variable', 'width', ) def create(self, **kwargs): return ttk.Radiobutton(self.root, **kwargs) def test_value(self): widget = self.create() self.checkParams(widget, 'value', 1, 2.3, '', 'any string') def test_invoke(self): success = [] def cb_test(): success.append(1) return "cb test called" myvar = tkinter.IntVar(self.root) cbtn = ttk.Radiobutton(self.root, command=cb_test, variable=myvar, value=0) cbtn2 = ttk.Radiobutton(self.root, command=cb_test, variable=myvar, value=1) if self.wantobjects: conv = lambda x: x else: conv = int res = cbtn.invoke() self.assertEqual(res, "cb test called") self.assertEqual(conv(cbtn['value']), myvar.get()) self.assertEqual(myvar.get(), conv(cbtn.tk.globalgetvar(cbtn['variable']))) self.assertTrue(success) cbtn2['command'] = '' res = cbtn2.invoke() self.assertEqual(str(res), '') self.assertLessEqual(len(success), 1) self.assertEqual(conv(cbtn2['value']), myvar.get()) self.assertEqual(myvar.get(), conv(cbtn.tk.globalgetvar(cbtn['variable']))) self.assertEqual(str(cbtn['variable']), str(cbtn2['variable'])) class MenubuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'compound', 'cursor', 'direction', 'image', 'menu', 'state', 'style', 'takefocus', 'text', 'textvariable', 'underline', 'width', ) def create(self, **kwargs): return ttk.Menubutton(self.root, **kwargs) def test_direction(self): widget = self.create() self.checkEnumParam(widget, 'direction', 'above', 'below', 'left', 'right', 'flush') def test_menu(self): widget = self.create() menu = tkinter.Menu(widget, name='menu') self.checkParam(widget, 'menu', menu, conv=str) menu.destroy() @add_standard_options(StandardTtkOptionsTests) class ScaleTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'cursor', 'from', 'length', 'orient', 'style', 'takefocus', 'to', 'value', 'variable', ) _conv_pixels = noconv_meth default_orient = 'horizontal' def setUp(self): super(ScaleTest, self).setUp() self.scale = self.create() self.scale.pack() self.scale.update() def create(self, **kwargs): return ttk.Scale(self.root, **kwargs) def test_from(self): widget = self.create() self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=False) def test_length(self): widget = self.create() self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i') def test_to(self): widget = self.create() self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False) def test_value(self): widget = self.create() self.checkFloatParam(widget, 'value', 300, 14.9, 15.1, -10, conv=False) def test_custom_event(self): failure = [1, 1, 1] # will need to be empty funcid = self.scale.bind('<<RangeChanged>>', lambda evt: failure.pop()) self.scale['from'] = 10 self.scale['from_'] = 10 self.scale['to'] = 3 self.assertFalse(failure) failure = [1, 1, 1] self.scale.configure(from_=2, to=5) self.scale.configure(from_=0, to=-2) self.scale.configure(to=10) self.assertFalse(failure) def test_get(self): if self.wantobjects: conv = lambda x: x else: conv = float scale_width = self.scale.winfo_width() self.assertEqual(self.scale.get(scale_width, 0), self.scale['to']) self.assertEqual(conv(self.scale.get(0, 0)), conv(self.scale['from'])) self.assertEqual(self.scale.get(), self.scale['value']) self.scale['value'] = 30 self.assertEqual(self.scale.get(), self.scale['value']) self.assertRaises(tkinter.TclError, self.scale.get, '', 0) self.assertRaises(tkinter.TclError, self.scale.get, 0, '') def test_set(self): if self.wantobjects: conv = lambda x: x else: conv = float # set restricts the max/min values according to the current range max = conv(self.scale['to']) new_max = max + 10 self.scale.set(new_max) self.assertEqual(conv(self.scale.get()), max) min = conv(self.scale['from']) self.scale.set(min - 1) self.assertEqual(conv(self.scale.get()), min) # changing directly the variable doesn't impose this limitation tho var = tkinter.DoubleVar(self.root) self.scale['variable'] = var var.set(max + 5) self.assertEqual(conv(self.scale.get()), var.get()) self.assertEqual(conv(self.scale.get()), max + 5) del var # the same happens with the value option self.scale['value'] = max + 10 self.assertEqual(conv(self.scale.get()), max + 10) self.assertEqual(conv(self.scale.get()), conv(self.scale['value'])) # nevertheless, note that the max/min values we can get specifying # x, y coords are the ones according to the current range self.assertEqual(conv(self.scale.get(0, 0)), min) self.assertEqual(conv(self.scale.get(self.scale.winfo_width(), 0)), max) self.assertRaises(tkinter.TclError, self.scale.set, None) @add_standard_options(StandardTtkOptionsTests) class ProgressbarTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'orient', 'length', 'mode', 'maximum', 'phase', 'style', 'takefocus', 'value', 'variable', ) _conv_pixels = noconv_meth default_orient = 'horizontal' def create(self, **kwargs): return ttk.Progressbar(self.root, **kwargs) def test_length(self): widget = self.create() self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i') def test_maximum(self): widget = self.create() self.checkFloatParam(widget, 'maximum', 150.2, 77.7, 0, -10, conv=False) def test_mode(self): widget = self.create() self.checkEnumParam(widget, 'mode', 'determinate', 'indeterminate') def test_phase(self): # XXX pass def test_value(self): widget = self.create() self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10, conv=False) @unittest.skipIf(sys.platform == 'darwin', 'ttk.Scrollbar is special on MacOSX') @add_standard_options(StandardTtkOptionsTests) class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'cursor', 'orient', 'style', 'takefocus', ) default_orient = 'vertical' def create(self, **kwargs): return ttk.Scrollbar(self.root, **kwargs) @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class NotebookTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', ) def setUp(self): super(NotebookTest, self).setUp() self.nb = self.create(padding=0) self.child1 = ttk.Label(self.root) self.child2 = ttk.Label(self.root) self.nb.add(self.child1, text='a') self.nb.add(self.child2, text='b') def create(self, **kwargs): return ttk.Notebook(self.root, **kwargs) def test_tab_identifiers(self): self.nb.forget(0) self.nb.hide(self.child2) self.assertRaises(tkinter.TclError, self.nb.tab, self.child1) self.assertEqual(self.nb.index('end'), 1) self.nb.add(self.child2) self.assertEqual(self.nb.index('end'), 1) self.nb.select(self.child2) self.assertTrue(self.nb.tab('current')) self.nb.add(self.child1, text='a') self.nb.pack() self.nb.wait_visibility() if sys.platform == 'darwin': tb_idx = "@20,5" else: tb_idx = "@5,5" self.assertEqual(self.nb.tab(tb_idx), self.nb.tab('current')) for i in range(5, 100, 5): try: if self.nb.tab('@%d, 5' % i, text=None) == 'a': break except tkinter.TclError: pass else: self.fail("Tab with text 'a' not found") def test_add_and_hidden(self): self.assertRaises(tkinter.TclError, self.nb.hide, -1) self.assertRaises(tkinter.TclError, self.nb.hide, 'hi') self.assertRaises(tkinter.TclError, self.nb.hide, None) self.assertRaises(tkinter.TclError, self.nb.add, None) self.assertRaises(tkinter.TclError, self.nb.add, ttk.Label(self.root), unknown='option') tabs = self.nb.tabs() self.nb.hide(self.child1) self.nb.add(self.child1) self.assertEqual(self.nb.tabs(), tabs) child = ttk.Label(self.root) self.nb.add(child, text='c') tabs = self.nb.tabs() curr = self.nb.index('current') # verify that the tab gets readded at its previous position child2_index = self.nb.index(self.child2) self.nb.hide(self.child2) self.nb.add(self.child2) self.assertEqual(self.nb.tabs(), tabs) self.assertEqual(self.nb.index(self.child2), child2_index) self.assertEqual(str(self.child2), self.nb.tabs()[child2_index]) # but the tab next to it (not hidden) is the one selected now self.assertEqual(self.nb.index('current'), curr + 1) def test_forget(self): self.assertRaises(tkinter.TclError, self.nb.forget, -1) self.assertRaises(tkinter.TclError, self.nb.forget, 'hi') self.assertRaises(tkinter.TclError, self.nb.forget, None) tabs = self.nb.tabs() child1_index = self.nb.index(self.child1) self.nb.forget(self.child1) self.assertNotIn(str(self.child1), self.nb.tabs()) self.assertEqual(len(tabs) - 1, len(self.nb.tabs())) self.nb.add(self.child1) self.assertEqual(self.nb.index(self.child1), 1) self.assertNotEqual(child1_index, self.nb.index(self.child1)) def test_index(self): self.assertRaises(tkinter.TclError, self.nb.index, -1) self.assertRaises(tkinter.TclError, self.nb.index, None) self.assertIsInstance(self.nb.index('end'), int) self.assertEqual(self.nb.index(self.child1), 0) self.assertEqual(self.nb.index(self.child2), 1) self.assertEqual(self.nb.index('end'), 2) def test_insert(self): # moving tabs tabs = self.nb.tabs() self.nb.insert(1, tabs[0]) self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0])) self.nb.insert(self.child1, self.child2) self.assertEqual(self.nb.tabs(), tabs) self.nb.insert('end', self.child1) self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0])) self.nb.insert('end', 0) self.assertEqual(self.nb.tabs(), tabs) # bad moves self.assertRaises(tkinter.TclError, self.nb.insert, 2, tabs[0]) self.assertRaises(tkinter.TclError, self.nb.insert, -1, tabs[0]) # new tab child3 = ttk.Label(self.root) self.nb.insert(1, child3) self.assertEqual(self.nb.tabs(), (tabs[0], str(child3), tabs[1])) self.nb.forget(child3) self.assertEqual(self.nb.tabs(), tabs) self.nb.insert(self.child1, child3) self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs) self.nb.forget(child3) self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3) self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3) # bad inserts self.assertRaises(tkinter.TclError, self.nb.insert, 'end', None) self.assertRaises(tkinter.TclError, self.nb.insert, None, 0) self.assertRaises(tkinter.TclError, self.nb.insert, None, None) def test_select(self): self.nb.pack() self.nb.wait_visibility() success = [] tab_changed = [] self.child1.bind('<Unmap>', lambda evt: success.append(True)) self.nb.bind('<<NotebookTabChanged>>', lambda evt: tab_changed.append(True)) self.assertEqual(self.nb.select(), str(self.child1)) self.nb.select(self.child2) self.assertTrue(success) self.assertEqual(self.nb.select(), str(self.child2)) self.nb.update() self.assertTrue(tab_changed) def test_tab(self): self.assertRaises(tkinter.TclError, self.nb.tab, -1) self.assertRaises(tkinter.TclError, self.nb.tab, 'notab') self.assertRaises(tkinter.TclError, self.nb.tab, None) self.assertIsInstance(self.nb.tab(self.child1), dict) self.assertEqual(self.nb.tab(self.child1, text=None), 'a') # newer form for querying a single option self.assertEqual(self.nb.tab(self.child1, 'text'), 'a') self.nb.tab(self.child1, text='abc') self.assertEqual(self.nb.tab(self.child1, text=None), 'abc') self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc') def test_tabs(self): self.assertEqual(len(self.nb.tabs()), 2) self.nb.forget(self.child1) self.nb.forget(self.child2) self.assertEqual(self.nb.tabs(), ()) def test_traversal(self): self.nb.pack() self.nb.wait_visibility() self.nb.select(0) simulate_mouse_click(self.nb, 5, 5) self.nb.focus_force() self.nb.event_generate('<Control-Tab>') self.assertEqual(self.nb.select(), str(self.child2)) self.nb.focus_force() self.nb.event_generate('<Shift-Control-Tab>') self.assertEqual(self.nb.select(), str(self.child1)) self.nb.focus_force() self.nb.event_generate('<Shift-Control-Tab>') self.assertEqual(self.nb.select(), str(self.child2)) self.nb.tab(self.child1, text='a', underline=0) self.nb.enable_traversal() self.nb.focus_force() simulate_mouse_click(self.nb, 5, 5) if sys.platform == 'darwin': self.nb.event_generate('<Option-a>') else: self.nb.event_generate('<Alt-a>') self.assertEqual(self.nb.select(), str(self.child1)) @add_standard_options(StandardTtkOptionsTests) class TreeviewTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'columns', 'cursor', 'displaycolumns', 'height', 'padding', 'selectmode', 'show', 'style', 'takefocus', 'xscrollcommand', 'yscrollcommand', ) def setUp(self): super(TreeviewTest, self).setUp() self.tv = self.create(padding=0) def create(self, **kwargs): return ttk.Treeview(self.root, **kwargs) def test_columns(self): widget = self.create() self.checkParam(widget, 'columns', 'a b c', expected=('a', 'b', 'c')) self.checkParam(widget, 'columns', ('a', 'b', 'c')) self.checkParam(widget, 'columns', () if tcl_version < (8, 5) else '') def test_displaycolumns(self): widget = self.create() widget['columns'] = ('a', 'b', 'c') self.checkParam(widget, 'displaycolumns', 'b a c', expected=('b', 'a', 'c')) self.checkParam(widget, 'displaycolumns', ('b', 'a', 'c')) self.checkParam(widget, 'displaycolumns', '#all', expected=('#all',)) self.checkParam(widget, 'displaycolumns', (2, 1, 0)) self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'), errmsg='Invalid column index d') self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3), errmsg='Column index 3 out of bounds') self.checkInvalidParam(widget, 'displaycolumns', (1, -2), errmsg='Column index -2 out of bounds') def test_height(self): widget = self.create() self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False) self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=noconv) def test_selectmode(self): widget = self.create() self.checkEnumParam(widget, 'selectmode', 'none', 'browse', 'extended') def test_show(self): widget = self.create() self.checkParam(widget, 'show', 'tree headings', expected=('tree', 'headings')) self.checkParam(widget, 'show', ('tree', 'headings')) self.checkParam(widget, 'show', ('headings', 'tree')) self.checkParam(widget, 'show', 'tree', expected=('tree',)) self.checkParam(widget, 'show', 'headings', expected=('headings',)) def test_bbox(self): self.tv.pack() self.assertEqual(self.tv.bbox(''), '') self.tv.wait_visibility() self.tv.update() item_id = self.tv.insert('', 'end') children = self.tv.get_children() self.assertTrue(children) bbox = self.tv.bbox(children[0]) self.assertIsBoundingBox(bbox) # compare width in bboxes self.tv['columns'] = ['test'] self.tv.column('test', width=50) bbox_column0 = self.tv.bbox(children[0], 0) root_width = self.tv.column('#0', width=None) if not self.wantobjects: root_width = int(root_width) self.assertEqual(bbox_column0[0], bbox[0] + root_width) # verify that bbox of a closed item is the empty string child1 = self.tv.insert(item_id, 'end') self.assertEqual(self.tv.bbox(child1), '') def test_children(self): # no children yet, should get an empty tuple self.assertEqual(self.tv.get_children(), ()) item_id = self.tv.insert('', 'end') self.assertIsInstance(self.tv.get_children(), tuple) self.assertEqual(self.tv.get_children()[0], item_id) # add item_id and child3 as children of child2 child2 = self.tv.insert('', 'end') child3 = self.tv.insert('', 'end') self.tv.set_children(child2, item_id, child3) self.assertEqual(self.tv.get_children(child2), (item_id, child3)) # child3 has child2 as parent, thus trying to set child2 as a children # of child3 should result in an error self.assertRaises(tkinter.TclError, self.tv.set_children, child3, child2) # remove child2 children self.tv.set_children(child2) self.assertEqual(self.tv.get_children(child2), ()) # remove root's children self.tv.set_children('') self.assertEqual(self.tv.get_children(), ()) def test_column(self): # return a dict with all options/values self.assertIsInstance(self.tv.column('#0'), dict) # return a single value of the given option if self.wantobjects: self.assertIsInstance(self.tv.column('#0', width=None), int) # set a new value for an option self.tv.column('#0', width=10) # testing new way to get option value self.assertEqual(self.tv.column('#0', 'width'), 10 if self.wantobjects else '10') self.assertEqual(self.tv.column('#0', width=None), 10 if self.wantobjects else '10') # check read-only option self.assertRaises(tkinter.TclError, self.tv.column, '#0', id='X') self.assertRaises(tkinter.TclError, self.tv.column, 'invalid') invalid_kws = [ {'unknown_option': 'some value'}, {'stretch': 'wrong'}, {'anchor': 'wrong'}, {'width': 'wrong'}, {'minwidth': 'wrong'} ] for kw in invalid_kws: self.assertRaises(tkinter.TclError, self.tv.column, '#0', **kw) def test_delete(self): self.assertRaises(tkinter.TclError, self.tv.delete, '#0') item_id = self.tv.insert('', 'end') item2 = self.tv.insert(item_id, 'end') self.assertEqual(self.tv.get_children(), (item_id, )) self.assertEqual(self.tv.get_children(item_id), (item2, )) self.tv.delete(item_id) self.assertFalse(self.tv.get_children()) # reattach should fail self.assertRaises(tkinter.TclError, self.tv.reattach, item_id, '', 'end') # test multiple item delete item1 = self.tv.insert('', 'end') item2 = self.tv.insert('', 'end') self.assertEqual(self.tv.get_children(), (item1, item2)) self.tv.delete(item1, item2) self.assertFalse(self.tv.get_children()) def test_detach_reattach(self): item_id = self.tv.insert('', 'end') item2 = self.tv.insert(item_id, 'end') # calling detach without items is valid, although it does nothing prev = self.tv.get_children() self.tv.detach() # this should do nothing self.assertEqual(prev, self.tv.get_children()) self.assertEqual(self.tv.get_children(), (item_id, )) self.assertEqual(self.tv.get_children(item_id), (item2, )) # detach item with children self.tv.detach(item_id) self.assertFalse(self.tv.get_children()) # reattach item with children self.tv.reattach(item_id, '', 'end') self.assertEqual(self.tv.get_children(), (item_id, )) self.assertEqual(self.tv.get_children(item_id), (item2, )) # move a children to the root self.tv.move(item2, '', 'end') self.assertEqual(self.tv.get_children(), (item_id, item2)) self.assertEqual(self.tv.get_children(item_id), ()) # bad values self.assertRaises(tkinter.TclError, self.tv.reattach, 'nonexistent', '', 'end') self.assertRaises(tkinter.TclError, self.tv.detach, 'nonexistent') self.assertRaises(tkinter.TclError, self.tv.reattach, item2, 'otherparent', 'end') self.assertRaises(tkinter.TclError, self.tv.reattach, item2, '', 'invalid') # multiple detach self.tv.detach(item_id, item2) self.assertEqual(self.tv.get_children(), ()) self.assertEqual(self.tv.get_children(item_id), ()) def test_exists(self): self.assertEqual(self.tv.exists('something'), False) self.assertEqual(self.tv.exists(''), True) self.assertEqual(self.tv.exists({}), False) # the following will make a tk.call equivalent to # tk.call(treeview, "exists") which should result in an error # in the tcl interpreter since tk requires an item. self.assertRaises(tkinter.TclError, self.tv.exists, None) def test_focus(self): # nothing is focused right now self.assertEqual(self.tv.focus(), '') item1 = self.tv.insert('', 'end') self.tv.focus(item1) self.assertEqual(self.tv.focus(), item1) self.tv.delete(item1) self.assertEqual(self.tv.focus(), '') # try focusing inexistent item self.assertRaises(tkinter.TclError, self.tv.focus, 'hi') def test_heading(self): # check a dict is returned self.assertIsInstance(self.tv.heading('#0'), dict) # check a value is returned self.tv.heading('#0', text='hi') self.assertEqual(self.tv.heading('#0', 'text'), 'hi') self.assertEqual(self.tv.heading('#0', text=None), 'hi') # invalid option self.assertRaises(tkinter.TclError, self.tv.heading, '#0', background=None) # invalid value self.assertRaises(tkinter.TclError, self.tv.heading, '#0', anchor=1) def test_heading_callback(self): def simulate_heading_click(x, y): simulate_mouse_click(self.tv, x, y) self.tv.update() success = [] # no success for now self.tv.pack() self.tv.wait_visibility() self.tv.heading('#0', command=lambda: success.append(True)) self.tv.column('#0', width=100) self.tv.update() # assuming that the coords (5, 5) fall into heading #0 simulate_heading_click(5, 5) if not success: self.fail("The command associated to the treeview heading wasn't " "invoked.") success = [] commands = self.tv.master._tclCommands self.tv.heading('#0', command=str(self.tv.heading('#0', command=None))) self.assertEqual(commands, self.tv.master._tclCommands) simulate_heading_click(5, 5) if not success: self.fail("The command associated to the treeview heading wasn't " "invoked.") # XXX The following raises an error in a tcl interpreter, but not in # Python #self.tv.heading('#0', command='I dont exist') #simulate_heading_click(5, 5) def test_index(self): # item 'what' doesn't exist self.assertRaises(tkinter.TclError, self.tv.index, 'what') self.assertEqual(self.tv.index(''), 0) item1 = self.tv.insert('', 'end') item2 = self.tv.insert('', 'end') c1 = self.tv.insert(item1, 'end') c2 = self.tv.insert(item1, 'end') self.assertEqual(self.tv.index(item1), 0) self.assertEqual(self.tv.index(c1), 0) self.assertEqual(self.tv.index(c2), 1) self.assertEqual(self.tv.index(item2), 1) self.tv.move(item2, '', 0) self.assertEqual(self.tv.index(item2), 0) self.assertEqual(self.tv.index(item1), 1) # check that index still works even after its parent and siblings # have been detached self.tv.detach(item1) self.assertEqual(self.tv.index(c2), 1) self.tv.detach(c1) self.assertEqual(self.tv.index(c2), 0) # but it fails after item has been deleted self.tv.delete(item1) self.assertRaises(tkinter.TclError, self.tv.index, c2) def test_insert_item(self): # parent 'none' doesn't exist self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end') # open values self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', open='') self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', open='please') self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=True))) self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=False))) # invalid index self.assertRaises(tkinter.TclError, self.tv.insert, '', 'middle') # trying to duplicate item id is invalid itemid = self.tv.insert('', 'end', 'first-item') self.assertEqual(itemid, 'first-item') self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', 'first-item') self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end', MockTclObj('first-item')) # unicode values value = u'\xe1ba' item = self.tv.insert('', 'end', values=(value, )) self.assertEqual(self.tv.item(item, 'values'), (value,) if self.wantobjects else value) self.assertEqual(self.tv.item(item, values=None), (value,) if self.wantobjects else value) self.tv.item(item, values=self.root.splitlist(self.tv.item(item, values=None))) self.assertEqual(self.tv.item(item, values=None), (value,) if self.wantobjects else value) self.assertIsInstance(self.tv.item(item), dict) # erase item values self.tv.item(item, values='') self.assertFalse(self.tv.item(item, values=None)) # item tags item = self.tv.insert('', 'end', tags=[1, 2, value]) self.assertEqual(self.tv.item(item, tags=None), ('1', '2', value) if self.wantobjects else '1 2 %s' % value) self.tv.item(item, tags=[]) self.assertFalse(self.tv.item(item, tags=None)) self.tv.item(item, tags=(1, 2)) self.assertEqual(self.tv.item(item, tags=None), ('1', '2') if self.wantobjects else '1 2') # values with spaces item = self.tv.insert('', 'end', values=('a b c', '%s %s' % (value, value))) self.assertEqual(self.tv.item(item, values=None), ('a b c', '%s %s' % (value, value)) if self.wantobjects else '{a b c} {%s %s}' % (value, value)) # text self.assertEqual(self.tv.item( self.tv.insert('', 'end', text="Label here"), text=None), "Label here") self.assertEqual(self.tv.item( self.tv.insert('', 'end', text=value), text=None), value) def test_set(self): self.tv['columns'] = ['A', 'B'] item = self.tv.insert('', 'end', values=['a', 'b']) self.assertEqual(self.tv.set(item), {'A': 'a', 'B': 'b'}) self.tv.set(item, 'B', 'a') self.assertEqual(self.tv.item(item, values=None), ('a', 'a') if self.wantobjects else 'a a') self.tv['columns'] = ['B'] self.assertEqual(self.tv.set(item), {'B': 'a'}) self.tv.set(item, 'B', 'b') self.assertEqual(self.tv.set(item, column='B'), 'b') self.assertEqual(self.tv.item(item, values=None), ('b', 'a') if self.wantobjects else 'b a') self.tv.set(item, 'B', 123) self.assertEqual(self.tv.set(item, 'B'), 123 if self.wantobjects else '123') self.assertEqual(self.tv.item(item, values=None), (123, 'a') if self.wantobjects else '123 a') self.assertEqual(self.tv.set(item), {'B': 123} if self.wantobjects else {'B': '123'}) # inexistent column self.assertRaises(tkinter.TclError, self.tv.set, item, 'A') self.assertRaises(tkinter.TclError, self.tv.set, item, 'A', 'b') # inexistent item self.assertRaises(tkinter.TclError, self.tv.set, 'notme') def test_tag_bind(self): events = [] item1 = self.tv.insert('', 'end', tags=['call']) item2 = self.tv.insert('', 'end', tags=['call']) self.tv.tag_bind('call', '<ButtonPress-1>', lambda evt: events.append(1)) self.tv.tag_bind('call', '<ButtonRelease-1>', lambda evt: events.append(2)) self.tv.pack() self.tv.wait_visibility() self.tv.update() pos_y = set() found = set() for i in range(0, 100, 10): if len(found) == 2: # item1 and item2 already found break item_id = self.tv.identify_row(i) if item_id and item_id not in found: pos_y.add(i) found.add(item_id) self.assertEqual(len(pos_y), 2) # item1 and item2 y pos for y in pos_y: simulate_mouse_click(self.tv, 0, y) # by now there should be 4 things in the events list, since each # item had a bind for two events that were simulated above self.assertEqual(len(events), 4) for evt in zip(events[::2], events[1::2]): self.assertEqual(evt, (1, 2)) def test_tag_configure(self): # Just testing parameter passing for now self.assertRaises(TypeError, self.tv.tag_configure) self.assertRaises(tkinter.TclError, self.tv.tag_configure, 'test', sky='blue') self.tv.tag_configure('test', foreground='blue') self.assertEqual(str(self.tv.tag_configure('test', 'foreground')), 'blue') self.assertEqual(str(self.tv.tag_configure('test', foreground=None)), 'blue') self.assertIsInstance(self.tv.tag_configure('test'), dict) def test_tag_has(self): item1 = self.tv.insert('', 'end', text='Item 1', tags=['tag1']) item2 = self.tv.insert('', 'end', text='Item 2', tags=['tag2']) self.assertRaises(TypeError, self.tv.tag_has) self.assertRaises(TclError, self.tv.tag_has, 'tag1', 'non-existing') self.assertTrue(self.tv.tag_has('tag1', item1)) self.assertFalse(self.tv.tag_has('tag1', item2)) self.assertFalse(self.tv.tag_has('tag2', item1)) self.assertTrue(self.tv.tag_has('tag2', item2)) self.assertFalse(self.tv.tag_has('tag3', item1)) self.assertFalse(self.tv.tag_has('tag3', item2)) self.assertEqual(self.tv.tag_has('tag1'), (item1,)) self.assertEqual(self.tv.tag_has('tag2'), (item2,)) self.assertEqual(self.tv.tag_has('tag3'), ()) @add_standard_options(StandardTtkOptionsTests) class SeparatorTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'orient', 'style', 'takefocus', # 'state'? ) default_orient = 'horizontal' def create(self, **kwargs): return ttk.Separator(self.root, **kwargs) @add_standard_options(StandardTtkOptionsTests) class SizegripTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'style', 'takefocus', # 'state'? ) def create(self, **kwargs): return ttk.Sizegrip(self.root, **kwargs) tests_gui = ( ButtonTest, CheckbuttonTest, ComboboxTest, EntryTest, FrameTest, LabelFrameTest, LabelTest, MenubuttonTest, NotebookTest, PanedWindowTest, ProgressbarTest, RadiobuttonTest, ScaleTest, ScrollbarTest, SeparatorTest, SizegripTest, TreeviewTest, WidgetTest, ) if __name__ == "__main__": run_unittest(*tests_gui)
gpl-2.0
2,572,791,768,090,678,000
34.761757
87
0.589711
false
codesparkle/youtube-dl
youtube_dl/extractor/addanime.py
29
3281
from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import ( compat_HTTPError, compat_str, compat_urllib_parse_urlencode, compat_urllib_parse_urlparse, ) from ..utils import ( ExtractorError, qualities, ) class AddAnimeIE(InfoExtractor): _VALID_URL = r'https?://(?:\w+\.)?add-anime\.net/(?:watch_video\.php\?(?:.*?)v=|video/)(?P<id>[\w_]+)' _TESTS = [{ 'url': 'http://www.add-anime.net/watch_video.php?v=24MR3YO5SAS9', 'md5': '72954ea10bc979ab5e2eb288b21425a0', 'info_dict': { 'id': '24MR3YO5SAS9', 'ext': 'mp4', 'description': 'One Piece 606', 'title': 'One Piece 606', } }, { 'url': 'http://add-anime.net/video/MDUGWYKNGBD8/One-Piece-687', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) try: webpage = self._download_webpage(url, video_id) except ExtractorError as ee: if not isinstance(ee.cause, compat_HTTPError) or \ ee.cause.code != 503: raise redir_webpage = ee.cause.read().decode('utf-8') action = self._search_regex( r'<form id="challenge-form" action="([^"]+)"', redir_webpage, 'Redirect form') vc = self._search_regex( r'<input type="hidden" name="jschl_vc" value="([^"]+)"/>', redir_webpage, 'redirect vc value') av = re.search( r'a\.value = ([0-9]+)[+]([0-9]+)[*]([0-9]+);', redir_webpage) if av is None: raise ExtractorError('Cannot find redirect math task') av_res = int(av.group(1)) + int(av.group(2)) * int(av.group(3)) parsed_url = compat_urllib_parse_urlparse(url) av_val = av_res + len(parsed_url.netloc) confirm_url = ( parsed_url.scheme + '://' + parsed_url.netloc + action + '?' + compat_urllib_parse_urlencode({ 'jschl_vc': vc, 'jschl_answer': compat_str(av_val)})) self._download_webpage( confirm_url, video_id, note='Confirming after redirect') webpage = self._download_webpage(url, video_id) FORMATS = ('normal', 'hq') quality = qualities(FORMATS) formats = [] for format_id in FORMATS: rex = r"var %s_video_file = '(.*?)';" % re.escape(format_id) video_url = self._search_regex(rex, webpage, 'video file URLx', fatal=False) if not video_url: continue formats.append({ 'format_id': format_id, 'url': video_url, 'quality': quality(format_id), }) self._sort_formats(formats) video_title = self._og_search_title(webpage) video_description = self._og_search_description(webpage) return { '_type': 'video', 'id': video_id, 'formats': formats, 'title': video_title, 'description': video_description }
unlicense
4,205,717,122,326,895,600
33.904255
106
0.502591
false
irees/emdash
emdash/ui/Ui_Wizard.py
1
1634
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'Ui_Wizard.ui' # # Created: Tue Jul 31 04:19:55 2012 # by: PyQt4 UI code generator 4.9.1 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_Wizard(object): def setupUi(self, Wizard): Wizard.setObjectName(_fromUtf8("Wizard")) Wizard.resize(329, 355) self.verticalLayout = QtGui.QVBoxLayout(Wizard) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.label_help = QtGui.QLabel(Wizard) self.label_help.setText(_fromUtf8("")) self.label_help.setTextFormat(QtCore.Qt.RichText) self.label_help.setWordWrap(True) self.label_help.setOpenExternalLinks(True) self.label_help.setObjectName(_fromUtf8("label_help")) self.verticalLayout.addWidget(self.label_help) self.line = QtGui.QFrame(Wizard) self.line.setFrameShape(QtGui.QFrame.HLine) self.line.setFrameShadow(QtGui.QFrame.Sunken) self.line.setObjectName(_fromUtf8("line")) self.verticalLayout.addWidget(self.line) self.layout = QtGui.QVBoxLayout() self.layout.setObjectName(_fromUtf8("layout")) self.verticalLayout.addLayout(self.layout) self.retranslateUi(Wizard) QtCore.QMetaObject.connectSlotsByName(Wizard) def retranslateUi(self, Wizard): Wizard.setWindowTitle(QtGui.QApplication.translate("Wizard", "WizardPage", None, QtGui.QApplication.UnicodeUTF8))
bsd-3-clause
5,817,218,018,311,960,000
36.136364
121
0.69339
false
str4d/i2p-tools
netdb/tests/test_netdb.py
2
1894
# test_netdb.py - Test netdb.py # Author: Chris Barry <[email protected]> # License: MIT # Note: this uses py.test. import netdb,os,random ''' def test_inspect(): netdb.inspect() ''' def test_sha256(): assert('d2f4e10adac32aeb600c2f57ba2bac1019a5c76baa65042714ed2678844320d0' == netdb.netdb.sha256('i2p is cool', raw=False)) def test_address_valid(): invalid = netdb.netdb.Address() valid = netdb.netdb.Address() valid.cost = 10 valid.transport = 'SSU' valid.options = {'host': '0.0.0.0', 'port': '1234', 'key': '', 'caps': ''} valid.expire = 0 valid.firewalled = False assert(valid.valid() and not invalid.valid()) def test_address_repr(): valid = netdb.netdb.Address() valid.cost = 10 valid.transport = 'SSU' valid.options = {'host': '0.0.0.0', 'port': '1234', 'key': '', 'caps': ''} valid.expire = 0 valid.firewalled = False assert(repr(valid) == 'Address: transport=SSU cost=10 expire=0 options={\'host\': \'0.0.0.0\', \'port\': \'1234\', \'key\': \'\', \'caps\': \'\'} location=None firewalled=False') # TODO: test_entry* def test_entry_read_short(): assert(True) def test_entry_read_mapping(): assert(True) def test_entry_read(): assert(True) def test_entry_read_short(): assert(True) def test_entry_read_byte(): assert(True) def test_entry_read_string(): assert(True) def test_entry_init(): assert(True) def test_entry_load(): assert(True) def test_entry_verify(): assert(True) def test_entry_repr(): assert(True) def test_entry_dict(): assert(True) # Make some garbage files and hope they break things. def test_fuzz(): pwd = os.environ['PWD'] for i in range(1,100): with open('{}/tests/fuzzdb/{}.dat'.format(pwd, i), 'wb') as fout: fout.write(os.urandom(random.randint(2,400))) # replace 1024 with size_kb if not unreasonably large # Now let's inspect the garbage. netdb.inspect(netdb_dir='{}/fuzzdb/'.format(pwd))
mit
-4,783,902,398,608,048,000
26.852941
179
0.665259
false
kmarius/qutebrowser
tests/unit/utils/test_error.py
4
3334
# Copyright 2015-2018 Florian Bruhin (The Compiler) <[email protected]> # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Tests for qutebrowser.utils.error.""" import logging import pytest from PyQt5.QtCore import QTimer from PyQt5.QtWidgets import QMessageBox from qutebrowser.utils import error, utils from qutebrowser.misc import ipc class Error(Exception): pass @pytest.mark.parametrize('exc, name, exc_text', [ # "builtins." stripped (ValueError('exception'), 'ValueError', 'exception'), (ValueError, 'ValueError', 'none'), # "qutebrowser." stripped (ipc.Error, 'misc.ipc.Error', 'none'), (Error, 'test_error.Error', 'none'), ]) def test_no_err_windows(caplog, exc, name, exc_text, fake_args): """Test handle_fatal_exc with no_err_windows = True.""" fake_args.no_err_windows = True try: raise exc except Exception as e: with caplog.at_level(logging.ERROR): error.handle_fatal_exc(e, fake_args, 'title', pre_text='pre', post_text='post') assert len(caplog.records) == 1 expected = [ 'Handling fatal {} with --no-err-windows!'.format(name), '', 'title: title', 'pre_text: pre', 'post_text: post', 'exception text: {}'.format(exc_text), ] assert caplog.records[0].msg == '\n'.join(expected) # This happens on Xvfb for some reason # See https://github.com/qutebrowser/qutebrowser/issues/984 @pytest.mark.qt_log_ignore(r'^QXcbConnection: XCB error: 8 \(BadMatch\), ' r'sequence: \d+, resource id: \d+, major code: 42 ' r'\(SetInputFocus\), minor code: 0$', r'^QIODevice::write: device not open') @pytest.mark.parametrize('pre_text, post_text, expected', [ ('', '', 'exception'), ('foo', '', 'foo: exception'), ('foo', 'bar', 'foo: exception\n\nbar'), ('', 'bar', 'exception\n\nbar'), ], ids=repr) def test_err_windows(qtbot, qapp, fake_args, pre_text, post_text, expected): def err_window_check(): w = qapp.activeModalWidget() try: qtbot.add_widget(w) if not utils.is_mac: assert w.windowTitle() == 'title' assert w.icon() == QMessageBox.Critical assert w.standardButtons() == QMessageBox.Ok assert w.text() == expected finally: w.close() fake_args.no_err_windows = False QTimer.singleShot(0, err_window_check) error.handle_fatal_exc(ValueError("exception"), fake_args, 'title', pre_text=pre_text, post_text=post_text)
gpl-3.0
-5,039,534,839,097,219,000
33.729167
78
0.627475
false
deepmind/acme
acme/agents/tf/bcq/discrete_learning.py
1
9377
# python3 # Copyright 2018 DeepMind Technologies Limited. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Discrete BCQ learner implementation. As described in https://arxiv.org/pdf/1910.01708.pdf. """ import copy from typing import Dict, List, Optional from acme import core from acme import types from acme.adders import reverb as adders from acme.agents.tf import bc from acme.tf import losses from acme.tf import savers as tf2_savers from acme.tf import utils as tf2_utils from acme.tf.networks import discrete as discrete_networks from acme.utils import counting from acme.utils import loggers import numpy as np import reverb import sonnet as snt import tensorflow as tf import trfl class _InternalBCQLearner(core.Learner, tf2_savers.TFSaveable): """Internal BCQ learner. This implements the Q-learning component in the discrete BCQ algorithm. """ def __init__( self, network: discrete_networks.DiscreteFilteredQNetwork, discount: float, importance_sampling_exponent: float, learning_rate: float, target_update_period: int, dataset: tf.data.Dataset, huber_loss_parameter: float = 1., replay_client: Optional[reverb.TFClient] = None, counter: Optional[counting.Counter] = None, logger: Optional[loggers.Logger] = None, checkpoint: bool = False, ): """Initializes the learner. Args: network: BCQ network discount: discount to use for TD updates. importance_sampling_exponent: power to which importance weights are raised before normalizing. learning_rate: learning rate for the q-network update. target_update_period: number of learner steps to perform before updating the target networks. dataset: dataset to learn from, whether fixed or from a replay buffer (see `acme.datasets.reverb.make_dataset` documentation). huber_loss_parameter: Quadratic-linear boundary for Huber loss. replay_client: client to replay to allow for updating priorities. counter: Counter object for (potentially distributed) counting. logger: Logger object for writing logs to. checkpoint: boolean indicating whether to checkpoint the learner. """ # Internalise agent components (replay buffer, networks, optimizer). # TODO(b/155086959): Fix type stubs and remove. self._iterator = iter(dataset) # pytype: disable=wrong-arg-types self._network = network self._q_network = network.q_network self._target_q_network = copy.deepcopy(network.q_network) self._optimizer = snt.optimizers.Adam(learning_rate) self._replay_client = replay_client # Internalise the hyperparameters. self._discount = discount self._target_update_period = target_update_period self._importance_sampling_exponent = importance_sampling_exponent self._huber_loss_parameter = huber_loss_parameter # Learner state. self._variables = [self._network.trainable_variables] self._num_steps = tf.Variable(0, dtype=tf.int32) # Internalise logging/counting objects. self._counter = counter or counting.Counter() self._logger = logger or loggers.make_default_logger('learner', save_data=False) # Create a snapshotter object. if checkpoint: self._snapshotter = tf2_savers.Snapshotter( objects_to_save={'network': network}, time_delta_minutes=60.) else: self._snapshotter = None @tf.function def _step(self) -> Dict[str, tf.Tensor]: """Do a step of SGD and update the priorities.""" # Pull out the data needed for updates/priorities. inputs = next(self._iterator) transitions: types.Transition = inputs.data keys, probs = inputs.info[:2] with tf.GradientTape() as tape: # Evaluate our networks. q_tm1 = self._q_network(transitions.observation) q_t_value = self._target_q_network(transitions.next_observation) q_t_selector = self._network(transitions.next_observation) # The rewards and discounts have to have the same type as network values. r_t = tf.cast(transitions.reward, q_tm1.dtype) r_t = tf.clip_by_value(r_t, -1., 1.) d_t = tf.cast(transitions.discount, q_tm1.dtype) * tf.cast( self._discount, q_tm1.dtype) # Compute the loss. _, extra = trfl.double_qlearning(q_tm1, transitions.action, r_t, d_t, q_t_value, q_t_selector) loss = losses.huber(extra.td_error, self._huber_loss_parameter) # Get the importance weights. importance_weights = 1. / probs # [B] importance_weights **= self._importance_sampling_exponent importance_weights /= tf.reduce_max(importance_weights) # Reweight. loss *= tf.cast(importance_weights, loss.dtype) # [B] loss = tf.reduce_mean(loss, axis=[0]) # [] # Do a step of SGD. gradients = tape.gradient(loss, self._network.trainable_variables) self._optimizer.apply(gradients, self._network.trainable_variables) # Update the priorities in the replay buffer. if self._replay_client: priorities = tf.cast(tf.abs(extra.td_error), tf.float64) self._replay_client.update_priorities( table=adders.DEFAULT_PRIORITY_TABLE, keys=keys, priorities=priorities) # Periodically update the target network. if tf.math.mod(self._num_steps, self._target_update_period) == 0: for src, dest in zip(self._q_network.variables, self._target_q_network.variables): dest.assign(src) self._num_steps.assign_add(1) # Compute the global norm of the gradients for logging. global_gradient_norm = tf.linalg.global_norm(gradients) # Compute statistics of the Q-values for logging. max_q = tf.reduce_max(q_t_value) min_q = tf.reduce_min(q_t_value) mean_q, var_q = tf.nn.moments(q_t_value, [0, 1]) # Report loss & statistics for logging. fetches = { 'gradient_norm': global_gradient_norm, 'loss': loss, 'max_q': max_q, 'mean_q': mean_q, 'min_q': min_q, 'var_q': var_q, } return fetches def step(self): # Do a batch of SGD. result = self._step() # Update our counts and record it. counts = self._counter.increment(steps=1) result.update(counts) # Snapshot and attempt to write logs. if self._snapshotter is not None: self._snapshotter.save() self._logger.write(result) def get_variables(self, names: List[str]) -> List[np.ndarray]: return tf2_utils.to_numpy(self._variables) @property def state(self): """Returns the stateful parts of the learner for checkpointing.""" return { 'network': self._network, 'target_q_network': self._target_q_network, 'optimizer': self._optimizer, 'num_steps': self._num_steps } class DiscreteBCQLearner(core.Learner, tf2_savers.TFSaveable): """Discrete BCQ learner. This learner combines supervised BC learning and Q learning to implement the discrete BCQ algorithm as described in https://arxiv.org/pdf/1910.01708.pdf. """ def __init__(self, network: discrete_networks.DiscreteFilteredQNetwork, dataset: tf.data.Dataset, learning_rate: float, counter: Optional[counting.Counter] = None, bc_logger: Optional[loggers.Logger] = None, bcq_logger: Optional[loggers.Logger] = None, **bcq_learner_kwargs): counter = counter or counting.Counter() self._bc_logger = bc_logger or loggers.TerminalLogger('bc_learner', time_delta=1.) self._bcq_logger = bcq_logger or loggers.TerminalLogger('bcq_learner', time_delta=1.) self._bc_learner = bc.BCLearner( network=network.g_network, learning_rate=learning_rate, dataset=dataset, counter=counting.Counter(counter, 'bc'), logger=self._bc_logger, checkpoint=False) self._bcq_learner = _InternalBCQLearner( network=network, learning_rate=learning_rate, dataset=dataset, counter=counting.Counter(counter, 'bcq'), logger=self._bcq_logger, **bcq_learner_kwargs) def get_variables(self, names): return self._bcq_learner.get_variables(names) @property def state(self): bc_state = self._bc_learner.state bc_state.pop('network') # No need to checkpoint the BC network. bcq_state = self._bcq_learner.state state = dict() state.update({f'bc_{k}': v for k, v in bc_state.items()}) state.update({f'bcq_{k}': v for k, v in bcq_state.items()}) return state def step(self): self._bc_learner.step() self._bcq_learner.step()
apache-2.0
2,976,378,094,815,298,000
34.927203
80
0.660126
false
gorjuce/odoo
addons/l10n_in_hr_payroll/wizard/__init__.py
430
1110
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import hr_salary_employee_bymonth import hr_yearly_salary_detail # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-8,130,922,034,773,170,000
43.44
78
0.625225
false
sbuss/voteswap
lib/networkx/linalg/spectrum.py
42
2793
""" Eigenvalue spectrum of graphs. """ # Copyright (C) 2004-2015 by # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. import networkx as nx __author__ = "\n".join(['Aric Hagberg <[email protected]>', 'Pieter Swart ([email protected])', 'Dan Schult([email protected])', 'Jean-Gabriel Young ([email protected])']) __all__ = ['laplacian_spectrum', 'adjacency_spectrum', 'modularity_spectrum'] def laplacian_spectrum(G, weight='weight'): """Return eigenvalues of the Laplacian of G Parameters ---------- G : graph A NetworkX graph weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- evals : NumPy array Eigenvalues Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- laplacian_matrix """ from scipy.linalg import eigvalsh return eigvalsh(nx.laplacian_matrix(G,weight=weight).todense()) def adjacency_spectrum(G, weight='weight'): """Return eigenvalues of the adjacency matrix of G. Parameters ---------- G : graph A NetworkX graph weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- evals : NumPy array Eigenvalues Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- adjacency_matrix """ from scipy.linalg import eigvals return eigvals(nx.adjacency_matrix(G,weight=weight).todense()) def modularity_spectrum(G): """Return eigenvalues of the modularity matrix of G. Parameters ---------- G : Graph A NetworkX Graph or DiGraph Returns ------- evals : NumPy array Eigenvalues See Also -------- modularity_matrix References ---------- .. [1] M. E. J. Newman, "Modularity and community structure in networks", Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. """ from scipy.linalg import eigvals if G.is_directed(): return eigvals(nx.directed_modularity_matrix(G)) else: return eigvals(nx.modularity_matrix(G)) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import scipy.linalg except: raise SkipTest("scipy.linalg not available")
mit
-4,305,660,519,431,983,600
24.162162
77
0.613319
false
jumpojoy/neutron
neutron/tests/common/base.py
34
3238
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import functools import unittest.case from oslo_db.sqlalchemy import test_base import testtools.testcase from neutron.common import constants as n_const from neutron.tests import base from neutron.tests import tools def create_resource(prefix, creation_func, *args, **kwargs): """Create a new resource that does not already exist. If prefix isn't 'max_length' in size, a random suffix is concatenated to ensure it is random. Otherwise, 'prefix' is used as is. :param prefix: The prefix for a randomly generated name :param creation_func: A function taking the name of the resource to be created as it's first argument. An error is assumed to indicate a name collision. :param *args *kwargs: These will be passed to the create function. """ # Don't generate a random name if prefix is already full-length. if len(prefix) == n_const.DEVICE_NAME_MAX_LEN: return creation_func(prefix, *args, **kwargs) while True: name = base.get_rand_name( max_length=n_const.DEVICE_NAME_MAX_LEN, prefix=prefix) try: return creation_func(name, *args, **kwargs) except RuntimeError: pass def no_skip_on_missing_deps(wrapped): """Do not allow a method/test to skip on missing dependencies. This decorator raises an error if a skip is raised by wrapped method when OS_FAIL_ON_MISSING_DEPS is evaluated to True. This decorator should be used only for missing dependencies (including missing system requirements). """ @functools.wraps(wrapped) def wrapper(*args, **kwargs): try: return wrapped(*args, **kwargs) except (testtools.TestCase.skipException, unittest.case.SkipTest) as e: if base.bool_from_env('OS_FAIL_ON_MISSING_DEPS'): tools.fail( '%s cannot be skipped because OS_FAIL_ON_MISSING_DEPS ' 'is enabled, skip reason: %s' % (wrapped.__name__, e)) raise return wrapper class MySQLTestCase(test_base.MySQLOpportunisticTestCase): """Base test class for MySQL tests. If the MySQL db is unavailable then this test is skipped, unless OS_FAIL_ON_MISSING_DEPS is enabled. """ SKIP_ON_UNAVAILABLE_DB = not base.bool_from_env('OS_FAIL_ON_MISSING_DEPS') class PostgreSQLTestCase(test_base.PostgreSQLOpportunisticTestCase): """Base test class for PostgreSQL tests. If the PostgreSQL db is unavailable then this test is skipped, unless OS_FAIL_ON_MISSING_DEPS is enabled. """ SKIP_ON_UNAVAILABLE_DB = not base.bool_from_env('OS_FAIL_ON_MISSING_DEPS')
apache-2.0
1,768,344,116,201,521,200
35.795455
79
0.683447
false
pkainz/pylearn2
pylearn2/linear/tests/test_conv2d.py
45
5497
import theano from theano import tensor import numpy from pylearn2.linear.conv2d import Conv2D, make_random_conv2D from pylearn2.space import Conv2DSpace from pylearn2.utils import sharedX import unittest try: scipy_available = True import scipy.ndimage except ImportError: scipy_available = False class TestConv2D(unittest.TestCase): """ Tests for Conv2D code """ def setUp(self): """ Set up a test image and filter to re-use """ self.image = numpy.random.rand(1, 3, 3, 1).astype(theano.config.floatX) self.image_tensor = tensor.tensor4() self.input_space = Conv2DSpace((3, 3), 1) self.filters_values = numpy.ones( (1, 1, 2, 2), dtype=theano.config.floatX ) self.filters = sharedX(self.filters_values, name='filters') self.conv2d = Conv2D(self.filters, 1, self.input_space) def test_value_errors(self): """ Check correct errors are raised when bad input is given """ bad_filters = sharedX(numpy.zeros((1, 3, 2))) self.assertRaises(ValueError, Conv2D, bad_filters, 1, self.input_space) self.assertRaises(AssertionError, Conv2D, self.filters, 0, self.input_space) def test_get_params(self): """ Check whether the conv2d has stored the correct filters """ assert self.conv2d.get_params() == [self.filters] def test_lmul(self): """ Use SciPy's ndimage to check whether the convolution worked correctly """ f = theano.function([self.image_tensor], self.conv2d.lmul(self.image_tensor)) if scipy_available: numpy.allclose( f(self.image).reshape((2, 2)), scipy.ndimage.filters.convolve( self.image.reshape((3, 3)), self.filters_values.reshape((2, 2)) )[:2, :2] ) def test_lmul_T(self): """ Check whether this function outputs the right shape """ conv2d = self.conv2d.lmul(self.image_tensor) f = theano.function([self.image_tensor], self.conv2d.lmul_T(conv2d)) assert f(self.image).shape == self.image.shape def test_lmul_sq_T(self): """ Check whether this function outputs the same values as when taking the square manually """ conv2d_sq = Conv2D(sharedX(numpy.square(self.filters_values)), 1, self.input_space ).lmul(self.image_tensor) conv2d = self.conv2d.lmul(self.image_tensor) f = theano.function([self.image_tensor], self.conv2d.lmul_T(conv2d_sq)) f2 = theano.function([self.image_tensor], self.conv2d.lmul_sq_T(conv2d)) numpy.testing.assert_allclose(f(self.image), f2(self.image)) def test_set_batch_size(self): """ Make sure that setting the batch size actually changes the property """ cur_img_shape = self.conv2d._img_shape cur_batch_size = self.conv2d._img_shape[0] self.conv2d.set_batch_size(cur_batch_size + 10) assert self.conv2d._img_shape[0] == cur_batch_size + 10 assert self.conv2d._img_shape[1:] == cur_img_shape[1:] def test_axes(self): """ Use different output axes and see whether the output is what we expect """ default_axes = ('b', 0, 1, 'c') axes = (0, 'b', 1, 'c') mapping = tuple(axes.index(axis) for axis in default_axes) input_space = Conv2DSpace((3, 3), num_channels=1, axes=axes) conv2d = Conv2D(self.filters, 1, input_space, output_axes=axes) f_axes = theano.function([self.image_tensor], conv2d.lmul(self.image_tensor)) f = theano.function([self.image_tensor], self.conv2d.lmul(self.image_tensor)) output_axes = f_axes(numpy.transpose(self.image, mapping)) output = f(self.image) output_axes = numpy.transpose(output_axes, mapping) numpy.testing.assert_allclose(output, output_axes) assert output.shape == output_axes.shape def test_channels(self): """ Go from 2 to 3 channels and see whether the shape is correct """ input_space = Conv2DSpace((3, 3), num_channels=3) filters_values = numpy.ones( (2, 3, 2, 2), dtype=theano.config.floatX ) filters = sharedX(filters_values) image = numpy.random.rand(1, 3, 3, 3).astype(theano.config.floatX) conv2d = Conv2D(filters, 1, input_space) f = theano.function([self.image_tensor], conv2d.lmul(self.image_tensor)) assert f(image).shape == (1, 2, 2, 2) def test_make_random_conv2D(self): """ Create a random convolution and check whether the shape, axes and input space are all what we expect """ output_space = Conv2DSpace((2, 2), 1) conv2d = make_random_conv2D(1, self.input_space, output_space, (2, 2), 1) f = theano.function([self.image_tensor], conv2d.lmul(self.image_tensor)) assert f(self.image).shape == (1, 2, 2, 1) assert conv2d.input_space == self.input_space assert conv2d.output_axes == output_space.axes
bsd-3-clause
7,762,065,973,396,580,000
36.910345
79
0.574313
false
robynbergeron/ansible-modules-extras
cloud/cloudstack/cs_affinitygroup.py
24
7542
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2015, René Moser <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: cs_affinitygroup short_description: Manages affinity groups on Apache CloudStack based clouds. description: - Create and remove affinity groups. version_added: '2.0' author: "René Moser (@resmo)" options: name: description: - Name of the affinity group. required: true affinty_type: description: - Type of the affinity group. If not specified, first found affinity type is used. required: false default: null description: description: - Description of the affinity group. required: false default: null state: description: - State of the affinity group. required: false default: 'present' choices: [ 'present', 'absent' ] domain: description: - Domain the affinity group is related to. required: false default: null account: description: - Account the affinity group is related to. required: false default: null poll_async: description: - Poll async jobs until job has finished. required: false default: true extends_documentation_fragment: cloudstack ''' EXAMPLES = ''' # Create a affinity group - local_action: module: cs_affinitygroup name: haproxy affinty_type: host anti-affinity # Remove a affinity group - local_action: module: cs_affinitygroup name: haproxy state: absent ''' RETURN = ''' --- id: description: UUID of the affinity group. returned: success type: string sample: 87b1e0ce-4e01-11e4-bb66-0050569e64b8 name: description: Name of affinity group. returned: success type: string sample: app description: description: Description of affinity group. returned: success type: string sample: application affinity group affinity_type: description: Type of affinity group. returned: success type: string sample: host anti-affinity ''' try: from cs import CloudStack, CloudStackException, read_config has_lib_cs = True except ImportError: has_lib_cs = False # import cloudstack common from ansible.module_utils.cloudstack import * class AnsibleCloudStackAffinityGroup(AnsibleCloudStack): def __init__(self, module): super(AnsibleCloudStackAffinityGroup, self).__init__(module) self.returns = { 'type': 'affinity_type', } self.affinity_group = None def get_affinity_group(self): if not self.affinity_group: affinity_group = self.module.params.get('name') args = {} args['account'] = self.get_account('name') args['domainid'] = self.get_domain('id') affinity_groups = self.cs.listAffinityGroups(**args) if affinity_groups: for a in affinity_groups['affinitygroup']: if affinity_group in [ a['name'], a['id'] ]: self.affinity_group = a break return self.affinity_group def get_affinity_type(self): affinity_type = self.module.params.get('affinty_type') affinity_types = self.cs.listAffinityGroupTypes() if affinity_types: if not affinity_type: return affinity_types['affinityGroupType'][0]['type'] for a in affinity_types['affinityGroupType']: if a['type'] == affinity_type: return a['type'] self.module.fail_json(msg="affinity group type '%s' not found" % affinity_type) def create_affinity_group(self): affinity_group = self.get_affinity_group() if not affinity_group: self.result['changed'] = True args = {} args['name'] = self.module.params.get('name') args['type'] = self.get_affinity_type() args['description'] = self.module.params.get('description') args['account'] = self.get_account('name') args['domainid'] = self.get_domain('id') if not self.module.check_mode: res = self.cs.createAffinityGroup(**args) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if res and poll_async: affinity_group = self._poll_job(res, 'affinitygroup') return affinity_group def remove_affinity_group(self): affinity_group = self.get_affinity_group() if affinity_group: self.result['changed'] = True args = {} args['name'] = self.module.params.get('name') args['account'] = self.get_account('name') args['domainid'] = self.get_domain('id') if not self.module.check_mode: res = self.cs.deleteAffinityGroup(**args) if 'errortext' in res: self.module.fail_json(msg="Failed: '%s'" % res['errortext']) poll_async = self.module.params.get('poll_async') if res and poll_async: res = self._poll_job(res, 'affinitygroup') return affinity_group def main(): module = AnsibleModule( argument_spec = dict( name = dict(required=True), affinty_type = dict(default=None), description = dict(default=None), state = dict(choices=['present', 'absent'], default='present'), domain = dict(default=None), account = dict(default=None), poll_async = dict(choices=BOOLEANS, default=True), api_key = dict(default=None), api_secret = dict(default=None, no_log=True), api_url = dict(default=None), api_http_method = dict(choices=['get', 'post'], default='get'), api_timeout = dict(type='int', default=10), api_region = dict(default='cloudstack'), ), required_together = ( ['api_key', 'api_secret', 'api_url'], ), supports_check_mode=True ) if not has_lib_cs: module.fail_json(msg="python library cs required: pip install cs") try: acs_ag = AnsibleCloudStackAffinityGroup(module) state = module.params.get('state') if state in ['absent']: affinity_group = acs_ag.remove_affinity_group() else: affinity_group = acs_ag.create_affinity_group() result = acs_ag.get_result(affinity_group) except CloudStackException, e: module.fail_json(msg='CloudStackException: %s' % str(e)) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
-5,951,637,710,754,166,000
29.403226
88
0.603714
false
erjohnso/ansible
test/units/modules/packaging/os/test_apk.py
137
1157
from ansible.compat.tests import mock from ansible.compat.tests import unittest from ansible.modules.packaging.os import apk class TestApkQueryLatest(unittest.TestCase): def setUp(self): self.module_names = [ 'bash', 'g++', ] @mock.patch('ansible.modules.packaging.os.apk.AnsibleModule') def test_not_latest(self, mock_module): apk.APK_PATH = "" for module_name in self.module_names: command_output = module_name + '-2.0.0-r1 < 3.0.0-r2 ' mock_module.run_command.return_value = (0, command_output, None) command_result = apk.query_latest(mock_module, module_name) self.assertFalse(command_result) @mock.patch('ansible.modules.packaging.os.apk.AnsibleModule') def test_latest(self, mock_module): apk.APK_PATH = "" for module_name in self.module_names: command_output = module_name + '-2.0.0-r1 = 2.0.0-r1 ' mock_module.run_command.return_value = (0, command_output, None) command_result = apk.query_latest(mock_module, module_name) self.assertTrue(command_result)
gpl-3.0
511,169,028,152,327,740
36.322581
76
0.628349
false
Javier-Acosta/meran
dev-plugins/node/lib/node/wafadmin/Logs.py
4
4722
#!/usr/bin/env python # Meran - MERAN UNLP is a ILS (Integrated Library System) wich provides Catalog, # Circulation and User's Management. It's written in Perl, and uses Apache2 # Web-Server, MySQL database and Sphinx 2 indexing. # Copyright (C) 2009-2013 Grupo de desarrollo de Meran CeSPI-UNLP # # This file is part of Meran. # # Meran is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Meran is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Meran. If not, see <http://www.gnu.org/licenses/>. # encoding: utf-8 # Meran - MERAN UNLP is a ILS (Integrated Library System) wich provides Catalog, # Circulation and User's Management. It's written in Perl, and uses Apache2 # Web-Server, MySQL database and Sphinx 2 indexing. # Copyright (C) 2009-2013 Grupo de desarrollo de Meran CeSPI-UNLP # # This file is part of Meran. # # Meran is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Meran is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Meran. If not, see <http://www.gnu.org/licenses/>. # Thomas Nagy, 2005 (ita) import ansiterm import os, re, logging, traceback, sys from Constants import * zones = '' verbose = 0 colors_lst = { 'USE' : True, 'BOLD' :'\x1b[01;1m', 'RED' :'\x1b[01;31m', 'GREEN' :'\x1b[32m', 'YELLOW':'\x1b[33m', 'PINK' :'\x1b[35m', 'BLUE' :'\x1b[01;34m', 'CYAN' :'\x1b[36m', 'NORMAL':'\x1b[0m', 'cursor_on' :'\x1b[?25h', 'cursor_off' :'\x1b[?25l', } got_tty = False term = os.environ.get('TERM', 'dumb') if not term in ['dumb', 'emacs']: try: got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys']) except AttributeError: pass import Utils if not got_tty or 'NOCOLOR' in os.environ: colors_lst['USE'] = False # test #if sys.platform == 'win32': # colors_lst['USE'] = True def get_color(cl): if not colors_lst['USE']: return '' return colors_lst.get(cl, '') class foo(object): def __getattr__(self, a): return get_color(a) def __call__(self, a): return get_color(a) colors = foo() re_log = re.compile(r'(\w+): (.*)', re.M) class log_filter(logging.Filter): def __init__(self, name=None): pass def filter(self, rec): rec.c1 = colors.PINK rec.c2 = colors.NORMAL rec.zone = rec.module if rec.levelno >= logging.INFO: if rec.levelno >= logging.ERROR: rec.c1 = colors.RED elif rec.levelno >= logging.WARNING: rec.c1 = colors.YELLOW else: rec.c1 = colors.GREEN return True zone = '' m = re_log.match(rec.msg) if m: zone = rec.zone = m.group(1) rec.msg = m.group(2) if zones: return getattr(rec, 'zone', '') in zones or '*' in zones elif not verbose > 2: return False return True class formatter(logging.Formatter): def __init__(self): logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) def format(self, rec): if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO: try: return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2) except: return rec.c1+rec.msg+rec.c2 return logging.Formatter.format(self, rec) def debug(*k, **kw): if verbose: k = list(k) k[0] = k[0].replace('\n', ' ') logging.debug(*k, **kw) def error(*k, **kw): logging.error(*k, **kw) if verbose > 1: if isinstance(k[0], Utils.WafError): st = k[0].stack else: st = traceback.extract_stack() if st: st = st[:-1] buf = [] for filename, lineno, name, line in st: buf.append(' File "%s", line %d, in %s' % (filename, lineno, name)) if line: buf.append(' %s' % line.strip()) if buf: logging.error("\n".join(buf)) warn = logging.warn info = logging.info def init_log(): log = logging.getLogger() log.handlers = [] log.filters = [] hdlr = logging.StreamHandler() hdlr.setFormatter(formatter()) log.addHandler(hdlr) log.addFilter(log_filter()) log.setLevel(logging.DEBUG) # may be initialized more than once init_log()
gpl-3.0
-7,033,342,781,268,120,000
25.988571
90
0.674926
false
amir343/ansible
test/units/parsing/test_data_loader.py
99
3256
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from six import PY2 from yaml.scanner import ScannerError from ansible.compat.tests import unittest from ansible.compat.tests.mock import patch, mock_open from ansible.errors import AnsibleParserError from ansible.parsing import DataLoader from ansible.parsing.yaml.objects import AnsibleMapping class TestDataLoader(unittest.TestCase): def setUp(self): # FIXME: need to add tests that utilize vault_password self._loader = DataLoader() def tearDown(self): pass @patch.object(DataLoader, '_get_file_contents') def test_parse_json_from_file(self, mock_def): mock_def.return_value = ("""{"a": 1, "b": 2, "c": 3}""", True) output = self._loader.load_from_file('dummy_json.txt') self.assertEqual(output, dict(a=1,b=2,c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_yaml_from_file(self, mock_def): mock_def.return_value = (""" a: 1 b: 2 c: 3 """, True) output = self._loader.load_from_file('dummy_yaml.txt') self.assertEqual(output, dict(a=1,b=2,c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_fail_from_file(self, mock_def): mock_def.return_value = (""" TEXT: *** NOT VALID """, True) self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt') class TestDataLoaderWithVault(unittest.TestCase): def setUp(self): self._loader = DataLoader(vault_password='ansible') def tearDown(self): pass @patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True) def test_parse_from_vault_1_1_file(self): vaulted_data = """$ANSIBLE_VAULT;1.1;AES256 33343734386261666161626433386662623039356366656637303939306563376130623138626165 6436333766346533353463636566313332623130383662340a393835656134633665333861393331 37666233346464636263636530626332623035633135363732623332313534306438393366323966 3135306561356164310a343937653834643433343734653137383339323330626437313562306630 3035 """ if PY2: builtins_name = '__builtin__' else: builtins_name = 'builtins' with patch(builtins_name + '.open', mock_open(read_data=vaulted_data)): output = self._loader.load_from_file('dummy_vault.txt') self.assertEqual(output, dict(foo='bar'))
gpl-3.0
6,124,409,931,130,497,000
35.177778
96
0.695025
false
fujunwei/chromium-crosswalk
chrome/test/ispy/server/rebaseline_handler.py
100
1208
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Request Handler that updates the Expectation version.""" import webapp2 import ispy_api from common import constants import gs_bucket class RebaselineHandler(webapp2.RequestHandler): """Request handler to allow test mask updates.""" def post(self): """Accepts post requests. Expects a test_run as a parameter and updates the associated version file to use the expectations associated with that test run. """ test_run = self.request.get('test_run') # Fail if test_run parameter is missing. if not test_run: self.response.headers['Content-Type'] = 'json/application' self.response.write(json.dumps( {'error': '\'test_run\' must be supplied to rebaseline.'})) return # Otherwise, set up the utilities. bucket = gs_bucket.GoogleCloudStorageBucket(constants.BUCKET) ispy = ispy_api.ISpyApi(bucket) # Update versions file. ispy.RebaselineToTestRun(test_run) # Redirect back to the sites list for the test run. self.redirect('/?test_run=%s' % test_run)
bsd-3-clause
-1,666,669,309,377,522,400
30.789474
80
0.706954
false
persandstrom/home-assistant
homeassistant/components/switch/orvibo.py
8
3041
""" Support for Orvibo S20 Wifi Smart Switches. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.orvibo/ """ import logging import voluptuous as vol from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_SWITCHES, CONF_MAC, CONF_DISCOVERY) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['orvibo==1.1.1'] _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Orvibo S20 Switch' DEFAULT_DISCOVERY = True PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_SWITCHES, default=[]): vol.All(cv.ensure_list, [{ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_MAC): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string }]), vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean, }) def setup_platform(hass, config, add_entities_callback, discovery_info=None): """Set up S20 switches.""" from orvibo.s20 import discover, S20, S20Exception switch_data = {} switches = [] switch_conf = config.get(CONF_SWITCHES, [config]) if config.get(CONF_DISCOVERY): _LOGGER.info("Discovering S20 switches ...") switch_data.update(discover()) for switch in switch_conf: switch_data[switch.get(CONF_HOST)] = switch for host, data in switch_data.items(): try: switches.append(S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC)))) _LOGGER.info("Initialized S20 at %s", host) except S20Exception: _LOGGER.error("S20 at %s couldn't be initialized", host) add_entities_callback(switches) class S20Switch(SwitchDevice): """Representation of an S20 switch.""" def __init__(self, name, s20): """Initialize the S20 device.""" from orvibo.s20 import S20Exception self._name = name self._s20 = s20 self._state = False self._exc = S20Exception @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the switch.""" return self._name @property def is_on(self): """Return true if device is on.""" return self._state def update(self): """Update device state.""" try: self._state = self._s20.on except self._exc: _LOGGER.exception("Error while fetching S20 state") def turn_on(self, **kwargs): """Turn the device on.""" try: self._s20.on = True except self._exc: _LOGGER.exception("Error while turning on S20") def turn_off(self, **kwargs): """Turn the device off.""" try: self._s20.on = False except self._exc: _LOGGER.exception("Error while turning off S20")
apache-2.0
-7,852,088,250,197,382,000
27.688679
77
0.613614
false
bluesabre/melodius
melodius/PreferencesMelodiusDialog.py
1
8596
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*- ### BEGIN LICENSE # Copyright (C) 2012 Sean Davis <[email protected]> # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License version 3, as published # by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranties of # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. ### END LICENSE # This is your preferences dialog. # # Define your preferences in # data/glib-2.0/schemas/net.launchpad.melodius.gschema.xml # See http://developer.gnome.org/gio/stable/GSettings.html for more info. from gi.repository import GObject, Gio, Gtk, Notify # pylint: disable=E0611 import locale from locale import gettext as _ locale.textdomain('melodius') import logging logger = logging.getLogger('melodius') from melodius_lib.PreferencesDialog import PreferencesDialog from . import MelodiusLibrary class PreferencesMelodiusDialog(PreferencesDialog): __gtype_name__ = "PreferencesMelodiusDialog" __gsignals__ = { 'library_updated': (GObject.SIGNAL_RUN_FIRST, None, (bool,)), 'show_preview_notification': (GObject.SIGNAL_RUN_FIRST, None, (bool,)) } def finish_initializing(self, builder): # pylint: disable=E1002 """Set up the preferences dialog""" super(PreferencesMelodiusDialog, self).finish_initializing(builder) # Library Settings self.library_treeview = self.builder.get_object('library_treeview') column = self.library_treeview.get_column(0) self.library_treeview.append_column( column) cell = Gtk.CellRendererText() column.pack_start(cell, True) column.add_attribute(cell, 'text', 0) self.library_toolbar = self.builder.get_object('library_toolbar') context = self.library_toolbar.get_style_context() context.add_class("inline-toolbar") self.library_stats = self.builder.get_object('library_stats') # Notification Settings self.show_notifications = self.builder.get_object("checkbutton_show_notifications") self.preview_image = self.builder.get_object("preview_image") self.preview_primary_message = self.builder.get_object("preview_primary_message") self.preview_secondary_message = self.builder.get_object("preview_secondary_message") self.notification_settings = self.builder.get_object("box_notification_settings") self.notifications_coverart = self.builder.get_object("notifications_coverart") self.notifications_primary = self.builder.get_object("notifications_primary") self.notifications_secondary = self.builder.get_object("notifications_secondary") # Bind each preference widget to gsettings self.settings = Gio.Settings("net.launchpad.melodius") model = self.library_treeview.get_model() for folder in self.settings['folders']: model.append([folder]) self.library = MelodiusLibrary.MelodiusLibrary() self.show_notifications.set_active( self.settings["show-notifications"] ) self.notifications_coverart.set_active( self.settings["show-coverart"] ) self.notifications_primary.set_text( self.settings["primary-message"] ) self.notifications_secondary.set_text( self.settings["secondary-message"] ) #widget = self.builder.get_object('example_entry') #settings.bind("example", widget, "text", Gio.SettingsBindFlags.DEFAULT) # Initialize notification previews Notify.init("melodius-preview") def on_toolbutton_library_add_clicked(self, widget): dialog = Gtk.FileChooserDialog(title=_("Add a folder to the library"), parent=self, action=Gtk.FileChooserAction.SELECT_FOLDER, buttons=(Gtk.STOCK_CANCEL,Gtk.ResponseType.CANCEL,Gtk.STOCK_ADD,Gtk.ResponseType.OK)) dialog.set_select_multiple(True) dialog.show() response = dialog.run() dialog.hide() if response == Gtk.ResponseType.OK: model = self.library_treeview.get_model() existing = [] iter = model.get_iter_first() while iter: existing.append( model.get_value(iter, 0) ) iter = model.iter_next(iter) for folder in dialog.get_filenames(): if folder not in existing: model.append([folder]) self.library.add_folder(folder) self.on_prefs_library_updated() def on_toolbutton_library_remove_clicked(self, widget): sel = self.library_treeview.get_selection() store, path = sel.get_selected_rows() folder = store[path][0] iter = store.get_iter( path[0] ) store.remove(iter) self.library.remove_folder(folder) self.on_prefs_library_updated() def on_prefs_library_updated(self): model = self.library_treeview.get_model() folders = [] iter = model.get_iter_first() while iter: folders.append( model.get_value(iter, 0) ) iter = model.iter_next(iter) folders.sort() model.clear() for folder in folders: model.append([folder]) self.settings['folders'] = folders self.library = MelodiusLibrary.MelodiusLibrary() self.library_stats.set_label(_('<i>%i songs in library. %s total playtime.</i>') % (len(self.library), '0:00:00')) self.emit("library_updated", len(self.library)) def on_checkbutton_show_notifications_toggled(self, widget): """Toggle the notification settings editable""" self.settings["show-notifications"] = widget.get_active() self.notification_settings.set_sensitive(widget.get_active()) def on_notifications_coverart_toggled(self, widget): self.preview_image.set_visible(widget.get_active()) self.settings["show-coverart"] = widget.get_active() def on_notifications_primary_changed(self, widget): """Update the primary message preview""" text = widget.get_text() self.settings["primary-message"] = text text = text.replace("<", "&lt;").replace(">", "&gt;") text = text.replace("%s", _("Song Title")) text = text.replace("%a", _("Song Artist")) text = text.replace("%l", _("Song Album")) self.preview_primary_message.set_markup("<b>%s</b>" % text) def on_notifications_secondary_changed(self, widget): """Update the secondary message preview""" text = widget.get_text() self.settings["secondary-message"] = text text = text.replace("%s", _("Song Title")) text = text.replace("%a", _("Song Artist")) text = text.replace("%l", _("Song Album")) self.preview_secondary_message.set_markup(text) def on_button_preview_clicked(self, widget): """Show a notification preview""" primary = self.notifications_primary.get_text() primary = primary.replace("<", "&lt;").replace(">", "&gt;") primary = primary.replace("%s", _("Song Title")) primary = primary.replace("%a", _("Song Artist")) primary = primary.replace("%l", _("Song Album")) secondary = self.notifications_secondary.get_text() secondary = secondary.replace("<", "&lt;").replace(">", "&gt;") secondary = secondary.replace("%s", _("Song Title")) secondary = secondary.replace("%a", _("Song Artist")) secondary = secondary.replace("%l", _("Song Album")) if self.notifications_coverart.get_active(): notification = Notify.Notification.new (primary,secondary,"audio-player") else: notification = Notify.Notification.new (primary,secondary,None) notification.show () def on_notifications_revert_clicked(self, widget): """Revert notification settings to defaults.""" self.notifications_coverart.set_active(True) self.notifications_primary.set_text("%s") self.notifications_secondary.set_text("by %a on %l")
gpl-3.0
1,381,353,494,769,310,000
44.005236
221
0.639483
false
corvorepack/REPOIVAN
plugin.video.tv.astra.vip/resources/regex/dinozap.py
3
4487
# -*- coding: utf-8 -*- #------------------------------------------------------------ # TV Ultra 7K Regex de Dinozap # Version 0.1 (17.10.2014) #------------------------------------------------------------ # License: GPL (http://www.gnu.org/licenses/gpl-3.0.html) # Gracias a la librería plugintools de Jesús (www.mimediacenter.info) import os import urllib import urllib2 import shutil import zipfile import time import xbmc import xbmcgui import xbmcaddon import xbmcplugin import plugintools, scrapertools import sys,traceback,urllib2,re addonName = xbmcaddon.Addon().getAddonInfo("name") addonVersion = xbmcaddon.Addon().getAddonInfo("version") addonId = xbmcaddon.Addon().getAddonInfo("id") addonPath = xbmcaddon.Addon().getAddonInfo("path") def dinozap0(params): plugintools.log('[%s %s] Initializing Businessapp regex... %s' % (addonName, addonVersion, repr(params))) url_user = {} # Construimos diccionario... url = params.get("url") url_extracted = url.split(" ") for entry in url_extracted: if entry.startswith("rtmp"): entry = entry.replace("rtmp=", "") url_user["rtmp"]=entry elif entry.startswith("playpath"): entry = entry.replace("playpath=", "") url_user["playpath"]=entry elif entry.startswith("swfUrl"): entry = entry.replace("swfUrl=", "") url_user["swfurl"]=entry elif entry.startswith("pageUrl"): entry = entry.replace("pageUrl=", "") url_user["pageurl"]=entry elif entry.startswith("token"): entry = entry.replace("token=", "") url_user["token"]=entry elif entry.startswith("referer"): entry = entry.replace("referer=", "") url_user["referer"]=entry url = url_user.get("pageurl") ref = 'http://www.dinozap.info/' body='';body=gethttp_referer_headers(url,ref) reff=url;url=plugintools.find_single_match(body,'iframe\ssrc="([^"]+)'); for i in range(1,10): k=url;body=gethttp_referer_headers(url,reff); scrpt='document\.write\(unescape\(\'([^\']+)';scrpt=plugintools.find_single_match(body,scrpt) tok='securetoken([^\n]+)';tok=plugintools.find_single_match(body,tok); try: hidd='type="hidden"\sid="([^"]+)"\svalue="([^"]*)';hidd=plugintools.find_multiple_matches(body,hidd); except: i-=1; diov='var\s(sUrl|cod1)\s=\s\'([^\']+)';diov=plugintools.find_multiple_matches(body,diov);#print diov; Epoc_mil=str(int(time.time()*1000));EpocTime=str(int(time.time()));jquery = '%s?callback=jQuery17049106340911455604_%s&v_cod1=%s&v_cod2=%s&_=%s'; jurl=jquery%(hidd[3][1].decode('base64'),Epoc_mil,urllib.quote_plus(hidd[1][1]),urllib.quote_plus(hidd[2][1]),Epoc_mil);r='"result\d{1}":"([^"]+)';p='plugintools.find_multiple_matches(body,r)'; body=gethttp_referer_headers(jurl,k);x=eval(p)[0];print jurl if x=='not_found': print 'try '+str(i)+' : '+x; else: print 'try '+str(i)+' : OK :)';break; if x=='not_found': eval(nolink);sys.exit(); swfUrl='http://www.businessapp1.pw/jwplayer5/addplayer/jwplayer.flash.swf';app=plugintools.find_single_match(eval(p)[1].replace('\\',''),'1735\/([^"]+)'); q='%s app=%s playpath=%s flashver=WIN%5C2017,0,0,134 swfUrl=%s swfVfy=1 pageUrl=%s live=1 timeout=15';#dzap,tvdirecto w=eval(p)[1].replace('\\','')+' app='+app+' playpath='+eval(p)[0]+' flashver=WIN%5C2017,0,0,134 swfUrl='+swfUrl+' swfVfy=1 pageUrl='+k+' live=1 timeout=15' if w: plugintools.play_resolved_url(w);sys.exit(); else: eval(nolink);sys.exit(); def gethttp_referer_headers(url,ref): plugintools.log("url= "+url) plugintools.log("ref= "+ref) request_headers=[] request_headers.append(["User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"]) request_headers.append(["Referer", ref]) body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) plugintools.log("body= "+body) return body def gethttp_headers(url): plugintools.log("url= "+url) request_headers=[] request_headers.append(["User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"]) body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) plugintools.log("body= "+body) return body
gpl-2.0
-8,906,776,713,287,633,000
42.543689
276
0.610033
false
agentfog/qiime
tests/test_pick_rep_set.py
15
20044
#!/usr/bin/env python """Tests of code for representative set picking""" __author__ = "Rob Knight" __copyright__ = "Copyright 2011, The QIIME Project" # remember to add yourself if you make changes __credits__ = ["Rob Knight", "Kyle Bittinger", "Greg Caporaso"] __license__ = "GPL" __version__ = "1.9.1-dev" __maintainer__ = "Daniel McDonald" __email__ = "[email protected]" from os import remove, close from tempfile import mkstemp from unittest import TestCase, main from skbio.util import remove_files from skbio.parse.sequences import parse_fasta from skbio.alignment import SequenceCollection from skbio.sequence import DNA from qiime.pick_rep_set import (RepSetPicker, GenericRepSetPicker, first_id, first, random_id, longest_id, unique_id_map, label_to_name, make_most_abundant, parse_fasta, ReferenceRepSetPicker) class RepSetPickerTests(TestCase): """Tests of the abstract RepSetPicker class""" def test_init(self): """Abstract RepSetPicker __init__ should store name, params""" p = RepSetPicker({}) self.assertEqual(p.Name, 'RepSetPicker') self.assertEqual(p.Params, {}) def test_call(self): """Abstract RepSetPicker __call__ should raise NotImplementedError""" p = RepSetPicker({}) self.assertRaises(NotImplementedError, p, '/path/to/seqs', '/path/to/otus') class SharedSetupTestCase(TestCase): """Wrapper for shared setup stuff""" def setUp(self): # create the temporary input files fd, self.tmp_seq_filepath = mkstemp(prefix='GenericRepSetPickerTest_', suffix='.fasta') close(fd) seq_file = open(self.tmp_seq_filepath, 'w') seq_file.write(dna_seqs) seq_file.close() fd, self.tmp_otu_filepath = mkstemp(prefix='GenericRepSetPickerTest_', suffix='.otu') close(fd) otu_file = open(self.tmp_otu_filepath, 'w') otu_file.write(otus) otu_file.close() self.files_to_remove = [self.tmp_seq_filepath, self.tmp_otu_filepath] self.params = {'Algorithm': 'first', 'ChoiceF': first_id} def tearDown(self): remove_files(self.files_to_remove) class GenericRepSetPickerTests(SharedSetupTestCase): """ Tests of the generic RepSet picker """ def test_call_default_params(self): """GenericRepSetPicker.__call__ returns expected clusters default params""" # adapted from test_app.test_cd_hit.test_cdhit_clusters_from_seqs exp = {'0': 'R27DLI_4812', '1': 'U1PLI_7889', '2': 'W3Cecum_4858', '3': 'R27DLI_3243', } app = GenericRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath) self.assertEqual(obs, exp) def test_call_wrapped_function(self): """GenericRepSetPicker.__call__ returns expected clusters default params""" # adapted from test_app.test_cd_hit.test_cdhit_clusters_from_seqs exp = {'0': 'R27DLI_4812', '1': 'U1PLI_7889', '2': 'W3Cecum_4858', '3': 'R27DLI_3243', } app = GenericRepSetPicker(params={'Algorithm': 'most_abundant', 'ChoiceF': make_most_abundant, 'ChoiceFRequiresSeqs': True}) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath) self.assertEqual(obs, exp) def test_call_output_to_file(self): """GenericRepSetPicker.__call__ output to file functions as expected """ fd, tmp_result_filepath = mkstemp( prefix='GenericRepSetPickerTest.test_call_output_to_file_', suffix='.txt') close(fd) app = GenericRepSetPicker(params=self.params) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath, result_path=tmp_result_filepath) result_file = open(tmp_result_filepath) result_file_str = result_file.read() result_file.close() # remove the result file before running the test, so in # case it fails the temp file is still cleaned up remove(tmp_result_filepath) # compare data in result file to fake expected file self.assertEqual(result_file_str, rep_seqs_result_file_exp) # confirm that nothing is returned when result_path is specified self.assertEqual(obs, None) def test_call_output_to_file_sorted(self): """GenericRepSetPicker.__call__ output to file sorts when requested """ fd, tmp_result_filepath = mkstemp( prefix='GenericRepSetPickerTest.test_call_output_to_file_', suffix='.txt') close(fd) app = GenericRepSetPicker(params=self.params) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath, result_path=tmp_result_filepath, sort_by='seq_id') result_file = open(tmp_result_filepath) result_file_str = result_file.read() result_file.close() # remove the result file before running the test, so in # case it fails the temp file is still cleaned up remove(tmp_result_filepath) # compare data in result file to fake expected file self.assertEqual(result_file_str, rep_seqs_result_file_sorted_exp) # confirm that nothing is returned when result_path is specified self.assertEqual(obs, None) def test_call_log_file(self): """GenericRepSetPicker.__call__ writes log when expected """ fd, tmp_log_filepath = mkstemp( prefix='GenericRepSetPickerTest.test_call_output_to_file_l_', suffix='.txt') close(fd) fd, tmp_result_filepath = mkstemp( prefix='GenericRepSetPickerTest.test_call_output_to_file_r_', suffix='.txt') close(fd) app = GenericRepSetPicker(params=self.params) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath, result_path=tmp_result_filepath, log_path=tmp_log_filepath) log_file = open(tmp_log_filepath) log_file_str = log_file.read() log_file.close() # remove the temp files before running the test, so in # case it fails the temp file is still cleaned up remove(tmp_log_filepath) remove(tmp_result_filepath) log_file_exp = ["GenericRepSetPicker parameters:", 'Algorithm:first', "Application:None", 'ChoiceF:first', 'ChoiceFRequiresSeqs:False', "Result path: %s" % tmp_result_filepath, ] # compare data in log file to fake expected log file for i, j in zip(log_file_str.splitlines(), log_file_exp): if not i.startswith('ChoiceF:'): # can't test, different each time self.assertEqual(i, j) class ReferenceRepSetPickerTests(SharedSetupTestCase): """Tests of the ReferenceRepSetPickerclass """ def setUp(self): # create the temporary input files fd, self.tmp_seq_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.fasta') close(fd) seq_file = open(self.tmp_seq_filepath, 'w') seq_file.write(dna_seqs) seq_file.close() fd, self.ref_seq_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.fasta') close(fd) seq_file = open(self.ref_seq_filepath, 'w') seq_file.write(reference_seqs) seq_file.close() fd, self.tmp_otu_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.otu') close(fd) otu_file = open(self.tmp_otu_filepath, 'w') otu_file.write(otus_w_ref) otu_file.close() fd, self.result_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.fasta') close(fd) otu_file = open(self.result_filepath, 'w') otu_file.write(otus_w_ref) otu_file.close() self.files_to_remove = [self.tmp_seq_filepath, self.tmp_otu_filepath, self.ref_seq_filepath, self.result_filepath] self.params = {'Algorithm': 'first', 'ChoiceF': first_id} def test_call_default_params(self): """ReferenceRepSetPicker.__call__ expected clusters default params""" exp = {'0': ('R27DLI_4812', 'CTGGGCCGTATCTC'), 'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'), '2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'), 'ref0': ('ref0', 'CCCAAAAAAATTTTTT'), } app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath, self.ref_seq_filepath) self.assertEqual(obs, exp) def test_call_write_to_file(self): """ReferenceRepSetPicker.__call__ otu map correctly written to file""" app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) app(self.tmp_seq_filepath, self.tmp_otu_filepath, self.ref_seq_filepath, result_path=self.result_filepath) with open(self.result_filepath) as f: actual = SequenceCollection.from_fasta_records(parse_fasta(f), DNA) expected = SequenceCollection.from_fasta_records( parse_fasta(rep_seqs_reference_result_file_exp.split('\n')), DNA) # we don't care about order in the results self.assertEqual(set(actual), set(expected)) def test_non_ref_otus(self): """ReferenceRepSetPicker.__call__ same result as Generic when no ref otus """ exp = {'0': ('R27DLI_4812', 'CTGGGCCGTATCTC'), '1': ('U1PLI_7889', 'TTGGACCGTG'), '2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'), '3': ('R27DLI_3243', 'CTGGACCGTGTCT')} fd, tmp_otu_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.otu') close(fd) otu_file = open(tmp_otu_filepath, 'w') otu_file.write(otus) otu_file.close() self.files_to_remove.append(tmp_otu_filepath) app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) obs = app(self.tmp_seq_filepath, tmp_otu_filepath, self.ref_seq_filepath) self.assertEqual(obs, exp) def test_call_invalid_id(self): """ReferenceRepSetPicker.__call__ expected clusters default params""" app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) fd, tmp_otu_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.otu') close(fd) otu_file = open(tmp_otu_filepath, 'w') # replace a valid sequence identifier with an invalid # sequence identifier (i.e., one that we don't have a sequence for) otu_file.write(otus_w_ref.replace('R27DLI_4812', 'bad_seq_identifier')) otu_file.close() self.files_to_remove.append(tmp_otu_filepath) # returning in dict self.assertRaises(KeyError, app, self.tmp_seq_filepath, tmp_otu_filepath, self.ref_seq_filepath) # writing to file self.assertRaises(KeyError, app, self.tmp_seq_filepath, tmp_otu_filepath, self.ref_seq_filepath, result_path=self.result_filepath) def test_call_ref_only(self): """ReferenceRepSetPicker.__call__ functions with no non-refseqs""" fd, tmp_otu_filepath = mkstemp( prefix='ReferenceRepSetPickerTest_', suffix='.otu') close(fd) otu_file = open(tmp_otu_filepath, 'w') otu_file.write(otus_all_ref) otu_file.close() self.files_to_remove.append(tmp_otu_filepath) exp = {'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'), 'ref0': ('ref0', 'CCCAAAAAAATTTTTT')} # passing only reference (not input seqs) app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) obs = app(None, tmp_otu_filepath, self.ref_seq_filepath) self.assertEqual(obs, exp) # passing reference and input seqs app = ReferenceRepSetPicker(params={'Algorithm': 'first', 'ChoiceF': first_id}) obs = app(self.tmp_seq_filepath, tmp_otu_filepath, self.ref_seq_filepath) self.assertEqual(obs, exp) def test_call_alt_non_ref_picker(self): """ReferenceRepSetPicker.__call__ handles alt non-ref picking method""" exp = {'0': ('U1PLI_9526', 'CTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCG' 'GTCTCTCAACCCGGCTACCCATCGCGGGCTAGGTGGGCCGTT' 'ACCCCGCCTACTACCTAATGGGCCGCGACCCCATCCCTTGCCGTCTGGGC' 'TTTCCCGGGCCCCCCAGGAGGGGGGCGAGGAGTATCCGGTATTAGCCTCGGTT' 'TCCCAAGGTTGTCCCGGAGCAAGGGGCAGGTTGGTCACGTGTTACTCACCCGT' 'TCGCCACTTCATGTCCGCCCGAGGGCGGTTTCATCG'), 'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'), '2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'), 'ref0': ('ref0', 'CCCAAAAAAATTTTTT'), } app = ReferenceRepSetPicker(params={'Algorithm': 'longest', 'ChoiceF': longest_id}) obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath, self.ref_seq_filepath) self.assertEqual(obs, exp) class TopLevelTests(SharedSetupTestCase): """Tests of top-level functions""" def test_first(self): """first should always return first item""" vals = [3, 4, 2] self.assertEqual(first(vals), 3) vals.reverse() self.assertEqual(first(vals), 2) def test_first_id(self): """first_id should return first id from list""" ids = \ "R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split( ) self.assertEqual(first_id(ids, {}), 'R27DLI_4812') def test_random_id(self): """random_id should return random id from list""" ids = \ "R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split( ) assert random_id(ids, {}) in ids # just test we got something from the list, don't add stochastic test def test_longest_id(self): """longest_id should return id associated with longest seq""" ids = \ "R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split( ) seqs = dict(parse_fasta(dna_seqs.splitlines(), label_to_name=label_to_name)) self.assertEqual(longest_id(ids, seqs), 'U1PLI_403') def test_unique_id_map(self): """unique_id_map should return map of seqs:unique representatives""" seqs = {'a': 'AG', 'b': 'AG', 'c': 'CC', 'd': 'CT'} obs = unique_id_map(seqs) exp = {'c': ['c'], 'd': ['d'], 'a': ['a', 'b'], 'b': ['a', 'b']} # can't predict if a or b for k in obs: assert obs[k] in exp[k] def test_make_most_abundant(self): """make_most_abundant should return function with correct behavior""" ids = \ "R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split( ) seqs = dict(parse_fasta(dna_seqs.splitlines(), label_to_name=label_to_name)) f = make_most_abundant(seqs) result = f(ids, seqs) assert result in ['R27DLI_4812', 'R27DLI_727', 'U1PLI_8969'] dna_seqs = """>R27DLI_4812 FMSX0OV01EIYV5 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGGCCGTATCTC >R27DLI_600 FMSX0OV01D110Y orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGGCCGTATCTCA >R27DLI_727 FMSX0OV01D5X55 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGGCCGTATCTC >U1PLI_403 FMSX0OV01DVG99 orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGGCCGTATCTCAGTCCCAA >U1PLI_8969 FMSX0OV01ARWY7 orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGGCCGTATCTC >U1PLI_9080 FMSX0OV01C9JUX orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGGCCG >U1PLI_9526 FMSX0OV01EUN7B orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCGGTCTCTCAACCCGGCTACCCATCGCGGGCTAGGTGGGCCGTTACCCCGCCTACTACCTAATGGGCCGCGACCCCATCCCTTGCCGTCTGGGCTTTCCCGGGCCCCCCAGGAGGGGGGCGAGGAGTATCCGGTATTAGCCTCGGTTTCCCAAGGTTGTCCCGGAGCAAGGGGCAGGTTGGTCACGTGTTACTCACCCGTTCGCCACTTCATGTCCGCCCGAGGGCGGTTTCATCG >W3Cecum_6642 FMSX0OV01CW7FI orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0 CTGGGCCGTATCTCAGT >W3Cecum_8992 FMSX0OV01C3YXK orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0 CTGGGCCGTGTCTC >U1PLI_7889 FMSX0OV01C6HRL orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 TTGGACCGTG >W3Cecum_4858 FMSX0OV01BX4KM orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0 TTGGGCCGTGTCTCAGT >R27DLI_3243 FMSX0OV01DH41R orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGACCGTGTCT >R27DLI_4562 FMSX0OV01EJKLT orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGACCGTGTCT >R27DLI_6828 FMSX0OV01BCWTL orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGACCGTGTCT >R27DLI_9097 FMSX0OV01APUV6 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0 CTGGACCGTGTCT >U1PLI_2780 FMSX0OV01E2K1S orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGACCGTGTCTC >U1PLI_67 FMSX0OV01DO1NS orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0 CTGGACCGTGT >U9PSI_10475 FMSX0OV01BB4Q3 orig_bc=GATAGCTGTCTT new_bc=GATAGCTGTCTT bc_diffs=0 CTGGACCGTGTCTC >U9PSI_4341 FMSX0OV01B8SXV orig_bc=GATAGCTGTCTT new_bc=GATAGCTGTCTT bc_diffs=0 CTGGACCGTGTCT >W3Cecum_5191 FMSX0OV01BMU6R orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0 CTGGACCGTGTCT """ otus = """0 R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969 U1PLI_9080 U1PLI_9526 W3Cecum_6642 W3Cecum_8992 1 U1PLI_7889 2 W3Cecum_4858 3 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191 """ rep_seqs_result_file_exp = """>0 R27DLI_4812 CTGGGCCGTATCTC >1 U1PLI_7889 TTGGACCGTG >2 W3Cecum_4858 TTGGGCCGTGTCTCAGT >3 R27DLI_3243 CTGGACCGTGTCT """ rep_seqs_result_file_sorted_exp = """>3 R27DLI_3243 CTGGACCGTGTCT >0 R27DLI_4812 CTGGGCCGTATCTC >2 W3Cecum_4858 TTGGGCCGTGTCTCAGT >1 U1PLI_7889 TTGGACCGTG """ otus_w_ref = """0 R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969 U1PLI_9080 U1PLI_9526 W3Cecum_6642 W3Cecum_8992 ref1 U1PLI_7889 2 W3Cecum_4858 ref0 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191 """ otus_all_ref = """ref1 U1PLI_7889 ref0 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191 """ reference_seqs = """>ref0 CCCAAAAAAATTTTTT >ref1 some comment GGGGGGGAAAAAAAAAAAAA >ref2 CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCAAAA """ rep_seqs_reference_result_file_exp = """>0 R27DLI_4812 CTGGGCCGTATCTC >ref1 ref1 GGGGGGGAAAAAAAAAAAAA >2 W3Cecum_4858 TTGGGCCGTGTCTCAGT >ref0 ref0 CCCAAAAAAATTTTTT """ # run unit tests if run from command-line if __name__ == '__main__': main()
gpl-2.0
-4,466,485,514,499,659,000
36.818868
268
0.611355
false
TheoChevalier/bedrock
bedrock/privacy/views.py
5
3689
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import re from django.views.decorators.cache import cache_page from commonware.response.decorators import xframe_allow from bs4 import BeautifulSoup from lib import l10n_utils from bedrock.legal_docs.views import LegalDocView, load_legal_doc HN_PATTERN = re.compile(r'^h(\d)$') HREF_PATTERN = re.compile(r'^https?\:\/\/www\.mozilla\.org') def process_legal_doc(content): """ Load a static Markdown file and return the document as a BeautifulSoup object for easier manipulation. :param content: HTML Content of the legal doc. """ soup = BeautifulSoup(content) # Manipulate the markup for section in soup.find_all('section'): level = 0 header = soup.new_tag('header') div = soup.new_tag('div') section.insert(0, header) section.insert(1, div) # Append elements to <header> or <div> for tag in section.children: if not tag.name: continue match = HN_PATTERN.match(tag.name) if match: header.append(tag) level = int(match.group(1)) if tag.name == 'p': (header if level == 1 else div).append(tag) if tag.name in ['ul', 'hr']: div.append(tag) if level > 3: section.parent.div.append(section) # Remove empty <div>s if len(div.contents) == 0: div.extract() # Convert the site's full URLs to absolute paths for link in soup.find_all(href=HREF_PATTERN): link['href'] = HREF_PATTERN.sub('', link['href']) # Return the HTML fragment as a BeautifulSoup object return soup class PrivacyDocView(LegalDocView): def get_legal_doc(self): doc = super(PrivacyDocView, self).get_legal_doc() doc['content'] = process_legal_doc(doc['content']) return doc firefox_notices = PrivacyDocView.as_view( template_name='privacy/notices/firefox.html', legal_doc_name='firefox_privacy_notice') firefox_os_notices = PrivacyDocView.as_view( template_name='privacy/notices/firefox-os.html', legal_doc_name='firefox_os_privacy_notice') firefox_cloud_notices = PrivacyDocView.as_view( template_name='privacy/notices/firefox-cloud.html', legal_doc_name='firefox_cloud_services_PrivacyNotice') firefox_hello_notices = PrivacyDocView.as_view( template_name='privacy/notices/firefox-hello.html', legal_doc_name='WebRTC_PrivacyNotice') firefox_focus_notices = PrivacyDocView.as_view( template_name='privacy/notices/firefox-focus.html', legal_doc_name='focus_privacy_notice') thunderbird_notices = PrivacyDocView.as_view( template_name='privacy/notices/thunderbird.html', legal_doc_name='thunderbird_privacy_policy') websites_notices = PrivacyDocView.as_view( template_name='privacy/notices/websites.html', legal_doc_name='websites_privacy_notice') facebook_notices = PrivacyDocView.as_view( template_name='privacy/notices/facebook.html', legal_doc_name='facebook_privacy_info') facebook_notices = xframe_allow(facebook_notices) @cache_page(60 * 60) # cache for 1 hour def privacy(request): doc = load_legal_doc('mozilla_privacy_policy', l10n_utils.get_locale(request)) template_vars = { 'doc': process_legal_doc(doc['content']), 'localized': doc['localized'], 'translations': doc['translations'], } return l10n_utils.render(request, 'privacy/index.html', template_vars)
mpl-2.0
-3,313,188,973,490,286,600
30.529915
82
0.665221
false